Compare commits

..

2 Commits

Author SHA1 Message Date
Igor Loskutov
1aabe4f880 feat: post Hatchet DAG status to Zulip with live updates
Post a rendered DAG status message to a configurable Zulip stream/topic
when a Hatchet workflow is dispatched. Update it after each task
completes (success or failure), and delete it when post_zulip runs
so the final transcript notification replaces it.

On pipeline failure, appends an error banner (:cross_mark:) with the
failing step name and error message.

New settings: ZULIP_DAG_STREAM, ZULIP_DAG_TOPIC, ZULIP_HOST_HEADER.
New module: reflector/hatchet/dag_zulip.py (create/update/delete).
2026-02-09 10:03:31 -05:00
Igor Loskutov
42e7c0b8fd feat: add CLI tool to render Hatchet workflow runs as text DAG
Cherry-picked from feat/hatchet-run-renderer branch.
2026-02-06 23:25:44 -05:00
52 changed files with 2239 additions and 2155 deletions

View File

@@ -1,219 +0,0 @@
# Self-contained standalone compose for fully local deployment (no external dependencies).
# Usage: docker compose -f docker-compose.standalone.yml up -d
#
# On Linux with NVIDIA GPU, also pass: --profile ollama-gpu
# On Linux without GPU: --profile ollama-cpu
# On Mac: Ollama runs natively (Metal GPU) — no profile needed, services here unused.
services:
server:
build:
context: server
ports:
- "1250:1250"
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
- ./server/:/app/
- /app/.venv
env_file:
- ./server/.env
environment:
ENTRYPOINT: server
# Docker DNS names instead of localhost
DATABASE_URL: postgresql+asyncpg://reflector:reflector@postgres:5432/reflector
REDIS_HOST: redis
CELERY_BROKER_URL: redis://redis:6379/1
CELERY_RESULT_BACKEND: redis://redis:6379/1
# Standalone doesn't run Hatchet
HATCHET_CLIENT_SERVER_URL: ""
HATCHET_CLIENT_HOST_PORT: ""
# Self-hosted transcription/diarization via CPU service
TRANSCRIPT_BACKEND: modal
TRANSCRIPT_URL: http://cpu:8000
TRANSCRIPT_MODAL_API_KEY: local
DIARIZATION_BACKEND: modal
DIARIZATION_URL: http://cpu:8000
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_started
worker:
build:
context: server
volumes:
- ./server/:/app/
- /app/.venv
env_file:
- ./server/.env
environment:
ENTRYPOINT: worker
HATCHET_CLIENT_SERVER_URL: ""
HATCHET_CLIENT_HOST_PORT: ""
TRANSCRIPT_BACKEND: modal
TRANSCRIPT_URL: http://cpu:8000
TRANSCRIPT_MODAL_API_KEY: local
DIARIZATION_BACKEND: modal
DIARIZATION_URL: http://cpu:8000
depends_on:
redis:
condition: service_started
beat:
build:
context: server
volumes:
- ./server/:/app/
- /app/.venv
env_file:
- ./server/.env
environment:
ENTRYPOINT: beat
depends_on:
redis:
condition: service_started
redis:
image: redis:7.2
ports:
- 6379:6379
postgres:
image: postgres:17
command: postgres -c 'max_connections=200'
ports:
- 5432:5432
environment:
POSTGRES_USER: reflector
POSTGRES_PASSWORD: reflector
POSTGRES_DB: reflector
volumes:
- ./data/postgres:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -d reflector -U reflector"]
interval: 5s
timeout: 5s
retries: 10
start_period: 15s
web:
image: reflector-frontend-standalone
build:
context: ./www
ports:
- "3000:3000"
command: ["node", "server.js"]
environment:
NODE_ENV: production
# Browser-facing URLs (host-accessible ports)
API_URL: http://localhost:1250
WEBSOCKET_URL: ws://localhost:1250
SITE_URL: http://localhost:3000
# Server-side URLs (docker-network internal)
SERVER_API_URL: http://server:1250
KV_URL: redis://redis:6379
KV_USE_TLS: "false"
# Standalone: no external auth provider
FEATURE_REQUIRE_LOGIN: "false"
NEXTAUTH_URL: http://localhost:3000
NEXTAUTH_SECRET: standalone-local-secret
# Nullify partial auth vars inherited from base env_file
AUTHENTIK_ISSUER: ""
AUTHENTIK_REFRESH_TOKEN_URL: ""
garage:
image: dxflrs/garage:v1.1.0
ports:
- "3900:3900" # S3 API
- "3903:3903" # Admin API
volumes:
- garage_data:/var/lib/garage/data
- garage_meta:/var/lib/garage/meta
- ./data/garage.toml:/etc/garage.toml:ro
restart: unless-stopped
healthcheck:
test: ["CMD", "/garage", "stats"]
interval: 10s
timeout: 5s
retries: 5
start_period: 5s
cpu:
build:
context: ./gpu/self_hosted
dockerfile: Dockerfile.cpu
ports:
- "8100:8000"
volumes:
- gpu_cache:/root/.cache
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/docs"]
interval: 15s
timeout: 5s
retries: 10
start_period: 120s
gpu-nvidia:
build:
context: ./gpu/self_hosted
profiles: ["gpu-nvidia"]
volumes:
- gpu_cache:/root/.cache
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/docs"]
interval: 15s
timeout: 5s
retries: 10
start_period: 120s
ollama:
image: ollama/ollama:latest
profiles: ["ollama-gpu"]
ports:
- "11434:11434"
volumes:
- ollama_data:/root/.ollama
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"]
interval: 10s
timeout: 5s
retries: 5
ollama-cpu:
image: ollama/ollama:latest
profiles: ["ollama-cpu"]
ports:
- "11434:11434"
volumes:
- ollama_data:/root/.ollama
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"]
interval: 10s
timeout: 5s
retries: 5
volumes:
garage_data:
garage_meta:
ollama_data:
gpu_cache:

View File

@@ -2,7 +2,8 @@ services:
server:
build:
context: server
network_mode: host
ports:
- 1250:1250
volumes:
- ./server/:/app/
- /app/.venv
@@ -10,12 +11,6 @@ services:
- ./server/.env
environment:
ENTRYPOINT: server
DATABASE_URL: postgresql+asyncpg://reflector:reflector@localhost:5432/reflector
REDIS_HOST: localhost
CELERY_BROKER_URL: redis://localhost:6379/1
CELERY_RESULT_BACKEND: redis://localhost:6379/1
HATCHET_CLIENT_SERVER_URL: http://localhost:8889
HATCHET_CLIENT_HOST_PORT: localhost:7078
worker:
build:
@@ -27,11 +22,6 @@ services:
- ./server/.env
environment:
ENTRYPOINT: worker
HATCHET_CLIENT_SERVER_URL: http://hatchet:8888
HATCHET_CLIENT_HOST_PORT: hatchet:7077
depends_on:
redis:
condition: service_started
beat:
build:
@@ -43,9 +33,6 @@ services:
- ./server/.env
environment:
ENTRYPOINT: beat
depends_on:
redis:
condition: service_started
hatchet-worker-cpu:
build:
@@ -57,8 +44,6 @@ services:
- ./server/.env
environment:
ENTRYPOINT: hatchet-worker-cpu
HATCHET_CLIENT_SERVER_URL: http://hatchet:8888
HATCHET_CLIENT_HOST_PORT: hatchet:7077
depends_on:
hatchet:
condition: service_healthy
@@ -72,8 +57,6 @@ services:
- ./server/.env
environment:
ENTRYPOINT: hatchet-worker-llm
HATCHET_CLIENT_SERVER_URL: http://hatchet:8888
HATCHET_CLIENT_HOST_PORT: hatchet:7077
depends_on:
hatchet:
condition: service_healthy
@@ -92,16 +75,10 @@ services:
volumes:
- ./www:/app/
- /app/node_modules
- next_cache:/app/.next
env_file:
- ./www/.env.local
environment:
- NODE_ENV=development
- SERVER_API_URL=http://host.docker.internal:1250
extra_hosts:
- "host.docker.internal:host-gateway"
depends_on:
- server
postgres:
image: postgres:17
@@ -117,14 +94,13 @@ services:
- ./server/docker/init-hatchet-db.sql:/docker-entrypoint-initdb.d/init-hatchet-db.sql:ro
healthcheck:
test: ["CMD-SHELL", "pg_isready -d reflector -U reflector"]
interval: 5s
timeout: 5s
retries: 10
start_period: 15s
interval: 10s
timeout: 10s
retries: 5
start_period: 10s
hatchet:
image: ghcr.io/hatchet-dev/hatchet/hatchet-lite:latest
restart: on-failure
ports:
- "8889:8888"
- "7078:7077"
@@ -132,7 +108,7 @@ services:
postgres:
condition: service_healthy
environment:
DATABASE_URL: "postgresql://reflector:reflector@postgres:5432/hatchet?sslmode=disable&connect_timeout=30"
DATABASE_URL: "postgresql://reflector:reflector@postgres:5432/hatchet?sslmode=disable"
SERVER_AUTH_COOKIE_DOMAIN: localhost
SERVER_AUTH_COOKIE_INSECURE: "t"
SERVER_GRPC_BIND_ADDRESS: "0.0.0.0"
@@ -152,5 +128,6 @@ services:
retries: 5
start_period: 30s
volumes:
next_cache:
networks:
default:
attachable: true

View File

@@ -1,214 +0,0 @@
---
sidebar_position: 2
title: Standalone Local Setup
---
# Standalone Local Setup
**The goal**: a clueless user clones the repo, runs one script, and has a working Reflector instance locally. No cloud accounts, no API keys, no manual env file editing.
```bash
git clone https://github.com/monadical-sas/reflector.git
cd reflector
./scripts/setup-standalone.sh
```
The script is idempotent — safe to re-run at any time. It detects what's already set up and skips completed steps.
## Prerequisites
- Docker / OrbStack / Docker Desktop (any)
- Mac (Apple Silicon) or Linux
- 16GB+ RAM (32GB recommended for 14B LLM models)
- **Mac only**: [Ollama](https://ollama.com/download) installed (`brew install ollama`)
## What the script does
### 1. LLM inference via Ollama
**Mac**: starts Ollama natively (Metal GPU acceleration). Pulls the LLM model. Docker containers reach it via `host.docker.internal:11434`.
**Linux**: starts containerized Ollama via `docker-compose.standalone.yml` profile (`ollama-gpu` with NVIDIA, `ollama-cpu` without). Pulls model inside the container.
### 2. Environment files
Generates `server/.env` and `www/.env.local` with standalone defaults:
**`server/.env`** — key settings:
| Variable | Value | Why |
|----------|-------|-----|
| `DATABASE_URL` | `postgresql+asyncpg://...@postgres:5432/reflector` | Docker-internal hostname |
| `REDIS_HOST` | `redis` | Docker-internal hostname |
| `CELERY_BROKER_URL` | `redis://redis:6379/1` | Docker-internal hostname |
| `AUTH_BACKEND` | `none` | No Authentik in standalone |
| `TRANSCRIPT_BACKEND` | `modal` | HTTP API to self-hosted CPU service |
| `TRANSCRIPT_URL` | `http://cpu:8000` | Docker-internal CPU service |
| `DIARIZATION_BACKEND` | `modal` | HTTP API to self-hosted CPU service |
| `DIARIZATION_URL` | `http://cpu:8000` | Docker-internal CPU service |
| `TRANSLATION_BACKEND` | `passthrough` | No Modal |
| `LLM_URL` | `http://host.docker.internal:11434/v1` (Mac) | Ollama endpoint |
**`www/.env.local`** — key settings:
| Variable | Value |
|----------|-------|
| `API_URL` | `http://localhost:1250` |
| `SERVER_API_URL` | `http://server:1250` |
| `WEBSOCKET_URL` | `ws://localhost:1250` |
| `FEATURE_REQUIRE_LOGIN` | `false` |
| `NEXTAUTH_SECRET` | `standalone-dev-secret-not-for-production` |
If env files already exist (including symlinks from worktree setup), the script resolves symlinks and ensures all standalone-critical vars are set. Existing vars not related to standalone are preserved.
### 3. Object storage (Garage)
Standalone uses [Garage](https://garagehq.deuxfleurs.fr/) — a lightweight S3-compatible object store running in Docker. The setup script starts Garage, initializes the layout, creates a bucket and access key, and writes the credentials to `server/.env`.
**`server/.env`** — storage settings added by the script:
| Variable | Value | Why |
|----------|-------|-----|
| `TRANSCRIPT_STORAGE_BACKEND` | `aws` | Uses the S3-compatible storage driver |
| `TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL` | `http://garage:3900` | Docker-internal Garage S3 API |
| `TRANSCRIPT_STORAGE_AWS_BUCKET_NAME` | `reflector-media` | Created by the script |
| `TRANSCRIPT_STORAGE_AWS_REGION` | `garage` | Must match Garage config |
| `TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID` | *(auto-generated)* | Created by `garage key create` |
| `TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY` | *(auto-generated)* | Created by `garage key create` |
The `TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL` setting enables S3-compatible backends. When set, the storage driver uses path-style addressing and routes all requests to the custom endpoint. When unset (production AWS), behavior is unchanged.
Garage config template lives at `scripts/garage.toml`. The setup script generates `data/garage.toml` (gitignored) with a random RPC secret and mounts it read-only into the container. Single-node, `replication_factor=1`.
> **Note**: Presigned URLs embed the Garage Docker hostname (`http://garage:3900`). This is fine — the server proxies S3 responses to the browser. Modal GPU workers cannot reach internal Garage, but standalone doesn't use Modal.
### 4. Transcription and diarization
Standalone runs the self-hosted ML service (`gpu/self_hosted/`) in a CPU-only Docker container named `cpu`. This is the same FastAPI service used for Modal.com GPU deployments, but built with `Dockerfile.cpu` (no NVIDIA CUDA dependencies). The compose service is named `cpu` (not `gpu`) to make clear it runs without GPU acceleration; the source code lives in `gpu/self_hosted/` because it's shared with the GPU deployment.
The `modal` backend name is reused — it just means "HTTP API client". Setting `TRANSCRIPT_URL` / `DIARIZATION_URL` to `http://cpu:8000` routes requests to the local container instead of Modal.com.
On first start, the service downloads pyannote speaker diarization models (~1GB) from a public S3 bundle. Models are cached in a Docker volume (`gpu_cache`) so subsequent starts are fast. No HuggingFace token or API key needed.
> **Performance**: CPU-only transcription and diarization work but are slow (~15 min for a 3 min file). For faster processing on Linux with NVIDIA GPU, use `--profile gpu-nvidia` instead (see `docker-compose.standalone.yml`).
### 5. Docker services
```bash
docker compose up -d postgres redis garage cpu server worker beat web
```
All services start in a single command. Garage and `cpu` are already started by earlier steps but included for idempotency. No Hatchet in standalone mode — LLM processing (summaries, topics, titles) runs via Celery tasks.
### 6. Database migrations
Run automatically by the `server` container on startup (`runserver.sh` calls `alembic upgrade head`). No manual step needed.
### 7. Health check
Verifies:
- CPU service responds (transcription + diarization ready)
- Server responds at `http://localhost:1250/health`
- Frontend serves at `http://localhost:3000`
- LLM endpoint reachable from inside containers
## Services
| Service | Port | Purpose |
|---------|------|---------|
| `server` | 1250 | FastAPI backend (runs migrations on start) |
| `web` | 3000 | Next.js frontend |
| `postgres` | 5432 | PostgreSQL database |
| `redis` | 6379 | Cache + Celery broker |
| `garage` | 3900, 3903 | S3-compatible object storage (S3 API + admin API) |
| `cpu` | — | Self-hosted transcription + diarization (CPU-only) |
| `worker` | — | Celery worker (live pipeline post-processing) |
| `beat` | — | Celery beat (scheduled tasks) |
## Testing programmatically
After the setup script completes, verify the full pipeline (upload, transcription, diarization, LLM summary) via the API:
```bash
# 1. Create a transcript
TRANSCRIPT_ID=$(curl -s -X POST 'http://localhost:1250/v1/transcripts' \
-H 'Content-Type: application/json' \
-d '{"name":"test-upload"}' | python3 -c "import sys,json; print(json.load(sys.stdin)['id'])")
echo "Created: $TRANSCRIPT_ID"
# 2. Upload an audio file (single-chunk upload)
curl -s "http://localhost:1250/v1/transcripts/${TRANSCRIPT_ID}/record/upload?chunk_number=0&total_chunks=1" \
-X POST -F "chunk=@/path/to/audio.mp3"
# 3. Poll until processing completes (status: ended or error)
while true; do
STATUS=$(curl -s "http://localhost:1250/v1/transcripts/${TRANSCRIPT_ID}" \
| python3 -c "import sys,json; print(json.load(sys.stdin)['status'])")
echo "Status: $STATUS"
case "$STATUS" in ended|error) break;; esac
sleep 10
done
# 4. Check the result
curl -s "http://localhost:1250/v1/transcripts/${TRANSCRIPT_ID}" | python3 -m json.tool
```
Expected result: status `ended`, auto-generated `title`, `short_summary`, `long_summary`, and `transcript` text with `Speaker 0` / `Speaker 1` labels.
CPU-only processing is slow (~15 min for a 3 min audio file). Diarization finishes in ~3 min, transcription takes the rest.
## Troubleshooting
### Port conflicts (most common issue)
If the frontend or backend behaves unexpectedly (e.g., env vars seem ignored, changes don't take effect), **check for port conflicts first**:
```bash
# Check what's listening on key ports
lsof -i :3000 # frontend
lsof -i :1250 # backend
lsof -i :5432 # postgres
lsof -i :3900 # Garage S3 API
lsof -i :6379 # Redis
# Kill stale processes on a port
lsof -ti :3000 | xargs kill
```
Common causes:
- A stale `next dev` or `pnpm dev` process from another terminal/worktree
- Another Docker Compose project (different worktree) with containers on the same ports — the setup script only manages its own project; containers from other projects must be stopped manually (`docker ps` to find them, `docker stop` to kill them)
The setup script checks ports 3000, 1250, 5432, 6379, 3900, 3903 for conflicts before starting services. It ignores OrbStack/Docker Desktop port forwarding processes (which always bind these ports but are not real conflicts).
### OrbStack false port-conflict warnings (Mac)
If you use OrbStack as your Docker runtime, `lsof` will show OrbStack binding ports like 3000, 1250, etc. even when no containers are running. This is OrbStack's port forwarding mechanism — not a real conflict. The setup script filters these out automatically.
### Re-enabling authentication
Standalone runs without authentication (`FEATURE_REQUIRE_LOGIN=false`, `AUTH_BACKEND=none`). To re-enable:
1. In `www/.env.local`: set `FEATURE_REQUIRE_LOGIN=true`, uncomment `AUTHENTIK_ISSUER` and `AUTHENTIK_REFRESH_TOKEN_URL`
2. In `server/.env`: set `AUTH_BACKEND=authentik` (or your backend), configure `AUTH_JWT_AUDIENCE`
3. Restart: `docker compose -f docker-compose.standalone.yml up -d --force-recreate web server`
## What's NOT covered
These require external accounts and infrastructure that can't be scripted:
- **Live meeting rooms** — requires Daily.co account, S3 bucket, IAM roles
- **Authentication** — requires Authentik deployment and OAuth configuration
- **Hatchet workflows** — requires separate Hatchet setup for multitrack processing
- **Production deployment** — see [Deployment Guide](./overview)
## Current status
All steps implemented. The setup script handles everything end-to-end:
- Step 1 (Ollama/LLM) — implemented
- Step 2 (environment files) — implemented
- Step 3 (object storage / Garage) — implemented
- Step 4 (transcription/diarization) — implemented (self-hosted GPU service)
- Steps 5-7 (Docker, migrations, health) — implemented
- **Unified script**: `scripts/setup-standalone.sh`

View File

@@ -1,39 +0,0 @@
FROM python:3.12-slim
ENV PYTHONUNBUFFERED=1 \
UV_LINK_MODE=copy \
UV_NO_CACHE=1
WORKDIR /tmp
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update \
&& apt-get install -y \
ffmpeg \
curl \
ca-certificates \
gnupg \
wget
ADD https://astral.sh/uv/install.sh /uv-installer.sh
RUN sh /uv-installer.sh && rm /uv-installer.sh
ENV PATH="/root/.local/bin/:$PATH"
RUN mkdir -p /app
WORKDIR /app
COPY pyproject.toml uv.lock /app/
COPY ./app /app/app
COPY ./main.py /app/
COPY ./runserver.sh /app/
# prevent uv failing with too many open files on big cpus
ENV UV_CONCURRENT_INSTALLS=16
# first install
RUN --mount=type=cache,target=/root/.cache/uv \
uv sync --compile-bytecode --locked
EXPOSE 8000
CMD ["sh", "/app/runserver.sh"]

View File

@@ -3,14 +3,14 @@ import os
from fastapi import Depends, HTTPException, status
from fastapi.security import OAuth2PasswordBearer
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
def apikey_auth(apikey: str | None = Depends(oauth2_scheme)):
def apikey_auth(apikey: str = Depends(oauth2_scheme)):
required_key = os.environ.get("REFLECTOR_GPU_APIKEY")
if not required_key:
return
if apikey and apikey == required_key:
if apikey == required_key:
return
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,

View File

@@ -1,65 +1,10 @@
import logging
import os
import tarfile
import threading
from pathlib import Path
from urllib.request import urlopen
import torch
import torchaudio
import yaml
from pyannote.audio import Pipeline
logger = logging.getLogger(__name__)
S3_BUNDLE_URL = "https://reflector-public.s3.us-east-1.amazonaws.com/pyannote-speaker-diarization-3.1.tar.gz"
BUNDLE_CACHE_DIR = Path("/root/.cache/pyannote-bundle")
def _ensure_model(cache_dir: Path) -> str:
"""Download and extract S3 model bundle if not cached."""
model_dir = cache_dir / "pyannote-speaker-diarization-3.1"
config_path = model_dir / "config.yaml"
if config_path.exists():
logger.info("Using cached model bundle at %s", model_dir)
return str(model_dir)
cache_dir.mkdir(parents=True, exist_ok=True)
tarball_path = cache_dir / "model.tar.gz"
logger.info("Downloading model bundle from %s", S3_BUNDLE_URL)
with urlopen(S3_BUNDLE_URL) as response, open(tarball_path, "wb") as f:
while chunk := response.read(8192):
f.write(chunk)
logger.info("Extracting model bundle")
with tarfile.open(tarball_path, "r:gz") as tar:
tar.extractall(path=cache_dir, filter="data")
tarball_path.unlink()
_patch_config(model_dir, cache_dir)
return str(model_dir)
def _patch_config(model_dir: Path, cache_dir: Path) -> None:
"""Rewrite config.yaml to reference local pytorch_model.bin paths."""
config_path = model_dir / "config.yaml"
with open(config_path) as f:
config = yaml.safe_load(f)
config["pipeline"]["params"]["segmentation"] = str(
cache_dir / "pyannote-segmentation-3.0" / "pytorch_model.bin"
)
config["pipeline"]["params"]["embedding"] = str(
cache_dir / "pyannote-wespeaker-voxceleb-resnet34-LM" / "pytorch_model.bin"
)
with open(config_path, "w") as f:
yaml.dump(config, f)
logger.info("Patched config.yaml with local model paths")
class PyannoteDiarizationService:
def __init__(self):
@@ -69,20 +14,10 @@ class PyannoteDiarizationService:
def load(self):
self._device = "cuda" if torch.cuda.is_available() else "cpu"
hf_token = os.environ.get("HF_TOKEN")
if hf_token:
logger.info("Loading pyannote model from HuggingFace (HF_TOKEN set)")
self._pipeline = Pipeline.from_pretrained(
"pyannote/speaker-diarization-3.1",
use_auth_token=hf_token,
)
else:
logger.info("HF_TOKEN not set — loading model from S3 bundle")
model_path = _ensure_model(BUNDLE_CACHE_DIR)
config_path = Path(model_path) / "config.yaml"
self._pipeline = Pipeline.from_pretrained(str(config_path))
self._pipeline = Pipeline.from_pretrained(
"pyannote/speaker-diarization-3.1",
use_auth_token=os.environ.get("HF_TOKEN"),
)
self._pipeline.to(torch.device(self._device))
def diarize_file(self, file_path: str, timestamp: float = 0.0) -> dict:

View File

@@ -1,14 +0,0 @@
metadata_dir = "/var/lib/garage/meta"
data_dir = "/var/lib/garage/data"
replication_factor = 1
rpc_secret = "__GARAGE_RPC_SECRET__"
rpc_bind_addr = "[::]:3901"
[s3_api]
api_bind_addr = "[::]:3900"
s3_region = "garage"
root_domain = ".s3.garage.localhost"
[admin]
api_bind_addr = "[::]:3903"

View File

@@ -1,544 +0,0 @@
#!/usr/bin/env bash
#
# Standalone local development setup for Reflector.
# Takes a fresh clone to a working instance — no cloud accounts, no API keys.
#
# Usage:
# ./scripts/setup-standalone.sh
#
# Idempotent — safe to re-run at any time.
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
SERVER_ENV="$ROOT_DIR/server/.env"
WWW_ENV="$ROOT_DIR/www/.env.local"
MODEL="${LLM_MODEL:-qwen2.5:14b}"
OLLAMA_PORT="${OLLAMA_PORT:-11434}"
OS="$(uname -s)"
# --- Colors ---
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
NC='\033[0m'
info() { echo -e "${CYAN}==>${NC} $*"; }
ok() { echo -e "${GREEN}${NC} $*"; }
warn() { echo -e "${YELLOW} !${NC} $*"; }
err() { echo -e "${RED}${NC} $*" >&2; }
# --- Helpers ---
dump_diagnostics() {
local failed_svc="${1:-}"
echo ""
err "========== DIAGNOSTICS =========="
err "Container status:"
compose_cmd ps -a --format "table {{.Name}}\t{{.Status}}" 2>/dev/null || true
echo ""
# Show logs for any container that exited
local stopped
stopped=$(compose_cmd ps -a --format '{{.Name}}\t{{.Status}}' 2>/dev/null \
| grep -iv 'up\|running' | awk -F'\t' '{print $1}' || true)
for c in $stopped; do
err "--- Logs for $c (exited/unhealthy) ---"
docker logs --tail 30 "$c" 2>&1 || true
echo ""
done
# If a specific service failed, always show its logs
if [[ -n "$failed_svc" ]]; then
err "--- Logs for $failed_svc (last 40) ---"
compose_cmd logs "$failed_svc" --tail 40 2>&1 || true
echo ""
# Try health check from inside the container as extra signal
err "--- Internal health check ($failed_svc) ---"
compose_cmd exec -T "$failed_svc" \
curl -sf http://localhost:1250/health 2>&1 || echo "(not reachable internally either)"
fi
err "================================="
}
trap 'dump_diagnostics' ERR
# Get the image ID for a compose service (works even when containers are not running).
svc_image_id() {
local svc="$1"
# Extract image name from compose config YAML, fall back to <project>-<service>
local img_name
img_name=$(compose_cmd config 2>/dev/null \
| sed -n "/^ ${svc}:/,/^ [a-z]/p" | grep '^\s*image:' | awk '{print $2}')
img_name="${img_name:-reflector-$svc}"
docker images -q "$img_name" 2>/dev/null | head -1
}
# Ensure images with build contexts are up-to-date.
# Docker layer cache makes this fast (~seconds) when source hasn't changed.
rebuild_images() {
local svc
for svc in web cpu; do
local old_id
old_id=$(svc_image_id "$svc")
old_id="${old_id:-<none>}"
info "Building $svc..."
compose_cmd build "$svc"
local new_id
new_id=$(svc_image_id "$svc")
if [[ "$old_id" == "$new_id" ]]; then
ok "$svc unchanged (${new_id:0:12})"
else
ok "$svc rebuilt (${old_id:0:12} -> ${new_id:0:12})"
fi
done
}
wait_for_url() {
local url="$1" label="$2" retries="${3:-30}" interval="${4:-2}"
for i in $(seq 1 "$retries"); do
if curl -sf "$url" > /dev/null 2>&1; then
return 0
fi
echo -ne "\r Waiting for $label... ($i/$retries)"
sleep "$interval"
done
echo ""
err "$label not responding at $url after $retries attempts"
return 1
}
env_has_key() {
local file="$1" key="$2"
grep -q "^${key}=" "$file" 2>/dev/null
}
env_set() {
local file="$1" key="$2" value="$3"
if env_has_key "$file" "$key"; then
# Replace existing value (portable sed)
if [[ "$OS" == "Darwin" ]]; then
sed -i '' "s|^${key}=.*|${key}=${value}|" "$file"
else
sed -i "s|^${key}=.*|${key}=${value}|" "$file"
fi
else
echo "${key}=${value}" >> "$file"
fi
}
resolve_symlink() {
local file="$1"
if [[ -L "$file" ]]; then
warn "$(basename "$file") is a symlink — creating standalone copy"
cp -L "$file" "$file.tmp"
rm "$file"
mv "$file.tmp" "$file"
fi
}
compose_cmd() {
local compose_files="-f $ROOT_DIR/docker-compose.standalone.yml"
if [[ "$OS" == "Linux" ]] && [[ -n "${OLLAMA_PROFILE:-}" ]]; then
docker compose $compose_files --profile "$OLLAMA_PROFILE" "$@"
else
docker compose $compose_files "$@"
fi
}
# =========================================================
# Step 1: LLM / Ollama
# =========================================================
step_llm() {
info "Step 1: LLM setup (Ollama + $MODEL)"
case "$OS" in
Darwin)
if ! command -v ollama &> /dev/null; then
err "Ollama not found. Install it:"
err " brew install ollama"
err " # or https://ollama.com/download"
exit 1
fi
# Start if not running
if ! curl -sf "http://localhost:$OLLAMA_PORT/api/tags" > /dev/null 2>&1; then
info "Starting Ollama..."
ollama serve &
disown
fi
wait_for_url "http://localhost:$OLLAMA_PORT/api/tags" "Ollama"
echo ""
# Pull model if not already present
if ollama list 2>/dev/null | awk '{print $1}' | grep -qxF "$MODEL"; then
ok "Model $MODEL already pulled"
else
info "Pulling model $MODEL (this may take a while)..."
ollama pull "$MODEL"
fi
LLM_URL_VALUE="http://host.docker.internal:$OLLAMA_PORT/v1"
;;
Linux)
if command -v nvidia-smi &> /dev/null && nvidia-smi > /dev/null 2>&1; then
ok "NVIDIA GPU detected — using ollama-gpu profile"
OLLAMA_PROFILE="ollama-gpu"
OLLAMA_SVC="ollama"
LLM_URL_VALUE="http://ollama:$OLLAMA_PORT/v1"
else
warn "No NVIDIA GPU — using ollama-cpu profile"
OLLAMA_PROFILE="ollama-cpu"
OLLAMA_SVC="ollama-cpu"
LLM_URL_VALUE="http://ollama-cpu:$OLLAMA_PORT/v1"
fi
info "Starting Ollama container..."
compose_cmd up -d
wait_for_url "http://localhost:$OLLAMA_PORT/api/tags" "Ollama"
echo ""
# Pull model inside container
if compose_cmd exec "$OLLAMA_SVC" ollama list 2>/dev/null | awk '{print $1}' | grep -qxF "$MODEL"; then
ok "Model $MODEL already pulled"
else
info "Pulling model $MODEL inside container (this may take a while)..."
compose_cmd exec "$OLLAMA_SVC" ollama pull "$MODEL"
fi
;;
*)
err "Unsupported OS: $OS"
exit 1
;;
esac
ok "LLM ready ($MODEL via Ollama)"
}
# =========================================================
# Step 2: Generate server/.env
# =========================================================
step_server_env() {
info "Step 2: Generating server/.env"
resolve_symlink "$SERVER_ENV"
if [[ -f "$SERVER_ENV" ]]; then
ok "server/.env already exists — ensuring standalone vars"
else
cat > "$SERVER_ENV" << 'ENVEOF'
# Generated by setup-standalone.sh — standalone local development
# Source of truth for settings: server/reflector/settings.py
ENVEOF
ok "Created server/.env"
fi
# Ensure all standalone-critical vars (appends if missing, replaces if present)
env_set "$SERVER_ENV" "DATABASE_URL" "postgresql+asyncpg://reflector:reflector@postgres:5432/reflector"
env_set "$SERVER_ENV" "REDIS_HOST" "redis"
env_set "$SERVER_ENV" "CELERY_BROKER_URL" "redis://redis:6379/1"
env_set "$SERVER_ENV" "CELERY_RESULT_BACKEND" "redis://redis:6379/1"
env_set "$SERVER_ENV" "AUTH_BACKEND" "none"
env_set "$SERVER_ENV" "PUBLIC_MODE" "true"
# TRANSCRIPT_BACKEND, TRANSCRIPT_URL, DIARIZATION_BACKEND, DIARIZATION_URL
# are set via docker-compose.standalone.yml `environment:` overrides — not written here
# so we don't clobber the user's server/.env for non-standalone use.
env_set "$SERVER_ENV" "TRANSLATION_BACKEND" "passthrough"
env_set "$SERVER_ENV" "LLM_URL" "$LLM_URL_VALUE"
env_set "$SERVER_ENV" "LLM_MODEL" "$MODEL"
env_set "$SERVER_ENV" "LLM_API_KEY" "not-needed"
ok "Standalone vars set (LLM_URL=$LLM_URL_VALUE)"
}
# =========================================================
# Step 3: Object storage (Garage)
# =========================================================
step_storage() {
info "Step 3: Object storage (Garage)"
# Generate garage.toml from template (fill in RPC secret)
GARAGE_TOML="$ROOT_DIR/scripts/garage.toml"
GARAGE_TOML_RUNTIME="$ROOT_DIR/data/garage.toml"
if [[ ! -f "$GARAGE_TOML_RUNTIME" ]]; then
mkdir -p "$ROOT_DIR/data"
RPC_SECRET=$(openssl rand -hex 32)
sed "s|__GARAGE_RPC_SECRET__|${RPC_SECRET}|" "$GARAGE_TOML" > "$GARAGE_TOML_RUNTIME"
fi
compose_cmd up -d garage
wait_for_url "http://localhost:3903/health" "Garage admin API"
echo ""
# Layout: get node ID, assign, apply (skip if already applied)
NODE_ID=$(compose_cmd exec -T garage /garage node id -q 2>/dev/null | tr -d '[:space:]')
LAYOUT_STATUS=$(compose_cmd exec -T garage /garage layout show 2>&1 || true)
if echo "$LAYOUT_STATUS" | grep -q "No nodes"; then
compose_cmd exec -T garage /garage layout assign "$NODE_ID" -c 1G -z dc1
compose_cmd exec -T garage /garage layout apply --version 1
fi
# Create bucket (idempotent — skip if exists)
if ! compose_cmd exec -T garage /garage bucket info reflector-media &>/dev/null; then
compose_cmd exec -T garage /garage bucket create reflector-media
fi
# Create key (idempotent — skip if exists)
CREATED_KEY=false
if compose_cmd exec -T garage /garage key info reflector &>/dev/null; then
ok "Key 'reflector' already exists"
else
KEY_OUTPUT=$(compose_cmd exec -T garage /garage key create reflector)
CREATED_KEY=true
fi
# Grant bucket permissions (idempotent)
compose_cmd exec -T garage /garage bucket allow reflector-media --read --write --key reflector
# Set env vars (only parse key on first create — key info redacts the secret)
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_BACKEND" "aws"
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" "http://garage:3900"
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_BUCKET_NAME" "reflector-media"
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_REGION" "garage"
if [[ "$CREATED_KEY" == "true" ]]; then
KEY_ID=$(echo "$KEY_OUTPUT" | grep -i "key id" | awk '{print $NF}')
KEY_SECRET=$(echo "$KEY_OUTPUT" | grep -i "secret key" | awk '{print $NF}')
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID" "$KEY_ID"
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY" "$KEY_SECRET"
fi
ok "Object storage ready (Garage)"
}
# =========================================================
# Step 4: Generate www/.env.local
# =========================================================
step_www_env() {
info "Step 4: Generating www/.env.local"
resolve_symlink "$WWW_ENV"
if [[ -f "$WWW_ENV" ]]; then
ok "www/.env.local already exists — ensuring standalone vars"
else
cat > "$WWW_ENV" << 'ENVEOF'
# Generated by setup-standalone.sh — standalone local development
ENVEOF
ok "Created www/.env.local"
fi
env_set "$WWW_ENV" "SITE_URL" "http://localhost:3000"
env_set "$WWW_ENV" "NEXTAUTH_URL" "http://localhost:3000"
env_set "$WWW_ENV" "NEXTAUTH_SECRET" "standalone-dev-secret-not-for-production"
env_set "$WWW_ENV" "API_URL" "http://localhost:1250"
env_set "$WWW_ENV" "WEBSOCKET_URL" "ws://localhost:1250"
env_set "$WWW_ENV" "SERVER_API_URL" "http://server:1250"
env_set "$WWW_ENV" "FEATURE_REQUIRE_LOGIN" "false"
ok "Standalone www vars set"
}
# =========================================================
# Step 5: Start all services
# =========================================================
step_services() {
info "Step 5: Starting Docker services"
# Check for port conflicts — stale processes silently shadow Docker port mappings.
# OrbStack/Docker Desktop bind ports for forwarding; ignore those PIDs.
local ports_ok=true
for port in 3000 1250 5432 6379 3900 3903; do
local pids
pids=$(lsof -ti :"$port" 2>/dev/null || true)
for pid in $pids; do
local pname
pname=$(ps -p "$pid" -o comm= 2>/dev/null || true)
# OrbStack and Docker Desktop own port forwarding — not real conflicts
if [[ "$pname" == *"OrbStack"* ]] || [[ "$pname" == *"com.docker"* ]] || [[ "$pname" == *"vpnkit"* ]]; then
continue
fi
warn "Port $port already in use by PID $pid ($pname)"
warn "Kill it with: lsof -ti :$port | xargs kill"
ports_ok=false
done
done
if [[ "$ports_ok" == "false" ]]; then
warn "Port conflicts detected — Docker containers may not be reachable"
warn "Continuing anyway (services will start but may be shadowed)"
fi
# Rebuild images if source has changed (Docker layer cache makes this fast when unchanged)
rebuild_images
# server runs alembic migrations on startup automatically (see runserver.sh)
compose_cmd up -d postgres redis garage cpu server worker beat web
ok "Containers started"
# Quick sanity check — catch containers that exit immediately (bad image, missing file, etc.)
sleep 3
local exited
exited=$(compose_cmd ps -a --format '{{.Name}} {{.Status}}' 2>/dev/null \
| grep -i 'exit' || true)
if [[ -n "$exited" ]]; then
warn "Some containers exited immediately:"
echo "$exited" | while read -r line; do warn " $line"; done
dump_diagnostics
fi
info "Server is running migrations (alembic upgrade head)..."
}
# =========================================================
# Step 6: Health checks
# =========================================================
step_health() {
info "Step 6: Health checks"
# CPU service may take a while on first start (model download + load).
# No host port exposed — check via docker exec.
info "Waiting for CPU service (first start downloads ~1GB of models)..."
local cpu_ok=false
for i in $(seq 1 120); do
if compose_cmd exec -T cpu curl -sf http://localhost:8000/docs > /dev/null 2>&1; then
cpu_ok=true
break
fi
echo -ne "\r Waiting for CPU service... ($i/120)"
sleep 5
done
echo ""
if [[ "$cpu_ok" == "true" ]]; then
ok "CPU service healthy (transcription + diarization)"
else
warn "CPU service not ready yet — it will keep loading in the background"
warn "Check with: docker compose logs cpu"
fi
# Server may take a long time on first run — alembic migrations run before uvicorn starts.
# Use docker exec so this works regardless of network_mode or port mapping.
info "Waiting for Server API (first run includes database migrations)..."
local server_ok=false
for i in $(seq 1 90); do
# Check if container is still running
local svc_status
svc_status=$(compose_cmd ps server --format '{{.Status}}' 2>/dev/null || true)
if [[ -z "$svc_status" ]] || echo "$svc_status" | grep -qi 'exit'; then
echo ""
err "Server container exited unexpectedly"
dump_diagnostics server
exit 1
fi
# Health check from inside container (avoids host networking issues)
if compose_cmd exec -T server curl -sf http://localhost:1250/health > /dev/null 2>&1; then
server_ok=true
break
fi
echo -ne "\r Waiting for Server API... ($i/90)"
sleep 5
done
echo ""
if [[ "$server_ok" == "true" ]]; then
ok "Server API healthy"
else
err "Server API not ready after ~7 minutes"
dump_diagnostics server
exit 1
fi
wait_for_url "http://localhost:3000" "Frontend" 90 3
echo ""
ok "Frontend responding"
# Check LLM reachability from inside a container
if compose_cmd exec -T server \
curl -sf "$LLM_URL_VALUE/models" > /dev/null 2>&1; then
ok "LLM reachable from containers"
else
warn "LLM not reachable from containers at $LLM_URL_VALUE"
warn "Summaries/topics/titles won't work until LLM is accessible"
fi
}
# =========================================================
# Main
# =========================================================
main() {
echo ""
echo "=========================================="
echo " Reflector — Standalone Local Setup"
echo "=========================================="
echo ""
# Ensure we're in the repo root
if [[ ! -f "$ROOT_DIR/docker-compose.yml" ]]; then
err "docker-compose.yml not found in $ROOT_DIR"
err "Run this script from the repo root: ./scripts/setup-standalone.sh"
exit 1
fi
# Ensure Docker Compose V2 plugin is available.
# Check output for "Compose" — without the plugin, `docker compose version`
# may still exit 0 (falling through to `docker version`).
if ! docker compose version 2>/dev/null | grep -qi compose; then
err "Docker Compose plugin not found."
err "Install Docker Desktop, OrbStack, or: brew install docker-compose"
exit 1
fi
# Dockerfiles use RUN --mount which requires BuildKit.
# Docker Desktop/OrbStack bundle it; Colima/bare engine need docker-buildx.
if ! docker buildx version &>/dev/null; then
err "Docker BuildKit (buildx) not found."
err "Install Docker Desktop, OrbStack, or: brew install docker-buildx"
exit 1
fi
# LLM_URL_VALUE is set by step_llm, used by later steps
LLM_URL_VALUE=""
OLLAMA_PROFILE=""
# docker-compose.yml may reference env_files that don't exist yet;
# touch them so compose_cmd works before the steps that populate them.
touch "$SERVER_ENV" "$WWW_ENV"
step_llm
echo ""
step_server_env
echo ""
step_storage
echo ""
step_www_env
echo ""
step_services
echo ""
step_health
echo ""
echo "=========================================="
echo -e " ${GREEN}Reflector is running!${NC}"
echo "=========================================="
echo ""
echo " Frontend: http://localhost:3000"
echo " API: http://localhost:1250"
echo ""
echo " To stop: docker compose down"
echo " To re-run: ./scripts/setup-standalone.sh"
echo ""
}
main "$@"

View File

@@ -66,22 +66,15 @@ TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run
## LLM backend (Required)
##
## Responsible for generating titles, summaries, and topic detection
## Supports any OpenAI-compatible endpoint.
## Requires OpenAI API key
## =======================================================
## --- Option A: Local LLM via Ollama (recommended for dev) ---
## Setup: ./scripts/setup-standalone.sh
## Mac: Ollama runs natively (Metal GPU). Containers reach it via host.docker.internal.
## Linux: docker compose --profile ollama-gpu up -d (or ollama-cpu for no GPU)
LLM_URL=http://host.docker.internal:11434/v1
LLM_MODEL=qwen2.5:14b
LLM_API_KEY=not-needed
## Linux with containerized Ollama: LLM_URL=http://ollama:11434/v1
## OpenAI API key - get from https://platform.openai.com/account/api-keys
LLM_API_KEY=sk-your-openai-api-key
LLM_MODEL=gpt-4o-mini
## --- Option B: Remote/cloud LLM ---
#LLM_API_KEY=sk-your-openai-api-key
#LLM_MODEL=gpt-4o-mini
## LLM_URL defaults to OpenAI when unset
## Optional: Custom endpoint (defaults to OpenAI)
# LLM_URL=https://api.openai.com/v1
## Context size for summary generation (tokens)
LLM_CONTEXT_WINDOW=16000

View File

@@ -68,6 +68,7 @@ evaluation = [
"pydantic>=2.1.1",
]
local = [
"pyannote-audio>=3.3.2",
"faster-whisper>=0.10.0",
]
silero-vad = [

View File

@@ -22,8 +22,6 @@ def asynctask(f):
await database.disconnect()
coro = run_with_db()
if current_task:
return asyncio.run(coro)
try:
loop = asyncio.get_running_loop()
except RuntimeError:

View File

@@ -12,5 +12,3 @@ AccessTokenInfo = auth_module.AccessTokenInfo
authenticated = auth_module.authenticated
current_user = auth_module.current_user
current_user_optional = auth_module.current_user_optional
parse_ws_bearer_token = auth_module.parse_ws_bearer_token
current_user_ws_optional = auth_module.current_user_ws_optional

View File

@@ -1,9 +1,6 @@
from typing import TYPE_CHECKING, Annotated, List, Optional
from typing import Annotated, List, Optional
from fastapi import Depends, HTTPException
if TYPE_CHECKING:
from fastapi import WebSocket
from fastapi.security import APIKeyHeader, OAuth2PasswordBearer
from jose import JWTError, jwt
from pydantic import BaseModel
@@ -127,20 +124,3 @@ async def current_user_optional(
jwtauth: JWTAuth = Depends(),
):
return await _authenticate_user(jwt_token, api_key, jwtauth)
def parse_ws_bearer_token(
websocket: "WebSocket",
) -> tuple[Optional[str], Optional[str]]:
raw = websocket.headers.get("sec-websocket-protocol") or ""
parts = [p.strip() for p in raw.split(",") if p.strip()]
if len(parts) >= 2 and parts[0].lower() == "bearer":
return parts[1], "bearer"
return None, None
async def current_user_ws_optional(websocket: "WebSocket") -> Optional[UserInfo]:
token, _ = parse_ws_bearer_token(websocket)
if not token:
return None
return await _authenticate_user(token, None, JWTAuth())

View File

@@ -1,5 +1,11 @@
from typing import Annotated
from fastapi import Depends
from fastapi.security import OAuth2PasswordBearer
from pydantic import BaseModel
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
class UserInfo(BaseModel):
sub: str
@@ -9,21 +15,13 @@ class AccessTokenInfo(BaseModel):
pass
def authenticated():
def authenticated(token: Annotated[str, Depends(oauth2_scheme)]):
return None
def current_user():
def current_user(token: Annotated[str, Depends(oauth2_scheme)]):
return None
def current_user_optional():
return None
def parse_ws_bearer_token(websocket):
return None, None
async def current_user_ws_optional(websocket):
def current_user_optional(token: Annotated[str, Depends(oauth2_scheme)]):
return None

View File

@@ -146,8 +146,6 @@ class DailyApiClient:
)
raise DailyApiError(operation, response)
if not response.content:
return {}
return response.json()
# ============================================================================

View File

@@ -99,7 +99,7 @@ def extract_room_name(event: DailyWebhookEvent) -> str | None:
>>> event = DailyWebhookEvent(**webhook_payload)
>>> room_name = extract_room_name(event)
"""
room = event.payload.get("room_name") or event.payload.get("room")
room = event.payload.get("room_name")
# Ensure we return a string, not any falsy value that might be in payload
return room if isinstance(room, str) else None

View File

@@ -6,7 +6,7 @@ Reference: https://docs.daily.co/reference/rest-api/webhooks
from typing import Annotated, Any, Dict, Literal, Union
from pydantic import AliasChoices, BaseModel, ConfigDict, Field, field_validator
from pydantic import BaseModel, Field, field_validator
from reflector.utils.string import NonEmptyString
@@ -41,8 +41,6 @@ class DailyTrack(BaseModel):
Reference: https://docs.daily.co/reference/rest-api/recordings
"""
model_config = ConfigDict(extra="ignore")
type: Literal["audio", "video"]
s3Key: NonEmptyString = Field(description="S3 object key for the track file")
size: int = Field(description="File size in bytes")
@@ -56,8 +54,6 @@ class DailyWebhookEvent(BaseModel):
Reference: https://docs.daily.co/reference/rest-api/webhooks
"""
model_config = ConfigDict(extra="ignore")
version: NonEmptyString = Field(
description="Represents the version of the event. This uses semantic versioning to inform a consumer if the payload has introduced any breaking changes"
)
@@ -86,13 +82,7 @@ class ParticipantJoinedPayload(BaseModel):
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/participant-joined
"""
model_config = ConfigDict(extra="ignore")
room_name: NonEmptyString | None = Field(
None,
description="Daily.co room name",
validation_alias=AliasChoices("room_name", "room"),
)
room_name: NonEmptyString | None = Field(None, description="Daily.co room name")
session_id: NonEmptyString = Field(description="Daily.co session identifier")
user_id: NonEmptyString = Field(description="User identifier (may be encoded)")
user_name: NonEmptyString | None = Field(None, description="User display name")
@@ -110,13 +100,7 @@ class ParticipantLeftPayload(BaseModel):
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/participant-left
"""
model_config = ConfigDict(extra="ignore")
room_name: NonEmptyString | None = Field(
None,
description="Daily.co room name",
validation_alias=AliasChoices("room_name", "room"),
)
room_name: NonEmptyString | None = Field(None, description="Daily.co room name")
session_id: NonEmptyString = Field(description="Daily.co session identifier")
user_id: NonEmptyString = Field(description="User identifier (may be encoded)")
user_name: NonEmptyString | None = Field(None, description="User display name")
@@ -128,9 +112,6 @@ class ParticipantLeftPayload(BaseModel):
_normalize_joined_at = field_validator("joined_at", mode="before")(
normalize_timestamp_to_int
)
_normalize_duration = field_validator("duration", mode="before")(
normalize_timestamp_to_int
)
class RecordingStartedPayload(BaseModel):
@@ -140,8 +121,6 @@ class RecordingStartedPayload(BaseModel):
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/recording-started
"""
model_config = ConfigDict(extra="ignore")
room_name: NonEmptyString | None = Field(None, description="Daily.co room name")
recording_id: NonEmptyString = Field(description="Recording identifier")
start_ts: int | None = Field(None, description="Recording start timestamp")
@@ -159,9 +138,7 @@ class RecordingReadyToDownloadPayload(BaseModel):
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/recording-ready-to-download
"""
model_config = ConfigDict(extra="ignore")
type: Literal["cloud", "cloud-audio-only", "raw-tracks"] = Field(
type: Literal["cloud", "raw-tracks"] = Field(
description="The type of recording that was generated"
)
recording_id: NonEmptyString = Field(
@@ -176,9 +153,8 @@ class RecordingReadyToDownloadPayload(BaseModel):
status: Literal["finished"] = Field(
description="The status of the given recording (always 'finished' in ready-to-download webhook, see RecordingStatus in responses.py for full API statuses)"
)
max_participants: int | None = Field(
None,
description="The number of participants on the call that were recorded (optional; Daily may omit it in some webhook versions)",
max_participants: int = Field(
description="The number of participants on the call that were recorded"
)
duration: int = Field(description="The duration in seconds of the call")
s3_key: NonEmptyString = Field(
@@ -204,8 +180,6 @@ class RecordingErrorPayload(BaseModel):
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/recording-error
"""
model_config = ConfigDict(extra="ignore")
action: Literal["clourd-recording-err", "cloud-recording-error"] = Field(
description="A string describing the event that was emitted (both variants are documented)"
)
@@ -226,8 +200,6 @@ class RecordingErrorPayload(BaseModel):
class ParticipantJoinedEvent(BaseModel):
model_config = ConfigDict(extra="ignore")
version: NonEmptyString
type: Literal["participant.joined"]
id: NonEmptyString
@@ -240,8 +212,6 @@ class ParticipantJoinedEvent(BaseModel):
class ParticipantLeftEvent(BaseModel):
model_config = ConfigDict(extra="ignore")
version: NonEmptyString
type: Literal["participant.left"]
id: NonEmptyString
@@ -254,8 +224,6 @@ class ParticipantLeftEvent(BaseModel):
class RecordingStartedEvent(BaseModel):
model_config = ConfigDict(extra="ignore")
version: NonEmptyString
type: Literal["recording.started"]
id: NonEmptyString
@@ -268,8 +236,6 @@ class RecordingStartedEvent(BaseModel):
class RecordingReadyEvent(BaseModel):
model_config = ConfigDict(extra="ignore")
version: NonEmptyString
type: Literal["recording.ready-to-download"]
id: NonEmptyString
@@ -282,8 +248,6 @@ class RecordingReadyEvent(BaseModel):
class RecordingErrorEvent(BaseModel):
model_config = ConfigDict(extra="ignore")
version: NonEmptyString
type: Literal["recording.error"]
id: NonEmptyString

View File

@@ -26,7 +26,6 @@ from reflector.db.rooms import rooms
from reflector.db.transcripts import SourceKind, TranscriptStatus, transcripts
from reflector.db.utils import is_postgresql
from reflector.logger import logger
from reflector.settings import settings
from reflector.utils.string import NonEmptyString, try_parse_non_empty_string
DEFAULT_SEARCH_LIMIT = 20
@@ -397,7 +396,7 @@ class SearchController:
transcripts.c.user_id == params.user_id, rooms.c.is_shared
)
)
elif not settings.PUBLIC_MODE:
else:
base_query = base_query.where(rooms.c.is_shared)
if params.room_id:
base_query = base_query.where(transcripts.c.room_id == params.room_id)

View File

@@ -5,10 +5,7 @@ import shutil
from contextlib import asynccontextmanager
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import TYPE_CHECKING, Any, Literal, Sequence
if TYPE_CHECKING:
from reflector.ws_events import TranscriptEventName
from typing import Any, Literal, Sequence
import sqlalchemy
from fastapi import HTTPException
@@ -187,7 +184,7 @@ class TranscriptWaveform(BaseModel):
class TranscriptEvent(BaseModel):
event: str # Typed at call sites via ws_events.TranscriptEventName; str here for DB compat
event: str
data: dict
@@ -236,9 +233,7 @@ class Transcript(BaseModel):
dt = dt.replace(tzinfo=timezone.utc)
return dt.isoformat()
def add_event(
self, event: "TranscriptEventName", data: BaseModel
) -> TranscriptEvent:
def add_event(self, event: str, data: BaseModel) -> TranscriptEvent:
ev = TranscriptEvent(event=event, data=data.model_dump())
self.events.append(ev)
return ev
@@ -411,7 +406,7 @@ class TranscriptController:
query = query.where(
or_(transcripts.c.user_id == user_id, rooms.c.is_shared)
)
elif not settings.PUBLIC_MODE:
else:
query = query.where(rooms.c.is_shared)
if source_kind:
@@ -693,7 +688,7 @@ class TranscriptController:
async def append_event(
self,
transcript: Transcript,
event: "TranscriptEventName",
event: str,
data: Any,
) -> TranscriptEvent:
"""

View File

@@ -12,11 +12,10 @@ import structlog
from reflector.db.transcripts import Transcript, TranscriptEvent, transcripts_controller
from reflector.utils.string import NonEmptyString
from reflector.ws_events import TranscriptEventName
from reflector.ws_manager import get_ws_manager
# Events that should also be sent to user room (matches Celery behavior)
USER_ROOM_EVENTS: set[TranscriptEventName] = {"STATUS", "FINAL_TITLE", "DURATION"}
USER_ROOM_EVENTS = {"STATUS", "FINAL_TITLE", "DURATION"}
async def broadcast_event(
@@ -82,7 +81,8 @@ async def set_status_and_broadcast(
async def append_event_and_broadcast(
transcript_id: NonEmptyString,
transcript: Transcript,
event_name: TranscriptEventName,
event_name: NonEmptyString,
# TODO proper dictionary event => type
data: Any,
logger: structlog.BoundLogger,
) -> TranscriptEvent:

View File

@@ -12,9 +12,7 @@ import threading
from hatchet_sdk import ClientConfig, Hatchet
from hatchet_sdk.clients.rest.models import V1TaskStatus
from hatchet_sdk.rate_limit import RateLimitDuration
from reflector.hatchet.constants import LLM_RATE_LIMIT_KEY, LLM_RATE_LIMIT_PER_SECOND
from reflector.logger import logger
from reflector.settings import settings
@@ -115,26 +113,3 @@ class HatchetClientManager:
"""Reset the client instance (for testing)."""
with cls._lock:
cls._instance = None
@classmethod
async def ensure_rate_limit(cls) -> None:
"""Ensure the LLM rate limit exists in Hatchet.
Uses the Hatchet SDK rate_limits client (aio_put). See:
https://docs.hatchet.run/sdks/python/feature-clients/rate_limits
"""
logger.info(
"[Hatchet] Ensuring rate limit exists",
rate_limit_key=LLM_RATE_LIMIT_KEY,
limit=LLM_RATE_LIMIT_PER_SECOND,
)
client = cls.get_client()
await client.rate_limits.aio_put(
key=LLM_RATE_LIMIT_KEY,
limit=LLM_RATE_LIMIT_PER_SECOND,
duration=RateLimitDuration.SECOND,
)
logger.info(
"[Hatchet] Rate limit put successfully",
rate_limit_key=LLM_RATE_LIMIT_KEY,
)

View File

@@ -0,0 +1,144 @@
"""
Hatchet DAG Status -> Zulip Live Updates.
Posts/updates/deletes a Zulip message showing the Hatchet workflow DAG status.
All functions are fire-and-forget (catch + warning log on failure).
Note: Uses deferred imports throughout for fork-safety,
consistent with the pipeline pattern in daily_multitrack_pipeline.py.
"""
from reflector.logger import logger
from reflector.settings import settings
def _dag_zulip_enabled() -> bool:
return bool(
settings.ZULIP_REALM and settings.ZULIP_DAG_STREAM and settings.ZULIP_DAG_TOPIC
)
async def create_dag_zulip_message(transcript_id: str, workflow_run_id: str) -> None:
"""Post initial DAG status to Zulip. Called at dispatch time (normal DB context)."""
if not _dag_zulip_enabled():
return
try:
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
from reflector.hatchet.client import HatchetClientManager # noqa: PLC0415
from reflector.tools.render_hatchet_run import ( # noqa: PLC0415
render_run_detail,
)
from reflector.zulip import send_message_to_zulip # noqa: PLC0415
client = HatchetClientManager.get_client()
details = await client.runs.aio_get(workflow_run_id)
content = render_run_detail(details)
response = await send_message_to_zulip(
settings.ZULIP_DAG_STREAM, settings.ZULIP_DAG_TOPIC, content
)
message_id = response.get("id")
if message_id:
transcript = await transcripts_controller.get_by_id(transcript_id)
if transcript:
await transcripts_controller.update(
transcript, {"zulip_message_id": message_id}
)
except Exception:
logger.warning(
"[DAG Zulip] Failed to create DAG message",
transcript_id=transcript_id,
workflow_run_id=workflow_run_id,
exc_info=True,
)
async def update_dag_zulip_message(
transcript_id: str,
workflow_run_id: str,
error_message: str | None = None,
) -> None:
"""Update existing DAG status in Zulip. Called from Hatchet worker (forked).
Args:
error_message: If set, appended as an error banner to the rendered DAG.
"""
if not _dag_zulip_enabled():
return
try:
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
from reflector.hatchet.client import HatchetClientManager # noqa: PLC0415
from reflector.hatchet.workflows.daily_multitrack_pipeline import ( # noqa: PLC0415
fresh_db_connection,
)
from reflector.tools.render_hatchet_run import ( # noqa: PLC0415
render_run_detail,
)
from reflector.zulip import update_zulip_message # noqa: PLC0415
async with fresh_db_connection():
transcript = await transcripts_controller.get_by_id(transcript_id)
if not transcript or not transcript.zulip_message_id:
return
client = HatchetClientManager.get_client()
details = await client.runs.aio_get(workflow_run_id)
content = render_run_detail(details)
if error_message:
content += f"\n\n:cross_mark: **{error_message}**"
await update_zulip_message(
transcript.zulip_message_id,
settings.ZULIP_DAG_STREAM,
settings.ZULIP_DAG_TOPIC,
content,
)
except Exception:
logger.warning(
"[DAG Zulip] Failed to update DAG message",
transcript_id=transcript_id,
workflow_run_id=workflow_run_id,
exc_info=True,
)
async def delete_dag_zulip_message(transcript_id: str) -> None:
"""Delete DAG Zulip message and clear zulip_message_id.
Called from post_zulip task (already inside fresh_db_connection).
Swallows InvalidMessageError (message already deleted).
"""
if not _dag_zulip_enabled():
return
try:
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
from reflector.zulip import ( # noqa: PLC0415
InvalidMessageError,
delete_zulip_message,
)
transcript = await transcripts_controller.get_by_id(transcript_id)
if not transcript or not transcript.zulip_message_id:
return
try:
await delete_zulip_message(transcript.zulip_message_id)
except InvalidMessageError:
logger.warning(
"[DAG Zulip] Message already deleted",
transcript_id=transcript_id,
zulip_message_id=transcript.zulip_message_id,
)
await transcripts_controller.update(transcript, {"zulip_message_id": None})
except Exception:
logger.warning(
"[DAG Zulip] Failed to delete DAG message",
transcript_id=transcript_id,
exc_info=True,
)

View File

@@ -3,8 +3,6 @@ LLM/I/O worker pool for all non-CPU tasks.
Handles: all tasks except mixdown_tracks (transcription, LLM inference, orchestration)
"""
import asyncio
from reflector.hatchet.client import HatchetClientManager
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
daily_multitrack_pipeline,
@@ -22,15 +20,6 @@ POOL = "llm-io"
def main():
hatchet = HatchetClientManager.get_client()
try:
asyncio.run(HatchetClientManager.ensure_rate_limit())
except Exception as e:
logger.warning(
"[Hatchet] Rate limit initialization failed, but continuing. "
"If workflows fail to register, rate limits may need to be created manually.",
error=str(e),
)
logger.info(
"Starting Hatchet LLM worker pool (all tasks except mixdown)",
worker_name=WORKER_NAME,

View File

@@ -45,6 +45,7 @@ from reflector.hatchet.constants import (
TIMEOUT_SHORT,
TaskName,
)
from reflector.hatchet.dag_zulip import update_dag_zulip_message
from reflector.hatchet.workflows.models import (
ActionItemsResult,
ConsentResult,
@@ -171,13 +172,11 @@ async def set_workflow_error_status(transcript_id: NonEmptyString) -> bool:
def _spawn_storage():
"""Create fresh storage instance."""
# TODO: replace direct AwsStorage construction with get_transcripts_storage() factory
return AwsStorage(
aws_bucket_name=settings.TRANSCRIPT_STORAGE_AWS_BUCKET_NAME,
aws_region=settings.TRANSCRIPT_STORAGE_AWS_REGION,
aws_access_key_id=settings.TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY,
aws_endpoint_url=settings.TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL,
)
@@ -240,7 +239,14 @@ def with_error_handling(
@functools.wraps(func)
async def wrapper(input: PipelineInput, ctx: Context) -> R:
try:
return await func(input, ctx)
result = await func(input, ctx)
try:
await update_dag_zulip_message(
input.transcript_id, ctx.workflow_run_id
)
except Exception:
pass
return result
except Exception as e:
logger.error(
f"[Hatchet] {step_name} failed",
@@ -248,6 +254,14 @@ def with_error_handling(
error=str(e),
exc_info=True,
)
try:
await update_dag_zulip_message(
input.transcript_id,
ctx.workflow_run_id,
error_message=f"{step_name} failed: {e}",
)
except Exception:
pass
if set_error_status:
await set_workflow_error_status(input.transcript_id)
raise
@@ -1296,6 +1310,11 @@ async def post_zulip(input: PipelineInput, ctx: Context) -> ZulipResult:
async with fresh_db_connection():
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
from reflector.hatchet.dag_zulip import ( # noqa: PLC0415
delete_dag_zulip_message,
)
await delete_dag_zulip_message(input.transcript_id)
transcript = await transcripts_controller.get_by_id(input.transcript_id)
if transcript:

View File

@@ -49,13 +49,11 @@ async def pad_track(input: PaddingInput, ctx: Context) -> PadTrackResult:
from reflector.settings import settings # noqa: PLC0415
from reflector.storage.storage_aws import AwsStorage # noqa: PLC0415
# TODO: replace direct AwsStorage construction with get_transcripts_storage() factory
storage = AwsStorage(
aws_bucket_name=settings.TRANSCRIPT_STORAGE_AWS_BUCKET_NAME,
aws_region=settings.TRANSCRIPT_STORAGE_AWS_REGION,
aws_access_key_id=settings.TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY,
aws_endpoint_url=settings.TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL,
)
source_url = await storage.get_file_url(

View File

@@ -60,7 +60,6 @@ async def pad_track(input: TrackInput, ctx: Context) -> PadTrackResult:
try:
# Create fresh storage instance to avoid aioboto3 fork issues
# TODO: replace direct AwsStorage construction with get_transcripts_storage() factory
from reflector.settings import settings # noqa: PLC0415
from reflector.storage.storage_aws import AwsStorage # noqa: PLC0415
@@ -69,7 +68,6 @@ async def pad_track(input: TrackInput, ctx: Context) -> PadTrackResult:
aws_region=settings.TRANSCRIPT_STORAGE_AWS_REGION,
aws_access_key_id=settings.TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY,
aws_endpoint_url=settings.TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL,
)
source_url = await storage.get_file_url(
@@ -161,7 +159,6 @@ async def transcribe_track(input: TrackInput, ctx: Context) -> TranscribeTrackRe
raise ValueError("Missing padded_key from pad_track")
# Presign URL on demand (avoids stale URLs on workflow replay)
# TODO: replace direct AwsStorage construction with get_transcripts_storage() factory
from reflector.settings import settings # noqa: PLC0415
from reflector.storage.storage_aws import AwsStorage # noqa: PLC0415
@@ -170,7 +167,6 @@ async def transcribe_track(input: TrackInput, ctx: Context) -> TranscribeTrackRe
aws_region=settings.TRANSCRIPT_STORAGE_AWS_REGION,
aws_access_key_id=settings.TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY,
aws_endpoint_url=settings.TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL,
)
audio_url = await storage.get_file_url(

View File

@@ -144,18 +144,7 @@ class StructuredOutputWorkflow(Workflow, Generic[OutputT]):
)
# Network retries handled by OpenAILike (max_retries=3)
# response_format enables grammar-based constrained decoding on backends
# that support it (DMR/llama.cpp, vLLM, Ollama, OpenAI).
response = await Settings.llm.acomplete(
json_prompt,
response_format={
"type": "json_schema",
"json_schema": {
"name": self.output_cls.__name__,
"schema": self.output_cls.model_json_schema(),
},
},
)
response = await Settings.llm.acomplete(json_prompt)
return ExtractionDone(output=response.text)
@step

View File

@@ -62,8 +62,6 @@ from reflector.processors.types import (
from reflector.processors.types import Transcript as TranscriptProcessorType
from reflector.settings import settings
from reflector.storage import get_transcripts_storage
from reflector.views.transcripts import GetTranscriptTopic
from reflector.ws_events import TranscriptEventName
from reflector.ws_manager import WebsocketManager, get_ws_manager
from reflector.zulip import (
get_zulip_message,
@@ -91,11 +89,7 @@ def broadcast_to_sockets(func):
if transcript and transcript.user_id:
# Emit only relevant events to the user room to avoid noisy updates.
# Allowed: STATUS, FINAL_TITLE, DURATION. All are prefixed with TRANSCRIPT_
allowed_user_events: set[TranscriptEventName] = {
"STATUS",
"FINAL_TITLE",
"DURATION",
}
allowed_user_events = {"STATUS", "FINAL_TITLE", "DURATION"}
if resp.event in allowed_user_events:
await self.ws_manager.send_json(
room_id=f"user:{transcript.user_id}",
@@ -250,14 +244,13 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
)
if isinstance(data, TitleSummaryWithIdProcessorType):
topic.id = data.id
get_topic = GetTranscriptTopic.from_transcript_topic(topic)
async with self.transaction():
transcript = await self.get_transcript()
await transcripts_controller.upsert_topic(transcript, topic)
return await transcripts_controller.append_event(
transcript=transcript,
event="TOPIC",
data=get_topic,
data=topic,
)
@broadcast_to_sockets

View File

@@ -0,0 +1,74 @@
import os
import torch
import torchaudio
from pyannote.audio import Pipeline
from reflector.processors.audio_diarization import AudioDiarizationProcessor
from reflector.processors.audio_diarization_auto import AudioDiarizationAutoProcessor
from reflector.processors.types import AudioDiarizationInput, DiarizationSegment
class AudioDiarizationPyannoteProcessor(AudioDiarizationProcessor):
"""Local diarization processor using pyannote.audio library"""
def __init__(
self,
model_name: str = "pyannote/speaker-diarization-3.1",
pyannote_auth_token: str | None = None,
device: str | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.model_name = model_name
self.auth_token = pyannote_auth_token or os.environ.get("HF_TOKEN")
self.device = device
if device is None:
self.device = "cuda" if torch.cuda.is_available() else "cpu"
self.logger.info(f"Loading pyannote diarization model: {self.model_name}")
self.diarization_pipeline = Pipeline.from_pretrained(
self.model_name, use_auth_token=self.auth_token
)
self.diarization_pipeline.to(torch.device(self.device))
self.logger.info(f"Diarization model loaded on device: {self.device}")
async def _diarize(self, data: AudioDiarizationInput) -> list[DiarizationSegment]:
try:
# Load audio file (audio_url is assumed to be a local file path)
self.logger.info(f"Loading local audio file: {data.audio_url}")
waveform, sample_rate = torchaudio.load(data.audio_url)
audio_input = {"waveform": waveform, "sample_rate": sample_rate}
self.logger.info("Running speaker diarization")
diarization = self.diarization_pipeline(audio_input)
# Convert pyannote diarization output to our format
segments = []
for segment, _, speaker in diarization.itertracks(yield_label=True):
# Extract speaker number from label (e.g., "SPEAKER_00" -> 0)
speaker_id = 0
if speaker.startswith("SPEAKER_"):
try:
speaker_id = int(speaker.split("_")[-1])
except (ValueError, IndexError):
# Fallback to hash-based ID if parsing fails
speaker_id = hash(speaker) % 1000
segments.append(
{
"start": round(segment.start, 3),
"end": round(segment.end, 3),
"speaker": speaker_id,
}
)
self.logger.info(f"Diarization completed with {len(segments)} segments")
return segments
except Exception as e:
self.logger.exception(f"Diarization failed: {e}")
raise
AudioDiarizationAutoProcessor.register("pyannote", AudioDiarizationPyannoteProcessor)

View File

@@ -17,6 +17,7 @@ from hatchet_sdk.clients.rest.models import V1TaskStatus
from reflector.db.recordings import recordings_controller
from reflector.db.transcripts import Transcript, transcripts_controller
from reflector.hatchet.client import HatchetClientManager
from reflector.hatchet.dag_zulip import create_dag_zulip_message
from reflector.logger import logger
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
from reflector.utils.string import NonEmptyString
@@ -97,11 +98,8 @@ async def validate_transcript_for_processing(
if transcript.locked:
return ValidationLocked(detail="Recording is locked")
if (
transcript.status == "idle"
and not transcript.workflow_run_id
and not transcript.recording_id
):
# Check if recording is ready for processing
if transcript.status == "idle" and not transcript.workflow_run_id:
return ValidationNotReady(detail="Recording is not ready for processing")
# Check Celery tasks
@@ -269,6 +267,16 @@ async def dispatch_transcript_processing(
transcript, {"workflow_run_id": workflow_id}
)
try:
await create_dag_zulip_message(config.transcript_id, workflow_id)
except Exception:
logger.warning(
"[DAG Zulip] Failed to create DAG message at dispatch",
transcript_id=config.transcript_id,
workflow_id=workflow_id,
exc_info=True,
)
logger.info("Hatchet workflow dispatched", workflow_id=workflow_id)
return None

View File

@@ -49,7 +49,6 @@ class Settings(BaseSettings):
TRANSCRIPT_STORAGE_AWS_REGION: str = "us-east-1"
TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID: str | None = None
TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY: str | None = None
TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL: str | None = None
# Platform-specific recording storage (follows {PREFIX}_STORAGE_AWS_{CREDENTIAL} pattern)
# Whereby storage configuration
@@ -85,7 +84,9 @@ class Settings(BaseSettings):
)
# Diarization
# backend: modal — HTTP API client (works with Modal.com OR self-hosted gpu/self_hosted/)
# backends:
# - pyannote: in-process model loading (no HTTP, runs in same process)
# - modal: HTTP API client (works with Modal.com OR self-hosted gpu/self_hosted/)
DIARIZATION_ENABLED: bool = True
DIARIZATION_BACKEND: str = "modal"
DIARIZATION_URL: str | None = None
@@ -94,6 +95,9 @@ class Settings(BaseSettings):
# Diarization: modal backend
DIARIZATION_MODAL_API_KEY: str | None = None
# Diarization: local pyannote.audio
DIARIZATION_PYANNOTE_AUTH_TOKEN: str | None = None
# Audio Padding (Modal.com backend)
PADDING_URL: str | None = None
PADDING_MODAL_API_KEY: str | None = None
@@ -157,6 +161,9 @@ class Settings(BaseSettings):
ZULIP_REALM: str | None = None
ZULIP_API_KEY: str | None = None
ZULIP_BOT_EMAIL: str | None = None
ZULIP_DAG_STREAM: str | None = None
ZULIP_DAG_TOPIC: str | None = None
ZULIP_HOST_HEADER: str | None = None
# Hatchet workflow orchestration (always enabled for multitrack processing)
HATCHET_CLIENT_TOKEN: str | None = None

View File

@@ -53,7 +53,6 @@ class AwsStorage(Storage):
aws_access_key_id: str | None = None,
aws_secret_access_key: str | None = None,
aws_role_arn: str | None = None,
aws_endpoint_url: str | None = None,
):
if not aws_bucket_name:
raise ValueError("Storage `aws_storage` require `aws_bucket_name`")
@@ -74,26 +73,17 @@ class AwsStorage(Storage):
self._access_key_id = aws_access_key_id
self._secret_access_key = aws_secret_access_key
self._role_arn = aws_role_arn
self._endpoint_url = aws_endpoint_url
self.aws_folder = ""
if "/" in aws_bucket_name:
self._bucket_name, self.aws_folder = aws_bucket_name.split("/", 1)
config_kwargs: dict = {"retries": {"max_attempts": 3, "mode": "adaptive"}}
if aws_endpoint_url:
config_kwargs["s3"] = {"addressing_style": "path"}
self.boto_config = Config(**config_kwargs)
self.boto_config = Config(retries={"max_attempts": 3, "mode": "adaptive"})
self.session = aioboto3.Session(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=aws_region,
)
if aws_endpoint_url:
self.base_url = f"{aws_endpoint_url}/{self._bucket_name}/"
else:
self.base_url = f"https://{self._bucket_name}.s3.amazonaws.com/"
self.base_url = f"https://{self._bucket_name}.s3.amazonaws.com/"
# Implement credential properties
@property
@@ -149,9 +139,7 @@ class AwsStorage(Storage):
s3filename = f"{folder}/{filename}" if folder else filename
logger.info(f"Uploading {filename} to S3 {actual_bucket}/{folder}")
async with self.session.client(
"s3", config=self.boto_config, endpoint_url=self._endpoint_url
) as client:
async with self.session.client("s3", config=self.boto_config) as client:
if isinstance(data, bytes):
await client.put_object(Bucket=actual_bucket, Key=s3filename, Body=data)
else:
@@ -174,9 +162,7 @@ class AwsStorage(Storage):
actual_bucket = bucket or self._bucket_name
folder = self.aws_folder
s3filename = f"{folder}/{filename}" if folder else filename
async with self.session.client(
"s3", config=self.boto_config, endpoint_url=self._endpoint_url
) as client:
async with self.session.client("s3", config=self.boto_config) as client:
presigned_url = await client.generate_presigned_url(
operation,
Params={"Bucket": actual_bucket, "Key": s3filename},
@@ -191,9 +177,7 @@ class AwsStorage(Storage):
folder = self.aws_folder
logger.info(f"Deleting {filename} from S3 {actual_bucket}/{folder}")
s3filename = f"{folder}/{filename}" if folder else filename
async with self.session.client(
"s3", config=self.boto_config, endpoint_url=self._endpoint_url
) as client:
async with self.session.client("s3", config=self.boto_config) as client:
await client.delete_object(Bucket=actual_bucket, Key=s3filename)
@handle_s3_client_errors("download")
@@ -202,9 +186,7 @@ class AwsStorage(Storage):
folder = self.aws_folder
logger.info(f"Downloading {filename} from S3 {actual_bucket}/{folder}")
s3filename = f"{folder}/{filename}" if folder else filename
async with self.session.client(
"s3", config=self.boto_config, endpoint_url=self._endpoint_url
) as client:
async with self.session.client("s3", config=self.boto_config) as client:
response = await client.get_object(Bucket=actual_bucket, Key=s3filename)
return await response["Body"].read()
@@ -219,9 +201,7 @@ class AwsStorage(Storage):
logger.info(f"Listing objects from S3 {actual_bucket} with prefix '{s3prefix}'")
keys = []
async with self.session.client(
"s3", config=self.boto_config, endpoint_url=self._endpoint_url
) as client:
async with self.session.client("s3", config=self.boto_config) as client:
paginator = client.get_paginator("list_objects_v2")
async for page in paginator.paginate(Bucket=actual_bucket, Prefix=s3prefix):
if "Contents" in page:
@@ -247,9 +227,7 @@ class AwsStorage(Storage):
folder = self.aws_folder
logger.info(f"Streaming {filename} from S3 {actual_bucket}/{folder}")
s3filename = f"{folder}/{filename}" if folder else filename
async with self.session.client(
"s3", config=self.boto_config, endpoint_url=self._endpoint_url
) as client:
async with self.session.client("s3", config=self.boto_config) as client:
await client.download_fileobj(
Bucket=actual_bucket, Key=s3filename, Fileobj=fileobj
)

View File

@@ -0,0 +1,412 @@
"""
Render Hatchet workflow runs as text DAG.
Usage:
# Show latest 5 runs (summary table)
uv run -m reflector.tools.render_hatchet_run
# Show specific run with full DAG + task details
uv run -m reflector.tools.render_hatchet_run <workflow_run_id>
# Drill into Nth run from the list (1-indexed)
uv run -m reflector.tools.render_hatchet_run --show 1
# Show latest N runs
uv run -m reflector.tools.render_hatchet_run --last 10
# Filter by status
uv run -m reflector.tools.render_hatchet_run --status FAILED
uv run -m reflector.tools.render_hatchet_run --status RUNNING
"""
import argparse
import asyncio
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from hatchet_sdk.clients.rest.models import (
V1TaskEvent,
V1TaskStatus,
V1TaskSummary,
V1WorkflowRunDetails,
WorkflowRunShapeItemForWorkflowRunDetails,
)
from reflector.hatchet.client import HatchetClientManager
STATUS_ICON = {
V1TaskStatus.COMPLETED: "\u2705",
V1TaskStatus.RUNNING: "\u23f3",
V1TaskStatus.FAILED: "\u274c",
V1TaskStatus.QUEUED: "\u23f8\ufe0f",
V1TaskStatus.CANCELLED: "\u26a0\ufe0f",
}
STATUS_LABEL = {
V1TaskStatus.COMPLETED: "Complete",
V1TaskStatus.RUNNING: "Running",
V1TaskStatus.FAILED: "FAILED",
V1TaskStatus.QUEUED: "Queued",
V1TaskStatus.CANCELLED: "Cancelled",
}
def _fmt_time(dt: datetime | None) -> str:
if dt is None:
return "-"
return dt.strftime("%H:%M:%S")
def _fmt_duration(ms: int | None) -> str:
if ms is None:
return "-"
secs = ms / 1000
if secs < 60:
return f"{secs:.1f}s"
mins = secs / 60
return f"{mins:.1f}m"
def _fmt_status_line(task: V1TaskSummary) -> str:
"""Format a status line like: Complete (finished 20:31:44)"""
label = STATUS_LABEL.get(task.status, task.status.value)
icon = STATUS_ICON.get(task.status, "?")
if task.status == V1TaskStatus.COMPLETED and task.finished_at:
return f"{icon} {label} (finished {_fmt_time(task.finished_at)})"
elif task.status == V1TaskStatus.RUNNING and task.started_at:
parts = [f"started {_fmt_time(task.started_at)}"]
if task.duration:
parts.append(f"{_fmt_duration(task.duration)} elapsed")
return f"{icon} {label} ({', '.join(parts)})"
elif task.status == V1TaskStatus.FAILED and task.finished_at:
return f"{icon} {label} (failed {_fmt_time(task.finished_at)})"
elif task.status == V1TaskStatus.CANCELLED:
return f"{icon} {label}"
elif task.status == V1TaskStatus.QUEUED:
return f"{icon} {label}"
return f"{icon} {label}"
def _topo_sort(
shape: list[WorkflowRunShapeItemForWorkflowRunDetails],
) -> list[str]:
"""Topological sort of step_ids from shape DAG."""
step_ids = {s.step_id for s in shape}
children_map: dict[str, list[str]] = {}
in_degree: dict[str, int] = {sid: 0 for sid in step_ids}
for s in shape:
children = [c for c in (s.children_step_ids or []) if c in step_ids]
children_map[s.step_id] = children
for c in children:
in_degree[c] += 1
queue = sorted(sid for sid, deg in in_degree.items() if deg == 0)
result: list[str] = []
while queue:
node = queue.pop(0)
result.append(node)
for c in children_map.get(node, []):
in_degree[c] -= 1
if in_degree[c] == 0:
queue.append(c)
queue.sort()
return result
def render_run_detail(details: V1WorkflowRunDetails) -> str:
"""Render a single workflow run as markdown DAG with task details."""
shape = details.shape or []
tasks = details.tasks or []
events = details.task_events or []
run = details.run
if not shape:
return f"Run {run.metadata.id}: {run.status.value} (no shape data)"
# Build lookups
step_to_shape: dict[str, WorkflowRunShapeItemForWorkflowRunDetails] = {
s.step_id: s for s in shape
}
step_to_name: dict[str, str] = {s.step_id: s.task_name for s in shape}
# Reverse edges (parents)
parents: dict[str, list[str]] = {s.step_id: [] for s in shape}
for s in shape:
for child_id in s.children_step_ids or []:
if child_id in parents:
parents[child_id].append(s.step_id)
# Join tasks by step_id
task_by_step: dict[str, V1TaskSummary] = {}
for t in tasks:
if t.step_id and t.step_id in step_to_name:
task_by_step[t.step_id] = t
# Events indexed by task_external_id
events_by_task: dict[str, list[V1TaskEvent]] = defaultdict(list)
for ev in events:
events_by_task[ev.task_id].append(ev)
ordered = _topo_sort(shape)
lines: list[str] = []
# Run header
run_icon = STATUS_ICON.get(run.status, "?")
run_name = run.display_name or run.workflow_id
dur = _fmt_duration(run.duration)
lines.append(f"**{run_name}** {run_icon} {dur}")
lines.append(f"ID: `{run.metadata.id}`")
if run.additional_metadata:
meta_parts = [f"{k}=`{v}`" for k, v in run.additional_metadata.items()]
lines.append(f"Meta: {', '.join(meta_parts)}")
if run.error_message:
# Take first line of error only for header
first_line = run.error_message.split("\n")[0]
lines.append(f"Error: {first_line}")
lines.append("")
# DAG Status Overview table
lines.append("**DAG Status Overview**")
lines.append("")
lines.append("| Node | Status | Duration | Dependencies |")
lines.append("|------|--------|----------|--------------|")
for step_id in ordered:
s = step_to_shape[step_id]
t = task_by_step.get(step_id)
name = step_to_name[step_id]
icon = STATUS_ICON.get(t.status, "?") if t else "?"
dur = _fmt_duration(t.duration) if t else "-"
parent_names = [step_to_name[p] for p in parents[step_id]]
child_names = [
step_to_name[c] for c in (s.children_step_ids or []) if c in step_to_name
]
deps_left = ", ".join(parent_names) if parent_names else ""
deps_right = ", ".join(child_names) if child_names else ""
if deps_left and deps_right:
deps = f"{deps_left} \u2192 {deps_right}"
elif deps_right:
deps = f"\u2192 {deps_right}"
elif deps_left:
deps = f"{deps_left} \u2192"
else:
deps = "-"
lines.append(f"| {name} | {icon} | {dur} | {deps} |")
lines.append("")
lines.append("---")
lines.append("")
# Node details
for step_id in ordered:
t = task_by_step.get(step_id)
name = step_to_name[step_id]
if not t:
lines.append(f"**\U0001f4e6 {name}**")
lines.append("Status: no task data")
lines.append("")
continue
lines.append(f"**\U0001f4e6 {name}**")
lines.append(f"Status: {_fmt_status_line(t)}")
if t.duration:
lines.append(f"Duration: {_fmt_duration(t.duration)}")
if t.retry_count and t.retry_count > 0:
lines.append(f"Retries: {t.retry_count}")
# Fan-out children
if t.num_spawned_children and t.num_spawned_children > 0:
children = t.children or []
completed = sum(1 for c in children if c.status == V1TaskStatus.COMPLETED)
failed = sum(1 for c in children if c.status == V1TaskStatus.FAILED)
running = sum(1 for c in children if c.status == V1TaskStatus.RUNNING)
lines.append(
f"Spawned children: {completed}/{t.num_spawned_children} done"
f"{f', {running} running' if running else ''}"
f"{f', {failed} failed' if failed else ''}"
)
# Error message (first meaningful line only, full trace in events)
if t.error_message:
err_lines = t.error_message.strip().split("\n")
# Find first non-empty, non-traceback line
err_summary = err_lines[0]
for line in err_lines:
stripped = line.strip()
if stripped and not stripped.startswith(
("Traceback", "File ", "{", ")")
):
err_summary = stripped
break
lines.append(f"Error: `{err_summary}`")
# Events log
task_events = sorted(
events_by_task.get(t.task_external_id, []),
key=lambda e: e.timestamp,
)
if task_events:
lines.append("Events:")
for ev in task_events:
ts = ev.timestamp.strftime("%H:%M:%S")
ev_icon = ""
if ev.event_type.value == "FINISHED":
ev_icon = "\u2705 "
elif ev.event_type.value in ("FAILED", "TIMED_OUT"):
ev_icon = "\u274c "
elif ev.event_type.value == "STARTED":
ev_icon = "\u25b6\ufe0f "
elif ev.event_type.value == "RETRYING":
ev_icon = "\U0001f504 "
elif ev.event_type.value == "CANCELLED":
ev_icon = "\u26a0\ufe0f "
msg = ev.message.strip()
if ev.error_message:
# Just first line of error in event log
err_first = ev.error_message.strip().split("\n")[0]
if msg:
msg += f" | {err_first}"
else:
msg = err_first
if msg:
lines.append(f" `{ts}` {ev_icon}{ev.event_type.value}: {msg}")
else:
lines.append(f" `{ts}` {ev_icon}{ev.event_type.value}")
lines.append("")
return "\n".join(lines)
def render_run_summary(idx: int, run: V1TaskSummary) -> str:
"""One-line summary for a run in the list view."""
icon = STATUS_ICON.get(run.status, "?")
name = run.display_name or run.workflow_name or "?"
run_id = run.workflow_run_external_id or "?"
dur = _fmt_duration(run.duration)
started = _fmt_time(run.started_at)
meta = ""
if run.additional_metadata:
meta_parts = [f"{k}=`{v}`" for k, v in run.additional_metadata.items()]
meta = f" ({', '.join(meta_parts)})"
return (
f" {idx}. {icon} **{name}** started={started} dur={dur}{meta}\n"
f" `{run_id}`"
)
async def _fetch_run_list(
count: int = 5,
statuses: list[V1TaskStatus] | None = None,
) -> list[V1TaskSummary]:
client = HatchetClientManager.get_client()
since = datetime.now(timezone.utc) - timedelta(days=7)
runs = await client.runs.aio_list(
since=since,
statuses=statuses,
limit=count,
)
return runs.rows or []
async def list_recent_runs(
count: int = 5,
statuses: list[V1TaskStatus] | None = None,
) -> str:
"""List recent workflow runs as text."""
rows = await _fetch_run_list(count, statuses)
if not rows:
return "No runs found in the last 7 days."
lines = [f"Recent runs ({len(rows)}):", ""]
for i, run in enumerate(rows, 1):
lines.append(render_run_summary(i, run))
lines.append("")
lines.append("Use `--show N` to see full DAG for run N")
return "\n".join(lines)
async def show_run(workflow_run_id: str) -> str:
"""Fetch and render a single run."""
client = HatchetClientManager.get_client()
details = await client.runs.aio_get(workflow_run_id)
return render_run_detail(details)
async def show_nth_run(
n: int,
count: int = 5,
statuses: list[V1TaskStatus] | None = None,
) -> str:
"""Fetch list, then drill into Nth run."""
rows = await _fetch_run_list(count, statuses)
if not rows:
return "No runs found in the last 7 days."
if n < 1 or n > len(rows):
return f"Invalid index {n}. Have {len(rows)} runs (1-{len(rows)})."
run = rows[n - 1]
return await show_run(run.workflow_run_external_id)
async def main_async(args: argparse.Namespace) -> None:
statuses = [V1TaskStatus(args.status)] if args.status else None
if args.run_id:
output = await show_run(args.run_id)
elif args.show is not None:
output = await show_nth_run(args.show, count=args.last, statuses=statuses)
else:
output = await list_recent_runs(count=args.last, statuses=statuses)
print(output)
def main() -> None:
parser = argparse.ArgumentParser(
description="Render Hatchet workflow runs as text DAG"
)
parser.add_argument(
"run_id",
nargs="?",
default=None,
help="Workflow run ID to show in detail. If omitted, lists recent runs.",
)
parser.add_argument(
"--show",
type=int,
default=None,
metavar="N",
help="Show full DAG for the Nth run in the list (1-indexed)",
)
parser.add_argument(
"--last",
type=int,
default=5,
help="Number of recent runs to list (default: 5)",
)
parser.add_argument(
"--status",
choices=["QUEUED", "RUNNING", "COMPLETED", "FAILED", "CANCELLED"],
help="Filter by status",
)
args = parser.parse_args()
asyncio.run(main_async(args))
if __name__ == "__main__":
main()

View File

@@ -80,14 +80,7 @@ async def webhook(request: Request):
try:
event = event_adapter.validate_python(body_json)
except Exception as e:
err_detail = str(e)
if hasattr(e, "errors"):
err_detail = f"{err_detail}; errors={e.errors()!r}"
logger.error(
"Failed to parse webhook event",
error=err_detail,
body=body.decode(),
)
logger.error("Failed to parse webhook event", error=str(e), body=body.decode())
raise HTTPException(status_code=422, detail="Invalid event format")
match event:

View File

@@ -5,7 +5,7 @@ from fastapi import APIRouter, Depends, HTTPException, UploadFile
from pydantic import BaseModel
import reflector.auth as auth
from reflector.db.transcripts import SourceKind, transcripts_controller
from reflector.db.transcripts import transcripts_controller
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
router = APIRouter()
@@ -88,10 +88,8 @@ async def transcript_record_upload(
finally:
container.close()
# set the status to "uploaded" and mark as file source
await transcripts_controller.update(
transcript, {"status": "uploaded", "source_kind": SourceKind.FILE}
)
# set the status to "uploaded"
await transcripts_controller.update(transcript, {"status": "uploaded"})
# launch a background task to process the file
task_pipeline_file_process.delay(transcript_id=transcript_id)

View File

@@ -4,22 +4,18 @@ Transcripts websocket API
"""
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketDisconnect
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, WebSocket, WebSocketDisconnect
import reflector.auth as auth
from reflector.db.transcripts import transcripts_controller
from reflector.ws_events import TranscriptWsEvent
from reflector.ws_manager import get_ws_manager
router = APIRouter()
@router.get(
"/transcripts/{transcript_id}/events",
response_model=TranscriptWsEvent,
summary="Transcript WebSocket event schema",
description="Stub exposing the discriminated union of all transcript-level WS events for OpenAPI type generation. Real events are delivered over the WebSocket at the same path.",
)
@router.get("/transcripts/{transcript_id}/events")
async def transcript_get_websocket_events(transcript_id: str):
pass
@@ -28,9 +24,8 @@ async def transcript_get_websocket_events(transcript_id: str):
async def transcript_events_websocket(
transcript_id: str,
websocket: WebSocket,
user: Optional[auth.UserInfo] = Depends(auth.current_user_optional),
):
_, negotiated_subprotocol = auth.parse_ws_bearer_token(websocket)
user = await auth.current_user_ws_optional(websocket)
user_id = user["sub"] if user else None
transcript = await transcripts_controller.get_by_id_for_http(
transcript_id, user_id=user_id
@@ -42,9 +37,7 @@ async def transcript_events_websocket(
# use ts:transcript_id as room id
room_id = f"ts:{transcript_id}"
ws_manager = get_ws_manager()
await ws_manager.add_user_to_room(
room_id, websocket, subprotocol=negotiated_subprotocol
)
await ws_manager.add_user_to_room(room_id, websocket)
try:
# on first connection, send all events only to the current user

View File

@@ -1,25 +1,13 @@
from typing import Optional
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from fastapi import APIRouter, WebSocket
from reflector.auth.auth_jwt import JWTAuth # type: ignore
from reflector.db.users import user_controller
from reflector.ws_events import UserWsEvent
from reflector.ws_manager import get_ws_manager
router = APIRouter()
@router.get(
"/events",
response_model=UserWsEvent,
summary="User WebSocket event schema",
description="Stub exposing the discriminated union of all user-level WS events for OpenAPI type generation. Real events are delivered over the WebSocket at the same path.",
)
async def user_get_websocket_events():
pass
# Close code for unauthorized WebSocket connections
UNAUTHORISED = 4401
@@ -72,8 +60,6 @@ async def user_events_websocket(websocket: WebSocket):
try:
while True:
await websocket.receive()
except (RuntimeError, WebSocketDisconnect):
pass
finally:
if room_id:
await ws_manager.remove_user_from_room(room_id, websocket)

View File

@@ -25,6 +25,7 @@ from reflector.db.transcripts import (
transcripts_controller,
)
from reflector.hatchet.client import HatchetClientManager
from reflector.hatchet.dag_zulip import create_dag_zulip_message
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
from reflector.pipelines.main_live_pipeline import asynctask
from reflector.pipelines.topic_processing import EmptyPipeline
@@ -372,6 +373,16 @@ async def _process_multitrack_recording_inner(
await transcripts_controller.update(transcript, {"workflow_run_id": workflow_id})
try:
await create_dag_zulip_message(transcript.id, workflow_id)
except Exception:
logger.warning(
"[DAG Zulip] Failed to create DAG message at dispatch",
transcript_id=transcript.id,
workflow_id=workflow_id,
exc_info=True,
)
@shared_task
@asynctask
@@ -1079,6 +1090,16 @@ async def reprocess_failed_daily_recordings():
transcript, {"workflow_run_id": workflow_id}
)
try:
await create_dag_zulip_message(transcript.id, workflow_id)
except Exception:
logger.warning(
"[DAG Zulip] Failed to create DAG message at reprocess dispatch",
transcript_id=transcript.id,
workflow_id=workflow_id,
exc_info=True,
)
logger.info(
"Queued Daily recording for Hatchet reprocessing",
recording_id=recording.id,

View File

@@ -1,188 +0,0 @@
"""Typed WebSocket event models.
Defines Pydantic models with Literal discriminators for all WS events.
Exposed via stub GET endpoints so ``pnpm openapi`` generates TS discriminated unions.
"""
from typing import Annotated, Literal, Union
from pydantic import BaseModel, Discriminator
from reflector.db.transcripts import (
TranscriptActionItems,
TranscriptDuration,
TranscriptFinalLongSummary,
TranscriptFinalShortSummary,
TranscriptFinalTitle,
TranscriptStatus,
TranscriptText,
TranscriptWaveform,
)
from reflector.utils.string import NonEmptyString
from reflector.views.transcripts import GetTranscriptTopic
# ---------------------------------------------------------------------------
# Transcript-level event name literal
# ---------------------------------------------------------------------------
TranscriptEventName = Literal[
"TRANSCRIPT",
"TOPIC",
"STATUS",
"FINAL_TITLE",
"FINAL_LONG_SUMMARY",
"FINAL_SHORT_SUMMARY",
"ACTION_ITEMS",
"DURATION",
"WAVEFORM",
]
# ---------------------------------------------------------------------------
# Transcript-level WS event wrappers
# ---------------------------------------------------------------------------
class TranscriptWsTranscript(BaseModel):
event: Literal["TRANSCRIPT"] = "TRANSCRIPT"
data: TranscriptText
class TranscriptWsTopic(BaseModel):
event: Literal["TOPIC"] = "TOPIC"
data: GetTranscriptTopic
class TranscriptWsStatusData(BaseModel):
value: TranscriptStatus
class TranscriptWsStatus(BaseModel):
event: Literal["STATUS"] = "STATUS"
data: TranscriptWsStatusData
class TranscriptWsFinalTitle(BaseModel):
event: Literal["FINAL_TITLE"] = "FINAL_TITLE"
data: TranscriptFinalTitle
class TranscriptWsFinalLongSummary(BaseModel):
event: Literal["FINAL_LONG_SUMMARY"] = "FINAL_LONG_SUMMARY"
data: TranscriptFinalLongSummary
class TranscriptWsFinalShortSummary(BaseModel):
event: Literal["FINAL_SHORT_SUMMARY"] = "FINAL_SHORT_SUMMARY"
data: TranscriptFinalShortSummary
class TranscriptWsActionItems(BaseModel):
event: Literal["ACTION_ITEMS"] = "ACTION_ITEMS"
data: TranscriptActionItems
class TranscriptWsDuration(BaseModel):
event: Literal["DURATION"] = "DURATION"
data: TranscriptDuration
class TranscriptWsWaveform(BaseModel):
event: Literal["WAVEFORM"] = "WAVEFORM"
data: TranscriptWaveform
TranscriptWsEvent = Annotated[
Union[
TranscriptWsTranscript,
TranscriptWsTopic,
TranscriptWsStatus,
TranscriptWsFinalTitle,
TranscriptWsFinalLongSummary,
TranscriptWsFinalShortSummary,
TranscriptWsActionItems,
TranscriptWsDuration,
TranscriptWsWaveform,
],
Discriminator("event"),
]
# ---------------------------------------------------------------------------
# User-level event name literal
# ---------------------------------------------------------------------------
UserEventName = Literal[
"TRANSCRIPT_CREATED",
"TRANSCRIPT_DELETED",
"TRANSCRIPT_STATUS",
"TRANSCRIPT_FINAL_TITLE",
"TRANSCRIPT_DURATION",
]
# ---------------------------------------------------------------------------
# User-level WS event data models
# ---------------------------------------------------------------------------
class UserTranscriptCreatedData(BaseModel):
id: NonEmptyString
class UserTranscriptDeletedData(BaseModel):
id: NonEmptyString
class UserTranscriptStatusData(BaseModel):
id: NonEmptyString
value: TranscriptStatus
class UserTranscriptFinalTitleData(BaseModel):
id: NonEmptyString
title: NonEmptyString
class UserTranscriptDurationData(BaseModel):
id: NonEmptyString
duration: float
# ---------------------------------------------------------------------------
# User-level WS event wrappers
# ---------------------------------------------------------------------------
class UserWsTranscriptCreated(BaseModel):
event: Literal["TRANSCRIPT_CREATED"] = "TRANSCRIPT_CREATED"
data: UserTranscriptCreatedData
class UserWsTranscriptDeleted(BaseModel):
event: Literal["TRANSCRIPT_DELETED"] = "TRANSCRIPT_DELETED"
data: UserTranscriptDeletedData
class UserWsTranscriptStatus(BaseModel):
event: Literal["TRANSCRIPT_STATUS"] = "TRANSCRIPT_STATUS"
data: UserTranscriptStatusData
class UserWsTranscriptFinalTitle(BaseModel):
event: Literal["TRANSCRIPT_FINAL_TITLE"] = "TRANSCRIPT_FINAL_TITLE"
data: UserTranscriptFinalTitleData
class UserWsTranscriptDuration(BaseModel):
event: Literal["TRANSCRIPT_DURATION"] = "TRANSCRIPT_DURATION"
data: UserTranscriptDurationData
UserWsEvent = Annotated[
Union[
UserWsTranscriptCreated,
UserWsTranscriptDeleted,
UserWsTranscriptStatus,
UserWsTranscriptFinalTitle,
UserWsTranscriptDuration,
],
Discriminator("event"),
]

View File

@@ -48,15 +48,7 @@ class RedisPubSubManager:
if not self.redis_connection:
await self.connect()
message = json.dumps(message)
try:
await self.redis_connection.publish(room_id, message)
except RuntimeError:
# Celery workers run each task in a new event loop (asyncio.run),
# which closes the previous loop. Cached Redis connection is dead.
# Reconnect on the current loop and retry.
self.redis_connection = None
await self.connect()
await self.redis_connection.publish(room_id, message)
await self.redis_connection.publish(room_id, message)
async def subscribe(self, room_id: str) -> redis.Redis:
await self.pubsub.subscribe(room_id)

View File

@@ -12,9 +12,16 @@ class InvalidMessageError(Exception):
pass
def _zulip_client() -> httpx.AsyncClient:
headers = {}
if settings.ZULIP_HOST_HEADER:
headers["Host"] = settings.ZULIP_HOST_HEADER
return httpx.AsyncClient(verify=False, headers=headers)
async def get_zulip_topics(stream_id: int) -> list[dict]:
try:
async with httpx.AsyncClient() as client:
async with _zulip_client() as client:
response = await client.get(
f"https://{settings.ZULIP_REALM}/api/v1/users/me/{stream_id}/topics",
auth=(settings.ZULIP_BOT_EMAIL, settings.ZULIP_API_KEY),
@@ -29,7 +36,7 @@ async def get_zulip_topics(stream_id: int) -> list[dict]:
async def get_zulip_streams() -> list[dict]:
try:
async with httpx.AsyncClient() as client:
async with _zulip_client() as client:
response = await client.get(
f"https://{settings.ZULIP_REALM}/api/v1/streams",
auth=(settings.ZULIP_BOT_EMAIL, settings.ZULIP_API_KEY),
@@ -44,7 +51,7 @@ async def get_zulip_streams() -> list[dict]:
async def send_message_to_zulip(stream: str, topic: str, content: str):
try:
async with httpx.AsyncClient() as client:
async with _zulip_client() as client:
response = await client.post(
f"https://{settings.ZULIP_REALM}/api/v1/messages",
data={
@@ -66,7 +73,7 @@ async def send_message_to_zulip(stream: str, topic: str, content: str):
async def update_zulip_message(message_id: int, stream: str, topic: str, content: str):
try:
async with httpx.AsyncClient() as client:
async with _zulip_client() as client:
response = await client.patch(
f"https://{settings.ZULIP_REALM}/api/v1/messages/{message_id}",
data={
@@ -90,6 +97,27 @@ async def update_zulip_message(message_id: int, stream: str, topic: str, content
raise Exception(f"Failed to update Zulip message: {error}")
async def delete_zulip_message(message_id: int):
try:
async with _zulip_client() as client:
response = await client.delete(
f"https://{settings.ZULIP_REALM}/api/v1/messages/{message_id}",
auth=(settings.ZULIP_BOT_EMAIL, settings.ZULIP_API_KEY),
)
if (
response.status_code == 400
and response.json()["msg"] == "Invalid message(s)"
):
raise InvalidMessageError(f"There is no message with id: {message_id}")
response.raise_for_status()
return response.json()
except httpx.RequestError as error:
raise Exception(f"Failed to delete Zulip message: {error}")
def get_zulip_message(transcript: Transcript, include_topics: bool):
transcript_url = f"{settings.UI_BASE_URL}/transcripts/{transcript.id}"

View File

@@ -15,7 +15,8 @@ from reflector.settings import settings
async def setup_webhook(webhook_url: str):
"""
Create Daily.co webhook. Deletes any existing webhooks first, then creates the new one.
Create or update Daily.co webhook for this environment using dailyco_api module.
Uses DAILY_WEBHOOK_UUID to identify existing webhook.
"""
if not settings.DAILY_API_KEY:
print("Error: DAILY_API_KEY not set")
@@ -34,37 +35,79 @@ async def setup_webhook(webhook_url: str):
]
async with DailyApiClient(api_key=settings.DAILY_API_KEY) as client:
webhooks = await client.list_webhooks()
for wh in webhooks:
await client.delete_webhook(wh.uuid)
print(f"Deleted webhook {wh.uuid}")
webhook_uuid = settings.DAILY_WEBHOOK_UUID
request = CreateWebhookRequest(
url=webhook_url,
eventTypes=event_types,
hmac=settings.DAILY_WEBHOOK_SECRET,
)
result = await client.create_webhook(request)
webhook_uuid = result.uuid
if webhook_uuid:
print(f"Updating existing webhook {webhook_uuid}...")
try:
# Note: Daily.co doesn't support PATCH well, so we delete + recreate
await client.delete_webhook(webhook_uuid)
print(f"Deleted old webhook {webhook_uuid}")
print(f"Created webhook {webhook_uuid} (state: {result.state})")
print(f" URL: {result.url}")
request = CreateWebhookRequest(
url=webhook_url,
eventTypes=event_types,
hmac=settings.DAILY_WEBHOOK_SECRET,
)
result = await client.create_webhook(request)
env_file = Path(__file__).parent.parent / ".env"
if env_file.exists():
lines = env_file.read_text().splitlines()
updated = False
for i, line in enumerate(lines):
if line.startswith("DAILY_WEBHOOK_UUID="):
lines[i] = f"DAILY_WEBHOOK_UUID={webhook_uuid}"
updated = True
break
if not updated:
lines.append(f"DAILY_WEBHOOK_UUID={webhook_uuid}")
env_file.write_text("\n".join(lines) + "\n")
print("✓ Saved DAILY_WEBHOOK_UUID to .env")
print(
f"✓ Created replacement webhook {result.uuid} (state: {result.state})"
)
print(f" URL: {result.url}")
return 0
webhook_uuid = result.uuid
except Exception as e:
if hasattr(e, "response") and e.response.status_code == 404:
print(f"Webhook {webhook_uuid} not found, creating new one...")
webhook_uuid = None # Fall through to creation
else:
print(f"Error updating webhook: {e}")
return 1
if not webhook_uuid:
print("Creating new webhook...")
request = CreateWebhookRequest(
url=webhook_url,
eventTypes=event_types,
hmac=settings.DAILY_WEBHOOK_SECRET,
)
result = await client.create_webhook(request)
webhook_uuid = result.uuid
print(f"✓ Created webhook {webhook_uuid} (state: {result.state})")
print(f" URL: {result.url}")
print()
print("=" * 60)
print("IMPORTANT: Add this to your environment variables:")
print("=" * 60)
print(f"DAILY_WEBHOOK_UUID: {webhook_uuid}")
print("=" * 60)
print()
# Try to write UUID to .env file
env_file = Path(__file__).parent.parent / ".env"
if env_file.exists():
lines = env_file.read_text().splitlines()
updated = False
# Update existing DAILY_WEBHOOK_UUID line or add it
for i, line in enumerate(lines):
if line.startswith("DAILY_WEBHOOK_UUID="):
lines[i] = f"DAILY_WEBHOOK_UUID={webhook_uuid}"
updated = True
break
if not updated:
lines.append(f"DAILY_WEBHOOK_UUID={webhook_uuid}")
env_file.write_text("\n".join(lines) + "\n")
print(f"✓ Also saved to local .env file")
else:
print(f"⚠ Local .env file not found - please add manually")
return 0
if __name__ == "__main__":
@@ -74,7 +117,11 @@ if __name__ == "__main__":
"Example: python recreate_daily_webhook.py https://example.com/v1/daily/webhook"
)
print()
print("Deletes all existing webhooks, then creates a new one.")
print("Behavior:")
print(" - If DAILY_WEBHOOK_UUID set: Deletes old webhook, creates new one")
print(
" - If DAILY_WEBHOOK_UUID empty: Creates new webhook, saves UUID to .env"
)
sys.exit(1)
sys.exit(asyncio.run(setup_webhook(sys.argv[1])))

View File

@@ -0,0 +1,536 @@
"""
Tests for Hatchet DAG Status -> Zulip Live Updates.
Tests cover:
- _dag_zulip_enabled() guard logic
- create_dag_zulip_message: sends + stores message ID
- update_dag_zulip_message: updates existing; noop when no message_id
- delete_dag_zulip_message: deletes + clears; handles InvalidMessageError
- delete_zulip_message: sends HTTP DELETE; raises on 400
- with_error_handling integration: calls update after success + failure
"""
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from reflector.db.transcripts import Transcript
@pytest.fixture
def dag_settings():
"""Patch settings for DAG Zulip tests."""
with patch("reflector.hatchet.dag_zulip.settings") as mock_settings:
mock_settings.ZULIP_REALM = "zulip.example.com"
mock_settings.ZULIP_DAG_STREAM = "dag-stream"
mock_settings.ZULIP_DAG_TOPIC = "dag-topic"
mock_settings.ZULIP_BOT_EMAIL = "bot@example.com"
mock_settings.ZULIP_API_KEY = "fake-key"
yield mock_settings
@pytest.fixture
def dag_settings_disabled():
"""Patch settings with DAG Zulip disabled."""
with patch("reflector.hatchet.dag_zulip.settings") as mock_settings:
mock_settings.ZULIP_REALM = "zulip.example.com"
mock_settings.ZULIP_DAG_STREAM = None
mock_settings.ZULIP_DAG_TOPIC = None
yield mock_settings
@pytest.fixture
def mock_transcript():
return Transcript(
id="test-transcript-id",
name="Test",
status="processing",
source_kind="room",
zulip_message_id=None,
)
@pytest.fixture
def mock_transcript_with_zulip_id():
return Transcript(
id="test-transcript-id",
name="Test",
status="processing",
source_kind="room",
zulip_message_id=42,
)
class TestDagZulipEnabled:
def test_enabled_when_all_set(self, dag_settings):
from reflector.hatchet.dag_zulip import _dag_zulip_enabled
assert _dag_zulip_enabled() is True
def test_disabled_when_realm_missing(self, dag_settings):
dag_settings.ZULIP_REALM = None
from reflector.hatchet.dag_zulip import _dag_zulip_enabled
assert _dag_zulip_enabled() is False
def test_disabled_when_stream_missing(self, dag_settings):
dag_settings.ZULIP_DAG_STREAM = None
from reflector.hatchet.dag_zulip import _dag_zulip_enabled
assert _dag_zulip_enabled() is False
def test_disabled_when_topic_missing(self, dag_settings):
dag_settings.ZULIP_DAG_TOPIC = None
from reflector.hatchet.dag_zulip import _dag_zulip_enabled
assert _dag_zulip_enabled() is False
@pytest.mark.usefixtures("setup_database")
@pytest.mark.asyncio
class TestCreateDagZulipMessage:
async def test_sends_and_stores_message_id(self, dag_settings, mock_transcript):
mock_run_details = MagicMock()
rendered_md = "**DAG** rendered"
with (
patch(
"reflector.hatchet.client.HatchetClientManager.get_client"
) as mock_get_client,
patch(
"reflector.tools.render_hatchet_run.render_run_detail",
return_value=rendered_md,
),
patch(
"reflector.zulip.send_message_to_zulip",
new_callable=AsyncMock,
return_value={"id": 99},
) as mock_send,
patch(
"reflector.db.transcripts.transcripts_controller.get_by_id",
new_callable=AsyncMock,
return_value=mock_transcript,
),
patch(
"reflector.db.transcripts.transcripts_controller.update",
new_callable=AsyncMock,
) as mock_update,
):
mock_client = MagicMock()
mock_client.runs.aio_get = AsyncMock(return_value=mock_run_details)
mock_get_client.return_value = mock_client
from reflector.hatchet.dag_zulip import create_dag_zulip_message
await create_dag_zulip_message("test-transcript-id", "workflow-run-123")
mock_send.assert_called_once_with("dag-stream", "dag-topic", rendered_md)
mock_update.assert_called_once_with(
mock_transcript, {"zulip_message_id": 99}
)
async def test_noop_when_disabled(self, dag_settings_disabled):
with patch(
"reflector.zulip.send_message_to_zulip",
new_callable=AsyncMock,
) as mock_send:
from reflector.hatchet.dag_zulip import create_dag_zulip_message
await create_dag_zulip_message("test-transcript-id", "workflow-run-123")
mock_send.assert_not_called()
async def test_logs_warning_on_failure(self, dag_settings, mock_transcript):
with (
patch(
"reflector.hatchet.client.HatchetClientManager.get_client"
) as mock_get_client,
patch(
"reflector.tools.render_hatchet_run.render_run_detail",
return_value="rendered",
),
patch(
"reflector.zulip.send_message_to_zulip",
new_callable=AsyncMock,
side_effect=Exception("Zulip down"),
),
patch(
"reflector.db.transcripts.transcripts_controller.get_by_id",
new_callable=AsyncMock,
return_value=mock_transcript,
),
patch("reflector.hatchet.dag_zulip.logger") as mock_logger,
):
mock_client = MagicMock()
mock_client.runs.aio_get = AsyncMock(return_value=MagicMock())
mock_get_client.return_value = mock_client
from reflector.hatchet.dag_zulip import create_dag_zulip_message
# Should not raise
await create_dag_zulip_message("test-transcript-id", "workflow-run-123")
mock_logger.warning.assert_called()
@pytest.mark.usefixtures("setup_database")
@pytest.mark.asyncio
class TestUpdateDagZulipMessage:
async def test_updates_existing_message(
self, dag_settings, mock_transcript_with_zulip_id
):
mock_run_details = MagicMock()
rendered_md = "**DAG** updated"
with (
patch(
"reflector.hatchet.client.HatchetClientManager.get_client"
) as mock_get_client,
patch(
"reflector.tools.render_hatchet_run.render_run_detail",
return_value=rendered_md,
),
patch(
"reflector.zulip.update_zulip_message",
new_callable=AsyncMock,
) as mock_update,
patch(
"reflector.db.transcripts.transcripts_controller.get_by_id",
new_callable=AsyncMock,
return_value=mock_transcript_with_zulip_id,
),
patch(
"reflector.hatchet.workflows.daily_multitrack_pipeline.fresh_db_connection"
) as mock_fresh_db,
):
mock_client = MagicMock()
mock_client.runs.aio_get = AsyncMock(return_value=mock_run_details)
mock_get_client.return_value = mock_client
mock_fresh_db.return_value.__aenter__ = AsyncMock()
mock_fresh_db.return_value.__aexit__ = AsyncMock(return_value=False)
from reflector.hatchet.dag_zulip import update_dag_zulip_message
await update_dag_zulip_message("test-transcript-id", "workflow-run-123")
mock_update.assert_called_once_with(
42, "dag-stream", "dag-topic", rendered_md
)
async def test_appends_error_banner(
self, dag_settings, mock_transcript_with_zulip_id
):
mock_run_details = MagicMock()
rendered_md = "**DAG** updated"
with (
patch(
"reflector.hatchet.client.HatchetClientManager.get_client"
) as mock_get_client,
patch(
"reflector.tools.render_hatchet_run.render_run_detail",
return_value=rendered_md,
),
patch(
"reflector.zulip.update_zulip_message",
new_callable=AsyncMock,
) as mock_update,
patch(
"reflector.db.transcripts.transcripts_controller.get_by_id",
new_callable=AsyncMock,
return_value=mock_transcript_with_zulip_id,
),
patch(
"reflector.hatchet.workflows.daily_multitrack_pipeline.fresh_db_connection"
) as mock_fresh_db,
):
mock_client = MagicMock()
mock_client.runs.aio_get = AsyncMock(return_value=mock_run_details)
mock_get_client.return_value = mock_client
mock_fresh_db.return_value.__aenter__ = AsyncMock()
mock_fresh_db.return_value.__aexit__ = AsyncMock(return_value=False)
from reflector.hatchet.dag_zulip import update_dag_zulip_message
await update_dag_zulip_message(
"test-transcript-id",
"workflow-run-123",
error_message="get_recording failed: connection timeout",
)
call_args = mock_update.call_args
content = call_args[0][3]
assert rendered_md in content
assert "get_recording failed: connection timeout" in content
async def test_noop_when_no_message_id(self, dag_settings, mock_transcript):
with (
patch(
"reflector.zulip.update_zulip_message",
new_callable=AsyncMock,
) as mock_update,
patch(
"reflector.db.transcripts.transcripts_controller.get_by_id",
new_callable=AsyncMock,
return_value=mock_transcript,
),
patch(
"reflector.hatchet.workflows.daily_multitrack_pipeline.fresh_db_connection"
) as mock_fresh_db,
):
mock_fresh_db.return_value.__aenter__ = AsyncMock()
mock_fresh_db.return_value.__aexit__ = AsyncMock(return_value=False)
from reflector.hatchet.dag_zulip import update_dag_zulip_message
await update_dag_zulip_message("test-transcript-id", "workflow-run-123")
mock_update.assert_not_called()
async def test_noop_when_disabled(self, dag_settings_disabled):
with patch(
"reflector.zulip.update_zulip_message",
new_callable=AsyncMock,
) as mock_update:
from reflector.hatchet.dag_zulip import update_dag_zulip_message
await update_dag_zulip_message("test-transcript-id", "workflow-run-123")
mock_update.assert_not_called()
@pytest.mark.usefixtures("setup_database")
@pytest.mark.asyncio
class TestDeleteDagZulipMessage:
async def test_deletes_and_clears(
self, dag_settings, mock_transcript_with_zulip_id
):
with (
patch(
"reflector.zulip.delete_zulip_message",
new_callable=AsyncMock,
) as mock_delete,
patch(
"reflector.db.transcripts.transcripts_controller.get_by_id",
new_callable=AsyncMock,
return_value=mock_transcript_with_zulip_id,
),
patch(
"reflector.db.transcripts.transcripts_controller.update",
new_callable=AsyncMock,
) as mock_tc_update,
):
from reflector.hatchet.dag_zulip import delete_dag_zulip_message
await delete_dag_zulip_message("test-transcript-id")
mock_delete.assert_called_once_with(42)
mock_tc_update.assert_called_once_with(
mock_transcript_with_zulip_id, {"zulip_message_id": None}
)
async def test_noop_when_no_message_id(self, dag_settings, mock_transcript):
with (
patch(
"reflector.zulip.delete_zulip_message",
new_callable=AsyncMock,
) as mock_delete,
patch(
"reflector.db.transcripts.transcripts_controller.get_by_id",
new_callable=AsyncMock,
return_value=mock_transcript,
),
):
from reflector.hatchet.dag_zulip import delete_dag_zulip_message
await delete_dag_zulip_message("test-transcript-id")
mock_delete.assert_not_called()
async def test_handles_invalid_message_error(
self, dag_settings, mock_transcript_with_zulip_id
):
from reflector.zulip import InvalidMessageError
with (
patch(
"reflector.zulip.delete_zulip_message",
new_callable=AsyncMock,
side_effect=InvalidMessageError("gone"),
),
patch(
"reflector.db.transcripts.transcripts_controller.get_by_id",
new_callable=AsyncMock,
return_value=mock_transcript_with_zulip_id,
),
patch(
"reflector.db.transcripts.transcripts_controller.update",
new_callable=AsyncMock,
) as mock_tc_update,
patch("reflector.hatchet.dag_zulip.logger"),
):
from reflector.hatchet.dag_zulip import delete_dag_zulip_message
# Should not raise; should still clear the message_id
await delete_dag_zulip_message("test-transcript-id")
mock_tc_update.assert_called_once_with(
mock_transcript_with_zulip_id, {"zulip_message_id": None}
)
async def test_noop_when_disabled(self, dag_settings_disabled):
with patch(
"reflector.zulip.delete_zulip_message",
new_callable=AsyncMock,
) as mock_delete:
from reflector.hatchet.dag_zulip import delete_dag_zulip_message
await delete_dag_zulip_message("test-transcript-id")
mock_delete.assert_not_called()
@pytest.mark.asyncio
class TestDeleteZulipMessage:
async def test_sends_delete_request(self):
mock_response = MagicMock()
mock_response.status_code = 200
mock_response.raise_for_status = MagicMock()
mock_response.json.return_value = {"result": "success"}
mock_client = AsyncMock()
mock_client.delete = AsyncMock(return_value=mock_response)
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
mock_client.__aexit__ = AsyncMock(return_value=False)
with patch("reflector.zulip.httpx.AsyncClient", return_value=mock_client):
with patch("reflector.zulip.settings") as mock_settings:
mock_settings.ZULIP_REALM = "zulip.example.com"
mock_settings.ZULIP_BOT_EMAIL = "bot@example.com"
mock_settings.ZULIP_API_KEY = "fake-key"
from reflector.zulip import delete_zulip_message
result = await delete_zulip_message(123)
assert result == {"result": "success"}
mock_client.delete.assert_called_once()
call_args = mock_client.delete.call_args
assert "123" in call_args.args[0]
async def test_raises_invalid_message_on_400(self):
from reflector.zulip import InvalidMessageError
mock_response = MagicMock()
mock_response.status_code = 400
mock_response.json.return_value = {"msg": "Invalid message(s)"}
mock_client = AsyncMock()
mock_client.delete = AsyncMock(return_value=mock_response)
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
mock_client.__aexit__ = AsyncMock(return_value=False)
with patch("reflector.zulip.httpx.AsyncClient", return_value=mock_client):
with patch("reflector.zulip.settings") as mock_settings:
mock_settings.ZULIP_REALM = "zulip.example.com"
mock_settings.ZULIP_BOT_EMAIL = "bot@example.com"
mock_settings.ZULIP_API_KEY = "fake-key"
from reflector.zulip import delete_zulip_message
with pytest.raises(InvalidMessageError):
await delete_zulip_message(999)
@pytest.mark.asyncio
class TestWithErrorHandlingDagUpdate:
"""Test that with_error_handling calls update_dag_zulip_message."""
async def test_calls_update_on_success(self):
from reflector.hatchet.constants import TaskName
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
PipelineInput,
with_error_handling,
)
mock_ctx = MagicMock()
mock_ctx.workflow_run_id = "wfr-123"
input_data = PipelineInput(
recording_id="rec-1",
tracks=[{"s3_key": "k"}],
bucket_name="bucket",
transcript_id="tid-1",
)
@with_error_handling(TaskName.GET_RECORDING)
async def fake_task(input: PipelineInput, ctx) -> str:
return "ok"
with patch(
"reflector.hatchet.workflows.daily_multitrack_pipeline.update_dag_zulip_message",
new_callable=AsyncMock,
) as mock_update:
result = await fake_task(input_data, mock_ctx)
assert result == "ok"
mock_update.assert_called_once_with("tid-1", "wfr-123")
async def test_calls_update_on_failure_with_error_message(self):
from reflector.hatchet.constants import TaskName
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
PipelineInput,
with_error_handling,
)
mock_ctx = MagicMock()
mock_ctx.workflow_run_id = "wfr-123"
input_data = PipelineInput(
recording_id="rec-1",
tracks=[{"s3_key": "k"}],
bucket_name="bucket",
transcript_id="tid-1",
)
@with_error_handling(TaskName.GET_RECORDING)
async def failing_task(input: PipelineInput, ctx) -> str:
raise ValueError("boom")
with (
patch(
"reflector.hatchet.workflows.daily_multitrack_pipeline.update_dag_zulip_message",
new_callable=AsyncMock,
) as mock_update,
patch(
"reflector.hatchet.workflows.daily_multitrack_pipeline.set_workflow_error_status",
new_callable=AsyncMock,
),
):
with pytest.raises(ValueError, match="boom"):
await failing_task(input_data, mock_ctx)
mock_update.assert_called_once_with(
"tid-1", "wfr-123", error_message="get_recording failed: boom"
)
async def test_dag_failure_doesnt_affect_task(self):
"""DAG update failure should not prevent task from succeeding."""
from reflector.hatchet.constants import TaskName
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
PipelineInput,
with_error_handling,
)
mock_ctx = MagicMock()
mock_ctx.workflow_run_id = "wfr-123"
input_data = PipelineInput(
recording_id="rec-1",
tracks=[{"s3_key": "k"}],
bucket_name="bucket",
transcript_id="tid-1",
)
@with_error_handling(TaskName.GET_RECORDING)
async def ok_task(input: PipelineInput, ctx) -> str:
return "ok"
with patch(
"reflector.hatchet.workflows.daily_multitrack_pipeline.update_dag_zulip_message",
new_callable=AsyncMock,
side_effect=Exception("zulip exploded"),
):
result = await ok_task(input_data, mock_ctx)
assert result == "ok"

View File

@@ -255,7 +255,7 @@ async def test_validation_locked_transcript():
@pytest.mark.usefixtures("setup_database")
@pytest.mark.asyncio
async def test_validation_idle_transcript():
"""Test that validation rejects idle transcripts without recording (file upload not ready)."""
"""Test that validation rejects idle transcripts (not ready)."""
from reflector.services.transcript_process import (
ValidationNotReady,
validate_transcript_for_processing,
@@ -274,34 +274,6 @@ async def test_validation_idle_transcript():
assert "not ready" in result.detail.lower()
@pytest.mark.usefixtures("setup_database")
@pytest.mark.asyncio
async def test_validation_idle_transcript_with_recording_allowed():
"""Test that validation allows idle transcripts with recording_id (multitrack ready/retry)."""
from reflector.services.transcript_process import (
ValidationOk,
validate_transcript_for_processing,
)
mock_transcript = Transcript(
id="test-transcript-id",
name="Test",
status="idle",
source_kind="room",
recording_id="test-recording-id",
)
with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active"
) as mock_celery_check:
mock_celery_check.return_value = False
result = await validate_transcript_for_processing(mock_transcript)
assert isinstance(result, ValidationOk)
assert result.recording_id == "test-recording-id"
@pytest.mark.usefixtures("setup_database")
@pytest.mark.asyncio
async def test_prepare_multitrack_config():

View File

@@ -319,51 +319,3 @@ def test_aws_storage_constructor_rejects_mixed_auth():
aws_secret_access_key="test-secret",
aws_role_arn="arn:aws:iam::123456789012:role/test-role",
)
@pytest.mark.asyncio
async def test_aws_storage_custom_endpoint_url():
"""Test that custom endpoint_url configures path-style addressing and passes endpoint to client."""
storage = AwsStorage(
aws_bucket_name="reflector-media",
aws_region="garage",
aws_access_key_id="GKtest",
aws_secret_access_key="secret",
aws_endpoint_url="http://garage:3900",
)
assert storage._endpoint_url == "http://garage:3900"
assert storage.boto_config.s3["addressing_style"] == "path"
assert storage.base_url == "http://garage:3900/reflector-media/"
# retries config preserved (merge, not replace)
assert storage.boto_config.retries["max_attempts"] == 3
mock_client = AsyncMock()
mock_client.put_object = AsyncMock()
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
mock_client.__aexit__ = AsyncMock(return_value=None)
mock_client.generate_presigned_url = AsyncMock(
return_value="http://garage:3900/reflector-media/test.txt"
)
with patch.object(
storage.session, "client", return_value=mock_client
) as mock_session_client:
await storage.put_file("test.txt", b"data")
mock_session_client.assert_called_with(
"s3", config=storage.boto_config, endpoint_url="http://garage:3900"
)
@pytest.mark.asyncio
async def test_aws_storage_none_endpoint_url():
"""Test that None endpoint preserves current AWS behavior."""
storage = AwsStorage(
aws_bucket_name="reflector-bucket",
aws_region="us-east-1",
aws_access_key_id="AKIAtest",
aws_secret_access_key="secret",
)
assert storage._endpoint_url is None
assert storage.base_url == "https://reflector-bucket.s3.amazonaws.com/"
# No s3 addressing_style override — boto_config should only have retries
assert not hasattr(storage.boto_config, "s3") or storage.boto_config.s3 is None

676
server/uv.lock generated
View File

@@ -235,6 +235,12 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
]
[[package]]
name = "antlr4-python3-runtime"
version = "4.9.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b", size = 117034 }
[[package]]
name = "anyio"
version = "4.9.0"
@@ -261,6 +267,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/f5/c36551e93acba41a59939ae6a0fb77ddb3f2e8e8caa716410c65f7341f72/asgi_lifespan-2.1.0-py3-none-any.whl", hash = "sha256:ed840706680e28428c01e14afb3875d7d76d3206f3d5b2f2294e059b5c23804f", size = 10895 },
]
[[package]]
name = "asteroid-filterbanks"
version = "0.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/90/fa/5c2be1f96dc179f83cdd3bb267edbd1f47d08f756785c016d5c2163901a7/asteroid-filterbanks-0.4.0.tar.gz", hash = "sha256:415f89d1dcf2b13b35f03f7a9370968ac4e6fa6800633c522dac992b283409b9", size = 24599 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c5/7c/83ff6046176a675e6a1e8aeefed8892cd97fe7c46af93cc540d1b24b8323/asteroid_filterbanks-0.4.0-py3-none-any.whl", hash = "sha256:4932ac8b6acc6e08fb87cbe8ece84215b5a74eee284fe83acf3540a72a02eaf5", size = 29912 },
]
[[package]]
name = "async-timeout"
version = "5.0.1"
@@ -582,6 +603,56 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018 },
]
[[package]]
name = "colorlog"
version = "6.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d3/7a/359f4d5df2353f26172b3cc39ea32daa39af8de522205f512f458923e677/colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2", size = 16624 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e3/51/9b208e85196941db2f0654ad0357ca6388ab3ed67efdbfc799f35d1f83aa/colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff", size = 11424 },
]
[[package]]
name = "contourpy"
version = "1.3.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773 },
{ url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149 },
{ url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222 },
{ url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234 },
{ url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555 },
{ url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238 },
{ url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218 },
{ url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867 },
{ url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677 },
{ url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234 },
{ url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123 },
{ url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419 },
{ url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979 },
{ url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653 },
{ url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536 },
{ url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397 },
{ url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601 },
{ url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288 },
{ url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386 },
{ url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018 },
{ url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567 },
{ url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655 },
{ url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809 },
{ url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593 },
{ url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202 },
{ url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207 },
{ url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315 },
]
[[package]]
name = "coverage"
version = "7.9.2"
@@ -682,6 +753,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/4c/0ecd260233290bee4b2facec4d8e755e57d8781d68f276e1248433993c9f/ctranslate2-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:511cdf810a5bf6a2cec735799e5cd47966e63f8f7688fdee1b97fed621abda00", size = 19470040 },
]
[[package]]
name = "cycler"
version = "0.12.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321 },
]
[[package]]
name = "databases"
version = "0.8.0"
@@ -794,6 +874,12 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 },
]
[[package]]
name = "docopt"
version = "0.6.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491", size = 25901 }
[[package]]
name = "ecdsa"
version = "0.19.1"
@@ -806,6 +892,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607 },
]
[[package]]
name = "einops"
version = "0.8.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e5/81/df4fbe24dff8ba3934af99044188e20a98ed441ad17a274539b74e82e126/einops-0.8.1.tar.gz", hash = "sha256:de5d960a7a761225532e0f1959e5315ebeafc0cd43394732f103ca44b9837e84", size = 54805 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/87/62/9773de14fe6c45c23649e98b83231fffd7b9892b6cf863251dc2afa73643/einops-0.8.1-py3-none-any.whl", hash = "sha256:919387eb55330f5757c6bea9165c5ff5cfe63a642682ea788a6d472576d81737", size = 64359 },
]
[[package]]
name = "email-validator"
version = "2.2.0"
@@ -939,6 +1034,31 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953 },
]
[[package]]
name = "fonttools"
version = "4.59.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0d/a5/fba25f9fbdab96e26dedcaeeba125e5f05a09043bf888e0305326e55685b/fonttools-4.59.2.tar.gz", hash = "sha256:e72c0749b06113f50bcb80332364c6be83a9582d6e3db3fe0b280f996dc2ef22", size = 3540889 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f8/53/742fcd750ae0bdc74de4c0ff923111199cc2f90a4ee87aaddad505b6f477/fonttools-4.59.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:511946e8d7ea5c0d6c7a53c4cb3ee48eda9ab9797cd9bf5d95829a398400354f", size = 2774961 },
{ url = "https://files.pythonhosted.org/packages/57/2a/976f5f9fa3b4dd911dc58d07358467bec20e813d933bc5d3db1a955dd456/fonttools-4.59.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5e2682cf7be766d84f462ba8828d01e00c8751a8e8e7ce12d7784ccb69a30d", size = 2344690 },
{ url = "https://files.pythonhosted.org/packages/c1/8f/b7eefc274fcf370911e292e95565c8253b0b87c82a53919ab3c795a4f50e/fonttools-4.59.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5729e12a982dba3eeae650de48b06f3b9ddb51e9aee2fcaf195b7d09a96250e2", size = 5026910 },
{ url = "https://files.pythonhosted.org/packages/69/95/864726eaa8f9d4e053d0c462e64d5830ec7c599cbdf1db9e40f25ca3972e/fonttools-4.59.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c52694eae5d652361d59ecdb5a2246bff7cff13b6367a12da8499e9df56d148d", size = 4971031 },
{ url = "https://files.pythonhosted.org/packages/24/4c/b8c4735ebdea20696277c70c79e0de615dbe477834e5a7c2569aa1db4033/fonttools-4.59.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f1f1bbc23ba1312bd8959896f46f667753b90216852d2a8cfa2d07e0cb234144", size = 5006112 },
{ url = "https://files.pythonhosted.org/packages/3b/23/f9ea29c292aa2fc1ea381b2e5621ac436d5e3e0a5dee24ffe5404e58eae8/fonttools-4.59.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a1bfe5378962825dabe741720885e8b9ae9745ec7ecc4a5ec1f1ce59a6062bf", size = 5117671 },
{ url = "https://files.pythonhosted.org/packages/ba/07/cfea304c555bf06e86071ff2a3916bc90f7c07ec85b23bab758d4908c33d/fonttools-4.59.2-cp311-cp311-win32.whl", hash = "sha256:e937790f3c2c18a1cbc7da101550a84319eb48023a715914477d2e7faeaba570", size = 2218157 },
{ url = "https://files.pythonhosted.org/packages/d7/de/35d839aa69db737a3f9f3a45000ca24721834d40118652a5775d5eca8ebb/fonttools-4.59.2-cp311-cp311-win_amd64.whl", hash = "sha256:9836394e2f4ce5f9c0a7690ee93bd90aa1adc6b054f1a57b562c5d242c903104", size = 2265846 },
{ url = "https://files.pythonhosted.org/packages/ba/3d/1f45db2df51e7bfa55492e8f23f383d372200be3a0ded4bf56a92753dd1f/fonttools-4.59.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:82906d002c349cad647a7634b004825a7335f8159d0d035ae89253b4abf6f3ea", size = 2769711 },
{ url = "https://files.pythonhosted.org/packages/29/df/cd236ab32a8abfd11558f296e064424258db5edefd1279ffdbcfd4fd8b76/fonttools-4.59.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a10c1bd7644dc58f8862d8ba0cf9fb7fef0af01ea184ba6ce3f50ab7dfe74d5a", size = 2340225 },
{ url = "https://files.pythonhosted.org/packages/98/12/b6f9f964fe6d4b4dd4406bcbd3328821c3de1f909ffc3ffa558fe72af48c/fonttools-4.59.2-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:738f31f23e0339785fd67652a94bc69ea49e413dfdb14dcb8c8ff383d249464e", size = 4912766 },
{ url = "https://files.pythonhosted.org/packages/73/78/82bde2f2d2c306ef3909b927363170b83df96171f74e0ccb47ad344563cd/fonttools-4.59.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ec99f9bdfee9cdb4a9172f9e8fd578cce5feb231f598909e0aecf5418da4f25", size = 4955178 },
{ url = "https://files.pythonhosted.org/packages/92/77/7de766afe2d31dda8ee46d7e479f35c7d48747e558961489a2d6e3a02bd4/fonttools-4.59.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0476ea74161322e08c7a982f83558a2b81b491509984523a1a540baf8611cc31", size = 4897898 },
{ url = "https://files.pythonhosted.org/packages/c5/77/ce0e0b905d62a06415fda9f2b2e109a24a5db54a59502b769e9e297d2242/fonttools-4.59.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:95922a922daa1f77cc72611747c156cfb38030ead72436a2c551d30ecef519b9", size = 5049144 },
{ url = "https://files.pythonhosted.org/packages/d9/ea/870d93aefd23fff2e07cbeebdc332527868422a433c64062c09d4d5e7fe6/fonttools-4.59.2-cp312-cp312-win32.whl", hash = "sha256:39ad9612c6a622726a6a130e8ab15794558591f999673f1ee7d2f3d30f6a3e1c", size = 2206473 },
{ url = "https://files.pythonhosted.org/packages/61/c4/e44bad000c4a4bb2e9ca11491d266e857df98ab6d7428441b173f0fe2517/fonttools-4.59.2-cp312-cp312-win_amd64.whl", hash = "sha256:980fd7388e461b19a881d35013fec32c713ffea1fc37aef2f77d11f332dfd7da", size = 2254706 },
{ url = "https://files.pythonhosted.org/packages/65/a4/d2f7be3c86708912c02571db0b550121caab8cd88a3c0aacb9cfa15ea66e/fonttools-4.59.2-py3-none-any.whl", hash = "sha256:8bd0f759020e87bb5d323e6283914d9bf4ae35a7307dafb2cbd1e379e720ad37", size = 1132315 },
]
[[package]]
name = "frozenlist"
version = "1.7.0"
@@ -991,6 +1111,11 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597 },
]
[package.optional-dependencies]
http = [
{ name = "aiohttp" },
]
[[package]]
name = "google-crc32c"
version = "1.7.1"
@@ -1255,6 +1380,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 },
]
[[package]]
name = "hyperpyyaml"
version = "1.2.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyyaml" },
{ name = "ruamel-yaml" },
]
sdist = { url = "https://files.pythonhosted.org/packages/52/e3/3ac46d9a662b037f699a6948b39c8d03bfcff0b592335d5953ba0c55d453/HyperPyYAML-1.2.2.tar.gz", hash = "sha256:bdb734210d18770a262f500fe5755c7a44a5d3b91521b06e24f7a00a36ee0f87", size = 17085 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/33/c9/751b6401887f4b50f9307cc1e53d287b3dc77c375c126aeb6335aff73ccb/HyperPyYAML-1.2.2-py3-none-any.whl", hash = "sha256:3c5864bdc8864b2f0fbd7bc495e7e8fdf2dfd5dd80116f72da27ca96a128bdeb", size = 16118 },
]
[[package]]
name = "icalendar"
version = "6.3.1"
@@ -1397,6 +1535,55 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437 },
]
[[package]]
name = "julius"
version = "0.2.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a1/19/c9e1596b5572c786b93428d0904280e964c930fae7e6c9368ed9e1b63922/julius-0.2.7.tar.gz", hash = "sha256:3c0f5f5306d7d6016fcc95196b274cae6f07e2c9596eed314e4e7641554fbb08", size = 59640 }
[[package]]
name = "kiwisolver"
version = "1.4.9"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167 },
{ url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579 },
{ url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309 },
{ url = "https://files.pythonhosted.org/packages/66/e1/e533435c0be77c3f64040d68d7a657771194a63c279f55573188161e81ca/kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61", size = 1435596 },
{ url = "https://files.pythonhosted.org/packages/67/1e/51b73c7347f9aabdc7215aa79e8b15299097dc2f8e67dee2b095faca9cb0/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1", size = 1246548 },
{ url = "https://files.pythonhosted.org/packages/21/aa/72a1c5d1e430294f2d32adb9542719cfb441b5da368d09d268c7757af46c/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872", size = 1263618 },
{ url = "https://files.pythonhosted.org/packages/a3/af/db1509a9e79dbf4c260ce0cfa3903ea8945f6240e9e59d1e4deb731b1a40/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26", size = 1317437 },
{ url = "https://files.pythonhosted.org/packages/e0/f2/3ea5ee5d52abacdd12013a94130436e19969fa183faa1e7c7fbc89e9a42f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028", size = 2195742 },
{ url = "https://files.pythonhosted.org/packages/6f/9b/1efdd3013c2d9a2566aa6a337e9923a00590c516add9a1e89a768a3eb2fc/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771", size = 2290810 },
{ url = "https://files.pythonhosted.org/packages/fb/e5/cfdc36109ae4e67361f9bc5b41323648cb24a01b9ade18784657e022e65f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a", size = 2461579 },
{ url = "https://files.pythonhosted.org/packages/62/86/b589e5e86c7610842213994cdea5add00960076bef4ae290c5fa68589cac/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464", size = 2268071 },
{ url = "https://files.pythonhosted.org/packages/3b/c6/f8df8509fd1eee6c622febe54384a96cfaf4d43bf2ccec7a0cc17e4715c9/kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2", size = 73840 },
{ url = "https://files.pythonhosted.org/packages/e2/2d/16e0581daafd147bc11ac53f032a2b45eabac897f42a338d0a13c1e5c436/kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7", size = 65159 },
{ url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686 },
{ url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460 },
{ url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952 },
{ url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756 },
{ url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404 },
{ url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410 },
{ url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631 },
{ url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963 },
{ url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295 },
{ url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987 },
{ url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817 },
{ url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895 },
{ url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992 },
{ url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104 },
{ url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592 },
{ url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281 },
{ url = "https://files.pythonhosted.org/packages/da/e3/dbd2ecdce306f1d07a1aaf324817ee993aab7aee9db47ceac757deabafbe/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f", size = 78009 },
{ url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929 },
]
[[package]]
name = "kombu"
version = "5.5.4"
@@ -1459,6 +1646,41 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/dc/1e/408fd10217eac0e43aea0604be22b4851a09e03d761d44d4ea12089dd70e/levenshtein-0.27.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7987ef006a3cf56a4532bd4c90c2d3b7b4ca9ad3bf8ae1ee5713c4a3bdfda913", size = 98045 },
]
[[package]]
name = "lightning"
version = "2.5.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "fsspec", extra = ["http"] },
{ name = "lightning-utilities" },
{ name = "packaging" },
{ name = "pytorch-lightning" },
{ name = "pyyaml" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
{ name = "torchmetrics" },
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0f/dd/86bb3bebadcdbc6e6e5a63657f0a03f74cd065b5ea965896679f76fec0b4/lightning-2.5.5.tar.gz", hash = "sha256:4d3d66c5b1481364a7e6a1ce8ddde1777a04fa740a3145ec218a9941aed7dd30", size = 640770 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2e/d0/4b4fbafc3b18df91207a6e46782d9fd1905f9f45cb2c3b8dfbb239aef781/lightning-2.5.5-py3-none-any.whl", hash = "sha256:69eb248beadd7b600bf48eff00a0ec8af171ec7a678d23787c4aedf12e225e8f", size = 828490 },
]
[[package]]
name = "lightning-utilities"
version = "0.15.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "packaging" },
{ name = "setuptools" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b8/39/6fc58ca81492db047149b4b8fd385aa1bfb8c28cd7cacb0c7eb0c44d842f/lightning_utilities-0.15.2.tar.gz", hash = "sha256:cdf12f530214a63dacefd713f180d1ecf5d165338101617b4742e8f22c032e24", size = 31090 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/de/73/3d757cb3fc16f0f9794dd289bcd0c4a031d9cf54d8137d6b984b2d02edf3/lightning_utilities-0.15.2-py3-none-any.whl", hash = "sha256:ad3ab1703775044bbf880dbf7ddaaac899396c96315f3aa1779cec9d618a9841", size = 29431 },
]
[[package]]
name = "llama-cloud"
version = "0.1.32"
@@ -1806,6 +2028,42 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878 },
]
[[package]]
name = "matplotlib"
version = "3.10.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "contourpy" },
{ name = "cycler" },
{ name = "fonttools" },
{ name = "kiwisolver" },
{ name = "numpy" },
{ name = "packaging" },
{ name = "pillow" },
{ name = "pyparsing" },
{ name = "python-dateutil" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a0/59/c3e6453a9676ffba145309a73c462bb407f4400de7de3f2b41af70720a3c/matplotlib-3.10.6.tar.gz", hash = "sha256:ec01b645840dd1996df21ee37f208cd8ba57644779fa20464010638013d3203c", size = 34804264 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/80/d6/5d3665aa44c49005aaacaa68ddea6fcb27345961cd538a98bb0177934ede/matplotlib-3.10.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:905b60d1cb0ee604ce65b297b61cf8be9f4e6cfecf95a3fe1c388b5266bc8f4f", size = 8257527 },
{ url = "https://files.pythonhosted.org/packages/8c/af/30ddefe19ca67eebd70047dabf50f899eaff6f3c5e6a1a7edaecaf63f794/matplotlib-3.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bac38d816637343e53d7185d0c66677ff30ffb131044a81898b5792c956ba76", size = 8119583 },
{ url = "https://files.pythonhosted.org/packages/d3/29/4a8650a3dcae97fa4f375d46efcb25920d67b512186f8a6788b896062a81/matplotlib-3.10.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:942a8de2b5bfff1de31d95722f702e2966b8a7e31f4e68f7cd963c7cd8861cf6", size = 8692682 },
{ url = "https://files.pythonhosted.org/packages/aa/d3/b793b9cb061cfd5d42ff0f69d1822f8d5dbc94e004618e48a97a8373179a/matplotlib-3.10.6-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3276c85370bc0dfca051ec65c5817d1e0f8f5ce1b7787528ec8ed2d524bbc2f", size = 9521065 },
{ url = "https://files.pythonhosted.org/packages/f7/c5/53de5629f223c1c66668d46ac2621961970d21916a4bc3862b174eb2a88f/matplotlib-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9df5851b219225731f564e4b9e7f2ac1e13c9e6481f941b5631a0f8e2d9387ce", size = 9576888 },
{ url = "https://files.pythonhosted.org/packages/fc/8e/0a18d6d7d2d0a2e66585032a760d13662e5250c784d53ad50434e9560991/matplotlib-3.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:abb5d9478625dd9c9eb51a06d39aae71eda749ae9b3138afb23eb38824026c7e", size = 8115158 },
{ url = "https://files.pythonhosted.org/packages/07/b3/1a5107bb66c261e23b9338070702597a2d374e5aa7004b7adfc754fbed02/matplotlib-3.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:886f989ccfae63659183173bb3fced7fd65e9eb793c3cc21c273add368536951", size = 7992444 },
{ url = "https://files.pythonhosted.org/packages/ea/1a/7042f7430055d567cc3257ac409fcf608599ab27459457f13772c2d9778b/matplotlib-3.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31ca662df6a80bd426f871105fdd69db7543e28e73a9f2afe80de7e531eb2347", size = 8272404 },
{ url = "https://files.pythonhosted.org/packages/a9/5d/1d5f33f5b43f4f9e69e6a5fe1fb9090936ae7bc8e2ff6158e7a76542633b/matplotlib-3.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1678bb61d897bb4ac4757b5ecfb02bfb3fddf7f808000fb81e09c510712fda75", size = 8128262 },
{ url = "https://files.pythonhosted.org/packages/67/c3/135fdbbbf84e0979712df58e5e22b4f257b3f5e52a3c4aacf1b8abec0d09/matplotlib-3.10.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:56cd2d20842f58c03d2d6e6c1f1cf5548ad6f66b91e1e48f814e4fb5abd1cb95", size = 8697008 },
{ url = "https://files.pythonhosted.org/packages/9c/be/c443ea428fb2488a3ea7608714b1bd85a82738c45da21b447dc49e2f8e5d/matplotlib-3.10.6-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:662df55604a2f9a45435566d6e2660e41efe83cd94f4288dfbf1e6d1eae4b0bb", size = 9530166 },
{ url = "https://files.pythonhosted.org/packages/a9/35/48441422b044d74034aea2a3e0d1a49023f12150ebc58f16600132b9bbaf/matplotlib-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08f141d55148cd1fc870c3387d70ca4df16dee10e909b3b038782bd4bda6ea07", size = 9593105 },
{ url = "https://files.pythonhosted.org/packages/45/c3/994ef20eb4154ab84cc08d033834555319e4af970165e6c8894050af0b3c/matplotlib-3.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:590f5925c2d650b5c9d813c5b3b5fc53f2929c3f8ef463e4ecfa7e052044fb2b", size = 8122784 },
{ url = "https://files.pythonhosted.org/packages/57/b8/5c85d9ae0e40f04e71bedb053aada5d6bab1f9b5399a0937afb5d6b02d98/matplotlib-3.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:f44c8d264a71609c79a78d50349e724f5d5fc3684ead7c2a473665ee63d868aa", size = 7992823 },
{ url = "https://files.pythonhosted.org/packages/12/bb/02c35a51484aae5f49bd29f091286e7af5f3f677a9736c58a92b3c78baeb/matplotlib-3.10.6-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f2d684c3204fa62421bbf770ddfebc6b50130f9cad65531eeba19236d73bb488", size = 8252296 },
{ url = "https://files.pythonhosted.org/packages/7d/85/41701e3092005aee9a2445f5ee3904d9dbd4a7df7a45905ffef29b7ef098/matplotlib-3.10.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6f4a69196e663a41d12a728fab8751177215357906436804217d6d9cf0d4d6cf", size = 8116749 },
{ url = "https://files.pythonhosted.org/packages/16/53/8d8fa0ea32a8c8239e04d022f6c059ee5e1b77517769feccd50f1df43d6d/matplotlib-3.10.6-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d6ca6ef03dfd269f4ead566ec6f3fb9becf8dab146fb999022ed85ee9f6b3eb", size = 8693933 },
]
[[package]]
name = "mdurl"
version = "0.1.2"
@@ -1947,6 +2205,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/48/6b/1c6b515a83d5564b1698a61efa245727c8feecf308f4091f565988519d20/numpy-2.3.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e610832418a2bc09d974cc9fecebfa51e9532d6190223bc5ef6a7402ebf3b5cb", size = 12927246 },
]
[[package]]
name = "omegaconf"
version = "2.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "antlr4-python3-runtime" },
{ name = "pyyaml" },
]
sdist = { url = "https://files.pythonhosted.org/packages/09/48/6388f1bb9da707110532cb70ec4d2822858ddfb44f1cdf1233c20a80ea4b/omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7", size = 3298120 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b", size = 79500 },
]
[[package]]
name = "onnxruntime"
version = "1.22.1"
@@ -1989,6 +2260,24 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8a/91/1f1cf577f745e956b276a8b1d3d76fa7a6ee0c2b05db3b001b900f2c71db/openai-1.97.0-py3-none-any.whl", hash = "sha256:a1c24d96f4609f3f7f51c9e1c2606d97cc6e334833438659cfd687e9c972c610", size = 764953 },
]
[[package]]
name = "optuna"
version = "4.5.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alembic" },
{ name = "colorlog" },
{ name = "numpy" },
{ name = "packaging" },
{ name = "pyyaml" },
{ name = "sqlalchemy" },
{ name = "tqdm" },
]
sdist = { url = "https://files.pythonhosted.org/packages/53/a3/bcd1e5500de6ec794c085a277e5b624e60b4fac1790681d7cdbde25b93a2/optuna-4.5.0.tar.gz", hash = "sha256:264844da16dad744dea295057d8bc218646129c47567d52c35a201d9f99942ba", size = 472338 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7f/12/cba81286cbaf0f0c3f0473846cfd992cb240bdcea816bf2ef7de8ed0f744/optuna-4.5.0-py3-none-any.whl", hash = "sha256:5b8a783e84e448b0742501bc27195344a28d2c77bd2feef5b558544d954851b0", size = 400872 },
]
[[package]]
name = "packaging"
version = "25.0"
@@ -2090,6 +2379,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 },
]
[[package]]
name = "primepy"
version = "1.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/35/77/0cfa1b4697cfb5336f3a96e8bc73327f64610be3a64c97275f1801afb395/primePy-1.3.tar.gz", hash = "sha256:25fd7e25344b0789a5984c75d89f054fcf1f180bef20c998e4befbac92de4669", size = 3914 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/74/c1/bb7e334135859c3a92ec399bc89293ea73f28e815e35b43929c8db6af030/primePy-1.3-py3-none-any.whl", hash = "sha256:5ed443718765be9bf7e2ff4c56cdff71b42140a15b39d054f9d99f0009e2317a", size = 4040 },
]
[[package]]
name = "prometheus-client"
version = "0.22.1"
@@ -2226,6 +2524,109 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 },
]
[[package]]
name = "pyannote-audio"
version = "3.3.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "asteroid-filterbanks" },
{ name = "einops" },
{ name = "huggingface-hub" },
{ name = "lightning" },
{ name = "omegaconf" },
{ name = "pyannote-core" },
{ name = "pyannote-database" },
{ name = "pyannote-metrics" },
{ name = "pyannote-pipeline" },
{ name = "pytorch-metric-learning" },
{ name = "rich" },
{ name = "semver" },
{ name = "soundfile" },
{ name = "speechbrain" },
{ name = "tensorboardx" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
{ name = "torch-audiomentations" },
{ name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin'" },
{ name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
{ name = "torchmetrics" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e9/00/3b96ca7ad0641e4f64cfaa2af153dc7da0998ff972280e1c1681b1fcc243/pyannote_audio-3.3.2.tar.gz", hash = "sha256:b2115e86b0db5faedb9f36ee1a150cebd07f7758e65e815accdac1a12ca9c777", size = 13664309 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/17/e6/76049470d90217f9a15a34abf3e92d782cabc3fb4ab27515c9baaa5495d1/pyannote.audio-3.3.2-py2.py3-none-any.whl", hash = "sha256:599c694acd5d193215147ff82d0bf638bb191204ed502bd9fde8ff582e20aa1c", size = 898707 },
{ url = "https://files.pythonhosted.org/packages/b7/9a/98a8992727e762b031ed30451d5726ece46cf8bb7b872a9dba5cef011e5d/pyannote_audio-3.3.2-py2.py3-none-any.whl", hash = "sha256:23e0dcedda920cb2e154e146bcd9663289ee7942d0e012663dad76f2e571ebeb", size = 897827 },
]
[[package]]
name = "pyannote-core"
version = "5.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "scipy" },
{ name = "sortedcontainers" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/65/03/feaf7534206f02c75baf151ce4b8c322b402a6f477c2be82f69d9269cbe6/pyannote.core-5.0.0.tar.gz", hash = "sha256:1a55bcc8bd680ba6be5fa53efa3b6f3d2cdd67144c07b6b4d8d66d5cb0d2096f", size = 59247 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/84/c4/370bc8ba66815a5832ece753a1009388bb07ea353d21c83f2d5a1a436f2c/pyannote.core-5.0.0-py3-none-any.whl", hash = "sha256:04920a6754492242ce0dc6017545595ab643870fe69a994f20c1a5f2da0544d0", size = 58475 },
]
[[package]]
name = "pyannote-database"
version = "5.1.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pandas" },
{ name = "pyannote-core" },
{ name = "pyyaml" },
{ name = "typer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a9/ae/de36413d69a46be87cb612ebbcdc4eacbeebce3bc809124603e44a88fe26/pyannote.database-5.1.3.tar.gz", hash = "sha256:0eaf64c1cc506718de60d2d702f1359b1ae7ff252ee3e4799f1c5e378cd52c31", size = 49957 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a1/64/92d51a3a05615ba58be8ba62a43f9f9f952d9f3646f7e4fb7826e5a3a24e/pyannote.database-5.1.3-py3-none-any.whl", hash = "sha256:37887844c7dfbcc075cb591eddc00aff45fae1ed905344e1f43e0090e63bd40a", size = 48127 },
]
[[package]]
name = "pyannote-metrics"
version = "3.2.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docopt" },
{ name = "matplotlib" },
{ name = "numpy" },
{ name = "pandas" },
{ name = "pyannote-core" },
{ name = "pyannote-database" },
{ name = "scikit-learn" },
{ name = "scipy" },
{ name = "sympy" },
{ name = "tabulate" },
]
sdist = { url = "https://files.pythonhosted.org/packages/39/2b/6c5f01d3c49aa1c160765946e23782ca6436ae8b9bc514b56319ff5f16e7/pyannote.metrics-3.2.1.tar.gz", hash = "sha256:08024255a3550e96a8e9da4f5f4af326886548480de891414567c8900920ee5c", size = 49086 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6c/7d/035b370ab834b30e849fe9cd092b7bd7f321fcc4a2c56b84e96476b7ede5/pyannote.metrics-3.2.1-py3-none-any.whl", hash = "sha256:46be797cdade26c82773e5018659ae610145260069c7c5bf3d3c8a029ade8e22", size = 51386 },
]
[[package]]
name = "pyannote-pipeline"
version = "3.0.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docopt" },
{ name = "filelock" },
{ name = "optuna" },
{ name = "pyannote-core" },
{ name = "pyannote-database" },
{ name = "pyyaml" },
{ name = "scikit-learn" },
{ name = "tqdm" },
]
sdist = { url = "https://files.pythonhosted.org/packages/35/04/4bcfe0dd588577a188328b806f3a7213d8cead0ce5fe5784d01fd57df93f/pyannote.pipeline-3.0.1.tar.gz", hash = "sha256:021794e26a2cf5d8fb5bb1835951e71f5fac33eb14e23dfb7468e16b1b805151", size = 34486 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/83/42/1bf7cbf061ed05c580bfb63bffdd3f3474cbd5c02bee4fac518eea9e9d9e/pyannote.pipeline-3.0.1-py3-none-any.whl", hash = "sha256:819bde4c4dd514f740f2373dfec794832b9fc8e346a35e43a7681625ee187393", size = 31517 },
]
[[package]]
name = "pyasn1"
version = "0.6.1"
@@ -2405,6 +2806,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/80/28/2659c02301b9500751f8d42f9a6632e1508aa5120de5e43042b8b30f8d5d/pyopenssl-25.1.0-py3-none-any.whl", hash = "sha256:2b11f239acc47ac2e5aca04fd7fa829800aeee22a2eb30d744572a157bd8a1ab", size = 56771 },
]
[[package]]
name = "pyparsing"
version = "3.2.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120 },
]
[[package]]
name = "pypdf"
version = "5.8.0"
@@ -2612,6 +3022,42 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 },
]
[[package]]
name = "pytorch-lightning"
version = "2.5.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "fsspec", extra = ["http"] },
{ name = "lightning-utilities" },
{ name = "packaging" },
{ name = "pyyaml" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
{ name = "torchmetrics" },
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/16/78/bce84aab9a5b3b2e9d087d4f1a6be9b481adbfaac4903bc9daaaf09d49a3/pytorch_lightning-2.5.5.tar.gz", hash = "sha256:d6fc8173d1d6e49abfd16855ea05d2eb2415e68593f33d43e59028ecb4e64087", size = 643703 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/f6/99a5c66478f469598dee25b0e29b302b5bddd4e03ed0da79608ac964056e/pytorch_lightning-2.5.5-py3-none-any.whl", hash = "sha256:0b533991df2353c0c6ea9ca10a7d0728b73631fd61f5a15511b19bee2aef8af0", size = 832431 },
]
[[package]]
name = "pytorch-metric-learning"
version = "2.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "scikit-learn" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
{ name = "tqdm" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9b/80/6e61b1a91debf4c1b47d441f9a9d7fe2aabcdd9575ed70b2811474eb95c3/pytorch-metric-learning-2.9.0.tar.gz", hash = "sha256:27a626caf5e2876a0fd666605a78cb67ef7597e25d7a68c18053dd503830701f", size = 84530 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/46/7d/73ef5052f57b7720cad00e16598db3592a5ef4826745ffca67a2f085d4dc/pytorch_metric_learning-2.9.0-py3-none-any.whl", hash = "sha256:d51646006dc87168f00cf954785db133a4c5aac81253877248737aa42ef6432a", size = 127801 },
]
[[package]]
name = "pytz"
version = "2025.2"
@@ -2788,6 +3234,7 @@ evaluation = [
]
local = [
{ name = "faster-whisper" },
{ name = "pyannote-audio" },
]
silero-vad = [
{ name = "silero-vad" },
@@ -2860,7 +3307,10 @@ evaluation = [
{ name = "pydantic", specifier = ">=2.1.1" },
{ name = "tqdm", specifier = ">=4.66.0" },
]
local = [{ name = "faster-whisper", specifier = ">=0.10.0" }]
local = [
{ name = "faster-whisper", specifier = ">=0.10.0" },
{ name = "pyannote-audio", specifier = ">=3.3.2" },
]
silero-vad = [
{ name = "silero-vad", specifier = ">=5.1.2" },
{ name = "torch", specifier = ">=2.8.0", index = "https://download.pytorch.org/whl/cpu" },
@@ -3064,6 +3514,44 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696 },
]
[[package]]
name = "ruamel-yaml"
version = "0.18.15"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "ruamel-yaml-clib", marker = "platform_python_implementation == 'CPython'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/3e/db/f3950f5e5031b618aae9f423a39bf81a55c148aecd15a34527898e752cf4/ruamel.yaml-0.18.15.tar.gz", hash = "sha256:dbfca74b018c4c3fba0b9cc9ee33e53c371194a9000e694995e620490fd40700", size = 146865 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/e5/f2a0621f1781b76a38194acae72f01e37b1941470407345b6e8653ad7640/ruamel.yaml-0.18.15-py3-none-any.whl", hash = "sha256:148f6488d698b7a5eded5ea793a025308b25eca97208181b6a026037f391f701", size = 119702 },
]
[[package]]
name = "ruamel-yaml-clib"
version = "0.2.12"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/20/84/80203abff8ea4993a87d823a5f632e4d92831ef75d404c9fc78d0176d2b5/ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f", size = 225315 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/8f/683c6ad562f558cbc4f7c029abcd9599148c51c54b5ef0f24f2638da9fbb/ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6", size = 132224 },
{ url = "https://files.pythonhosted.org/packages/3c/d2/b79b7d695e2f21da020bd44c782490578f300dd44f0a4c57a92575758a76/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e", size = 641480 },
{ url = "https://files.pythonhosted.org/packages/68/6e/264c50ce2a31473a9fdbf4fa66ca9b2b17c7455b31ef585462343818bd6c/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e", size = 739068 },
{ url = "https://files.pythonhosted.org/packages/86/29/88c2567bc893c84d88b4c48027367c3562ae69121d568e8a3f3a8d363f4d/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52", size = 703012 },
{ url = "https://files.pythonhosted.org/packages/11/46/879763c619b5470820f0cd6ca97d134771e502776bc2b844d2adb6e37753/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642", size = 704352 },
{ url = "https://files.pythonhosted.org/packages/02/80/ece7e6034256a4186bbe50dee28cd032d816974941a6abf6a9d65e4228a7/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2", size = 737344 },
{ url = "https://files.pythonhosted.org/packages/f0/ca/e4106ac7e80efbabdf4bf91d3d32fc424e41418458251712f5672eada9ce/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3", size = 714498 },
{ url = "https://files.pythonhosted.org/packages/67/58/b1f60a1d591b771298ffa0428237afb092c7f29ae23bad93420b1eb10703/ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4", size = 100205 },
{ url = "https://files.pythonhosted.org/packages/b4/4f/b52f634c9548a9291a70dfce26ca7ebce388235c93588a1068028ea23fcc/ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb", size = 118185 },
{ url = "https://files.pythonhosted.org/packages/48/41/e7a405afbdc26af961678474a55373e1b323605a4f5e2ddd4a80ea80f628/ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632", size = 133433 },
{ url = "https://files.pythonhosted.org/packages/ec/b0/b850385604334c2ce90e3ee1013bd911aedf058a934905863a6ea95e9eb4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d", size = 647362 },
{ url = "https://files.pythonhosted.org/packages/44/d0/3f68a86e006448fb6c005aee66565b9eb89014a70c491d70c08de597f8e4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c", size = 754118 },
{ url = "https://files.pythonhosted.org/packages/52/a9/d39f3c5ada0a3bb2870d7db41901125dbe2434fa4f12ca8c5b83a42d7c53/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd", size = 706497 },
{ url = "https://files.pythonhosted.org/packages/b0/fa/097e38135dadd9ac25aecf2a54be17ddf6e4c23e43d538492a90ab3d71c6/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31", size = 698042 },
{ url = "https://files.pythonhosted.org/packages/ec/d5/a659ca6f503b9379b930f13bc6b130c9f176469b73b9834296822a83a132/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680", size = 745831 },
{ url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d", size = 715692 },
{ url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5", size = 98777 },
{ url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4", size = 115523 },
]
[[package]]
name = "s3transfer"
version = "0.13.0"
@@ -3098,6 +3586,68 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl", hash = "sha256:836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11", size = 308878 },
]
[[package]]
name = "scikit-learn"
version = "1.7.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "joblib" },
{ name = "numpy" },
{ name = "scipy" },
{ name = "threadpoolctl" },
]
sdist = { url = "https://files.pythonhosted.org/packages/41/84/5f4af978fff619706b8961accac84780a6d298d82a8873446f72edb4ead0/scikit_learn-1.7.1.tar.gz", hash = "sha256:24b3f1e976a4665aa74ee0fcaac2b8fccc6ae77c8e07ab25da3ba6d3292b9802", size = 7190445 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b4/bd/a23177930abd81b96daffa30ef9c54ddbf544d3226b8788ce4c3ef1067b4/scikit_learn-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90c8494ea23e24c0fb371afc474618c1019dc152ce4a10e4607e62196113851b", size = 9334838 },
{ url = "https://files.pythonhosted.org/packages/8d/a1/d3a7628630a711e2ac0d1a482910da174b629f44e7dd8cfcd6924a4ef81a/scikit_learn-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:bb870c0daf3bf3be145ec51df8ac84720d9972170786601039f024bf6d61a518", size = 8651241 },
{ url = "https://files.pythonhosted.org/packages/26/92/85ec172418f39474c1cd0221d611345d4f433fc4ee2fc68e01f524ccc4e4/scikit_learn-1.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40daccd1b5623f39e8943ab39735cadf0bdce80e67cdca2adcb5426e987320a8", size = 9718677 },
{ url = "https://files.pythonhosted.org/packages/df/ce/abdb1dcbb1d2b66168ec43b23ee0cee356b4cc4100ddee3943934ebf1480/scikit_learn-1.7.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:30d1f413cfc0aa5a99132a554f1d80517563c34a9d3e7c118fde2d273c6fe0f7", size = 9511189 },
{ url = "https://files.pythonhosted.org/packages/b2/3b/47b5eaee01ef2b5a80ba3f7f6ecf79587cb458690857d4777bfd77371c6f/scikit_learn-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:c711d652829a1805a95d7fe96654604a8f16eab5a9e9ad87b3e60173415cb650", size = 8914794 },
{ url = "https://files.pythonhosted.org/packages/cb/16/57f176585b35ed865f51b04117947fe20f130f78940c6477b6d66279c9c2/scikit_learn-1.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3cee419b49b5bbae8796ecd690f97aa412ef1674410c23fc3257c6b8b85b8087", size = 9260431 },
{ url = "https://files.pythonhosted.org/packages/67/4e/899317092f5efcab0e9bc929e3391341cec8fb0e816c4789686770024580/scikit_learn-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2fd8b8d35817b0d9ebf0b576f7d5ffbbabdb55536b0655a8aaae629d7ffd2e1f", size = 8637191 },
{ url = "https://files.pythonhosted.org/packages/f3/1b/998312db6d361ded1dd56b457ada371a8d8d77ca2195a7d18fd8a1736f21/scikit_learn-1.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:588410fa19a96a69763202f1d6b7b91d5d7a5d73be36e189bc6396bfb355bd87", size = 9486346 },
{ url = "https://files.pythonhosted.org/packages/ad/09/a2aa0b4e644e5c4ede7006748f24e72863ba2ae71897fecfd832afea01b4/scikit_learn-1.7.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3142f0abe1ad1d1c31a2ae987621e41f6b578144a911ff4ac94781a583adad7", size = 9290988 },
{ url = "https://files.pythonhosted.org/packages/15/fa/c61a787e35f05f17fc10523f567677ec4eeee5f95aa4798dbbbcd9625617/scikit_learn-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3ddd9092c1bd469acab337d87930067c87eac6bd544f8d5027430983f1e1ae88", size = 8735568 },
]
[[package]]
name = "scipy"
version = "1.16.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f5/4a/b927028464795439faec8eaf0b03b011005c487bb2d07409f28bf30879c4/scipy-1.16.1.tar.gz", hash = "sha256:44c76f9e8b6e8e488a586190ab38016e4ed2f8a038af7cd3defa903c0a2238b3", size = 30580861 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/da/91/812adc6f74409b461e3a5fa97f4f74c769016919203138a3bf6fc24ba4c5/scipy-1.16.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c033fa32bab91dc98ca59d0cf23bb876454e2bb02cbe592d5023138778f70030", size = 36552519 },
{ url = "https://files.pythonhosted.org/packages/47/18/8e355edcf3b71418d9e9f9acd2708cc3a6c27e8f98fde0ac34b8a0b45407/scipy-1.16.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6e5c2f74e5df33479b5cd4e97a9104c511518fbd979aa9b8f6aec18b2e9ecae7", size = 28638010 },
{ url = "https://files.pythonhosted.org/packages/d9/eb/e931853058607bdfbc11b86df19ae7a08686121c203483f62f1ecae5989c/scipy-1.16.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0a55ffe0ba0f59666e90951971a884d1ff6f4ec3275a48f472cfb64175570f77", size = 20909790 },
{ url = "https://files.pythonhosted.org/packages/45/0c/be83a271d6e96750cd0be2e000f35ff18880a46f05ce8b5d3465dc0f7a2a/scipy-1.16.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f8a5d6cd147acecc2603fbd382fed6c46f474cccfcf69ea32582e033fb54dcfe", size = 23513352 },
{ url = "https://files.pythonhosted.org/packages/7c/bf/fe6eb47e74f762f933cca962db7f2c7183acfdc4483bd1c3813cfe83e538/scipy-1.16.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb18899127278058bcc09e7b9966d41a5a43740b5bb8dcba401bd983f82e885b", size = 33534643 },
{ url = "https://files.pythonhosted.org/packages/bb/ba/63f402e74875486b87ec6506a4f93f6d8a0d94d10467280f3d9d7837ce3a/scipy-1.16.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adccd93a2fa937a27aae826d33e3bfa5edf9aa672376a4852d23a7cd67a2e5b7", size = 35376776 },
{ url = "https://files.pythonhosted.org/packages/c3/b4/04eb9d39ec26a1b939689102da23d505ea16cdae3dbb18ffc53d1f831044/scipy-1.16.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:18aca1646a29ee9a0625a1be5637fa798d4d81fdf426481f06d69af828f16958", size = 35698906 },
{ url = "https://files.pythonhosted.org/packages/04/d6/bb5468da53321baeb001f6e4e0d9049eadd175a4a497709939128556e3ec/scipy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d85495cef541729a70cdddbbf3e6b903421bc1af3e8e3a9a72a06751f33b7c39", size = 38129275 },
{ url = "https://files.pythonhosted.org/packages/c4/94/994369978509f227cba7dfb9e623254d0d5559506fe994aef4bea3ed469c/scipy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:226652fca853008119c03a8ce71ffe1b3f6d2844cc1686e8f9806edafae68596", size = 38644572 },
{ url = "https://files.pythonhosted.org/packages/f8/d9/ec4864f5896232133f51382b54a08de91a9d1af7a76dfa372894026dfee2/scipy-1.16.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81b433bbeaf35728dad619afc002db9b189e45eebe2cd676effe1fb93fef2b9c", size = 36575194 },
{ url = "https://files.pythonhosted.org/packages/5c/6d/40e81ecfb688e9d25d34a847dca361982a6addf8e31f0957b1a54fbfa994/scipy-1.16.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:886cc81fdb4c6903a3bb0464047c25a6d1016fef77bb97949817d0c0d79f9e04", size = 28594590 },
{ url = "https://files.pythonhosted.org/packages/0e/37/9f65178edfcc629377ce9a64fc09baebea18c80a9e57ae09a52edf84880b/scipy-1.16.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:15240c3aac087a522b4eaedb09f0ad061753c5eebf1ea430859e5bf8640d5919", size = 20866458 },
{ url = "https://files.pythonhosted.org/packages/2c/7b/749a66766871ea4cb1d1ea10f27004db63023074c22abed51f22f09770e0/scipy-1.16.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:65f81a25805f3659b48126b5053d9e823d3215e4a63730b5e1671852a1705921", size = 23539318 },
{ url = "https://files.pythonhosted.org/packages/c4/db/8d4afec60eb833a666434d4541a3151eedbf2494ea6d4d468cbe877f00cd/scipy-1.16.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6c62eea7f607f122069b9bad3f99489ddca1a5173bef8a0c75555d7488b6f725", size = 33292899 },
{ url = "https://files.pythonhosted.org/packages/51/1e/79023ca3bbb13a015d7d2757ecca3b81293c663694c35d6541b4dca53e98/scipy-1.16.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f965bbf3235b01c776115ab18f092a95aa74c271a52577bcb0563e85738fd618", size = 35162637 },
{ url = "https://files.pythonhosted.org/packages/b6/49/0648665f9c29fdaca4c679182eb972935b3b4f5ace41d323c32352f29816/scipy-1.16.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f006e323874ffd0b0b816d8c6a8e7f9a73d55ab3b8c3f72b752b226d0e3ac83d", size = 35490507 },
{ url = "https://files.pythonhosted.org/packages/62/8f/66cbb9d6bbb18d8c658f774904f42a92078707a7c71e5347e8bf2f52bb89/scipy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8fd15fc5085ab4cca74cb91fe0a4263b1f32e4420761ddae531ad60934c2119", size = 37923998 },
{ url = "https://files.pythonhosted.org/packages/14/c3/61f273ae550fbf1667675701112e380881905e28448c080b23b5a181df7c/scipy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:f7b8013c6c066609577d910d1a2a077021727af07b6fab0ee22c2f901f22352a", size = 38508060 },
]
[[package]]
name = "semver"
version = "3.0.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/72/d1/d3159231aec234a59dd7d601e9dd9fe96f3afff15efd33c1070019b26132/semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602", size = 269730 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a6/24/4d91e05817e92e3a61c8a21e08fd0f390f5301f1c448b137c57c4bc6e543/semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746", size = 17912 },
]
[[package]]
name = "sentencepiece"
version = "0.2.0"
@@ -3201,6 +3751,25 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 },
]
[[package]]
name = "soundfile"
version = "0.13.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi" },
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e1/41/9b873a8c055582859b239be17902a85339bec6a30ad162f98c9b0288a2cc/soundfile-0.13.1.tar.gz", hash = "sha256:b2c68dab1e30297317080a5b43df57e302584c49e2942defdde0acccc53f0e5b", size = 46156 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/64/28/e2a36573ccbcf3d57c00626a21fe51989380636e821b341d36ccca0c1c3a/soundfile-0.13.1-py2.py3-none-any.whl", hash = "sha256:a23c717560da2cf4c7b5ae1142514e0fd82d6bbd9dfc93a50423447142f2c445", size = 25751 },
{ url = "https://files.pythonhosted.org/packages/ea/ab/73e97a5b3cc46bba7ff8650a1504348fa1863a6f9d57d7001c6b67c5f20e/soundfile-0.13.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:82dc664d19831933fe59adad199bf3945ad06d84bc111a5b4c0d3089a5b9ec33", size = 1142250 },
{ url = "https://files.pythonhosted.org/packages/a0/e5/58fd1a8d7b26fc113af244f966ee3aecf03cb9293cb935daaddc1e455e18/soundfile-0.13.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:743f12c12c4054921e15736c6be09ac26b3b3d603aef6fd69f9dde68748f2593", size = 1101406 },
{ url = "https://files.pythonhosted.org/packages/58/ae/c0e4a53d77cf6e9a04179535766b3321b0b9ced5f70522e4caf9329f0046/soundfile-0.13.1-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:9c9e855f5a4d06ce4213f31918653ab7de0c5a8d8107cd2427e44b42df547deb", size = 1235729 },
{ url = "https://files.pythonhosted.org/packages/57/5e/70bdd9579b35003a489fc850b5047beeda26328053ebadc1fb60f320f7db/soundfile-0.13.1-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:03267c4e493315294834a0870f31dbb3b28a95561b80b134f0bd3cf2d5f0e618", size = 1313646 },
{ url = "https://files.pythonhosted.org/packages/fe/df/8c11dc4dfceda14e3003bb81a0d0edcaaf0796dd7b4f826ea3e532146bba/soundfile-0.13.1-py2.py3-none-win32.whl", hash = "sha256:c734564fab7c5ddf8e9be5bf70bab68042cd17e9c214c06e365e20d64f9a69d5", size = 899881 },
{ url = "https://files.pythonhosted.org/packages/14/e9/6b761de83277f2f02ded7e7ea6f07828ec78e4b229b80e4ca55dd205b9dc/soundfile-0.13.1-py2.py3-none-win_amd64.whl", hash = "sha256:1e70a05a0626524a69e9f0f4dd2ec174b4e9567f4d8b6c11d38b5c289be36ee9", size = 1019162 },
]
[[package]]
name = "soupsieve"
version = "2.7"
@@ -3210,6 +3779,29 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677 },
]
[[package]]
name = "speechbrain"
version = "1.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "huggingface-hub" },
{ name = "hyperpyyaml" },
{ name = "joblib" },
{ name = "numpy" },
{ name = "packaging" },
{ name = "scipy" },
{ name = "sentencepiece" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
{ name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin'" },
{ name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
{ name = "tqdm" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ab/10/87e666544a4e0cec7cbdc09f26948994831ae0f8bbc58de3bf53b68285ff/speechbrain-1.0.3.tar.gz", hash = "sha256:fcab3c6e90012cecb1eed40ea235733b550137e73da6bfa2340ba191ec714052", size = 747735 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/58/13/e61f1085aebee17d5fc2df19fcc5177c10379be52578afbecdd615a831c9/speechbrain-1.0.3-py3-none-any.whl", hash = "sha256:9859d4c1b1fb3af3b85523c0c89f52e45a04f305622ed55f31aa32dd2fba19e9", size = 864091 },
]
[[package]]
name = "sqlalchemy"
version = "1.4.54"
@@ -3291,6 +3883,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 },
]
[[package]]
name = "tabulate"
version = "0.9.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 },
]
[[package]]
name = "tenacity"
version = "9.1.2"
@@ -3300,6 +3901,29 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248 },
]
[[package]]
name = "tensorboardx"
version = "2.6.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "packaging" },
{ name = "protobuf" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2b/c5/d4cc6e293fb837aaf9f76dd7745476aeba8ef7ef5146c3b3f9ee375fe7a5/tensorboardx-2.6.4.tar.gz", hash = "sha256:b163ccb7798b31100b9f5fa4d6bc22dad362d7065c2f24b51e50731adde86828", size = 4769801 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e0/1d/b5d63f1a6b824282b57f7b581810d20b7a28ca951f2d5b59f1eb0782c12b/tensorboardx-2.6.4-py3-none-any.whl", hash = "sha256:5970cf3a1f0a6a6e8b180ccf46f3fe832b8a25a70b86e5a237048a7c0beb18e2", size = 87201 },
]
[[package]]
name = "threadpoolctl"
version = "3.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638 },
]
[[package]]
name = "tiktoken"
version = "0.9.0"
@@ -3440,6 +4064,40 @@ wheels = [
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_arm64.whl", hash = "sha256:99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434" },
]
[[package]]
name = "torch-audiomentations"
version = "0.12.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "julius" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
{ name = "torch-pitch-shift" },
{ name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin'" },
{ name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
]
sdist = { url = "https://files.pythonhosted.org/packages/31/8d/2f8fd7e34c75f5ee8de4310c3bd3f22270acd44d1f809e2fe7c12fbf35f8/torch_audiomentations-0.12.0.tar.gz", hash = "sha256:b02d4c5eb86376986a53eb405cca5e34f370ea9284411237508e720c529f7888", size = 52094 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/21/9d/1ee04f49c15d2d632f6f7102061d7c07652858e6d91b58a091531034e84f/torch_audiomentations-0.12.0-py3-none-any.whl", hash = "sha256:1b80b91d2016ccf83979622cac8f702072a79b7dcc4c2bee40f00b26433a786b", size = 48506 },
]
[[package]]
name = "torch-pitch-shift"
version = "1.2.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "packaging" },
{ name = "primepy" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
{ name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin'" },
{ name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
]
sdist = { url = "https://files.pythonhosted.org/packages/79/a6/722a832bca75d5079f6731e005b3d0c2eec7c6c6863d030620952d143d57/torch_pitch_shift-1.2.5.tar.gz", hash = "sha256:6e1c7531f08d0f407a4c55e5ff8385a41355c5c5d27ab7fa08632e51defbd0ed", size = 4725 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/27/4c/96ac2a09efb56cc3c41fb3ce9b6f4d8c0604499f7481d4a13a7b03e21382/torch_pitch_shift-1.2.5-py3-none-any.whl", hash = "sha256:6f8500cbc13f1c98b11cde1805ce5084f82cdd195c285f34287541f168a7c6a7", size = 5005 },
]
[[package]]
name = "torchaudio"
version = "2.8.0"
@@ -3487,6 +4145,22 @@ wheels = [
{ url = "https://download.pytorch.org/whl/cpu/torchaudio-2.8.0%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:9b302192b570657c1cc787a4d487ae4bbb7f2aab1c01b1fcc46757e7f86f391e" },
]
[[package]]
name = "torchmetrics"
version = "1.8.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "lightning-utilities" },
{ name = "numpy" },
{ name = "packaging" },
{ name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
{ name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/85/2e/48a887a59ecc4a10ce9e8b35b3e3c5cef29d902c4eac143378526e7485cb/torchmetrics-1.8.2.tar.gz", hash = "sha256:cf64a901036bf107f17a524009eea7781c9c5315d130713aeca5747a686fe7a5", size = 580679 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/02/21/aa0f434434c48490f91b65962b1ce863fdcce63febc166ca9fe9d706c2b6/torchmetrics-1.8.2-py3-none-any.whl", hash = "sha256:08382fd96b923e39e904c4d570f3d49e2cc71ccabd2a94e0f895d1f0dac86242", size = 983161 },
]
[[package]]
name = "tqdm"
version = "4.67.1"

View File

@@ -11,7 +11,6 @@ import {
import { useRouter } from "next/navigation";
import { useTranscriptGet } from "../../../../lib/apiHooks";
import { parseNonEmptyString } from "../../../../lib/utils";
import { useWebSockets } from "../../useWebSockets";
type TranscriptProcessing = {
params: Promise<{
@@ -25,7 +24,6 @@ export default function TranscriptProcessing(details: TranscriptProcessing) {
const router = useRouter();
const transcript = useTranscriptGet(transcriptId);
useWebSockets(transcriptId);
useEffect(() => {
const status = transcript.data?.status;

View File

@@ -23,16 +23,7 @@ const useWebRTC = (
let p: Peer;
try {
p = new Peer({
initiator: true,
stream: stream,
// Disable trickle ICE: single SDP exchange (offer + answer) with all candidates.
// Required for HTTP-based signaling; trickle needs WebSocket for candidate exchange.
trickle: false,
config: {
iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
},
});
p = new Peer({ initiator: true, stream: stream });
} catch (error) {
setError(error as Error, "Error creating WebRTC");
return;

View File

@@ -1,22 +1,18 @@
import { useEffect, useState } from "react";
import { Topic, FinalSummary, Status } from "./webSocketTypes";
import { useError } from "../../(errors)/errorContext";
import type { components, operations } from "../../reflector-api";
import type { components } from "../../reflector-api";
type AudioWaveform = components["schemas"]["AudioWaveform"];
type GetTranscriptSegmentTopic =
components["schemas"]["GetTranscriptSegmentTopic"];
import { useQueryClient } from "@tanstack/react-query";
import { WEBSOCKET_URL } from "../../lib/apiClient";
import { $api, WEBSOCKET_URL } from "../../lib/apiClient";
import {
invalidateTranscript,
invalidateTranscriptTopics,
invalidateTranscriptWaveform,
} from "../../lib/apiHooks";
import { useAuth } from "../../lib/AuthProvider";
import { parseNonEmptyString } from "../../lib/utils";
type TranscriptWsEvent =
operations["v1_transcript_get_websocket_events"]["responses"][200]["content"]["application/json"];
import { NonEmptyString } from "../../lib/utils";
export type UseWebSockets = {
transcriptTextLive: string;
@@ -31,7 +27,6 @@ export type UseWebSockets = {
};
export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
const auth = useAuth();
const [transcriptTextLive, setTranscriptTextLive] = useState<string>("");
const [translateText, setTranslateText] = useState<string>("");
const [title, setTitle] = useState<string>("");
@@ -336,168 +331,155 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
};
if (!transcriptId) return;
const tsId = parseNonEmptyString(transcriptId);
const MAX_RETRIES = 10;
const url = `${WEBSOCKET_URL}/v1/transcripts/${transcriptId}/events`;
let ws: WebSocket | null = null;
let retryCount = 0;
let retryTimeout: ReturnType<typeof setTimeout> | null = null;
let intentionalClose = false;
let ws = new WebSocket(url);
const connect = () => {
const subprotocols = auth.accessToken
? ["bearer", auth.accessToken]
: undefined;
ws = new WebSocket(url, subprotocols);
ws.onopen = () => {
console.debug("WebSocket connection opened");
};
ws.onopen = () => {
console.debug("WebSocket connection opened");
retryCount = 0;
};
ws.onmessage = (event) => {
const message = JSON.parse(event.data);
ws.onmessage = (event) => {
const message: TranscriptWsEvent = JSON.parse(event.data);
try {
switch (message.event) {
case "TRANSCRIPT":
const newText = (message.data.text ?? "").trim();
const newTranslation = (message.data.translation ?? "").trim();
try {
switch (message.event) {
case "TRANSCRIPT": {
const newText = (message.data.text ?? "").trim();
const newTranslation = (message.data.translation ?? "").trim();
if (!newText) break;
if (!newText) break;
console.debug("TRANSCRIPT event:", newText);
setTextQueue((prevQueue) => [...prevQueue, newText]);
setTranslationQueue((prevQueue) => [...prevQueue, newTranslation]);
console.debug("TRANSCRIPT event:", newText);
setTextQueue((prevQueue) => [...prevQueue, newText]);
setTranslationQueue((prevQueue) => [
...prevQueue,
newTranslation,
]);
setAccumulatedText((prevText) => prevText + " " + newText);
break;
setAccumulatedText((prevText) => prevText + " " + newText);
break;
}
case "TOPIC":
setTopics((prevTopics) => {
const topic = message.data;
const index = prevTopics.findIndex(
(prevTopic) => prevTopic.id === topic.id,
);
if (index >= 0) {
prevTopics[index] = topic;
return prevTopics;
}
setAccumulatedText((prevText) =>
prevText.slice(topic.transcript?.length ?? 0),
);
return [...prevTopics, topic];
});
console.debug("TOPIC event:", message.data);
invalidateTranscriptTopics(queryClient, tsId);
break;
case "FINAL_SHORT_SUMMARY":
console.debug("FINAL_SHORT_SUMMARY event:", message.data);
break;
case "FINAL_LONG_SUMMARY":
setFinalSummary({ summary: message.data.long_summary });
invalidateTranscript(queryClient, tsId);
break;
case "FINAL_TITLE":
console.debug("FINAL_TITLE event:", message.data);
setTitle(message.data.title);
invalidateTranscript(queryClient, tsId);
break;
case "WAVEFORM":
console.debug(
"WAVEFORM event length:",
message.data.waveform.length,
case "TOPIC":
setTopics((prevTopics) => {
const topic = message.data as Topic;
const index = prevTopics.findIndex(
(prevTopic) => prevTopic.id === topic.id,
);
setWaveForm({ data: message.data.waveform });
invalidateTranscriptWaveform(queryClient, tsId);
break;
case "DURATION":
console.debug("DURATION event:", message.data);
setDuration(message.data.duration);
break;
case "STATUS":
console.log("STATUS event:", message.data);
if (message.data.value === "error") {
setError(
Error("Websocket error status"),
"There was an error processing this meeting.",
);
if (index >= 0) {
prevTopics[index] = topic;
return prevTopics;
}
setStatus(message.data);
invalidateTranscript(queryClient, tsId);
if (message.data.value === "ended") {
intentionalClose = true;
ws?.close();
}
break;
case "ACTION_ITEMS":
console.debug("ACTION_ITEMS event:", message.data);
invalidateTranscript(queryClient, tsId);
break;
default: {
const _exhaustive: never = message;
console.warn(
`Received unknown WebSocket event: ${(_exhaustive as TranscriptWsEvent).event}`,
setAccumulatedText((prevText) =>
prevText.slice(topic.transcript.length),
);
}
}
} catch (error) {
setError(error);
}
};
ws.onerror = (error) => {
console.error("WebSocket error:", error);
};
ws.onclose = (event) => {
console.debug("WebSocket connection closed, code:", event.code);
if (intentionalClose) return;
const normalCodes = [1000, 1001, 1005];
if (normalCodes.includes(event.code)) return;
if (retryCount < MAX_RETRIES) {
const delay = Math.min(1000 * Math.pow(2, retryCount), 30000);
console.log(
`WebSocket reconnecting in ${delay}ms (attempt ${retryCount + 1}/${MAX_RETRIES})`,
);
if (retryCount === 0) {
setError(
new Error("WebSocket connection lost"),
"Connection lost. Reconnecting...",
return [...prevTopics, topic];
});
console.debug("TOPIC event:", message.data);
// Invalidate topics query to sync with WebSocket data
invalidateTranscriptTopics(
queryClient,
transcriptId as NonEmptyString,
);
}
retryCount++;
retryTimeout = setTimeout(connect, delay);
} else {
break;
case "FINAL_SHORT_SUMMARY":
console.debug("FINAL_SHORT_SUMMARY event:", message.data);
break;
case "FINAL_LONG_SUMMARY":
if (message.data) {
setFinalSummary(message.data);
// Invalidate transcript query to sync summary
invalidateTranscript(queryClient, transcriptId as NonEmptyString);
}
break;
case "FINAL_TITLE":
console.debug("FINAL_TITLE event:", message.data);
if (message.data) {
setTitle(message.data.title);
// Invalidate transcript query to sync title
invalidateTranscript(queryClient, transcriptId as NonEmptyString);
}
break;
case "WAVEFORM":
console.debug(
"WAVEFORM event length:",
message.data.waveform.length,
);
if (message.data) {
setWaveForm(message.data.waveform);
invalidateTranscriptWaveform(
queryClient,
transcriptId as NonEmptyString,
);
}
break;
case "DURATION":
console.debug("DURATION event:", message.data);
if (message.data) {
setDuration(message.data.duration);
}
break;
case "STATUS":
console.log("STATUS event:", message.data);
if (message.data.value === "error") {
setError(
Error("Websocket error status"),
"There was an error processing this meeting.",
);
}
setStatus(message.data);
if (message.data.value === "ended") {
ws.close();
}
break;
default:
setError(
new Error(`Received unknown WebSocket event: ${message.event}`),
);
}
} catch (error) {
setError(error);
}
};
ws.onerror = (error) => {
console.error("WebSocket error:", error);
setError(new Error("A WebSocket error occurred."));
};
ws.onclose = (event) => {
console.debug("WebSocket connection closed");
switch (event.code) {
case 1000: // Normal Closure:
break;
case 1005: // Closure by client FF
break;
case 1001: // Navigate away
break;
case 1006: // Closed by client Chrome
console.warn(
"WebSocket closed by client, likely duplicated connection in react dev mode",
);
break;
default:
setError(
new Error(`WebSocket closed unexpectedly with code: ${event.code}`),
"Disconnected from the server. Please refresh the page.",
);
}
};
console.log(
"Socket is closed. Reconnect will be attempted in 1 second.",
event.reason,
);
// todo handle reconnect with socket.io
}
};
connect();
return () => {
intentionalClose = true;
if (retryTimeout) clearTimeout(retryTimeout);
ws?.close();
ws.close();
};
}, [transcriptId]);

View File

@@ -4,12 +4,14 @@ import React, { useEffect, useRef } from "react";
import { useQueryClient } from "@tanstack/react-query";
import { WEBSOCKET_URL } from "./apiClient";
import { useAuth } from "./AuthProvider";
import { invalidateTranscript, invalidateTranscriptLists } from "./apiHooks";
import { parseNonEmptyString } from "./utils";
import type { operations } from "../reflector-api";
import { z } from "zod";
import { invalidateTranscriptLists, TRANSCRIPT_SEARCH_URL } from "./apiHooks";
type UserWsEvent =
operations["v1_user_get_websocket_events"]["responses"][200]["content"]["application/json"];
const UserEvent = z.object({
event: z.string(),
});
type UserEvent = z.TypeOf<typeof UserEvent>;
class UserEventsStore {
private socket: WebSocket | null = null;
@@ -131,26 +133,23 @@ export function UserEventsProvider({
if (!detachRef.current) {
const onMessage = (event: MessageEvent) => {
try {
const msg: UserWsEvent = JSON.parse(event.data);
const msg = UserEvent.parse(JSON.parse(event.data));
const eventName = msg.event;
switch (msg.event) {
const invalidateList = () => invalidateTranscriptLists(queryClient);
switch (eventName) {
case "TRANSCRIPT_CREATED":
case "TRANSCRIPT_DELETED":
case "TRANSCRIPT_STATUS":
case "TRANSCRIPT_FINAL_TITLE":
case "TRANSCRIPT_DURATION":
invalidateTranscriptLists(queryClient).then(() => {});
invalidateTranscript(
queryClient,
parseNonEmptyString(msg.data.id),
).then(() => {});
invalidateList().then(() => {});
break;
default:
// Ignore other content events for list updates
break;
default: {
const _exhaustive: never = msg;
console.warn(
`Unknown user event: ${(_exhaustive as UserWsEvent).event}`,
);
}
}
} catch (err) {
console.warn("Invalid user event message", event.data);

View File

@@ -7,7 +7,6 @@ import type { components } from "../reflector-api";
import { useAuth } from "./AuthProvider";
import { MeetingId } from "./types";
import { NonEmptyString } from "./utils";
import type { TranscriptStatus } from "./transcript";
/*
* XXX error types returned from the hooks are not always correct; declared types are ValidationError but real type could be string or any other
@@ -105,12 +104,6 @@ export function useTranscriptProcess() {
});
}
const ACTIVE_TRANSCRIPT_STATUSES = new Set<TranscriptStatus>([
"processing",
"uploaded",
"recording",
]);
export function useTranscriptGet(transcriptId: NonEmptyString | null) {
return $api.useQuery(
"get",
@@ -124,10 +117,6 @@ export function useTranscriptGet(transcriptId: NonEmptyString | null) {
},
{
enabled: !!transcriptId,
refetchInterval: (query) => {
const status = query.state.data?.status;
return status && ACTIVE_TRANSCRIPT_STATUSES.has(status) ? 5000 : false;
},
},
);
}

View File

@@ -568,10 +568,7 @@ export interface paths {
path?: never;
cookie?: never;
};
/**
* Transcript WebSocket event schema
* @description Stub exposing the discriminated union of all transcript-level WS events for OpenAPI type generation. Real events are delivered over the WebSocket at the same path.
*/
/** Transcript Get Websocket Events */
get: operations["v1_transcript_get_websocket_events"];
put?: never;
post?: never;
@@ -667,26 +664,6 @@ export interface paths {
patch?: never;
trace?: never;
};
"/v1/events": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
/**
* User WebSocket event schema
* @description Stub exposing the discriminated union of all user-level WS events for OpenAPI type generation. Real events are delivered over the WebSocket at the same path.
*/
get: operations["v1_user_get_websocket_events"];
put?: never;
post?: never;
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/v1/zulip/streams": {
parameters: {
query?: never;
@@ -1900,33 +1877,6 @@ export interface components {
/** Name */
name: string;
};
/** TranscriptActionItems */
TranscriptActionItems: {
/** Action Items */
action_items: {
[key: string]: unknown;
};
};
/** TranscriptDuration */
TranscriptDuration: {
/** Duration */
duration: number;
};
/** TranscriptFinalLongSummary */
TranscriptFinalLongSummary: {
/** Long Summary */
long_summary: string;
};
/** TranscriptFinalShortSummary */
TranscriptFinalShortSummary: {
/** Short Summary */
short_summary: string;
};
/** TranscriptFinalTitle */
TranscriptFinalTitle: {
/** Title */
title: string;
};
/** TranscriptParticipant */
TranscriptParticipant: {
/** Id */
@@ -1967,113 +1917,6 @@ export interface components {
/** End */
end: number;
};
/** TranscriptText */
TranscriptText: {
/** Text */
text: string;
/** Translation */
translation: string | null;
};
/** TranscriptWaveform */
TranscriptWaveform: {
/** Waveform */
waveform: number[];
};
/** TranscriptWsActionItems */
TranscriptWsActionItems: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "ACTION_ITEMS";
data: components["schemas"]["TranscriptActionItems"];
};
/** TranscriptWsDuration */
TranscriptWsDuration: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "DURATION";
data: components["schemas"]["TranscriptDuration"];
};
/** TranscriptWsFinalLongSummary */
TranscriptWsFinalLongSummary: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "FINAL_LONG_SUMMARY";
data: components["schemas"]["TranscriptFinalLongSummary"];
};
/** TranscriptWsFinalShortSummary */
TranscriptWsFinalShortSummary: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "FINAL_SHORT_SUMMARY";
data: components["schemas"]["TranscriptFinalShortSummary"];
};
/** TranscriptWsFinalTitle */
TranscriptWsFinalTitle: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "FINAL_TITLE";
data: components["schemas"]["TranscriptFinalTitle"];
};
/** TranscriptWsStatus */
TranscriptWsStatus: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "STATUS";
data: components["schemas"]["TranscriptWsStatusData"];
};
/** TranscriptWsStatusData */
TranscriptWsStatusData: {
/**
* Value
* @enum {string}
*/
value:
| "idle"
| "uploaded"
| "recording"
| "processing"
| "error"
| "ended";
};
/** TranscriptWsTopic */
TranscriptWsTopic: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "TOPIC";
data: components["schemas"]["GetTranscriptTopic"];
};
/** TranscriptWsTranscript */
TranscriptWsTranscript: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "TRANSCRIPT";
data: components["schemas"]["TranscriptText"];
};
/** TranscriptWsWaveform */
TranscriptWsWaveform: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "WAVEFORM";
data: components["schemas"]["TranscriptWaveform"];
};
/** UpdateParticipant */
UpdateParticipant: {
/** Speaker */
@@ -2144,82 +1987,6 @@ export interface components {
/** Email */
email: string | null;
};
/** UserTranscriptCreatedData */
UserTranscriptCreatedData: {
/** Id */
id: string;
};
/** UserTranscriptDeletedData */
UserTranscriptDeletedData: {
/** Id */
id: string;
};
/** UserTranscriptDurationData */
UserTranscriptDurationData: {
/** Id */
id: string;
/** Duration */
duration: number;
};
/** UserTranscriptFinalTitleData */
UserTranscriptFinalTitleData: {
/** Id */
id: string;
/** Title */
title: string;
};
/** UserTranscriptStatusData */
UserTranscriptStatusData: {
/** Id */
id: string;
/** Value */
value: string;
};
/** UserWsTranscriptCreated */
UserWsTranscriptCreated: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "TRANSCRIPT_CREATED";
data: components["schemas"]["UserTranscriptCreatedData"];
};
/** UserWsTranscriptDeleted */
UserWsTranscriptDeleted: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "TRANSCRIPT_DELETED";
data: components["schemas"]["UserTranscriptDeletedData"];
};
/** UserWsTranscriptDuration */
UserWsTranscriptDuration: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "TRANSCRIPT_DURATION";
data: components["schemas"]["UserTranscriptDurationData"];
};
/** UserWsTranscriptFinalTitle */
UserWsTranscriptFinalTitle: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "TRANSCRIPT_FINAL_TITLE";
data: components["schemas"]["UserTranscriptFinalTitleData"];
};
/** UserWsTranscriptStatus */
UserWsTranscriptStatus: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
event: "TRANSCRIPT_STATUS";
data: components["schemas"]["UserTranscriptStatusData"];
};
/** ValidationError */
ValidationError: {
/** Location */
@@ -3656,16 +3423,7 @@ export interface operations {
[name: string]: unknown;
};
content: {
"application/json":
| components["schemas"]["TranscriptWsTranscript"]
| components["schemas"]["TranscriptWsTopic"]
| components["schemas"]["TranscriptWsStatus"]
| components["schemas"]["TranscriptWsFinalTitle"]
| components["schemas"]["TranscriptWsFinalLongSummary"]
| components["schemas"]["TranscriptWsFinalShortSummary"]
| components["schemas"]["TranscriptWsActionItems"]
| components["schemas"]["TranscriptWsDuration"]
| components["schemas"]["TranscriptWsWaveform"];
"application/json": unknown;
};
};
/** @description Validation Error */
@@ -3849,31 +3607,6 @@ export interface operations {
};
};
};
v1_user_get_websocket_events: {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
requestBody?: never;
responses: {
/** @description Successful Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json":
| components["schemas"]["UserWsTranscriptCreated"]
| components["schemas"]["UserWsTranscriptDeleted"]
| components["schemas"]["UserWsTranscriptStatus"]
| components["schemas"]["UserWsTranscriptFinalTitle"]
| components["schemas"]["UserWsTranscriptDuration"];
};
};
};
};
v1_zulip_get_streams: {
parameters: {
query?: never;