From a2ed7d60d557b551a5b64e4dfd909b63a791d9fc Mon Sep 17 00:00:00 2001 From: Sergey Mankovsky Date: Tue, 3 Feb 2026 00:18:47 +0100 Subject: [PATCH 1/3] fix: make caddy optional (#841) --- Caddyfile.example | 8 ++-- docker-compose.prod.yml | 9 +++- docs/docs/installation/docker-setup.md | 57 ++++++++++++++++++-------- docs/docs/installation/overview.md | 44 ++++++++++++++++---- 4 files changed, 90 insertions(+), 28 deletions(-) diff --git a/Caddyfile.example b/Caddyfile.example index ebbaabdf..f99f6336 100644 --- a/Caddyfile.example +++ b/Caddyfile.example @@ -1,6 +1,8 @@ -# Reflector Caddyfile -# Replace example.com with your actual domains -# CORS is handled by the backend - Caddy just proxies +# Reflector Caddyfile (optional reverse proxy) +# Use this only when you run Caddy via: docker compose -f docker-compose.prod.yml --profile caddy up -d +# If Coolify, Traefik, or nginx already use ports 80/443, do NOT start Caddy; point your proxy at web:3000 and server:1250. +# +# Replace example.com with your actual domains. CORS is handled by the backend - Caddy just proxies. # # For environment variable substitution, set: # FRONTEND_DOMAIN=app.example.com diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index f897a624..db87264b 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -1,9 +1,14 @@ # Production Docker Compose configuration # Usage: docker compose -f docker-compose.prod.yml up -d # +# Caddy (reverse proxy on ports 80/443) is OPTIONAL and behind the "caddy" profile: +# - With Caddy (self-hosted, you manage SSL): docker compose -f docker-compose.prod.yml --profile caddy up -d +# - Without Caddy (Coolify/Traefik/nginx already on 80/443): docker compose -f docker-compose.prod.yml up -d +# Then point your proxy at web:3000 (frontend) and server:1250 (API). +# # Prerequisites: # 1. Copy .env.example to .env and configure for both server/ and www/ -# 2. Copy Caddyfile.example to Caddyfile and edit with your domains +# 2. If using Caddy: copy Caddyfile.example to Caddyfile and edit your domains # 3. Deploy Modal GPU functions (see gpu/modal_deployments/deploy-all.sh) services: @@ -84,6 +89,8 @@ services: retries: 3 caddy: + profiles: + - caddy image: caddy:2-alpine restart: unless-stopped ports: diff --git a/docs/docs/installation/docker-setup.md b/docs/docs/installation/docker-setup.md index 701ad15e..499ce92d 100644 --- a/docs/docs/installation/docker-setup.md +++ b/docs/docs/installation/docker-setup.md @@ -11,15 +11,15 @@ This page documents the Docker Compose configuration for Reflector. For the comp The `docker-compose.prod.yml` includes these services: -| Service | Image | Purpose | -|---------|-------|---------| -| `web` | `monadicalsas/reflector-frontend` | Next.js frontend | -| `server` | `monadicalsas/reflector-backend` | FastAPI backend | -| `worker` | `monadicalsas/reflector-backend` | Celery worker for background tasks | -| `beat` | `monadicalsas/reflector-backend` | Celery beat scheduler | -| `redis` | `redis:7.2-alpine` | Message broker and cache | -| `postgres` | `postgres:17-alpine` | Primary database | -| `caddy` | `caddy:2-alpine` | Reverse proxy with auto-SSL | +| Service | Image | Purpose | +| ---------- | --------------------------------- | --------------------------------------------------------------------------- | +| `web` | `monadicalsas/reflector-frontend` | Next.js frontend | +| `server` | `monadicalsas/reflector-backend` | FastAPI backend | +| `worker` | `monadicalsas/reflector-backend` | Celery worker for background tasks | +| `beat` | `monadicalsas/reflector-backend` | Celery beat scheduler | +| `redis` | `redis:7.2-alpine` | Message broker and cache | +| `postgres` | `postgres:17-alpine` | Primary database | +| `caddy` | `caddy:2-alpine` | Reverse proxy with auto-SSL (optional; see [Caddy profile](#caddy-profile)) | ## Environment Files @@ -30,6 +30,7 @@ Reflector uses two separate environment files: Used by: `server`, `worker`, `beat` Key variables: + ```env # Database connection DATABASE_URL=postgresql+asyncpg://reflector:reflector@postgres:5432/reflector @@ -54,6 +55,7 @@ TRANSCRIPT_MODAL_API_KEY=... Used by: `web` Key variables: + ```env # Domain configuration SITE_URL=https://app.example.com @@ -70,26 +72,42 @@ Note: `API_URL` is used client-side (browser), `SERVER_API_URL` is used server-s ## Volumes -| Volume | Purpose | -|--------|---------| -| `redis_data` | Redis persistence | -| `postgres_data` | PostgreSQL data | -| `server_data` | Uploaded files, local storage | -| `caddy_data` | SSL certificates | -| `caddy_config` | Caddy configuration | +| Volume | Purpose | +| --------------- | ----------------------------- | +| `redis_data` | Redis persistence | +| `postgres_data` | PostgreSQL data | +| `server_data` | Uploaded files, local storage | +| `caddy_data` | SSL certificates | +| `caddy_config` | Caddy configuration | ## Network All services share the default network. The network is marked `attachable: true` to allow external containers (like Authentik) to join. +## Caddy profile + +Caddy (ports 80 and 443) is **optional** and behind the `caddy` profile so it does not conflict with an existing reverse proxy (e.g. Coolify, Traefik, nginx). + +- **With Caddy** (you want Reflector to handle SSL): + `docker compose -f docker-compose.prod.yml --profile caddy up -d` +- **Without Caddy** (Coolify or another proxy already on 80/443): + `docker compose -f docker-compose.prod.yml up -d` + Then configure your proxy to send traffic to `web:3000` (frontend) and `server:1250` (API). + ## Common Commands ### Start all services + ```bash +# Without Caddy (e.g. when using Coolify) docker compose -f docker-compose.prod.yml up -d + +# With Caddy as reverse proxy +docker compose -f docker-compose.prod.yml --profile caddy up -d ``` ### View logs + ```bash # All services docker compose -f docker-compose.prod.yml logs -f @@ -99,6 +117,7 @@ docker compose -f docker-compose.prod.yml logs server --tail 50 ``` ### Restart a service + ```bash # Quick restart (doesn't reload .env changes) docker compose -f docker-compose.prod.yml restart server @@ -108,27 +127,32 @@ docker compose -f docker-compose.prod.yml up -d server ``` ### Run database migrations + ```bash docker compose -f docker-compose.prod.yml exec server uv run alembic upgrade head ``` ### Access database + ```bash docker compose -f docker-compose.prod.yml exec postgres psql -U reflector ``` ### Pull latest images + ```bash docker compose -f docker-compose.prod.yml pull docker compose -f docker-compose.prod.yml up -d ``` ### Stop all services + ```bash docker compose -f docker-compose.prod.yml down ``` ### Full reset (WARNING: deletes data) + ```bash docker compose -f docker-compose.prod.yml down -v ``` @@ -187,6 +211,7 @@ The Caddyfile supports environment variable substitution: Set `FRONTEND_DOMAIN` and `API_DOMAIN` environment variables, or edit the file directly. ### Reload Caddy after changes + ```bash docker compose -f docker-compose.prod.yml exec caddy caddy reload --config /etc/caddy/Caddyfile ``` diff --git a/docs/docs/installation/overview.md b/docs/docs/installation/overview.md index f6218d64..9dca5ed7 100644 --- a/docs/docs/installation/overview.md +++ b/docs/docs/installation/overview.md @@ -26,7 +26,7 @@ flowchart LR Before starting, you need: -- **Production server** - 4+ cores, 8GB+ RAM, public IP +- **Production server** - 4+ cores, 8GB+ RAM, public IP - **Two domain names** - e.g., `app.example.com` (frontend) and `api.example.com` (backend) - **GPU processing** - Choose one: - Modal.com account, OR @@ -60,16 +60,17 @@ Type: A Name: api Value: Reflector requires GPU processing for transcription and speaker diarization. Choose one option: -| | **Modal.com (Cloud)** | **Self-Hosted GPU** | -|---|---|---| +| | **Modal.com (Cloud)** | **Self-Hosted GPU** | +| ------------ | --------------------------------- | ---------------------------- | | **Best for** | No GPU hardware, zero maintenance | Own GPU server, full control | -| **Pricing** | Pay-per-use | Fixed infrastructure cost | +| **Pricing** | Pay-per-use | Fixed infrastructure cost | ### Option A: Modal.com (Serverless Cloud GPU) #### Accept HuggingFace Licenses Visit both pages and click "Accept": + - https://huggingface.co/pyannote/speaker-diarization-3.1 - https://huggingface.co/pyannote/segmentation-3.0 @@ -179,6 +180,7 @@ Save these credentials - you'll need them in the next step. ## Configure Environment Reflector has two env files: + - `server/.env` - Backend configuration - `www/.env` - Frontend configuration @@ -190,6 +192,7 @@ nano server/.env ``` **Required settings:** + ```env # Database (defaults work with docker-compose.prod.yml) DATABASE_URL=postgresql+asyncpg://reflector:reflector@postgres:5432/reflector @@ -249,6 +252,7 @@ nano www/.env ``` **Required settings:** + ```env # Your domains SITE_URL=https://app.example.com @@ -266,7 +270,11 @@ FEATURE_REQUIRE_LOGIN=false --- -## Configure Caddy +## Reverse proxy (Caddy or existing) + +**If Coolify, Traefik, or nginx already use ports 80/443** (e.g. Coolify on your host): skip Caddy. Start the stack without the Caddy profile (see [Start Services](#start-services) below), then point your proxy at `web:3000` (frontend) and `server:1250` (API). + +**If you want Reflector to provide the reverse proxy and SSL:** ```bash cp Caddyfile.example Caddyfile @@ -289,10 +297,18 @@ Replace `example.com` with your domains. The `{$VAR:default}` syntax uses Caddy' ## Start Services +**Without Caddy** (e.g. Coolify already on 80/443): + ```bash docker compose -f docker-compose.prod.yml up -d ``` +**With Caddy** (Reflector handles SSL): + +```bash +docker compose -f docker-compose.prod.yml --profile caddy up -d +``` + Wait for containers to start (first run may take 1-2 minutes to pull images and initialize). --- @@ -300,18 +316,21 @@ Wait for containers to start (first run may take 1-2 minutes to pull images and ## Verify Deployment ### Check services + ```bash docker compose -f docker-compose.prod.yml ps # All should show "Up" ``` ### Test API + ```bash curl https://api.example.com/health # Should return: {"status":"healthy"} ``` ### Test Frontend + - Visit https://app.example.com - You should see the Reflector interface - Try uploading an audio file to test transcription @@ -327,6 +346,7 @@ By default, Reflector is open (no login required). **Authentication is required See [Authentication Setup](./auth-setup) for full Authentik OAuth configuration. Quick summary: + 1. Deploy Authentik on your server 2. Create OAuth provider in Authentik 3. Extract public key for JWT verification @@ -358,6 +378,7 @@ DAILYCO_STORAGE_AWS_ROLE_ARN= ``` Reload env and restart: + ```bash docker compose -f docker-compose.prod.yml up -d server worker ``` @@ -367,35 +388,43 @@ docker compose -f docker-compose.prod.yml up -d server worker ## Troubleshooting ### Check logs for errors + ```bash docker compose -f docker-compose.prod.yml logs server --tail 20 docker compose -f docker-compose.prod.yml logs worker --tail 20 ``` ### Services won't start + ```bash docker compose -f docker-compose.prod.yml logs ``` ### CORS errors in browser + - Verify `CORS_ORIGIN` in `server/.env` matches your frontend domain exactly (including `https://`) - Reload env: `docker compose -f docker-compose.prod.yml up -d server` -### SSL certificate errors +### SSL certificate errors (when using Caddy) + - Caddy auto-provisions Let's Encrypt certificates -- Ensure ports 80 and 443 are open +- Ensure ports 80 and 443 are open and not used by another proxy - Check: `docker compose -f docker-compose.prod.yml logs caddy` +- If port 80 is already in use (e.g. by Coolify), run without Caddy: `docker compose -f docker-compose.prod.yml up -d` and use your existing proxy ### Transcription not working + - Check Modal dashboard: https://modal.com/apps - Verify URLs in `server/.env` match deployed functions - Check worker logs: `docker compose -f docker-compose.prod.yml logs worker` ### "Login required" but auth not configured + - Set `FEATURE_REQUIRE_LOGIN=false` in `www/.env` - Rebuild frontend: `docker compose -f docker-compose.prod.yml up -d --force-recreate web` ### Database migrations or connectivity issues + Migrations run automatically on server startup. To check database connectivity or debug migration failures: ```bash @@ -408,4 +437,3 @@ docker compose -f docker-compose.prod.yml exec server uv run python -c "from ref # Manually run migrations (if needed) docker compose -f docker-compose.prod.yml exec server uv run alembic upgrade head ``` - From 4acde4b7fdef88cc02ca12cf38c9020b05ed96ac Mon Sep 17 00:00:00 2001 From: Igor Monadical Date: Tue, 3 Feb 2026 16:05:16 -0500 Subject: [PATCH 2/3] fix: increase TIMEOUT_MEDIUM from 2m to 5m for LLM tasks (#843) Topic detection was timing out on longer transcripts when LLM responses are slow. This affects detect_chunk_topic and other LLM-calling tasks that use TIMEOUT_MEDIUM. Co-authored-by: Igor Loskutov --- server/reflector/hatchet/constants.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/reflector/hatchet/constants.py b/server/reflector/hatchet/constants.py index 209d1bd1..b3810ad6 100644 --- a/server/reflector/hatchet/constants.py +++ b/server/reflector/hatchet/constants.py @@ -35,7 +35,9 @@ LLM_RATE_LIMIT_PER_SECOND = 10 # Task execution timeouts (seconds) TIMEOUT_SHORT = 60 # Quick operations: API calls, DB updates -TIMEOUT_MEDIUM = 120 # Single LLM calls, waveform generation +TIMEOUT_MEDIUM = ( + 300 # Single LLM calls, waveform generation (5m for slow LLM responses) +) TIMEOUT_LONG = 180 # Action items (larger context LLM) TIMEOUT_AUDIO = 720 # Audio processing: padding, mixdown TIMEOUT_HEAVY = 600 # Transcription, fan-out LLM tasks From 8707c6694a80c939b6214bbc13331741f192e082 Mon Sep 17 00:00:00 2001 From: Igor Monadical Date: Tue, 3 Feb 2026 17:15:03 -0500 Subject: [PATCH 3/3] fix: use Daily API recording.duration as master source for transcript duration (#844) Set duration early in get_participants from Daily API (seconds -> ms), ensuring post_zulip has the value before mixdown_tracks completes. Removes redundant duration update from mixdown_tracks. Co-authored-by: Igor Loskutov --- .../workflows/daily_multitrack_pipeline.py | 30 +++++++++---------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/server/reflector/hatchet/workflows/daily_multitrack_pipeline.py b/server/reflector/hatchet/workflows/daily_multitrack_pipeline.py index 32c45fdb..188133c7 100644 --- a/server/reflector/hatchet/workflows/daily_multitrack_pipeline.py +++ b/server/reflector/hatchet/workflows/daily_multitrack_pipeline.py @@ -322,6 +322,7 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe mtg_session_id = recording.mtg_session_id async with fresh_db_connection(): from reflector.db.transcripts import ( # noqa: PLC0415 + TranscriptDuration, TranscriptParticipant, transcripts_controller, ) @@ -330,15 +331,26 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe if not transcript: raise ValueError(f"Transcript {input.transcript_id} not found") # Note: title NOT cleared - preserves existing titles + # Duration from Daily API (seconds -> milliseconds) - master source + duration_ms = recording.duration * 1000 if recording.duration else 0 await transcripts_controller.update( transcript, { "events": [], "topics": [], "participants": [], + "duration": duration_ms, }, ) + await append_event_and_broadcast( + input.transcript_id, + transcript, + "DURATION", + TranscriptDuration(duration=duration_ms), + logger=logger, + ) + mtg_session_id = assert_non_none_and_non_empty( mtg_session_id, "mtg_session_id is required" ) @@ -561,27 +573,13 @@ async def mixdown_tracks(input: PipelineInput, ctx: Context) -> MixdownResult: Path(output_path).unlink(missing_ok=True) - duration = duration_ms_callback_capture_container[0] - async with fresh_db_connection(): - from reflector.db.transcripts import ( # noqa: PLC0415 - TranscriptDuration, - transcripts_controller, - ) + from reflector.db.transcripts import transcripts_controller # noqa: PLC0415 transcript = await transcripts_controller.get_by_id(input.transcript_id) if transcript: await transcripts_controller.update( - transcript, {"audio_location": "storage", "duration": duration} - ) - - duration_data = TranscriptDuration(duration=duration) - await append_event_and_broadcast( - input.transcript_id, - transcript, - "DURATION", - duration_data, - logger=logger, + transcript, {"audio_location": "storage"} ) ctx.log(f"mixdown_tracks complete: uploaded {file_size} bytes to {storage_path}")