feat: Add Single User authentication to Selfhosted (#870)

* Single user/password for selfhosted

* fix revision id latest migration
This commit is contained in:
Juan Diego García
2026-02-23 11:10:27 -05:00
committed by GitHub
parent 2ba0d965e8
commit c8db37362b
31 changed files with 1333 additions and 163 deletions

View File

@@ -6,7 +6,7 @@ repos:
- id: format - id: format
name: run format name: run format
language: system language: system
entry: bash -c 'cd www && pnpm format' entry: bash -c 'source "$HOME/.nvm/nvm.sh" && cd www && pnpm format'
pass_filenames: false pass_filenames: false
files: ^www/ files: ^www/

View File

@@ -125,11 +125,11 @@ services:
- ./www/.env - ./www/.env
environment: environment:
NODE_ENV: production NODE_ENV: production
NODE_TLS_REJECT_UNAUTHORIZED: "0"
SERVER_API_URL: http://server:1250 SERVER_API_URL: http://server:1250
KV_URL: redis://redis:6379 KV_URL: redis://redis:6379
KV_USE_TLS: "false" KV_USE_TLS: "false"
AUTHENTIK_ISSUER: "" NEXTAUTH_URL_INTERNAL: http://localhost:3000
AUTHENTIK_REFRESH_TOKEN_URL: ""
depends_on: depends_on:
- redis - redis
@@ -227,9 +227,12 @@ services:
profiles: [ollama-gpu] profiles: [ollama-gpu]
restart: unless-stopped restart: unless-stopped
ports: ports:
- "127.0.0.1:11434:11434" - "127.0.0.1:11435:11435"
volumes: volumes:
- ollama_data:/root/.ollama - ollama_data:/root/.ollama
environment:
OLLAMA_HOST: "0.0.0.0:11435"
OLLAMA_KEEP_ALIVE: "24h"
deploy: deploy:
resources: resources:
reservations: reservations:
@@ -238,7 +241,7 @@ services:
count: all count: all
capabilities: [gpu] capabilities: [gpu]
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"] test: ["CMD", "curl", "-f", "http://localhost:11435/api/tags"]
interval: 10s interval: 10s
timeout: 5s timeout: 5s
retries: 5 retries: 5
@@ -248,11 +251,14 @@ services:
profiles: [ollama-cpu] profiles: [ollama-cpu]
restart: unless-stopped restart: unless-stopped
ports: ports:
- "127.0.0.1:11434:11434" - "127.0.0.1:11435:11435"
volumes: volumes:
- ollama_data:/root/.ollama - ollama_data:/root/.ollama
environment:
OLLAMA_HOST: "0.0.0.0:11435"
OLLAMA_KEEP_ALIVE: "24h" # keep model loaded to avoid reload delays
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"] test: ["CMD", "curl", "-f", "http://localhost:11435/api/tags"]
interval: 10s interval: 10s
timeout: 5s timeout: 5s
retries: 5 retries: 5

View File

@@ -38,7 +38,7 @@ The script is idempotent — safe to re-run at any time. It detects what's alrea
### 1. LLM inference via Ollama ### 1. LLM inference via Ollama
**Mac**: starts Ollama natively (Metal GPU acceleration). Pulls the LLM model. Docker containers reach it via `host.docker.internal:11434`. **Mac**: starts Ollama natively (Metal GPU acceleration). Pulls the LLM model. Docker containers reach it via `host.docker.internal:11435`.
**Linux**: starts containerized Ollama via `docker-compose.standalone.yml` profile (`ollama-gpu` with NVIDIA, `ollama-cpu` without). Pulls model inside the container. **Linux**: starts containerized Ollama via `docker-compose.standalone.yml` profile (`ollama-gpu` with NVIDIA, `ollama-cpu` without). Pulls model inside the container.
@@ -59,7 +59,7 @@ Generates `server/.env` and `www/.env.local` with standalone defaults:
| `DIARIZATION_BACKEND` | `modal` | HTTP API to self-hosted CPU service | | `DIARIZATION_BACKEND` | `modal` | HTTP API to self-hosted CPU service |
| `DIARIZATION_URL` | `http://cpu:8000` | Docker-internal CPU service | | `DIARIZATION_URL` | `http://cpu:8000` | Docker-internal CPU service |
| `TRANSLATION_BACKEND` | `passthrough` | No Modal | | `TRANSLATION_BACKEND` | `passthrough` | No Modal |
| `LLM_URL` | `http://host.docker.internal:11434/v1` (Mac) | Ollama endpoint | | `LLM_URL` | `http://host.docker.internal:11435/v1` (Mac) | Ollama endpoint |
**`www/.env.local`** — key settings: **`www/.env.local`** — key settings:

View File

@@ -52,6 +52,8 @@ Creates cryptographic secrets needed by the backend and frontend:
Secrets are only generated if they don't already exist or are still set to the placeholder value `changeme`. This is what makes the script idempotent for secrets. Secrets are only generated if they don't already exist or are still set to the placeholder value `changeme`. This is what makes the script idempotent for secrets.
If `--password` is passed, this step also generates a PBKDF2-SHA256 password hash from the provided password. The hash is computed using Python's stdlib (`hashlib.pbkdf2_hmac`) with 100,000 iterations and a random 16-byte salt, producing a hash in the format `pbkdf2:sha256:100000$<salt_hex>$<hash_hex>`.
### Step 2: Generate `server/.env` ### Step 2: Generate `server/.env`
Creates or updates the backend environment file from `server/.env.selfhosted.example`. Sets: Creates or updates the backend environment file from `server/.env.selfhosted.example`. Sets:
@@ -63,6 +65,7 @@ Creates or updates the backend environment file from `server/.env.selfhosted.exa
- **HuggingFace token** — prompts interactively for pyannote model access; writes to root `.env` so Docker Compose can inject it into GPU/CPU containers - **HuggingFace token** — prompts interactively for pyannote model access; writes to root `.env` so Docker Compose can inject it into GPU/CPU containers
- **LLM** — if `--ollama-*` is used, configures `LLM_URL` pointing to the Ollama container. Otherwise, warns that the user needs to configure an external LLM - **LLM** — if `--ollama-*` is used, configures `LLM_URL` pointing to the Ollama container. Otherwise, warns that the user needs to configure an external LLM
- **Public mode** — sets `PUBLIC_MODE=true` so the app is accessible without authentication by default - **Public mode** — sets `PUBLIC_MODE=true` so the app is accessible without authentication by default
- **Password auth** — if `--password` is passed, sets `AUTH_BACKEND=password`, `PUBLIC_MODE=false`, `ADMIN_EMAIL=admin@localhost`, and `ADMIN_PASSWORD_HASH` (the hash generated in Step 1). The admin user is provisioned in the database on container startup via `runserver.sh`
The script uses `env_set` for each variable, which either updates an existing line or appends a new one. This means re-running the script updates values in-place without duplicating keys. The script uses `env_set` for each variable, which either updates an existing line or appends a new one. This means re-running the script updates values in-place without duplicating keys.
@@ -75,6 +78,7 @@ Creates or updates the frontend environment file from `www/.env.selfhosted.examp
- **`SERVER_API_URL`** — always `http://server:1250` (Docker-internal, used for server-side rendering) - **`SERVER_API_URL`** — always `http://server:1250` (Docker-internal, used for server-side rendering)
- **`KV_URL`** — Redis URL for Next.js caching - **`KV_URL`** — Redis URL for Next.js caching
- **`FEATURE_REQUIRE_LOGIN`** — `false` by default (matches `PUBLIC_MODE=true` on the backend) - **`FEATURE_REQUIRE_LOGIN`** — `false` by default (matches `PUBLIC_MODE=true` on the backend)
- **Password auth** — if `--password` is passed, sets `FEATURE_REQUIRE_LOGIN=true` and `AUTH_PROVIDER=credentials`, which tells the frontend to use a local email/password login form instead of Authentik OAuth
### Step 4: Storage Setup ### Step 4: Storage Setup
@@ -125,7 +129,7 @@ Waits for each service in order, with generous timeouts:
| Service | Check | Timeout | Notes | | Service | Check | Timeout | Notes |
|---------|-------|---------|-------| |---------|-------|---------|-------|
| GPU/CPU models | `curl http://localhost:8000/docs` | 10 min (120 x 5s) | First start downloads ~1GB of models | | GPU/CPU models | `curl http://localhost:8000/docs` | 10 min (120 x 5s) | First start downloads ~1GB of models |
| Ollama | `curl http://localhost:11434/api/tags` | 3 min (60 x 3s) | Then pulls the selected model | | Ollama | `curl http://localhost:11435/api/tags` | 3 min (60 x 3s) | Then pulls the selected model |
| Server API | `curl http://localhost:1250/health` | 7.5 min (90 x 5s) | First start runs database migrations | | Server API | `curl http://localhost:1250/health` | 7.5 min (90 x 5s) | First start runs database migrations |
| Frontend | `curl http://localhost:3000` | 1.5 min (30 x 3s) | Next.js build on first start | | Frontend | `curl http://localhost:3000` | 1.5 min (30 x 3s) | Next.js build on first start |
| Caddy | `curl -k https://localhost` | Quick check | After other services are up | | Caddy | `curl -k https://localhost` | Quick check | After other services are up |
@@ -202,7 +206,7 @@ Both the `gpu` and `cpu` services define a Docker network alias of `transcriptio
┌─────┴─────┐ ┌─────────┐ ┌─────┴─────┐ ┌─────────┐
│ ollama │ │ garage │ │ ollama │ │ garage │
│(optional) │ │(optional│ │(optional) │ │(optional│
│ :11434 │ │ S3) │ │ :11435 │ │ S3) │
└───────────┘ └─────────┘ └───────────┘ └─────────┘
``` ```
@@ -410,7 +414,7 @@ All services communicate over Docker's default bridge network. Only specific por
| 3900 | Garage | `0.0.0.0:3900` | S3 API (for admin/debug access) | | 3900 | Garage | `0.0.0.0:3900` | S3 API (for admin/debug access) |
| 3903 | Garage | `0.0.0.0:3903` | Garage admin API | | 3903 | Garage | `0.0.0.0:3903` | Garage admin API |
| 8000 | GPU/CPU | `127.0.0.1:8000` | ML model API (localhost only) | | 8000 | GPU/CPU | `127.0.0.1:8000` | ML model API (localhost only) |
| 11434 | Ollama | `127.0.0.1:11434` | Ollama API (localhost only) | | 11435 | Ollama | `127.0.0.1:11435` | Ollama API (localhost only) |
| 50000-50100/udp | Server | `0.0.0.0:50000-50100` | WebRTC ICE candidates | | 50000-50100/udp | Server | `0.0.0.0:50000-50100` | WebRTC ICE candidates |
Services bound to `127.0.0.1` are only accessible from the host itself (not from the network). Caddy is the only service exposed to the internet on standard HTTP/HTTPS ports. Services bound to `127.0.0.1` are only accessible from the host itself (not from the network). Caddy is the only service exposed to the internet on standard HTTP/HTTPS ports.

View File

@@ -57,6 +57,9 @@ cd reflector
# CPU-only (same, but slower): # CPU-only (same, but slower):
./scripts/setup-selfhosted.sh --cpu --ollama-cpu --garage --caddy ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --garage --caddy
# With password authentication (single admin user):
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --password mysecretpass
# Build from source instead of pulling prebuilt images: # Build from source instead of pulling prebuilt images:
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --build ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --build
``` ```
@@ -124,6 +127,7 @@ Browse all available models at https://ollama.com/library.
| `--garage` | Starts Garage (local S3-compatible storage). Auto-configures bucket, keys, and env vars. | | `--garage` | Starts Garage (local S3-compatible storage). Auto-configures bucket, keys, and env vars. |
| `--caddy` | Starts Caddy reverse proxy on ports 80/443 with self-signed cert. | | `--caddy` | Starts Caddy reverse proxy on ports 80/443 with self-signed cert. |
| `--domain DOMAIN` | Use a real domain with Let's Encrypt auto-HTTPS (implies `--caddy`). Requires DNS A record pointing to this server and ports 80/443 open. | | `--domain DOMAIN` | Use a real domain with Let's Encrypt auto-HTTPS (implies `--caddy`). Requires DNS A record pointing to this server and ports 80/443 open. |
| `--password PASS` | Enable password authentication with an `admin@localhost` user. Sets `AUTH_BACKEND=password`, `PUBLIC_MODE=false`. See [Enabling Password Authentication](#enabling-password-authentication). |
| `--build` | Build backend (server, worker, beat) and frontend (web) Docker images from source instead of pulling prebuilt images from the registry. Useful for development or when running a version with local changes. | | `--build` | Build backend (server, worker, beat) and frontend (web) Docker images from source instead of pulling prebuilt images from the registry. Useful for development or when running a version with local changes. |
Without `--garage`, you **must** provide S3-compatible credentials (the script will prompt interactively or you can pre-fill `server/.env`). Without `--garage`, you **must** provide S3-compatible credentials (the script will prompt interactively or you can pre-fill `server/.env`).
@@ -156,13 +160,16 @@ Without `--caddy` or `--domain`, no ports are exposed. Point your own reverse pr
| `DATABASE_URL` | PostgreSQL connection | Auto-set (Docker internal) | | `DATABASE_URL` | PostgreSQL connection | Auto-set (Docker internal) |
| `REDIS_HOST` | Redis hostname | Auto-set (`redis`) | | `REDIS_HOST` | Redis hostname | Auto-set (`redis`) |
| `SECRET_KEY` | App secret | Auto-generated | | `SECRET_KEY` | App secret | Auto-generated |
| `AUTH_BACKEND` | Authentication method | `none` | | `AUTH_BACKEND` | Authentication method (`none`, `password`, `jwt`) | `none` |
| `PUBLIC_MODE` | Allow unauthenticated access | `true` | | `PUBLIC_MODE` | Allow unauthenticated access | `true` |
| `ADMIN_EMAIL` | Admin email for password auth | *(unset)* |
| `ADMIN_PASSWORD_HASH` | PBKDF2 hash for password auth | *(unset)* |
| `WEBRTC_HOST` | IP advertised in WebRTC ICE candidates | Auto-detected (server IP) | | `WEBRTC_HOST` | IP advertised in WebRTC ICE candidates | Auto-detected (server IP) |
| `TRANSCRIPT_URL` | Specialized model endpoint | `http://transcription:8000` | | `TRANSCRIPT_URL` | Specialized model endpoint | `http://transcription:8000` |
| `LLM_URL` | OpenAI-compatible LLM endpoint | Auto-set for Ollama modes | | `LLM_URL` | OpenAI-compatible LLM endpoint | Auto-set for Ollama modes |
| `LLM_API_KEY` | LLM API key | `not-needed` for Ollama | | `LLM_API_KEY` | LLM API key | `not-needed` for Ollama |
| `LLM_MODEL` | LLM model name | `qwen2.5:14b` for Ollama (override with `--llm-model`) | | `LLM_MODEL` | LLM model name | `qwen2.5:14b` for Ollama (override with `--llm-model`) |
| `CELERY_BEAT_POLL_INTERVAL` | Override all worker polling intervals (seconds). `0` = use individual defaults | `300` (selfhosted), `0` (other) |
| `TRANSCRIPT_STORAGE_BACKEND` | Storage backend | `aws` | | `TRANSCRIPT_STORAGE_BACKEND` | Storage backend | `aws` |
| `TRANSCRIPT_STORAGE_AWS_*` | S3 credentials | Auto-set for Garage | | `TRANSCRIPT_STORAGE_AWS_*` | S3 credentials | Auto-set for Garage |
@@ -175,6 +182,7 @@ Without `--caddy` or `--domain`, no ports are exposed. Point your own reverse pr
| `SERVER_API_URL` | API URL (server-side) | `http://server:1250` | | `SERVER_API_URL` | API URL (server-side) | `http://server:1250` |
| `NEXTAUTH_SECRET` | Auth secret | Auto-generated | | `NEXTAUTH_SECRET` | Auth secret | Auto-generated |
| `FEATURE_REQUIRE_LOGIN` | Require authentication | `false` | | `FEATURE_REQUIRE_LOGIN` | Require authentication | `false` |
| `AUTH_PROVIDER` | Auth provider (`authentik` or `credentials`) | *(unset)* |
## Storage Options ## Storage Options
@@ -207,8 +215,110 @@ TRANSCRIPT_STORAGE_AWS_REGION=us-east-1
TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL=http://minio:9000 TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL=http://minio:9000
``` ```
## What Authentication Enables
By default, Reflector runs in **public mode** (`AUTH_BACKEND=none`, `PUBLIC_MODE=true`) — anyone can create and view transcripts without logging in. Transcripts are anonymous (not linked to any user) and cannot be edited or deleted after creation.
Enabling authentication (either password or Authentik) unlocks:
| Feature | Public mode (no auth) | With authentication |
|---------|----------------------|---------------------|
| Create transcripts (record/upload) | Yes (anonymous, unowned) | Yes (owned by user) |
| View transcripts | All transcripts visible | Own transcripts + shared rooms |
| Edit/delete transcripts | No | Yes (owner only) |
| Privacy controls (private/semi-private/public) | No (everything public) | Yes (owner can set share mode) |
| Speaker reassignment and merging | No | Yes (owner only) |
| Participant management (add/edit/delete) | Read-only | Full CRUD (owner only) |
| Create rooms | No | Yes |
| Edit/delete rooms | No | Yes (owner only) |
| Room calendar (ICS) sync | No | Yes (owner only) |
| API key management | No | Yes |
| Post to Zulip | No | Yes (owner only) |
| Real-time WebSocket notifications | No (connection closed) | Yes (transcript create/delete events) |
| Meeting host access (Daily.co token) | No | Yes (room owner) |
In short: public mode is "demo-friendly" — great for trying Reflector out. Authentication adds **ownership, privacy, and management** of your data.
## Authentication Options
Reflector supports three authentication backends:
| Backend | `AUTH_BACKEND` | Use case |
|---------|---------------|----------|
| `none` | `none` | Public/demo mode, no login required |
| `password` | `password` | Single-user self-hosted, simple email/password login |
| `jwt` | `jwt` | Multi-user via Authentik (OAuth2/OIDC) |
## Enabling Password Authentication
The simplest way to add authentication. Creates a single admin user with email/password login — no external identity provider needed.
### Quick setup (recommended)
Pass `--password` to the setup script:
```bash
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --password mysecretpass
```
This automatically:
- Sets `AUTH_BACKEND=password` and `PUBLIC_MODE=false` in `server/.env`
- Creates an `admin@localhost` user with the given password
- Sets `FEATURE_REQUIRE_LOGIN=true` and `AUTH_PROVIDER=credentials` in `www/.env`
- Provisions the admin user in the database on container startup
### Manual setup
If you prefer to configure manually or want to change the admin email:
1. Generate a password hash:
```bash
cd server
uv run python -m reflector.tools.create_admin --hash-only --password yourpassword
```
2. Update `server/.env`:
```env
AUTH_BACKEND=password
PUBLIC_MODE=false
ADMIN_EMAIL=admin@yourdomain.com
ADMIN_PASSWORD_HASH=pbkdf2:sha256:100000$<salt>$<hash>
```
3. Update `www/.env`:
```env
FEATURE_REQUIRE_LOGIN=true
AUTH_PROVIDER=credentials
```
4. Restart:
```bash
docker compose -f docker-compose.selfhosted.yml down
./scripts/setup-selfhosted.sh <same-flags>
```
### How it works
- The backend issues HS256 JWTs (signed with `SECRET_KEY`) on successful login via `POST /v1/auth/login`
- Tokens expire after 24 hours; the user must log in again after expiry
- The frontend shows a login page at `/login` with email and password fields
- A rate limiter blocks IPs after 10 failed login attempts within 5 minutes
- The admin user is provisioned automatically on container startup from `ADMIN_EMAIL` and `ADMIN_PASSWORD_HASH` environment variables
- Passwords are hashed with PBKDF2-SHA256 (100,000 iterations) — no additional dependencies required
### Changing the admin password
```bash
cd server
uv run python -m reflector.tools.create_admin --email admin@localhost --password newpassword
```
Or update `ADMIN_PASSWORD_HASH` in `server/.env` and restart the containers.
## Enabling Authentication (Authentik) ## Enabling Authentication (Authentik)
For multi-user deployments with SSO. Requires an external Authentik instance.
By default, authentication is disabled (`AUTH_BACKEND=none`, `FEATURE_REQUIRE_LOGIN=false`). To enable: By default, authentication is disabled (`AUTH_BACKEND=none`, `FEATURE_REQUIRE_LOGIN=false`). To enable:
1. Deploy an Authentik instance (see [Authentik docs](https://goauthentik.io/docs/installation)) 1. Deploy an Authentik instance (see [Authentik docs](https://goauthentik.io/docs/installation))
@@ -221,6 +331,7 @@ By default, authentication is disabled (`AUTH_BACKEND=none`, `FEATURE_REQUIRE_LO
4. Update `www/.env`: 4. Update `www/.env`:
```env ```env
FEATURE_REQUIRE_LOGIN=true FEATURE_REQUIRE_LOGIN=true
AUTH_PROVIDER=authentik
AUTHENTIK_ISSUER=https://authentik.example.com/application/o/reflector AUTHENTIK_ISSUER=https://authentik.example.com/application/o/reflector
AUTHENTIK_REFRESH_TOKEN_URL=https://authentik.example.com/application/o/token/ AUTHENTIK_REFRESH_TOKEN_URL=https://authentik.example.com/application/o/token/
AUTHENTIK_CLIENT_ID=your-client-id AUTHENTIK_CLIENT_ID=your-client-id
@@ -273,6 +384,41 @@ By default, Caddy uses self-signed certificates. For a real domain:
``` ```
5. Restart Caddy: `docker compose -f docker-compose.selfhosted.yml restart caddy web` 5. Restart Caddy: `docker compose -f docker-compose.selfhosted.yml restart caddy web`
## Worker Polling Frequency
The selfhosted setup defaults all background worker polling intervals to **300 seconds (5 minutes)** to reduce CPU and memory usage. This controls how often the beat scheduler triggers tasks like recording discovery, meeting reconciliation, and calendar sync.
To change the interval, edit `server/.env`:
```env
# Poll every 60 seconds (more responsive, uses more resources)
CELERY_BEAT_POLL_INTERVAL=60
# Poll every 5 minutes (default for selfhosted)
CELERY_BEAT_POLL_INTERVAL=300
# Use individual per-task defaults (production SaaS behavior)
CELERY_BEAT_POLL_INTERVAL=0
```
After changing, restart the beat and worker containers:
```bash
docker compose -f docker-compose.selfhosted.yml restart beat worker
```
**Affected tasks when `CELERY_BEAT_POLL_INTERVAL` is set:**
| Task | Default (no override) | With override |
|------|-----------------------|---------------|
| SQS message polling | 60s | Override value |
| Daily.co recording discovery | 15s (no webhook) / 180s (webhook) | Override value |
| Meeting reconciliation | 30s | Override value |
| ICS calendar sync | 60s | Override value |
| Upcoming meeting creation | 30s | Override value |
> **Note:** Daily crontab tasks (failed recording reprocessing at 05:00 UTC, public data cleanup at 03:00 UTC) and healthcheck pings (10 min) are **not** affected by this setting.
## Troubleshooting ## Troubleshooting
### Check service status ### Check service status
@@ -366,7 +512,7 @@ The setup script is idempotent — it won't overwrite existing secrets or env va
┌─────┴─────┐ ┌─────────┐ ┌─────┴─────┐ ┌─────────┐
│ ollama │ │ garage │ │ ollama │ │ garage │
│ (optional)│ │(optional│ │ (optional)│ │(optional│
│ :11434 │ │ S3) │ │ :11435 │ │ S3) │
└───────────┘ └─────────┘ └───────────┘ └─────────┘
``` ```

View File

@@ -4,7 +4,7 @@
# Single script to configure and launch everything on one server. # Single script to configure and launch everything on one server.
# #
# Usage: # Usage:
# ./scripts/setup-selfhosted.sh <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--build] # ./scripts/setup-selfhosted.sh <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASSWORD] [--build]
# #
# Specialized models (pick ONE — required): # Specialized models (pick ONE — required):
# --gpu NVIDIA GPU for transcription/diarization/translation # --gpu NVIDIA GPU for transcription/diarization/translation
@@ -22,6 +22,7 @@
# --domain DOMAIN Use a real domain for Caddy (enables Let's Encrypt auto-HTTPS) # --domain DOMAIN Use a real domain for Caddy (enables Let's Encrypt auto-HTTPS)
# Requires: DNS pointing to this server + ports 80/443 open # Requires: DNS pointing to this server + ports 80/443 open
# Without --domain: Caddy uses self-signed cert for IP access # Without --domain: Caddy uses self-signed cert for IP access
# --password PASS Enable password auth with admin@localhost user
# --build Build backend and frontend images from source instead of pulling # --build Build backend and frontend images from source instead of pulling
# #
# Examples: # Examples:
@@ -29,6 +30,7 @@
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --domain reflector.example.com # ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --domain reflector.example.com
# ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --garage --caddy # ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --garage --caddy
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --llm-model mistral --garage --caddy # ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --llm-model mistral --garage --caddy
# ./scripts/setup-selfhosted.sh --gpu --garage --caddy --password mysecretpass
# ./scripts/setup-selfhosted.sh --gpu --garage --caddy # ./scripts/setup-selfhosted.sh --gpu --garage --caddy
# ./scripts/setup-selfhosted.sh --cpu # ./scripts/setup-selfhosted.sh --cpu
# #
@@ -165,6 +167,7 @@ USE_GARAGE=false
USE_CADDY=false USE_CADDY=false
CUSTOM_DOMAIN="" # optional domain for Let's Encrypt HTTPS CUSTOM_DOMAIN="" # optional domain for Let's Encrypt HTTPS
BUILD_IMAGES=false # build backend/frontend from source BUILD_IMAGES=false # build backend/frontend from source
ADMIN_PASSWORD="" # optional admin password for password auth
SKIP_NEXT=false SKIP_NEXT=false
ARGS=("$@") ARGS=("$@")
@@ -198,6 +201,14 @@ for i in "${!ARGS[@]}"; do
--garage) USE_GARAGE=true ;; --garage) USE_GARAGE=true ;;
--caddy) USE_CADDY=true ;; --caddy) USE_CADDY=true ;;
--build) BUILD_IMAGES=true ;; --build) BUILD_IMAGES=true ;;
--password)
next_i=$((i + 1))
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
err "--password requires a password value (e.g. --password mysecretpass)"
exit 1
fi
ADMIN_PASSWORD="${ARGS[$next_i]}"
SKIP_NEXT=true ;;
--domain) --domain)
next_i=$((i + 1)) next_i=$((i + 1))
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
@@ -209,7 +220,7 @@ for i in "${!ARGS[@]}"; do
SKIP_NEXT=true ;; SKIP_NEXT=true ;;
*) *)
err "Unknown argument: $arg" err "Unknown argument: $arg"
err "Usage: $0 <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--build]" err "Usage: $0 <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASS] [--build]"
exit 1 exit 1
;; ;;
esac esac
@@ -218,7 +229,7 @@ done
if [[ -z "$MODEL_MODE" ]]; then if [[ -z "$MODEL_MODE" ]]; then
err "No model mode specified. You must choose --gpu or --cpu." err "No model mode specified. You must choose --gpu or --cpu."
err "" err ""
err "Usage: $0 <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--build]" err "Usage: $0 <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASS] [--build]"
err "" err ""
err "Specialized models (required):" err "Specialized models (required):"
err " --gpu NVIDIA GPU for transcription/diarization/translation" err " --gpu NVIDIA GPU for transcription/diarization/translation"
@@ -234,6 +245,7 @@ if [[ -z "$MODEL_MODE" ]]; then
err " --garage Local S3-compatible storage (Garage)" err " --garage Local S3-compatible storage (Garage)"
err " --caddy Caddy reverse proxy with self-signed cert" err " --caddy Caddy reverse proxy with self-signed cert"
err " --domain DOMAIN Use a real domain with Let's Encrypt HTTPS (implies --caddy)" err " --domain DOMAIN Use a real domain with Let's Encrypt HTTPS (implies --caddy)"
err " --password PASS Enable password auth (admin@localhost) instead of public mode"
err " --build Build backend/frontend images from source instead of pulling" err " --build Build backend/frontend images from source instead of pulling"
exit 1 exit 1
fi fi
@@ -325,6 +337,18 @@ step_secrets() {
NEXTAUTH_SECRET=$(openssl rand -hex 32) NEXTAUTH_SECRET=$(openssl rand -hex 32)
fi fi
# Generate admin password hash if --password was provided
if [[ -n "$ADMIN_PASSWORD" ]]; then
# Note: $$ escapes are required because docker-compose interprets $ in .env files
ADMIN_PASSWORD_HASH=$(python3 -c "
import hashlib, os
salt = os.urandom(16).hex()
dk = hashlib.pbkdf2_hmac('sha256', '''${ADMIN_PASSWORD}'''.encode('utf-8'), salt.encode('utf-8'), 100000)
print(f'pbkdf2:sha256:100000\$\$' + salt + '\$\$' + dk.hex())
")
ok "Admin password hash generated"
fi
ok "Secrets ready" ok "Secrets ready"
} }
@@ -346,9 +370,28 @@ step_server_env() {
env_set "$SERVER_ENV" "REDIS_HOST" "redis" env_set "$SERVER_ENV" "REDIS_HOST" "redis"
env_set "$SERVER_ENV" "CELERY_BROKER_URL" "redis://redis:6379/1" env_set "$SERVER_ENV" "CELERY_BROKER_URL" "redis://redis:6379/1"
env_set "$SERVER_ENV" "CELERY_RESULT_BACKEND" "redis://redis:6379/1" env_set "$SERVER_ENV" "CELERY_RESULT_BACKEND" "redis://redis:6379/1"
env_set "$SERVER_ENV" "CELERY_BEAT_POLL_INTERVAL" "300"
env_set "$SERVER_ENV" "SECRET_KEY" "$SECRET_KEY" env_set "$SERVER_ENV" "SECRET_KEY" "$SECRET_KEY"
env_set "$SERVER_ENV" "AUTH_BACKEND" "none"
env_set "$SERVER_ENV" "PUBLIC_MODE" "true" # Auth configuration
if [[ -n "$ADMIN_PASSWORD" ]]; then
env_set "$SERVER_ENV" "AUTH_BACKEND" "password"
env_set "$SERVER_ENV" "PUBLIC_MODE" "false"
env_set "$SERVER_ENV" "ADMIN_EMAIL" "admin@localhost"
env_set "$SERVER_ENV" "ADMIN_PASSWORD_HASH" "$ADMIN_PASSWORD_HASH"
ok "Password auth configured (admin@localhost)"
else
local current_auth_backend=""
if env_has_key "$SERVER_ENV" "AUTH_BACKEND"; then
current_auth_backend=$(env_get "$SERVER_ENV" "AUTH_BACKEND")
fi
if [[ "$current_auth_backend" != "jwt" ]]; then
env_set "$SERVER_ENV" "AUTH_BACKEND" "none"
env_set "$SERVER_ENV" "PUBLIC_MODE" "true"
else
ok "Keeping existing auth backend: $current_auth_backend"
fi
fi
# Public-facing URLs # Public-facing URLs
local server_base_url local server_base_url
@@ -413,7 +456,7 @@ step_server_env() {
# LLM configuration # LLM configuration
if [[ "$USES_OLLAMA" == "true" ]]; then if [[ "$USES_OLLAMA" == "true" ]]; then
local llm_host="$OLLAMA_SVC" local llm_host="$OLLAMA_SVC"
env_set "$SERVER_ENV" "LLM_URL" "http://${llm_host}:11434/v1" env_set "$SERVER_ENV" "LLM_URL" "http://${llm_host}:11435/v1"
env_set "$SERVER_ENV" "LLM_MODEL" "$OLLAMA_MODEL" env_set "$SERVER_ENV" "LLM_MODEL" "$OLLAMA_MODEL"
env_set "$SERVER_ENV" "LLM_API_KEY" "not-needed" env_set "$SERVER_ENV" "LLM_API_KEY" "not-needed"
ok "LLM configured for local Ollama ($llm_host, model=$OLLAMA_MODEL)" ok "LLM configured for local Ollama ($llm_host, model=$OLLAMA_MODEL)"
@@ -474,7 +517,23 @@ step_www_env() {
env_set "$WWW_ENV" "WEBSOCKET_URL" "auto" env_set "$WWW_ENV" "WEBSOCKET_URL" "auto"
env_set "$WWW_ENV" "SERVER_API_URL" "http://server:1250" env_set "$WWW_ENV" "SERVER_API_URL" "http://server:1250"
env_set "$WWW_ENV" "KV_URL" "redis://redis:6379" env_set "$WWW_ENV" "KV_URL" "redis://redis:6379"
env_set "$WWW_ENV" "FEATURE_REQUIRE_LOGIN" "false"
# Auth configuration
if [[ -n "$ADMIN_PASSWORD" ]]; then
env_set "$WWW_ENV" "FEATURE_REQUIRE_LOGIN" "true"
env_set "$WWW_ENV" "AUTH_PROVIDER" "credentials"
ok "Frontend configured for password auth"
else
local current_auth_provider=""
if env_has_key "$WWW_ENV" "AUTH_PROVIDER"; then
current_auth_provider=$(env_get "$WWW_ENV" "AUTH_PROVIDER")
fi
if [[ "$current_auth_provider" != "authentik" ]]; then
env_set "$WWW_ENV" "FEATURE_REQUIRE_LOGIN" "false"
else
ok "Keeping existing auth provider: $current_auth_provider"
fi
fi
ok "www/.env ready (URL=$base_url)" ok "www/.env ready (URL=$base_url)"
} }
@@ -755,7 +814,7 @@ step_health() {
info "Waiting for Ollama service..." info "Waiting for Ollama service..."
local ollama_ok=false local ollama_ok=false
for i in $(seq 1 60); do for i in $(seq 1 60); do
if curl -sf http://localhost:11434/api/tags > /dev/null 2>&1; then if curl -sf http://localhost:11435/api/tags > /dev/null 2>&1; then
ollama_ok=true ollama_ok=true
break break
fi fi

View File

@@ -17,7 +17,7 @@ SERVER_ENV="$ROOT_DIR/server/.env"
WWW_ENV="$ROOT_DIR/www/.env.local" WWW_ENV="$ROOT_DIR/www/.env.local"
MODEL="${LLM_MODEL:-qwen2.5:14b}" MODEL="${LLM_MODEL:-qwen2.5:14b}"
OLLAMA_PORT="${OLLAMA_PORT:-11434}" OLLAMA_PORT="${OLLAMA_PORT:-11435}"
OS="$(uname -s)" OS="$(uname -s)"

View File

@@ -73,10 +73,10 @@ TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run
## Setup: ./scripts/setup-standalone.sh ## Setup: ./scripts/setup-standalone.sh
## Mac: Ollama runs natively (Metal GPU). Containers reach it via host.docker.internal. ## Mac: Ollama runs natively (Metal GPU). Containers reach it via host.docker.internal.
## Linux: docker compose --profile ollama-gpu up -d (or ollama-cpu for no GPU) ## Linux: docker compose --profile ollama-gpu up -d (or ollama-cpu for no GPU)
LLM_URL=http://host.docker.internal:11434/v1 LLM_URL=http://host.docker.internal:11435/v1
LLM_MODEL=qwen2.5:14b LLM_MODEL=qwen2.5:14b
LLM_API_KEY=not-needed LLM_API_KEY=not-needed
## Linux with containerized Ollama: LLM_URL=http://ollama:11434/v1 ## Linux with containerized Ollama: LLM_URL=http://ollama:11435/v1
## --- Option B: Remote/cloud LLM --- ## --- Option B: Remote/cloud LLM ---
#LLM_API_KEY=sk-your-openai-api-key #LLM_API_KEY=sk-your-openai-api-key

View File

@@ -26,6 +26,9 @@ SECRET_KEY=changeme-generate-a-secure-random-string
AUTH_BACKEND=none AUTH_BACKEND=none
# AUTH_BACKEND=jwt # AUTH_BACKEND=jwt
# AUTH_JWT_AUDIENCE= # AUTH_JWT_AUDIENCE=
# AUTH_BACKEND=password
# ADMIN_EMAIL=admin@localhost
# ADMIN_PASSWORD_HASH=pbkdf2:sha256:100000$<salt>$<hash>
# ======================================================= # =======================================================
# Specialized Models (Transcription, Diarization, Translation) # Specialized Models (Transcription, Diarization, Translation)
@@ -64,7 +67,7 @@ TRANSLATE_URL=http://transcription:8000
# LLM_MODEL=gpt-4o-mini # LLM_MODEL=gpt-4o-mini
# --- Option B: Local Ollama (auto-set by --ollama-gpu/--ollama-cpu) --- # --- Option B: Local Ollama (auto-set by --ollama-gpu/--ollama-cpu) ---
# LLM_URL=http://ollama:11434/v1 # LLM_URL=http://ollama:11435/v1
# LLM_API_KEY=not-needed # LLM_API_KEY=not-needed
# LLM_MODEL=llama3.1 # LLM_MODEL=llama3.1

View File

@@ -0,0 +1,25 @@
"""add password_hash to user table
Revision ID: e1f093f7f124
Revises: 623af934249a
Create Date: 2026-02-19 00:00:00.000000
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
revision: str = "e1f093f7f124"
down_revision: Union[str, None] = "623af934249a"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column("user", sa.Column("password_hash", sa.String(), nullable=True))
def downgrade() -> None:
op.drop_column("user", "password_hash")

View File

@@ -8,6 +8,7 @@ from prometheus_fastapi_instrumentator import Instrumentator
import reflector.auth # noqa import reflector.auth # noqa
import reflector.db # noqa import reflector.db # noqa
from reflector.auth import router as auth_router
from reflector.events import subscribers_shutdown, subscribers_startup from reflector.events import subscribers_shutdown, subscribers_startup
from reflector.logger import logger from reflector.logger import logger
from reflector.metrics import metrics_init from reflector.metrics import metrics_init
@@ -105,6 +106,8 @@ app.include_router(user_ws_router, prefix="/v1")
app.include_router(zulip_router, prefix="/v1") app.include_router(zulip_router, prefix="/v1")
app.include_router(whereby_router, prefix="/v1") app.include_router(whereby_router, prefix="/v1")
app.include_router(daily_router, prefix="/v1/daily") app.include_router(daily_router, prefix="/v1/daily")
if auth_router:
app.include_router(auth_router, prefix="/v1")
add_pagination(app) add_pagination(app)
# prepare celery # prepare celery

View File

@@ -14,3 +14,6 @@ current_user = auth_module.current_user
current_user_optional = auth_module.current_user_optional current_user_optional = auth_module.current_user_optional
parse_ws_bearer_token = auth_module.parse_ws_bearer_token parse_ws_bearer_token = auth_module.parse_ws_bearer_token
current_user_ws_optional = auth_module.current_user_ws_optional current_user_ws_optional = auth_module.current_user_ws_optional
# Optional router (e.g. for /auth/login in password backend)
router = getattr(auth_module, "router", None)

View File

@@ -0,0 +1,198 @@
"""Password-based authentication backend for selfhosted deployments.
Issues HS256 JWTs signed with settings.SECRET_KEY. Provides a POST /auth/login
endpoint for email/password authentication.
"""
import time
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING, Annotated, Optional
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.security import APIKeyHeader, OAuth2PasswordBearer
from jose import JWTError, jwt
from pydantic import BaseModel
from reflector.auth.password_utils import verify_password
from reflector.db.user_api_keys import user_api_keys_controller
from reflector.db.users import user_controller
from reflector.logger import logger
from reflector.settings import settings
if TYPE_CHECKING:
from fastapi import WebSocket
# --- FastAPI security schemes (same pattern as auth_jwt.py) ---
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/v1/auth/login", auto_error=False)
api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
# --- JWT configuration ---
JWT_ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 # 24 hours
# --- Rate limiting (in-memory) ---
_login_attempts: dict[str, list[float]] = defaultdict(list)
RATE_LIMIT_WINDOW = 300 # 5 minutes
RATE_LIMIT_MAX = 10 # max attempts per window
def _check_rate_limit(key: str) -> bool:
"""Return True if request is allowed, False if rate-limited."""
now = time.monotonic()
attempts = _login_attempts[key]
_login_attempts[key] = [t for t in attempts if now - t < RATE_LIMIT_WINDOW]
if len(_login_attempts[key]) >= RATE_LIMIT_MAX:
return False
_login_attempts[key].append(now)
return True
# --- Pydantic models ---
class UserInfo(BaseModel):
sub: str
email: Optional[str] = None
def __getitem__(self, key):
return getattr(self, key)
class AccessTokenInfo(BaseModel):
exp: Optional[int] = None
sub: Optional[str] = None
class LoginRequest(BaseModel):
email: str
password: str
class LoginResponse(BaseModel):
access_token: str
token_type: str = "bearer"
expires_in: int
# --- JWT token creation and verification ---
def _create_access_token(user_id: str, email: str) -> tuple[str, int]:
"""Create an HS256 JWT. Returns (token, expires_in_seconds)."""
expires_delta = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
expire = datetime.now(timezone.utc) + expires_delta
payload = {
"sub": user_id,
"email": email,
"exp": expire,
}
token = jwt.encode(payload, settings.SECRET_KEY, algorithm=JWT_ALGORITHM)
return token, int(expires_delta.total_seconds())
def _verify_token(token: str) -> dict:
"""Verify and decode an HS256 JWT."""
return jwt.decode(token, settings.SECRET_KEY, algorithms=[JWT_ALGORITHM])
# --- Authentication logic (mirrors auth_jwt._authenticate_user) ---
async def _authenticate_user(
jwt_token: Optional[str],
api_key: Optional[str],
) -> UserInfo | None:
user_infos: list[UserInfo] = []
if api_key:
user_api_key = await user_api_keys_controller.verify_key(api_key)
if user_api_key:
user_infos.append(UserInfo(sub=user_api_key.user_id, email=None))
if jwt_token:
try:
payload = _verify_token(jwt_token)
user_id = payload["sub"]
email = payload.get("email")
user_infos.append(UserInfo(sub=user_id, email=email))
except JWTError as e:
logger.error(f"JWT error: {e}")
raise HTTPException(status_code=401, detail="Invalid authentication")
if len(user_infos) == 0:
return None
if len(set(x.sub for x in user_infos)) > 1:
raise HTTPException(
status_code=401,
detail="Invalid authentication: more than one user provided",
)
return user_infos[0]
# --- FastAPI dependencies (exported, required by auth/__init__.py) ---
def authenticated(token: Annotated[str, Depends(oauth2_scheme)]):
if token is None:
raise HTTPException(status_code=401, detail="Not authenticated")
return None
async def current_user(
jwt_token: Annotated[Optional[str], Depends(oauth2_scheme)],
api_key: Annotated[Optional[str], Depends(api_key_header)],
):
user = await _authenticate_user(jwt_token, api_key)
if user is None:
raise HTTPException(status_code=401, detail="Not authenticated")
return user
async def current_user_optional(
jwt_token: Annotated[Optional[str], Depends(oauth2_scheme)],
api_key: Annotated[Optional[str], Depends(api_key_header)],
):
return await _authenticate_user(jwt_token, api_key)
# --- WebSocket auth (same pattern as auth_jwt.py) ---
def parse_ws_bearer_token(
websocket: "WebSocket",
) -> tuple[Optional[str], Optional[str]]:
raw = websocket.headers.get("sec-websocket-protocol") or ""
parts = [p.strip() for p in raw.split(",") if p.strip()]
if len(parts) >= 2 and parts[0].lower() == "bearer":
return parts[1], "bearer"
return None, None
async def current_user_ws_optional(websocket: "WebSocket") -> Optional[UserInfo]:
token, _ = parse_ws_bearer_token(websocket)
if not token:
return None
return await _authenticate_user(token, None)
# --- Login router ---
router = APIRouter(prefix="/auth", tags=["auth"])
@router.post("/login", response_model=LoginResponse)
async def login(request: Request, body: LoginRequest):
client_ip = request.client.host if request.client else "unknown"
if not _check_rate_limit(client_ip):
raise HTTPException(
status_code=429,
detail="Too many login attempts. Try again later.",
)
user = await user_controller.get_by_email(body.email)
if not user or not user.password_hash:
print("invalid email")
raise HTTPException(status_code=401, detail="Invalid email or password")
if not verify_password(body.password, user.password_hash):
print("invalid pass")
raise HTTPException(status_code=401, detail="Invalid email or password")
access_token, expires_in = _create_access_token(user.id, user.email)
return LoginResponse(
access_token=access_token,
token_type="bearer",
expires_in=expires_in,
)

View File

@@ -0,0 +1,41 @@
"""Password hashing utilities using PBKDF2-SHA256 (stdlib only)."""
import hashlib
import hmac
import os
PBKDF2_ITERATIONS = 100_000
SALT_LENGTH = 16 # bytes, hex-encoded to 32 chars
def hash_password(password: str) -> str:
"""Hash a password using PBKDF2-SHA256 with a random salt.
Format: pbkdf2:sha256:<iterations>$<salt_hex>$<hash_hex>
"""
salt = os.urandom(SALT_LENGTH).hex()
dk = hashlib.pbkdf2_hmac(
"sha256",
password.encode("utf-8"),
salt.encode("utf-8"),
PBKDF2_ITERATIONS,
)
return f"pbkdf2:sha256:{PBKDF2_ITERATIONS}${salt}${dk.hex()}"
def verify_password(password: str, password_hash: str) -> bool:
"""Verify a password against its hash using constant-time comparison."""
try:
header, salt, stored_hash = password_hash.split("$", 2)
_, algo, iterations_str = header.split(":")
iterations = int(iterations_str)
dk = hashlib.pbkdf2_hmac(
algo,
password.encode("utf-8"),
salt.encode("utf-8"),
iterations,
)
return hmac.compare_digest(dk.hex(), stored_hash)
except (ValueError, AttributeError):
return False

View File

@@ -1,4 +1,4 @@
"""User table for storing Authentik user information.""" """User table for storing user information."""
from datetime import datetime, timezone from datetime import datetime, timezone
@@ -15,6 +15,7 @@ users = sqlalchemy.Table(
sqlalchemy.Column("id", sqlalchemy.String, primary_key=True), sqlalchemy.Column("id", sqlalchemy.String, primary_key=True),
sqlalchemy.Column("email", sqlalchemy.String, nullable=False), sqlalchemy.Column("email", sqlalchemy.String, nullable=False),
sqlalchemy.Column("authentik_uid", sqlalchemy.String, nullable=False), sqlalchemy.Column("authentik_uid", sqlalchemy.String, nullable=False),
sqlalchemy.Column("password_hash", sqlalchemy.String, nullable=True),
sqlalchemy.Column("created_at", sqlalchemy.DateTime(timezone=True), nullable=False), sqlalchemy.Column("created_at", sqlalchemy.DateTime(timezone=True), nullable=False),
sqlalchemy.Column("updated_at", sqlalchemy.DateTime(timezone=True), nullable=False), sqlalchemy.Column("updated_at", sqlalchemy.DateTime(timezone=True), nullable=False),
sqlalchemy.Index("idx_user_authentik_uid", "authentik_uid", unique=True), sqlalchemy.Index("idx_user_authentik_uid", "authentik_uid", unique=True),
@@ -26,6 +27,7 @@ class User(BaseModel):
id: NonEmptyString = Field(default_factory=generate_uuid4) id: NonEmptyString = Field(default_factory=generate_uuid4)
email: NonEmptyString email: NonEmptyString
authentik_uid: NonEmptyString authentik_uid: NonEmptyString
password_hash: str | None = None
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
@@ -51,22 +53,29 @@ class UserController:
@staticmethod @staticmethod
async def create_or_update( async def create_or_update(
id: NonEmptyString, authentik_uid: NonEmptyString, email: NonEmptyString id: NonEmptyString,
authentik_uid: NonEmptyString,
email: NonEmptyString,
password_hash: str | None = None,
) -> User: ) -> User:
existing = await UserController.get_by_authentik_uid(authentik_uid) existing = await UserController.get_by_authentik_uid(authentik_uid)
now = datetime.now(timezone.utc) now = datetime.now(timezone.utc)
if existing: if existing:
update_values: dict = {"email": email, "updated_at": now}
if password_hash is not None:
update_values["password_hash"] = password_hash
query = ( query = (
users.update() users.update()
.where(users.c.authentik_uid == authentik_uid) .where(users.c.authentik_uid == authentik_uid)
.values(email=email, updated_at=now) .values(**update_values)
) )
await get_database().execute(query) await get_database().execute(query)
return User( return User(
id=existing.id, id=existing.id,
authentik_uid=authentik_uid, authentik_uid=authentik_uid,
email=email, email=email,
password_hash=password_hash or existing.password_hash,
created_at=existing.created_at, created_at=existing.created_at,
updated_at=now, updated_at=now,
) )
@@ -75,6 +84,7 @@ class UserController:
id=id, id=id,
authentik_uid=authentik_uid, authentik_uid=authentik_uid,
email=email, email=email,
password_hash=password_hash,
created_at=now, created_at=now,
updated_at=now, updated_at=now,
) )
@@ -82,6 +92,16 @@ class UserController:
await get_database().execute(query) await get_database().execute(query)
return user return user
@staticmethod
async def set_password_hash(user_id: NonEmptyString, password_hash: str) -> None:
now = datetime.now(timezone.utc)
query = (
users.update()
.where(users.c.id == user_id)
.values(password_hash=password_hash, updated_at=now)
)
await get_database().execute(query)
@staticmethod @staticmethod
async def list_all() -> list[User]: async def list_all() -> list[User]:
query = users.select().order_by(users.c.created_at.desc()) query = users.select().order_by(users.c.created_at.desc())

View File

@@ -228,6 +228,7 @@ class LLM:
is_function_calling_model=False, is_function_calling_model=False,
temperature=self.temperature, temperature=self.temperature,
max_tokens=self.max_tokens, max_tokens=self.max_tokens,
timeout=self.settings_obj.LLM_REQUEST_TIMEOUT,
additional_kwargs={"extra_body": {"litellm_session_id": session_id}}, additional_kwargs={"extra_body": {"litellm_session_id": session_id}},
) )

View File

@@ -87,6 +87,7 @@ class Settings(BaseSettings):
LLM_URL: str | None = None LLM_URL: str | None = None
LLM_API_KEY: str | None = None LLM_API_KEY: str | None = None
LLM_CONTEXT_WINDOW: int = 16000 LLM_CONTEXT_WINDOW: int = 16000
LLM_REQUEST_TIMEOUT: float = 300.0 # HTTP request timeout for LLM calls (seconds)
LLM_PARSE_MAX_RETRIES: int = ( LLM_PARSE_MAX_RETRIES: int = (
3 # Max retries for JSON/validation errors (total attempts = retries + 1) 3 # Max retries for JSON/validation errors (total attempts = retries + 1)
@@ -112,7 +113,7 @@ class Settings(BaseSettings):
# Sentry # Sentry
SENTRY_DSN: str | None = None SENTRY_DSN: str | None = None
# User authentication (none, jwt) # User authentication (none, jwt, password)
AUTH_BACKEND: str = "none" AUTH_BACKEND: str = "none"
# User authentication using JWT # User authentication using JWT
@@ -120,6 +121,10 @@ class Settings(BaseSettings):
AUTH_JWT_PUBLIC_KEY: str | None = "authentik.monadical.com_public.pem" AUTH_JWT_PUBLIC_KEY: str | None = "authentik.monadical.com_public.pem"
AUTH_JWT_AUDIENCE: str | None = None AUTH_JWT_AUDIENCE: str | None = None
# User authentication using password (selfhosted)
ADMIN_EMAIL: str | None = None
ADMIN_PASSWORD_HASH: str | None = None
PUBLIC_MODE: bool = False PUBLIC_MODE: bool = False
PUBLIC_DATA_RETENTION_DAYS: PositiveInt = 7 PUBLIC_DATA_RETENTION_DAYS: PositiveInt = 7
@@ -153,6 +158,9 @@ class Settings(BaseSettings):
WHEREBY_WEBHOOK_SECRET: str | None = None WHEREBY_WEBHOOK_SECRET: str | None = None
AWS_PROCESS_RECORDING_QUEUE_URL: str | None = None AWS_PROCESS_RECORDING_QUEUE_URL: str | None = None
SQS_POLLING_TIMEOUT_SECONDS: int = 60 SQS_POLLING_TIMEOUT_SECONDS: int = 60
CELERY_BEAT_POLL_INTERVAL: int = (
0 # 0 = use individual defaults; set e.g. 300 for 5-min polling
)
# Daily.co integration # Daily.co integration
DAILY_API_KEY: str | None = None DAILY_API_KEY: str | None = None

View File

@@ -0,0 +1,80 @@
"""Create or update an admin user with password authentication.
Usage:
uv run python -m reflector.tools.create_admin --email admin@localhost --password <pass>
uv run python -m reflector.tools.create_admin --email admin@localhost # prompts for password
uv run python -m reflector.tools.create_admin --hash-only --password <pass> # print hash only
"""
import argparse
import asyncio
import getpass
import sys
from reflector.auth.password_utils import hash_password
from reflector.db.users import user_controller
from reflector.utils import generate_uuid4
async def create_admin(email: str, password: str) -> None:
from reflector.db import get_database
database = get_database()
await database.connect()
try:
password_hash = hash_password(password)
existing = await user_controller.get_by_email(email)
if existing:
await user_controller.set_password_hash(existing.id, password_hash)
print(f"Updated password for existing user: {email} (id={existing.id})")
else:
user = await user_controller.create_or_update(
id=generate_uuid4(),
authentik_uid=f"local:{email}",
email=email,
password_hash=password_hash,
)
print(f"Created admin user: {email} (id={user.id})")
finally:
await database.disconnect()
def main():
parser = argparse.ArgumentParser(description="Create or update an admin user")
parser.add_argument(
"--email", default="admin@localhost", help="Admin email address"
)
parser.add_argument(
"--password",
help="Admin password (will prompt if not provided)",
)
parser.add_argument(
"--hash-only",
action="store_true",
help="Print the password hash and exit (for ADMIN_PASSWORD_HASH env var)",
)
args = parser.parse_args()
password = args.password
if not password:
password = getpass.getpass("Password: ")
confirm = getpass.getpass("Confirm password: ")
if password != confirm:
print("Passwords do not match", file=sys.stderr)
sys.exit(1)
if not password:
print("Password cannot be empty", file=sys.stderr)
sys.exit(1)
if args.hash_only:
print(hash_password(password))
sys.exit(0)
asyncio.run(create_admin(args.email, password))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,43 @@
"""Provision admin user on server startup using environment variables.
Reads ADMIN_EMAIL and ADMIN_PASSWORD_HASH from settings and creates or updates
the admin user. Intended to be called from runserver.sh on container startup.
"""
import asyncio
from reflector.db.users import user_controller
from reflector.settings import settings
from reflector.utils import generate_uuid4
async def provision() -> None:
if not settings.ADMIN_EMAIL or not settings.ADMIN_PASSWORD_HASH:
return
from reflector.db import get_database
database = get_database()
await database.connect()
try:
existing = await user_controller.get_by_email(settings.ADMIN_EMAIL)
if existing:
await user_controller.set_password_hash(
existing.id, settings.ADMIN_PASSWORD_HASH
)
print(f"Updated admin user: {settings.ADMIN_EMAIL}")
else:
await user_controller.create_or_update(
id=generate_uuid4(),
authentik_uid=f"local:{settings.ADMIN_EMAIL}",
email=settings.ADMIN_EMAIL,
password_hash=settings.ADMIN_PASSWORD_HASH,
)
print(f"Created admin user: {settings.ADMIN_EMAIL}")
finally:
await database.disconnect()
if __name__ == "__main__":
asyncio.run(provision())

View File

@@ -2,8 +2,7 @@ from typing import Optional
from fastapi import APIRouter, WebSocket, WebSocketDisconnect from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from reflector.auth.auth_jwt import JWTAuth # type: ignore import reflector.auth as auth
from reflector.db.users import user_controller
from reflector.ws_events import UserWsEvent from reflector.ws_events import UserWsEvent
from reflector.ws_manager import get_ws_manager from reflector.ws_manager import get_ws_manager
@@ -26,42 +25,24 @@ UNAUTHORISED = 4401
@router.websocket("/events") @router.websocket("/events")
async def user_events_websocket(websocket: WebSocket): async def user_events_websocket(websocket: WebSocket):
# Browser can't send Authorization header for WS; use subprotocol: ["bearer", token] token, negotiated_subprotocol = auth.parse_ws_bearer_token(websocket)
raw_subprotocol = websocket.headers.get("sec-websocket-protocol") or ""
parts = [p.strip() for p in raw_subprotocol.split(",") if p.strip()]
token: Optional[str] = None
negotiated_subprotocol: Optional[str] = None
if len(parts) >= 2 and parts[0].lower() == "bearer":
negotiated_subprotocol = "bearer"
token = parts[1]
user_id: Optional[str] = None
if not token: if not token:
await websocket.close(code=UNAUTHORISED) await websocket.close(code=UNAUTHORISED)
return return
try: try:
payload = JWTAuth().verify_token(token) user = await auth.current_user_ws_optional(websocket)
authentik_uid = payload.get("sub")
if authentik_uid:
user = await user_controller.get_by_authentik_uid(authentik_uid)
if user:
user_id = user.id
else:
await websocket.close(code=UNAUTHORISED)
return
else:
await websocket.close(code=UNAUTHORISED)
return
except Exception: except Exception:
await websocket.close(code=UNAUTHORISED) await websocket.close(code=UNAUTHORISED)
return return
if not user_id: if not user:
await websocket.close(code=UNAUTHORISED) await websocket.close(code=UNAUTHORISED)
return return
user_id: Optional[str] = user.sub if hasattr(user, "sub") else user["sub"]
room_id = f"user:{user_id}" room_id = f"user:{user_id}"
ws_manager = get_ws_manager() ws_manager = get_ws_manager()

View File

@@ -8,8 +8,21 @@ from reflector.settings import settings
logger = structlog.get_logger(__name__) logger = structlog.get_logger(__name__)
# Polling intervals (seconds) # Polling intervals (seconds)
# CELERY_BEAT_POLL_INTERVAL overrides all sub-5-min intervals (e.g. 300 for selfhosted)
_override = (
float(settings.CELERY_BEAT_POLL_INTERVAL)
if settings.CELERY_BEAT_POLL_INTERVAL > 0
else 0
)
# Webhook-aware: 180s when webhook configured (backup mode), 15s when no webhook (primary discovery) # Webhook-aware: 180s when webhook configured (backup mode), 15s when no webhook (primary discovery)
POLL_DAILY_RECORDINGS_INTERVAL_SEC = 180.0 if settings.DAILY_WEBHOOK_SECRET else 15.0 POLL_DAILY_RECORDINGS_INTERVAL_SEC = _override or (
180.0 if settings.DAILY_WEBHOOK_SECRET else 15.0
)
SQS_POLL_INTERVAL = _override or float(settings.SQS_POLLING_TIMEOUT_SECONDS)
RECONCILIATION_INTERVAL = _override or 30.0
ICS_SYNC_INTERVAL = _override or 60.0
UPCOMING_MEETINGS_INTERVAL = _override or 30.0
if celery.current_app.main != "default": if celery.current_app.main != "default":
logger.info(f"Celery already configured ({celery.current_app})") logger.info(f"Celery already configured ({celery.current_app})")
@@ -33,11 +46,11 @@ else:
app.conf.beat_schedule = { app.conf.beat_schedule = {
"process_messages": { "process_messages": {
"task": "reflector.worker.process.process_messages", "task": "reflector.worker.process.process_messages",
"schedule": float(settings.SQS_POLLING_TIMEOUT_SECONDS), "schedule": SQS_POLL_INTERVAL,
}, },
"process_meetings": { "process_meetings": {
"task": "reflector.worker.process.process_meetings", "task": "reflector.worker.process.process_meetings",
"schedule": float(settings.SQS_POLLING_TIMEOUT_SECONDS), "schedule": SQS_POLL_INTERVAL,
}, },
"reprocess_failed_recordings": { "reprocess_failed_recordings": {
"task": "reflector.worker.process.reprocess_failed_recordings", "task": "reflector.worker.process.reprocess_failed_recordings",
@@ -53,15 +66,15 @@ else:
}, },
"trigger_daily_reconciliation": { "trigger_daily_reconciliation": {
"task": "reflector.worker.process.trigger_daily_reconciliation", "task": "reflector.worker.process.trigger_daily_reconciliation",
"schedule": 30.0, # Every 30 seconds (queues poll tasks for all active meetings) "schedule": RECONCILIATION_INTERVAL,
}, },
"sync_all_ics_calendars": { "sync_all_ics_calendars": {
"task": "reflector.worker.ics_sync.sync_all_ics_calendars", "task": "reflector.worker.ics_sync.sync_all_ics_calendars",
"schedule": 60.0, # Run every minute to check which rooms need sync "schedule": ICS_SYNC_INTERVAL,
}, },
"create_upcoming_meetings": { "create_upcoming_meetings": {
"task": "reflector.worker.ics_sync.create_upcoming_meetings", "task": "reflector.worker.ics_sync.create_upcoming_meetings",
"schedule": 30.0, # Run every 30 seconds to create upcoming meetings "schedule": UPCOMING_MEETINGS_INTERVAL,
}, },
} }

View File

@@ -2,6 +2,10 @@
if [ "${ENTRYPOINT}" = "server" ]; then if [ "${ENTRYPOINT}" = "server" ]; then
uv run alembic upgrade head uv run alembic upgrade head
# Provision admin user if password auth is configured
if [ -n "${ADMIN_EMAIL:-}" ] && [ -n "${ADMIN_PASSWORD_HASH:-}" ]; then
uv run python -m reflector.tools.provision_admin
fi
uv run uvicorn reflector.app:app --host 0.0.0.0 --port 1250 uv run uvicorn reflector.app:app --host 0.0.0.0 --port 1250
elif [ "${ENTRYPOINT}" = "worker" ]; then elif [ "${ENTRYPOINT}" = "worker" ]; then
uv run celery -A reflector.worker.app worker --loglevel=info uv run celery -A reflector.worker.app worker --loglevel=info

View File

@@ -0,0 +1,201 @@
"""Tests for the password auth backend."""
import pytest
from httpx import AsyncClient
from jose import jwt
from reflector.auth.password_utils import hash_password
from reflector.settings import settings
@pytest.fixture
async def password_app():
"""Create a minimal FastAPI app with the password auth router."""
from fastapi import FastAPI
from reflector.auth import auth_password
app = FastAPI()
app.include_router(auth_password.router, prefix="/v1")
# Reset rate limiter between tests
auth_password._login_attempts.clear()
return app
@pytest.fixture
async def password_client(password_app):
"""Create a test client for the password auth app."""
async with AsyncClient(app=password_app, base_url="http://test/v1") as client:
yield client
async def _create_user_with_password(email: str, password: str):
"""Helper to create a user with a password hash in the DB."""
from reflector.db.users import user_controller
from reflector.utils import generate_uuid4
pw_hash = hash_password(password)
return await user_controller.create_or_update(
id=generate_uuid4(),
authentik_uid=f"local:{email}",
email=email,
password_hash=pw_hash,
)
@pytest.mark.asyncio
async def test_login_success(password_client, setup_database):
await _create_user_with_password("admin@test.com", "testpass123")
response = await password_client.post(
"/auth/login",
json={"email": "admin@test.com", "password": "testpass123"},
)
assert response.status_code == 200
data = response.json()
assert "access_token" in data
assert data["token_type"] == "bearer"
assert data["expires_in"] > 0
# Verify the JWT is valid
payload = jwt.decode(
data["access_token"],
settings.SECRET_KEY,
algorithms=["HS256"],
)
assert payload["email"] == "admin@test.com"
assert "sub" in payload
assert "exp" in payload
@pytest.mark.asyncio
async def test_login_wrong_password(password_client, setup_database):
await _create_user_with_password("user@test.com", "correctpassword")
response = await password_client.post(
"/auth/login",
json={"email": "user@test.com", "password": "wrongpassword"},
)
assert response.status_code == 401
@pytest.mark.asyncio
async def test_login_nonexistent_user(password_client, setup_database):
response = await password_client.post(
"/auth/login",
json={"email": "nobody@test.com", "password": "anything"},
)
assert response.status_code == 401
@pytest.mark.asyncio
async def test_login_user_without_password_hash(password_client, setup_database):
"""User exists but has no password_hash (e.g. Authentik user)."""
from reflector.db.users import user_controller
from reflector.utils import generate_uuid4
await user_controller.create_or_update(
id=generate_uuid4(),
authentik_uid="authentik:abc123",
email="oidc@test.com",
)
response = await password_client.post(
"/auth/login",
json={"email": "oidc@test.com", "password": "anything"},
)
assert response.status_code == 401
@pytest.mark.asyncio
async def test_login_rate_limiting(password_client, setup_database):
from reflector.auth import auth_password
# Reset rate limiter
auth_password._login_attempts.clear()
for _ in range(10):
await password_client.post(
"/auth/login",
json={"email": "fake@test.com", "password": "wrong"},
)
# 11th attempt should be rate-limited
response = await password_client.post(
"/auth/login",
json={"email": "fake@test.com", "password": "wrong"},
)
assert response.status_code == 429
@pytest.mark.asyncio
async def test_jwt_create_and_verify():
from reflector.auth.auth_password import _create_access_token, _verify_token
token, expires_in = _create_access_token("user-123", "test@example.com")
assert expires_in > 0
payload = _verify_token(token)
assert payload["sub"] == "user-123"
assert payload["email"] == "test@example.com"
assert "exp" in payload
@pytest.mark.asyncio
async def test_authenticate_user_with_jwt():
from reflector.auth.auth_password import (
_authenticate_user,
_create_access_token,
)
token, _ = _create_access_token("user-abc", "abc@test.com")
user = await _authenticate_user(token, None)
assert user is not None
assert user.sub == "user-abc"
assert user.email == "abc@test.com"
@pytest.mark.asyncio
async def test_authenticate_user_invalid_jwt():
from fastapi import HTTPException
from reflector.auth.auth_password import _authenticate_user
with pytest.raises(HTTPException) as exc_info:
await _authenticate_user("invalid.jwt.token", None)
assert exc_info.value.status_code == 401
@pytest.mark.asyncio
async def test_authenticate_user_no_credentials():
from reflector.auth.auth_password import _authenticate_user
user = await _authenticate_user(None, None)
assert user is None
@pytest.mark.asyncio
async def test_current_user_raises_without_token():
"""Verify that current_user dependency raises 401 without token."""
from fastapi import Depends, FastAPI
from fastapi.testclient import TestClient
from reflector.auth import auth_password
app = FastAPI()
@app.get("/test")
async def test_endpoint(user=Depends(auth_password.current_user)):
return {"user": user.sub}
# Use sync TestClient for simplicity
client = TestClient(app)
response = client.get("/test")
# OAuth2PasswordBearer with auto_error=False returns None, then current_user raises 401
assert response.status_code == 401

View File

@@ -0,0 +1,97 @@
"""Tests for admin user creation logic (used by create_admin CLI tool)."""
import pytest
from reflector.auth.password_utils import hash_password, verify_password
from reflector.db.users import user_controller
from reflector.utils import generate_uuid4
async def _provision_admin(email: str, password: str):
"""Mirrors the logic in create_admin.create_admin() without managing DB connections."""
password_hash = hash_password(password)
existing = await user_controller.get_by_email(email)
if existing:
await user_controller.set_password_hash(existing.id, password_hash)
else:
await user_controller.create_or_update(
id=generate_uuid4(),
authentik_uid=f"local:{email}",
email=email,
password_hash=password_hash,
)
@pytest.mark.asyncio
async def test_create_admin_new_user(setup_database):
await _provision_admin("newadmin@test.com", "password123")
user = await user_controller.get_by_email("newadmin@test.com")
assert user is not None
assert user.email == "newadmin@test.com"
assert user.authentik_uid == "local:newadmin@test.com"
assert user.password_hash is not None
assert verify_password("password123", user.password_hash)
@pytest.mark.asyncio
async def test_create_admin_updates_existing(setup_database):
# Create first
await _provision_admin("admin@test.com", "oldpassword")
user1 = await user_controller.get_by_email("admin@test.com")
# Update password
await _provision_admin("admin@test.com", "newpassword")
user2 = await user_controller.get_by_email("admin@test.com")
assert user1.id == user2.id # same user, not duplicated
assert verify_password("newpassword", user2.password_hash)
assert not verify_password("oldpassword", user2.password_hash)
@pytest.mark.asyncio
async def test_create_admin_idempotent(setup_database):
await _provision_admin("admin@test.com", "samepassword")
await _provision_admin("admin@test.com", "samepassword")
# Should only have one user
users = await user_controller.list_all()
admin_users = [u for u in users if u.email == "admin@test.com"]
assert len(admin_users) == 1
@pytest.mark.asyncio
async def test_create_or_update_with_password_hash(setup_database):
"""Test the extended create_or_update method with password_hash parameter."""
pw_hash = hash_password("test123")
user = await user_controller.create_or_update(
id=generate_uuid4(),
authentik_uid="local:test@example.com",
email="test@example.com",
password_hash=pw_hash,
)
assert user.password_hash == pw_hash
fetched = await user_controller.get_by_email("test@example.com")
assert fetched is not None
assert verify_password("test123", fetched.password_hash)
@pytest.mark.asyncio
async def test_set_password_hash(setup_database):
"""Test the set_password_hash method."""
user = await user_controller.create_or_update(
id=generate_uuid4(),
authentik_uid="local:pw@test.com",
email="pw@test.com",
)
assert user.password_hash is None
pw_hash = hash_password("newpass")
await user_controller.set_password_hash(user.id, pw_hash)
updated = await user_controller.get_by_email("pw@test.com")
assert updated is not None
assert verify_password("newpass", updated.password_hash)

View File

@@ -0,0 +1,58 @@
"""Tests for password hashing utilities."""
from reflector.auth.password_utils import hash_password, verify_password
def test_hash_and_verify():
pw = "my-secret-password"
h = hash_password(pw)
assert verify_password(pw, h) is True
def test_wrong_password():
h = hash_password("correct")
assert verify_password("wrong", h) is False
def test_hash_format():
h = hash_password("test")
parts = h.split("$")
assert len(parts) == 3
assert parts[0] == "pbkdf2:sha256:100000"
assert len(parts[1]) == 32 # 16 bytes hex = 32 chars
assert len(parts[2]) == 64 # sha256 hex = 64 chars
def test_different_salts():
h1 = hash_password("same")
h2 = hash_password("same")
assert h1 != h2 # different salts produce different hashes
assert verify_password("same", h1) is True
assert verify_password("same", h2) is True
def test_malformed_hash():
assert verify_password("test", "garbage") is False
assert verify_password("test", "") is False
assert verify_password("test", "pbkdf2:sha256:100000$short") is False
def test_empty_password():
h = hash_password("")
assert verify_password("", h) is True
assert verify_password("notempty", h) is False
def test_unicode_password():
pw = "p\u00e4ssw\u00f6rd\U0001f512"
h = hash_password(pw)
assert verify_password(pw, h) is True
assert verify_password("password", h) is False
def test_constant_time_comparison():
"""Verify that hmac.compare_digest is used (structural test)."""
import inspect
source = inspect.getsource(verify_password)
assert "hmac.compare_digest" in source

View File

@@ -19,9 +19,13 @@ SERVER_API_URL=http://server:1250
KV_URL=redis://redis:6379 KV_URL=redis://redis:6379
# Authentication # Authentication
# Set to true when Authentik is configured # Set to true when Authentik or password auth is configured
FEATURE_REQUIRE_LOGIN=false FEATURE_REQUIRE_LOGIN=false
# Auth provider: "authentik" or "credentials"
# Set to "credentials" when using password auth backend
# AUTH_PROVIDER=credentials
# Nullify auth vars when not using Authentik # Nullify auth vars when not using Authentik
AUTHENTIK_ISSUER= AUTHENTIK_ISSUER=
AUTHENTIK_REFRESH_TOKEN_URL= AUTHENTIK_REFRESH_TOKEN_URL=

View File

@@ -78,7 +78,10 @@ const useMp3 = (transcriptId: string, waiting?: boolean): Mp3Response => {
// Audio is not deleted, proceed to load it // Audio is not deleted, proceed to load it
audioElement = document.createElement("audio"); audioElement = document.createElement("audio");
audioElement.src = `${API_URL}/v1/transcripts/${transcriptId}/audio/mp3`; const audioUrl = `${API_URL}/v1/transcripts/${transcriptId}/audio/mp3`;
audioElement.src = accessTokenInfo
? `${audioUrl}?token=${encodeURIComponent(accessTokenInfo)}`
: audioUrl;
audioElement.crossOrigin = "anonymous"; audioElement.crossOrigin = "anonymous";
audioElement.preload = "auto"; audioElement.preload = "auto";

View File

@@ -23,7 +23,7 @@ export default function UserInfo() {
className="font-light px-2" className="font-light px-2"
onClick={(e) => { onClick={(e) => {
e.preventDefault(); e.preventDefault();
auth.signIn("authentik"); auth.signIn();
}} }}
> >
Log in Log in

View File

@@ -1,5 +1,6 @@
import { AuthOptions } from "next-auth"; import { AuthOptions } from "next-auth";
import AuthentikProvider from "next-auth/providers/authentik"; import AuthentikProvider from "next-auth/providers/authentik";
import CredentialsProvider from "next-auth/providers/credentials";
import type { JWT } from "next-auth/jwt"; import type { JWT } from "next-auth/jwt";
import { JWTWithAccessToken, CustomSession } from "./types"; import { JWTWithAccessToken, CustomSession } from "./types";
import { import {
@@ -52,7 +53,7 @@ const TOKEN_CACHE_TTL = REFRESH_ACCESS_TOKEN_BEFORE;
const getAuthentikClientId = () => getNextEnvVar("AUTHENTIK_CLIENT_ID"); const getAuthentikClientId = () => getNextEnvVar("AUTHENTIK_CLIENT_ID");
const getAuthentikClientSecret = () => getNextEnvVar("AUTHENTIK_CLIENT_SECRET"); const getAuthentikClientSecret = () => getNextEnvVar("AUTHENTIK_CLIENT_SECRET");
const getAuthentikRefreshTokenUrl = () => const getAuthentikRefreshTokenUrl = () =>
getNextEnvVar("AUTHENTIK_REFRESH_TOKEN_URL"); getNextEnvVar("AUTHENTIK_REFRESH_TOKEN_URL").replace(/\/+$/, "");
const getAuthentikIssuer = () => { const getAuthentikIssuer = () => {
const stringUrl = getNextEnvVar("AUTHENTIK_ISSUER"); const stringUrl = getNextEnvVar("AUTHENTIK_ISSUER");
@@ -61,113 +62,194 @@ const getAuthentikIssuer = () => {
} catch (e) { } catch (e) {
throw new Error("AUTHENTIK_ISSUER is not a valid URL: " + stringUrl); throw new Error("AUTHENTIK_ISSUER is not a valid URL: " + stringUrl);
} }
return stringUrl; return stringUrl.replace(/\/+$/, "");
}; };
export const authOptions = (): AuthOptions => export const authOptions = (): AuthOptions => {
featureEnabled("requireLogin") if (!featureEnabled("requireLogin")) {
? { return { providers: [] };
providers: [ }
AuthentikProvider({
...(() => { const authProvider = process.env.AUTH_PROVIDER;
const [clientId, clientSecret, issuer] = sequenceThrows(
getAuthentikClientId, if (authProvider === "credentials") {
getAuthentikClientSecret, return credentialsAuthOptions();
getAuthentikIssuer, }
);
return { return authentikAuthOptions();
clientId, };
clientSecret,
issuer, function credentialsAuthOptions(): AuthOptions {
}; return {
})(), providers: [
authorization: { CredentialsProvider({
params: { name: "Password",
scope: "openid email profile offline_access", credentials: {
}, email: { label: "Email", type: "email" },
}, password: { label: "Password", type: "password" },
}),
],
session: {
strategy: "jwt",
}, },
callbacks: { async authorize(credentials) {
async jwt({ token, account, user }) { if (!credentials?.email || !credentials?.password) return null;
if (account && !account.access_token) { const apiUrl = getNextEnvVar("SERVER_API_URL");
const response = await fetch(`${apiUrl}/v1/auth/login`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
email: credentials.email,
password: credentials.password,
}),
});
if (!response.ok) return null;
const data = await response.json();
return {
id: "pending",
email: credentials.email,
accessToken: data.access_token,
expiresIn: data.expires_in,
};
},
}),
],
session: { strategy: "jwt" },
pages: {
signIn: "/login",
},
callbacks: {
async jwt({ token, user }) {
if (user) {
// First login - user comes from authorize()
const typedUser = user as any;
token.accessToken = typedUser.accessToken;
token.accessTokenExpires = Date.now() + typedUser.expiresIn * 1000;
// Resolve actual user ID from backend
const userId = await getUserId(typedUser.accessToken);
if (userId) {
token.sub = userId;
}
token.email = typedUser.email;
}
return token;
},
async session({ session, token }) {
const extendedToken = token as JWTWithAccessToken;
return {
...session,
accessToken: extendedToken.accessToken,
accessTokenExpires: extendedToken.accessTokenExpires,
error: extendedToken.error,
user: {
id: assertExistsAndNonEmptyString(token.sub, "User ID required"),
name: extendedToken.name,
email: extendedToken.email,
},
} satisfies CustomSession;
},
},
};
}
function authentikAuthOptions(): AuthOptions {
return {
providers: [
AuthentikProvider({
...(() => {
const [clientId, clientSecret, issuer] = sequenceThrows(
getAuthentikClientId,
getAuthentikClientSecret,
getAuthentikIssuer,
);
return {
clientId,
clientSecret,
issuer,
};
})(),
authorization: {
params: {
scope: "openid email profile offline_access",
},
},
}),
],
session: {
strategy: "jwt",
},
callbacks: {
async jwt({ token, account, user }) {
if (account && !account.access_token) {
await deleteTokenCache(tokenCacheRedis, `token:${token.sub}`);
}
if (account && user) {
// called only on first login
// XXX account.expires_in used in example is not defined for authentik backend, but expires_at is
if (account.access_token) {
const expiresAtS = assertExists(account.expires_at);
const expiresAtMs = expiresAtS * 1000;
const jwtToken: JWTWithAccessToken = {
...token,
accessToken: account.access_token,
accessTokenExpires: expiresAtMs,
refreshToken: account.refresh_token,
};
if (jwtToken.error) {
await deleteTokenCache(tokenCacheRedis, `token:${token.sub}`); await deleteTokenCache(tokenCacheRedis, `token:${token.sub}`);
} else {
assertNotExists(
jwtToken.error,
`panic! trying to cache token with error in jwt: ${jwtToken.error}`,
);
await setTokenCache(tokenCacheRedis, `token:${token.sub}`, {
token: jwtToken,
timestamp: Date.now(),
});
return jwtToken;
} }
}
}
if (account && user) { const currentToken = await getTokenCache(
// called only on first login tokenCacheRedis,
// XXX account.expires_in used in example is not defined for authentik backend, but expires_at is `token:${token.sub}`,
if (account.access_token) { );
const expiresAtS = assertExists(account.expires_at); console.debug(
const expiresAtMs = expiresAtS * 1000; "currentToken from cache",
const jwtToken: JWTWithAccessToken = { JSON.stringify(currentToken, null, 2),
...token, "will be returned?",
accessToken: account.access_token, currentToken &&
accessTokenExpires: expiresAtMs, !shouldRefreshToken(currentToken.token.accessTokenExpires),
refreshToken: account.refresh_token, );
}; if (
if (jwtToken.error) { currentToken &&
await deleteTokenCache(tokenCacheRedis, `token:${token.sub}`); !shouldRefreshToken(currentToken.token.accessTokenExpires)
} else { ) {
assertNotExists( return currentToken.token;
jwtToken.error, }
`panic! trying to cache token with error in jwt: ${jwtToken.error}`,
);
await setTokenCache(tokenCacheRedis, `token:${token.sub}`, {
token: jwtToken,
timestamp: Date.now(),
});
return jwtToken;
}
}
}
const currentToken = await getTokenCache( // access token has expired, try to update it
tokenCacheRedis, return await lockedRefreshAccessToken(token);
`token:${token.sub}`, },
); async session({ session, token }) {
console.debug( const extendedToken = token as JWTWithAccessToken;
"currentToken from cache", console.log("extendedToken", extendedToken);
JSON.stringify(currentToken, null, 2), const userId = await getUserId(extendedToken.accessToken);
"will be returned?",
currentToken &&
!shouldRefreshToken(currentToken.token.accessTokenExpires),
);
if (
currentToken &&
!shouldRefreshToken(currentToken.token.accessTokenExpires)
) {
return currentToken.token;
}
// access token has expired, try to update it return {
return await lockedRefreshAccessToken(token); ...session,
accessToken: extendedToken.accessToken,
accessTokenExpires: extendedToken.accessTokenExpires,
error: extendedToken.error,
user: {
id: assertExistsAndNonEmptyString(userId, "User ID required"),
name: extendedToken.name,
email: extendedToken.email,
}, },
async session({ session, token }) { } satisfies CustomSession;
const extendedToken = token as JWTWithAccessToken; },
console.log("extendedToken", extendedToken); },
const userId = await getUserId(extendedToken.accessToken); };
}
return {
...session,
accessToken: extendedToken.accessToken,
accessTokenExpires: extendedToken.accessTokenExpires,
error: extendedToken.error,
user: {
id: assertExistsAndNonEmptyString(userId, "User ID required"),
name: extendedToken.name,
email: extendedToken.email,
},
} satisfies CustomSession;
},
},
}
: {
providers: [],
};
async function lockedRefreshAccessToken( async function lockedRefreshAccessToken(
token: JWT, token: JWT,

View File

@@ -28,10 +28,13 @@ export type EnvFeaturePartial = {
[key in FeatureEnvName]: boolean | null; [key in FeatureEnvName]: boolean | null;
}; };
export type AuthProviderType = "authentik" | "credentials" | null;
// CONTRACT: isomorphic with JSON.stringify // CONTRACT: isomorphic with JSON.stringify
export type ClientEnvCommon = EnvFeaturePartial & { export type ClientEnvCommon = EnvFeaturePartial & {
API_URL: NonEmptyString; API_URL: NonEmptyString;
WEBSOCKET_URL: NonEmptyString | null; WEBSOCKET_URL: NonEmptyString | null;
AUTH_PROVIDER: AuthProviderType;
}; };
let clientEnv: ClientEnvCommon | null = null; let clientEnv: ClientEnvCommon | null = null;
@@ -59,6 +62,12 @@ const parseBooleanString = (str: string | undefined): boolean | null => {
return str === "true"; return str === "true";
}; };
const parseAuthProvider = (): AuthProviderType => {
const val = process.env.AUTH_PROVIDER;
if (val === "authentik" || val === "credentials") return val;
return null;
};
export const getClientEnvServer = (): ClientEnvCommon => { export const getClientEnvServer = (): ClientEnvCommon => {
if (typeof window !== "undefined") { if (typeof window !== "undefined") {
throw new Error( throw new Error(
@@ -76,6 +85,7 @@ export const getClientEnvServer = (): ClientEnvCommon => {
return { return {
API_URL: getNextEnvVar("API_URL"), API_URL: getNextEnvVar("API_URL"),
WEBSOCKET_URL: parseMaybeNonEmptyString(process.env.WEBSOCKET_URL ?? ""), WEBSOCKET_URL: parseMaybeNonEmptyString(process.env.WEBSOCKET_URL ?? ""),
AUTH_PROVIDER: parseAuthProvider(),
...features, ...features,
}; };
} }
@@ -83,6 +93,7 @@ export const getClientEnvServer = (): ClientEnvCommon => {
clientEnv = { clientEnv = {
API_URL: getNextEnvVar("API_URL"), API_URL: getNextEnvVar("API_URL"),
WEBSOCKET_URL: parseMaybeNonEmptyString(process.env.WEBSOCKET_URL ?? ""), WEBSOCKET_URL: parseMaybeNonEmptyString(process.env.WEBSOCKET_URL ?? ""),
AUTH_PROVIDER: parseAuthProvider(),
...features, ...features,
}; };
return clientEnv; return clientEnv;

76
www/app/login/page.tsx Normal file
View File

@@ -0,0 +1,76 @@
"use client";
import { useState } from "react";
import { signIn } from "next-auth/react";
import { useRouter } from "next/navigation";
import {
Box,
Button,
Field,
Input,
VStack,
Text,
Heading,
} from "@chakra-ui/react";
export default function LoginPage() {
const router = useRouter();
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [error, setError] = useState<string | null>(null);
const [loading, setLoading] = useState(false);
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
setError(null);
setLoading(true);
const result = await signIn("credentials", {
email,
password,
redirect: false,
});
setLoading(false);
if (result?.error) {
console.log(result?.error);
setError("Invalid email or password");
} else {
router.push("/");
}
};
return (
<Box maxW="400px" mx="auto" mt="100px" p={6}>
<VStack gap={6} as="form" onSubmit={handleSubmit}>
<Heading size="lg">Log in</Heading>
{error && <Text color="red.500">{error}</Text>}
<Field.Root required>
<Field.Label>Email</Field.Label>
<Input
type="email"
value={email}
onChange={(e) => setEmail(e.target.value)}
/>
</Field.Root>
<Field.Root required>
<Field.Label>Password</Field.Label>
<Input
type="password"
value={password}
onChange={(e) => setPassword(e.target.value)}
/>
</Field.Root>
<Button
type="submit"
colorPalette="blue"
width="full"
loading={loading}
>
Log in
</Button>
</VStack>
</Box>
);
}