mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2026-04-11 08:06:54 +00:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ed3b781ee | ||
|
|
26239f05a3 | ||
|
|
08c276e4f4 | ||
|
|
f4f94a0d99 | ||
|
|
df782107d7 | ||
|
|
bc8338fa4f |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,6 +3,8 @@ server/.env
|
||||
server/.env.production
|
||||
.env
|
||||
Caddyfile
|
||||
livekit.yaml
|
||||
egress.yaml
|
||||
.env.hatchet
|
||||
server/exportdanswer
|
||||
.vercel
|
||||
|
||||
14
CHANGELOG.md
14
CHANGELOG.md
@@ -1,5 +1,19 @@
|
||||
# Changelog
|
||||
|
||||
## [0.44.0](https://github.com/GreyhavenHQ/reflector/compare/v0.43.0...v0.44.0) (2026-04-07)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* allow livekit to work with tunnels udp and tcp with quickguide ([#948](https://github.com/GreyhavenHQ/reflector/issues/948)) ([f4f94a0](https://github.com/GreyhavenHQ/reflector/commit/f4f94a0d9998030e5ef7f01935d99722045165ac))
|
||||
* Livekit - Selfhost video room solution ([#946](https://github.com/GreyhavenHQ/reflector/issues/946)) ([bc8338f](https://github.com/GreyhavenHQ/reflector/commit/bc8338fa4f136534f5f27784f5dd10d47cecf412))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* allow anonymous recording start and add light theme email icon ([#949](https://github.com/GreyhavenHQ/reflector/issues/949)) ([08c276e](https://github.com/GreyhavenHQ/reflector/commit/08c276e4f493c7960158a44e77062af1b30c3352))
|
||||
* deactivate meeting button and better deactivation heuristics ([#950](https://github.com/GreyhavenHQ/reflector/issues/950)) ([26239f0](https://github.com/GreyhavenHQ/reflector/commit/26239f05a34af07ebba764d669343c32e40e63bf))
|
||||
|
||||
## [0.43.0](https://github.com/GreyhavenHQ/reflector/compare/v0.42.0...v0.43.0) (2026-03-31)
|
||||
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ services:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:1250:1250"
|
||||
- "${BIND_HOST:-127.0.0.1}:1250:1250"
|
||||
- "40000-40100:40000-40100/udp"
|
||||
env_file:
|
||||
- ./server/.env
|
||||
@@ -116,7 +116,7 @@ services:
|
||||
image: monadicalsas/reflector-frontend:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:3000:3000"
|
||||
- "${BIND_HOST:-127.0.0.1}:3000:3000"
|
||||
env_file:
|
||||
- ./www/.env
|
||||
environment:
|
||||
@@ -339,7 +339,7 @@ services:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- "127.0.0.1:8888:8888"
|
||||
- "0.0.0.0:8888:8888" # Hatchet dashboard (plain HTTP — no TLS)
|
||||
- "127.0.0.1:7078:7077"
|
||||
env_file:
|
||||
- ./.env.hatchet
|
||||
@@ -366,7 +366,7 @@ services:
|
||||
context: ./server
|
||||
dockerfile: Dockerfile
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
profiles: [dailyco]
|
||||
profiles: [dailyco, livekit]
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./server/.env
|
||||
@@ -406,6 +406,40 @@ services:
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
|
||||
# ===========================================================
|
||||
# LiveKit — self-hosted open-source video platform
|
||||
# Activated via --profile livekit (auto-detected from LIVEKIT_API_KEY in server/.env)
|
||||
# ===========================================================
|
||||
|
||||
livekit-server:
|
||||
image: livekit/livekit-server:v1.10.1
|
||||
profiles: [livekit]
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "7880:7880" # Signaling (HTTP/WS)
|
||||
- "7881:7881" # WebRTC over TCP
|
||||
- "${LIVEKIT_UDP_PORTS:-44200-44300:44200-44300}/udp" # WebRTC ICE (range or single port for tunnels)
|
||||
volumes:
|
||||
- ./livekit.yaml:/etc/livekit.yaml:ro
|
||||
command: ["--config", "/etc/livekit.yaml"]
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
|
||||
livekit-egress:
|
||||
image: livekit/egress:v1.12.0
|
||||
profiles: [livekit]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
EGRESS_CONFIG_FILE: /etc/egress.yaml
|
||||
volumes:
|
||||
- ./egress.yaml:/etc/egress.yaml:ro
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
livekit-server:
|
||||
condition: service_started
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
|
||||
97
docsv2/firewall-ports.md
Normal file
97
docsv2/firewall-ports.md
Normal file
@@ -0,0 +1,97 @@
|
||||
# Firewall & Port Requirements
|
||||
|
||||
Ports that need to be open on your server firewall, organized by deployment mode.
|
||||
|
||||
## With Caddy (--caddy or --ip or --domain)
|
||||
|
||||
Caddy acts as the reverse proxy. Most services are only accessible through Caddy on port 443.
|
||||
|
||||
| Port | Protocol | Direction | Service | Required? |
|
||||
|------|----------|-----------|---------|-----------|
|
||||
| 443 | TCP | Inbound | Caddy HTTPS — web app, API, LiveKit signaling (`/lk-ws`) | Yes |
|
||||
| 80 | TCP | Inbound | Caddy HTTP — redirects to HTTPS | Yes |
|
||||
| 44200-44300 | UDP | Inbound | LiveKit WebRTC media (audio/video) | Yes (if LiveKit enabled) |
|
||||
| 7881 | TCP | Inbound | LiveKit TCP media fallback (when UDP is blocked by client network) | Recommended |
|
||||
| 8888 | TCP | Inbound | Hatchet dashboard (plain HTTP, no TLS) | Optional (admin only) |
|
||||
|
||||
Ports that do NOT need to be open (proxied through Caddy):
|
||||
- 1250 (backend API)
|
||||
- 3000 (frontend)
|
||||
- 7880 (LiveKit signaling — proxied via `/lk-ws`)
|
||||
- 3900 (Garage S3)
|
||||
|
||||
## Without Caddy (direct access)
|
||||
|
||||
All services need direct port access. Use this only for local development or trusted networks.
|
||||
|
||||
| Port | Protocol | Direction | Service | Required? |
|
||||
|------|----------|-----------|---------|-----------|
|
||||
| 3000 | TCP | Inbound | Frontend (Next.js) | Yes |
|
||||
| 1250 | TCP | Inbound | Backend API (FastAPI) | Yes |
|
||||
| 7880 | TCP | Inbound | LiveKit signaling (WebSocket) | Yes (if LiveKit enabled) |
|
||||
| 7881 | TCP | Inbound | LiveKit TCP media fallback | Recommended |
|
||||
| 44200-44300 | UDP | Inbound | LiveKit WebRTC media | Yes (if LiveKit enabled) |
|
||||
| 40000-40100 | UDP | Inbound | Reflector WebRTC (browser recording) | Yes (if using browser WebRTC) |
|
||||
| 3900 | TCP | Inbound | Garage S3 (for presigned URLs in browser) | Yes (if using Garage) |
|
||||
| 8888 | TCP | Inbound | Hatchet dashboard | Optional |
|
||||
|
||||
> **Important:** Without Caddy, all traffic is plain HTTP. Browsers block microphone/camera access on non-HTTPS pages (except `localhost`). Use `--ip` (which implies Caddy) for any non-localhost deployment.
|
||||
|
||||
## Internal-Only Ports (never expose)
|
||||
|
||||
These ports are used between Docker containers and should NOT be open on the firewall:
|
||||
|
||||
| Port | Service | Purpose |
|
||||
|------|---------|---------|
|
||||
| 5432 | PostgreSQL | Database |
|
||||
| 6379 | Redis | Cache + message broker |
|
||||
| 7077 | Hatchet gRPC | Worker communication |
|
||||
|
||||
## Cloud Provider Firewall Examples
|
||||
|
||||
### DigitalOcean (with Caddy + LiveKit)
|
||||
|
||||
```bash
|
||||
# Create firewall
|
||||
doctl compute firewall create \
|
||||
--name reflector \
|
||||
--inbound-rules "protocol:tcp,ports:443,address:0.0.0.0/0 protocol:tcp,ports:80,address:0.0.0.0/0 protocol:udp,ports:44200-44300,address:0.0.0.0/0 protocol:tcp,ports:7881,address:0.0.0.0/0 protocol:tcp,ports:22,address:0.0.0.0/0" \
|
||||
--outbound-rules "protocol:tcp,ports:all,address:0.0.0.0/0 protocol:udp,ports:all,address:0.0.0.0/0" \
|
||||
--droplet-ids <DROPLET_ID>
|
||||
```
|
||||
|
||||
### AWS Security Group (with Caddy + LiveKit)
|
||||
|
||||
| Type | Port Range | Source | Description |
|
||||
|------|-----------|--------|-------------|
|
||||
| HTTPS | 443 | 0.0.0.0/0 | Web app + API + LiveKit signaling |
|
||||
| HTTP | 80 | 0.0.0.0/0 | Redirect to HTTPS |
|
||||
| Custom UDP | 44200-44300 | 0.0.0.0/0 | LiveKit WebRTC media |
|
||||
| Custom TCP | 7881 | 0.0.0.0/0 | LiveKit TCP fallback |
|
||||
| SSH | 22 | Your IP | Admin access |
|
||||
|
||||
### Ubuntu UFW (with Caddy + LiveKit)
|
||||
|
||||
```bash
|
||||
sudo ufw allow 443/tcp # Caddy HTTPS
|
||||
sudo ufw allow 80/tcp # HTTP redirect
|
||||
sudo ufw allow 7881/tcp # LiveKit TCP fallback
|
||||
sudo ufw allow 44200:44300/udp # LiveKit WebRTC media
|
||||
sudo ufw allow 22/tcp # SSH
|
||||
sudo ufw enable
|
||||
```
|
||||
|
||||
## Port Ranges Explained
|
||||
|
||||
### Why 44200-44300 for LiveKit?
|
||||
|
||||
LiveKit's WebRTC ICE candidates use UDP. The port range was chosen to avoid collisions:
|
||||
- **40000-40100** — Reflector's own WebRTC (browser recording)
|
||||
- **44200-44300** — LiveKit WebRTC
|
||||
- **49152-65535** — macOS ephemeral ports (reserved by OS)
|
||||
|
||||
The range is configurable in `livekit.yaml` under `rtc.port_range_start` / `rtc.port_range_end`. If changed, update `docker-compose.selfhosted.yml` port mapping to match.
|
||||
|
||||
### Why 101 ports?
|
||||
|
||||
100 UDP ports support ~100 concurrent WebRTC connections (roughly 50 participants with audio + video). For larger deployments, increase the range in both `livekit.yaml` and `docker-compose.selfhosted.yml`.
|
||||
297
docsv2/livekit-setup.md
Normal file
297
docsv2/livekit-setup.md
Normal file
@@ -0,0 +1,297 @@
|
||||
# LiveKit Setup (Self-Hosted Video Platform)
|
||||
|
||||
LiveKit is the recommended open-source, self-hosted video platform for Reflector. It replaces Daily.co for deployments that need free, fully self-hosted video rooms with per-participant audio recording.
|
||||
|
||||
> LiveKit runs alongside Daily.co and Whereby — you choose the platform per room. Existing Daily/Whereby setups are not affected.
|
||||
|
||||
## What LiveKit Provides
|
||||
|
||||
- **Video/audio rooms** — WebRTC-based conferencing via `livekit-server` (Go SFU)
|
||||
- **Per-participant audio recording** — Track Egress writes each participant's audio to S3 as a separate OGG/Opus file (no composite video, no Chrome dependency)
|
||||
- **S3-compatible storage** — works with Garage, MinIO, AWS S3, or any S3-compatible provider via `force_path_style`
|
||||
- **Webhook events** — participant join/leave, egress start/end, room lifecycle
|
||||
- **JWT access tokens** — per-participant tokens with granular permissions
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────┐
|
||||
Participants ────>│ livekit-server │ :7880 (WS signaling)
|
||||
(browser) │ (Go SFU) │ :7881 (TCP RTC)
|
||||
│ │ :44200-44300/udp (ICE)
|
||||
└────────┬────────┘
|
||||
│ media forwarding
|
||||
┌────────┴────────┐
|
||||
│ livekit-egress │ Track Egress
|
||||
│ (per-track OGG) │ writes to S3
|
||||
└────────┬────────┘
|
||||
│
|
||||
┌────────┴────────┐
|
||||
│ S3 Storage │ Garage / MinIO / AWS
|
||||
│ (audio tracks) │
|
||||
└─────────────────┘
|
||||
```
|
||||
|
||||
Both services share Redis with the existing Reflector stack (same instance, same db).
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Option 1: Via Setup Script (Recommended)
|
||||
|
||||
Pass `--livekit` to the setup script. It generates all credentials and config automatically:
|
||||
|
||||
```bash
|
||||
# First run — --livekit generates credentials and config files
|
||||
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy
|
||||
|
||||
# Re-runs — LiveKit is auto-detected from existing LIVEKIT_API_KEY in server/.env
|
||||
./scripts/setup-selfhosted.sh
|
||||
```
|
||||
|
||||
The `--livekit` flag will:
|
||||
1. Generate `LIVEKIT_API_KEY` and `LIVEKIT_API_SECRET` (random credentials)
|
||||
2. Set `LIVEKIT_URL`, `LIVEKIT_PUBLIC_URL`, and storage credentials in `server/.env`
|
||||
3. Generate `livekit.yaml` and `egress.yaml` config files
|
||||
4. Set `DEFAULT_VIDEO_PLATFORM=livekit`
|
||||
5. Enable the `livekit` Docker Compose profile
|
||||
6. Start `livekit-server` and `livekit-egress` containers
|
||||
|
||||
On subsequent re-runs (without flags), the script detects the existing `LIVEKIT_API_KEY` in `server/.env` and re-enables the profile automatically.
|
||||
|
||||
### Option 2: Manual Setup
|
||||
|
||||
If you prefer manual configuration:
|
||||
|
||||
1. **Generate credentials:**
|
||||
|
||||
```bash
|
||||
export LK_KEY="reflector_$(openssl rand -hex 8)"
|
||||
export LK_SECRET="$(openssl rand -hex 32)"
|
||||
```
|
||||
|
||||
2. **Add to `server/.env`:**
|
||||
|
||||
```env
|
||||
# LiveKit connection
|
||||
LIVEKIT_URL=ws://livekit-server:7880
|
||||
LIVEKIT_API_KEY=$LK_KEY
|
||||
LIVEKIT_API_SECRET=$LK_SECRET
|
||||
LIVEKIT_PUBLIC_URL=wss://your-domain:7880 # or ws://your-ip:7880
|
||||
|
||||
# LiveKit egress S3 storage (reuse transcript storage or configure separately)
|
||||
LIVEKIT_STORAGE_AWS_BUCKET_NAME=reflector-bucket
|
||||
LIVEKIT_STORAGE_AWS_REGION=us-east-1
|
||||
LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID=your-key
|
||||
LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY=your-secret
|
||||
LIVEKIT_STORAGE_AWS_ENDPOINT_URL=http://garage:3900 # for Garage/MinIO
|
||||
|
||||
# Set LiveKit as default platform for new rooms
|
||||
DEFAULT_VIDEO_PLATFORM=livekit
|
||||
```
|
||||
|
||||
3. **Create `livekit.yaml`:**
|
||||
|
||||
```yaml
|
||||
port: 7880
|
||||
rtc:
|
||||
tcp_port: 7881
|
||||
port_range_start: 44200
|
||||
port_range_end: 44300
|
||||
redis:
|
||||
address: redis:6379
|
||||
keys:
|
||||
your_api_key: your_api_secret
|
||||
webhook:
|
||||
urls:
|
||||
- http://server:1250/v1/livekit/webhook
|
||||
api_key: your_api_key
|
||||
logging:
|
||||
level: info
|
||||
room:
|
||||
empty_timeout: 300
|
||||
max_participants: 0
|
||||
```
|
||||
|
||||
4. **Create `egress.yaml`:**
|
||||
|
||||
```yaml
|
||||
api_key: your_api_key
|
||||
api_secret: your_api_secret
|
||||
ws_url: ws://livekit-server:7880
|
||||
health_port: 7082
|
||||
log_level: info
|
||||
session_limits:
|
||||
file_output_max_duration: 4h
|
||||
```
|
||||
|
||||
5. **Start with the livekit profile:**
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.selfhosted.yml --profile livekit up -d livekit-server livekit-egress
|
||||
```
|
||||
|
||||
## Environment Variables Reference
|
||||
|
||||
### Required
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `LIVEKIT_URL` | Internal WebSocket URL (server -> LiveKit) | `ws://livekit-server:7880` |
|
||||
| `LIVEKIT_API_KEY` | API key for authentication | `reflector_a1b2c3d4e5f6` |
|
||||
| `LIVEKIT_API_SECRET` | API secret for token signing and webhooks | `64-char hex string` |
|
||||
|
||||
### Recommended
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `LIVEKIT_PUBLIC_URL` | Public WebSocket URL (browser -> LiveKit). **Must be reachable from participants' browsers**, not a Docker-internal address. Without `--domain`, set to `ws://<server-ip>:7880`. With `--domain`, set to `wss://<domain>:7880`. | `wss://reflector.example.com:7880` |
|
||||
| `LIVEKIT_WEBHOOK_SECRET` | Webhook verification secret. Defaults to `LIVEKIT_API_SECRET` if not set. Only needed if you want a separate secret for webhooks. | (same as API secret) |
|
||||
| `DEFAULT_VIDEO_PLATFORM` | Default platform for new rooms | `livekit` |
|
||||
|
||||
### Storage (for Track Egress)
|
||||
|
||||
Track Egress writes per-participant audio files to S3. If not configured, falls back to the transcript storage credentials.
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `LIVEKIT_STORAGE_AWS_BUCKET_NAME` | S3 bucket for egress output | `reflector-bucket` |
|
||||
| `LIVEKIT_STORAGE_AWS_REGION` | S3 region | `us-east-1` |
|
||||
| `LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID` | S3 access key | `GK...` |
|
||||
| `LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY` | S3 secret key | `...` |
|
||||
| `LIVEKIT_STORAGE_AWS_ENDPOINT_URL` | S3 endpoint (for Garage/MinIO) | `http://garage:3900` |
|
||||
|
||||
## Docker Compose Services
|
||||
|
||||
Two services are added under the `livekit` profile in `docker-compose.selfhosted.yml`:
|
||||
|
||||
### livekit-server
|
||||
|
||||
| Setting | Value |
|
||||
|---------|-------|
|
||||
| Image | `livekit/livekit-server:v1.10.1` |
|
||||
| Ports | 7880 (signaling), 7881 (TCP RTC), 44200-44300/udp (ICE) |
|
||||
| Config | `./livekit.yaml` mounted at `/etc/livekit.yaml` |
|
||||
| Depends on | Redis |
|
||||
|
||||
### livekit-egress
|
||||
|
||||
| Setting | Value |
|
||||
|---------|-------|
|
||||
| Image | `livekit/egress:v1.10.1` |
|
||||
| Config | `./egress.yaml` mounted at `/etc/egress.yaml` |
|
||||
| Depends on | Redis, livekit-server |
|
||||
|
||||
No `--cap-add=SYS_ADMIN` is needed because Track Egress does not use Chrome (that's only for Room Composite video recording, which we don't use).
|
||||
|
||||
## Port Ranges
|
||||
|
||||
| Range | Protocol | Service | Notes |
|
||||
|-------|----------|---------|-------|
|
||||
| 7880 | TCP | LiveKit signaling | WebSocket connections from browsers (direct, no Caddy) |
|
||||
| 7881 | TCP | LiveKit RTC over TCP | Fallback when UDP is blocked |
|
||||
| 44200-44300 | UDP | LiveKit ICE | WebRTC media. Avoids collision with Reflector WebRTC (40000-40100) and macOS ephemeral ports (49152-65535) |
|
||||
|
||||
### TLS / Caddy Integration
|
||||
|
||||
When `--caddy` is enabled (HTTPS), the setup script automatically:
|
||||
|
||||
1. Adds a `/lk-ws` reverse proxy route to the Caddyfile that proxies `wss://domain/lk-ws` → `ws://livekit-server:7880`
|
||||
2. Sets `LIVEKIT_PUBLIC_URL` to `wss://<domain>/lk-ws` (or `wss://<ip>/lk-ws`)
|
||||
|
||||
This avoids mixed-content blocking (browsers reject `ws://` connections on `https://` pages). Caddy handles TLS termination; LiveKit server itself runs plain WebSocket internally.
|
||||
|
||||
Without `--caddy`, browsers connect directly to LiveKit on port 7880 via `ws://`.
|
||||
|
||||
### Security Note: on_demand TLS
|
||||
|
||||
When using `--ip` (Caddy with self-signed certs), the Caddyfile uses `tls internal { on_demand }`. This generates certificates dynamically for any hostname/IP on first TLS request.
|
||||
|
||||
**Risk:** An attacker can trigger certificate generation for arbitrary hostnames by sending TLS requests with spoofed SNI values, causing disk and CPU usage. This is a low-severity resource exhaustion risk, not a data theft risk.
|
||||
|
||||
**Mitigations:**
|
||||
- For LAN/development use: not a concern (not internet-exposed)
|
||||
- For cloud VMs: restrict port 443 access via firewall to trusted IPs
|
||||
- For production: use `--domain` with a real domain name instead of `--ip` — Caddy uses Let's Encrypt (no `on_demand` needed)
|
||||
|
||||
| Deployment | `LIVEKIT_PUBLIC_URL` | How it works |
|
||||
|---|---|---|
|
||||
| localhost, no Caddy | `ws://localhost:7880` | Direct connection |
|
||||
| LAN IP, no Caddy | `ws://192.168.1.x:7880` | Direct connection |
|
||||
| IP + Caddy | `wss://192.168.1.x/lk-ws` | Caddy terminates TLS, proxies to LiveKit |
|
||||
| Domain + Caddy | `wss://example.com/lk-ws` | Caddy terminates TLS, proxies to LiveKit |
|
||||
|
||||
## Webhook Endpoint
|
||||
|
||||
LiveKit sends webhook events to `POST /v1/livekit/webhook`. Events handled:
|
||||
|
||||
| Event | Action |
|
||||
|-------|--------|
|
||||
| `participant_joined` | Logs participant join, updates meeting state |
|
||||
| `participant_left` | Logs participant leave |
|
||||
| `egress_started` | Logs recording start |
|
||||
| `egress_ended` | Logs recording completion with output file info |
|
||||
| `room_started` / `room_finished` | Logs room lifecycle |
|
||||
|
||||
Webhooks are authenticated via JWT in the `Authorization` header, verified using the API secret.
|
||||
|
||||
## Frontend
|
||||
|
||||
The LiveKit room component uses `@livekit/components-react` with the prebuilt `<VideoConference>` UI. It includes:
|
||||
|
||||
- Recording consent dialog (same as Daily/Whereby)
|
||||
- Email transcript button (feature-gated)
|
||||
- Extensible overlay buttons for custom actions
|
||||
|
||||
When a user joins a LiveKit room, the backend generates a JWT access token and returns it in the `room_url` query parameter. The frontend parses this and passes it to the LiveKit React SDK.
|
||||
|
||||
## Separate Server Deployment
|
||||
|
||||
For larger deployments (15+ participants, multiple simultaneous rooms), LiveKit can run on a dedicated server:
|
||||
|
||||
1. Run `livekit-server` and `livekit-egress` on a separate machine
|
||||
2. Point `LIVEKIT_URL` to the remote LiveKit server (e.g., `ws://livekit-host:7880`)
|
||||
3. Set `LIVEKIT_PUBLIC_URL` to the public-facing URL (e.g., `wss://livekit.example.com`)
|
||||
4. Configure the remote LiveKit's `webhook.urls` to point back to the Reflector server
|
||||
5. Both need access to the same Redis (or configure LiveKit's own Redis)
|
||||
6. Both need access to the same S3 storage
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### LiveKit server not starting
|
||||
|
||||
```bash
|
||||
# Check logs
|
||||
docker compose -f docker-compose.selfhosted.yml logs livekit-server --tail 30
|
||||
|
||||
# Verify config
|
||||
cat livekit.yaml
|
||||
|
||||
# Common issues:
|
||||
# - Redis not reachable (check redis service is running)
|
||||
# - Port 7880 already in use
|
||||
# - Invalid API key format in livekit.yaml
|
||||
```
|
||||
|
||||
### Participants can't connect
|
||||
|
||||
```bash
|
||||
# Check that LIVEKIT_PUBLIC_URL is accessible from the browser
|
||||
# It must be the URL the browser can reach, not the Docker-internal URL
|
||||
|
||||
# Check firewall allows ports 7880, 7881, and 44200-44300/udp
|
||||
sudo ufw status # or iptables -L
|
||||
|
||||
# Verify the access token is being generated
|
||||
docker compose -f docker-compose.selfhosted.yml logs server | grep livekit
|
||||
```
|
||||
|
||||
### Track Egress not writing files
|
||||
|
||||
```bash
|
||||
# Check egress logs
|
||||
docker compose -f docker-compose.selfhosted.yml logs livekit-egress --tail 30
|
||||
|
||||
# Verify S3 credentials
|
||||
# Egress receives S3 config per-request from the server, so check server/.env:
|
||||
grep LIVEKIT_STORAGE server/.env
|
||||
```
|
||||
73
docsv2/migrate-daily-to-livekit.md
Normal file
73
docsv2/migrate-daily-to-livekit.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# Migrating from Daily.co to LiveKit
|
||||
|
||||
This guide covers running LiveKit alongside Daily.co or fully replacing it.
|
||||
|
||||
## Both Platforms Run Simultaneously
|
||||
|
||||
LiveKit and Daily.co coexist — the platform is selected **per room**. You don't need to migrate all rooms at once.
|
||||
|
||||
- Existing Daily rooms continue to work as-is
|
||||
- New rooms can use LiveKit
|
||||
- Each room's `platform` field determines which video service is used
|
||||
- Transcripts, topics, summaries work identically regardless of platform
|
||||
|
||||
## Step 1: Enable LiveKit
|
||||
|
||||
Add `--livekit` to your setup command:
|
||||
|
||||
```bash
|
||||
# If currently running:
|
||||
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy
|
||||
|
||||
# Add --livekit:
|
||||
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy
|
||||
```
|
||||
|
||||
This starts `livekit-server` + `livekit-egress` containers alongside your existing stack.
|
||||
|
||||
## Step 2: Set Default Platform
|
||||
|
||||
The setup script automatically sets `DEFAULT_VIDEO_PLATFORM=livekit` in `server/.env`. This means **new rooms** default to LiveKit. Existing rooms keep their current platform.
|
||||
|
||||
To keep Daily as the default for new rooms:
|
||||
```bash
|
||||
# In server/.env, change:
|
||||
DEFAULT_VIDEO_PLATFORM=daily
|
||||
```
|
||||
|
||||
## Step 3: Switch Individual Rooms
|
||||
|
||||
In the Rooms admin page, edit any room and change the **Platform** dropdown from "Daily" to "LiveKit". The next meeting in that room will use LiveKit.
|
||||
|
||||
Previously recorded Daily transcripts for that room are unaffected.
|
||||
|
||||
## Step 4: (Optional) Remove Daily.co
|
||||
|
||||
Once all rooms use LiveKit and you no longer need Daily.co:
|
||||
|
||||
1. Remove `DAILY_API_KEY` and related Daily settings from `server/.env`
|
||||
2. Re-run the setup script — it won't activate the `dailyco` profile
|
||||
3. Hatchet workers are shared between Daily and LiveKit, so they continue running
|
||||
|
||||
Daily-specific services that stop:
|
||||
- `hatchet-worker-cpu` with `dailyco` profile (but continues if `livekit` profile is active)
|
||||
- Daily webhook polling tasks (`poll_daily_recordings`, etc.)
|
||||
|
||||
## What Changes for Users
|
||||
|
||||
| Feature | Daily.co | LiveKit |
|
||||
|---------|---------|---------|
|
||||
| Video/audio quality | Daily.co SFU | LiveKit SFU (comparable) |
|
||||
| Pre-join screen | Daily's built-in iframe | LiveKit PreJoin component (name + device selection) |
|
||||
| Recording | Starts via REST API from frontend | Auto Track Egress (automatic, no user action) |
|
||||
| Multitrack audio | Per-participant WebM tracks | Per-participant OGG tracks |
|
||||
| Transcript quality | Same pipeline | Same pipeline |
|
||||
| Self-hosted | No (SaaS only) | Yes (fully self-hosted) |
|
||||
|
||||
## Database Changes
|
||||
|
||||
None required. The `platform` field on rooms and meetings already supports `"livekit"`. LiveKit recordings use recording IDs prefixed with `lk-` to distinguish them from Daily recordings.
|
||||
|
||||
## Rollback
|
||||
|
||||
To revert a room back to Daily, just change the Platform dropdown back to "Daily" in the Rooms admin page. No data migration needed.
|
||||
@@ -170,6 +170,8 @@ These start regardless of which flags you pass:
|
||||
| `ollama-cpu` | `ollama-cpu` | Local Ollama LLM on CPU |
|
||||
| `garage` | `garage` | Local S3-compatible object storage |
|
||||
| `caddy` | `caddy` | Reverse proxy with SSL |
|
||||
| `dailyco` | `hatchet-worker-cpu` | Hatchet workflow workers for Daily.co multitrack processing |
|
||||
| `livekit` | `livekit-server`, `livekit-egress` | Self-hosted video platform + per-participant audio recording |
|
||||
|
||||
### The "transcription" Alias
|
||||
|
||||
@@ -206,11 +208,17 @@ Both the `gpu` and `cpu` services define a Docker network alias of `transcriptio
|
||||
│ :8000 │ └─────────┘ └─────────┘
|
||||
└───────────┘
|
||||
│
|
||||
┌─────┴─────┐ ┌─────────┐
|
||||
│ ollama │ │ garage │
|
||||
│(optional) │ │(optional│
|
||||
│ :11435 │ │ S3) │
|
||||
└───────────┘ └─────────┘
|
||||
┌─────┴─────┐ ┌─────────┐ ┌──────────────┐
|
||||
│ ollama │ │ garage │ │livekit-server│
|
||||
│(optional) │ │(optional│ │ (optional) │
|
||||
│ :11435 │ │ S3) │ │ :7880 │
|
||||
└───────────┘ └─────────┘ └──────┬───────┘
|
||||
│
|
||||
┌──────┴───────┐
|
||||
│livekit-egress│
|
||||
│ (Track Egress│
|
||||
│ to S3) │
|
||||
└──────────────┘
|
||||
```
|
||||
|
||||
### How Services Interact
|
||||
@@ -320,7 +328,9 @@ You can point your own reverse proxy (nginx, Traefik, etc.) at these ports.
|
||||
|
||||
### WebRTC and UDP
|
||||
|
||||
The server exposes UDP ports 50000-50100 for WebRTC ICE candidates. The `WEBRTC_HOST` variable tells the server which IP to advertise in ICE candidates — this must be the server's actual IP address (not a domain), because WebRTC uses UDP which doesn't go through the HTTP reverse proxy.
|
||||
The server exposes UDP ports 40000-40100 for Reflector's own WebRTC ICE candidates. When LiveKit is enabled, it additionally uses ports 44200-44300/udp for its WebRTC ICE candidates. The `WEBRTC_HOST` variable tells the server which IP to advertise in ICE candidates — this must be the server's actual IP address (not a domain), because WebRTC uses UDP which doesn't go through the HTTP reverse proxy.
|
||||
|
||||
Port ranges are chosen to avoid collision with macOS ephemeral ports (49152-65535).
|
||||
|
||||
---
|
||||
|
||||
@@ -426,7 +436,10 @@ All services communicate over Docker's default bridge network. Only specific por
|
||||
| 3903 | Garage | `0.0.0.0:3903` | Garage admin API |
|
||||
| 8000 | GPU/CPU | `127.0.0.1:8000` | ML model API (localhost only) |
|
||||
| 11435 | Ollama | `127.0.0.1:11435` | Ollama API (localhost only) |
|
||||
| 50000-50100/udp | Server | `0.0.0.0:50000-50100` | WebRTC ICE candidates |
|
||||
| 40000-40100/udp | Server | `0.0.0.0:40000-40100` | Reflector WebRTC ICE candidates |
|
||||
| 7880 | LiveKit | `0.0.0.0:7880` | LiveKit signaling (WS) |
|
||||
| 7881 | LiveKit | `0.0.0.0:7881` | LiveKit RTC over TCP |
|
||||
| 44200-44300/udp | LiveKit | `0.0.0.0:44200-44300` | LiveKit WebRTC ICE candidates |
|
||||
|
||||
Services bound to `127.0.0.1` are only accessible from the host itself (not from the network). Caddy is the only service exposed to the internet on standard HTTP/HTTPS ports.
|
||||
|
||||
@@ -443,6 +456,8 @@ Inside the Docker network, services reach each other by their compose service na
|
||||
| `transcription` | GPU or CPU container (network alias) |
|
||||
| `ollama` / `ollama-cpu` | Ollama container |
|
||||
| `garage` | Garage S3 container |
|
||||
| `livekit-server` | LiveKit SFU server |
|
||||
| `livekit-egress` | LiveKit Track Egress service |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -144,6 +144,7 @@ Browse all available models at https://ollama.com/library.
|
||||
|
||||
| Flag | What it does |
|
||||
|------|-------------|
|
||||
| `--livekit` | Enables LiveKit self-hosted video platform. Generates API credentials, starts `livekit-server` + `livekit-egress`. See [LiveKit Setup](livekit-setup.md). |
|
||||
| `--garage` | Starts Garage (local S3-compatible storage). Auto-configures bucket, keys, and env vars. |
|
||||
| `--caddy` | Starts Caddy reverse proxy on ports 80/443 with self-signed cert. |
|
||||
| `--domain DOMAIN` | Use a real domain with Let's Encrypt auto-HTTPS (implies `--caddy`). Requires DNS A record pointing to this server and ports 80/443 open. |
|
||||
@@ -154,6 +155,20 @@ Without `--garage`, you **must** provide S3-compatible credentials (the script w
|
||||
|
||||
Without `--caddy` or `--domain`, no ports are exposed. Point your own reverse proxy at `web:3000` (frontend) and `server:1250` (API).
|
||||
|
||||
## Video Platform (LiveKit)
|
||||
|
||||
For self-hosted video rooms with per-participant audio recording, add `--livekit` to your setup command:
|
||||
|
||||
```bash
|
||||
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy
|
||||
```
|
||||
|
||||
This generates LiveKit API credentials, creates config files (`livekit.yaml`, `egress.yaml`), and starts `livekit-server` (WebRTC SFU) + `livekit-egress` (per-participant audio recording to S3). LiveKit reuses the same Redis and S3 storage as the rest of the stack.
|
||||
|
||||
New rooms default to LiveKit when `DEFAULT_VIDEO_PLATFORM=livekit` is set (done automatically by the setup script). Existing Daily.co and Whereby rooms continue to work. On re-runs, the script detects the existing `LIVEKIT_API_KEY` in `server/.env` automatically.
|
||||
|
||||
> For detailed configuration, environment variables, ports, and troubleshooting, see [LiveKit Setup](livekit-setup.md).
|
||||
|
||||
**Using a domain (recommended for production):** Point a DNS A record at your server's IP, then pass `--domain your.domain.com`. Caddy will automatically obtain and renew a Let's Encrypt certificate. Ports 80 and 443 must be open.
|
||||
|
||||
**Without a domain:** `--caddy` alone uses a self-signed certificate. Browsers will show a security warning that must be accepted.
|
||||
|
||||
144
docsv2/tunnel-setup.md
Normal file
144
docsv2/tunnel-setup.md
Normal file
@@ -0,0 +1,144 @@
|
||||
# Tunnel Setup (Self-Hosting Behind NAT)
|
||||
|
||||
Expose your self-hosted Reflector + LiveKit stack to the internet without port forwarding, static IPs, or cloud VMs using tunneling services.
|
||||
|
||||
## Requirements
|
||||
|
||||
You need **two tunnels**:
|
||||
|
||||
| Tunnel | Protocol | What it carries | Local port | Examples |
|
||||
|--------|----------|----------------|------------|----------|
|
||||
| **TCP tunnel** | TCP | Web app, API, LiveKit signaling (WebSocket) | 443 (Caddy) | playit.gg, ngrok, Cloudflare Tunnel, bore, frp |
|
||||
| **UDP tunnel** | UDP | WebRTC audio/video media | Assigned by tunnel service | playit.gg, frp |
|
||||
|
||||
> **Important:** Most tunneling services only support TCP. WebRTC media requires UDP. Make sure your chosen service supports UDP tunnels. As of writing, [playit.gg](https://playit.gg) is one of the few that supports both TCP and UDP (premium $3/mo).
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
Internet participants
|
||||
│
|
||||
├── TCP tunnel (HTTPS)
|
||||
│ └── tunnel service → your machine port 443 (Caddy)
|
||||
│ ├── /v1/* → server:1250 (API)
|
||||
│ ├── /lk-ws/* → livekit-server:7880 (signaling)
|
||||
│ └── /* → web:3000 (frontend)
|
||||
│
|
||||
└── UDP tunnel
|
||||
└── tunnel service → your machine port N (LiveKit ICE)
|
||||
```
|
||||
|
||||
## Setup
|
||||
|
||||
### Step 1: Create tunnels with your chosen service
|
||||
|
||||
Create two tunnels and note the public addresses:
|
||||
|
||||
- **TCP tunnel**: Points to your local port `443`
|
||||
- You'll get an address like `your-tunnel.example.com:PORT`
|
||||
- **UDP tunnel**: Points to a local port (e.g., `14139`)
|
||||
- You'll get an address like `udp-host.example.com:PORT`
|
||||
- **The local port must match the public port** (or LiveKit ICE candidates won't match). Set the local port to the same number as the public port assigned by the tunnel service.
|
||||
|
||||
### Step 2: Run the setup script
|
||||
|
||||
```bash
|
||||
./scripts/setup-selfhosted.sh <mode> --livekit --garage \
|
||||
--tunnels <TCP_ADDRESS>,<UDP_ADDRESS>
|
||||
```
|
||||
|
||||
Example:
|
||||
```bash
|
||||
./scripts/setup-selfhosted.sh --cpu --livekit --garage \
|
||||
--tunnels my-tunnel.example.com:9055,udp-host.example.com:14139
|
||||
```
|
||||
|
||||
Or use separate flags:
|
||||
```bash
|
||||
./scripts/setup-selfhosted.sh --cpu --livekit --garage \
|
||||
--tunnel-tcp my-tunnel.example.com:9055 \
|
||||
--tunnel-udp udp-host.example.com:14139
|
||||
```
|
||||
|
||||
The script automatically:
|
||||
- Sets all URLs (API, frontend, LiveKit signaling) to the TCP tunnel address
|
||||
- Configures LiveKit with the UDP tunnel port and resolved IP for ICE candidates
|
||||
- Enables Caddy with self-signed TLS (catch-all on port 443)
|
||||
- Saves tunnel config for re-runs
|
||||
|
||||
### Step 3: Start the tunnel agent
|
||||
|
||||
Run your tunneling service's agent/client on the same machine. It must be running whenever you want external access.
|
||||
|
||||
### Step 4: Access
|
||||
|
||||
Share `https://<TCP_TUNNEL_ADDRESS>` with participants. They'll need to accept the self-signed certificate warning in their browser.
|
||||
|
||||
## Flag Reference
|
||||
|
||||
| Flag | Description |
|
||||
|------|-------------|
|
||||
| `--tunnels TCP,UDP` | Both tunnel addresses comma-separated (e.g., `host:9055,host:14139`) |
|
||||
| `--tunnel-tcp ADDR` | TCP tunnel address only (e.g., `host.example.com:9055`) |
|
||||
| `--tunnel-udp ADDR` | UDP tunnel address only (e.g., `host.example.com:14139`) |
|
||||
|
||||
Tunnel flags:
|
||||
- Imply `--caddy` (HTTPS required for browser mic/camera access)
|
||||
- Are mutually exclusive with `--ip` and `--domain`
|
||||
- Are saved to config memory (re-run without flags replays saved config)
|
||||
|
||||
## UDP Port Matching
|
||||
|
||||
LiveKit advertises ICE candidates with a specific IP and port. The browser connects to that exact address. If the tunnel's public port differs from the local port, ICE will fail.
|
||||
|
||||
**Correct setup:** Set the tunnel's local port to match its public port.
|
||||
|
||||
```
|
||||
Tunnel assigns public port 14139
|
||||
→ Set local port to 14139
|
||||
→ LiveKit listens on 14139 (udp_port in livekit.yaml)
|
||||
→ Docker maps 14139:14139/udp
|
||||
→ ICE candidates advertise tunnel_ip:14139
|
||||
→ Browser connects to tunnel_ip:14139 → tunnel → local:14139 → LiveKit
|
||||
```
|
||||
|
||||
If your tunneling service doesn't let you choose the local port, you'll need to update `livekit.yaml` manually with the assigned ports.
|
||||
|
||||
## TLS Certificate Warning
|
||||
|
||||
With tunnel services on non-standard ports (e.g., `:9055`), Let's Encrypt can't auto-provision certificates (it requires ports 80/443). Caddy uses `tls internal` which generates a self-signed certificate. Participants will see a browser warning they must accept.
|
||||
|
||||
**To avoid the warning:**
|
||||
- Use a tunnel service that provides port 443 for TCP
|
||||
- Or use a real domain with `--domain` on a server with a public IP
|
||||
|
||||
## Compatible Tunnel Services
|
||||
|
||||
| Service | TCP | UDP | Free tier | Notes |
|
||||
|---------|-----|-----|-----------|-------|
|
||||
| [playit.gg](https://playit.gg) | Yes (premium) | Yes (premium) | Limited | $3/mo premium. Supports both TCP + UDP. |
|
||||
| [ngrok](https://ngrok.com) | Yes | No | Limited | TCP only — needs a separate UDP tunnel for media |
|
||||
| [Cloudflare Tunnel](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/) | Yes | No | Yes | TCP only — needs a separate UDP tunnel for media |
|
||||
| [bore](https://github.com/ekzhang/bore) | Yes | No | Self-hosted | TCP only |
|
||||
| [frp](https://github.com/fatedier/frp) | Yes | Yes | Self-hosted | Requires your own VPS to run the frp server |
|
||||
| [Tailscale Funnel](https://tailscale.com/kb/1223/funnel) | Yes | No | Free (3 nodes) | TCP only, requires Tailscale account |
|
||||
|
||||
For a full self-contained setup without a VPS, playit.gg (TCP + UDP) is currently the simplest option.
|
||||
|
||||
## Limitations
|
||||
|
||||
- **Latency**: Adds a hop through the tunnel service's relay servers
|
||||
- **Bandwidth**: Tunnel services may have bandwidth limits on free/cheap tiers
|
||||
- **Reliability**: Depends on the tunnel service's uptime
|
||||
- **Certificate warning**: Unavoidable with non-standard ports (see above)
|
||||
- **Single UDP port**: Tunnel mode uses a single UDP port instead of a range, which limits concurrent WebRTC connections (~50 participants max)
|
||||
- **Not production-grade**: Suitable for demos, small teams, development, and privacy-first setups. For production, use a server with a public IP.
|
||||
|
||||
## Comparison
|
||||
|
||||
| Approach | Cost | Setup | Data location | Port forwarding needed |
|
||||
|----------|------|-------|---------------|----------------------|
|
||||
| **Tunnel (this guide)** | $0-3/mo | Low | Your machine | No |
|
||||
| **Cloud VM** | $5-20/mo | Low | Cloud provider | No |
|
||||
| **Port forwarding** | $0 | Medium | Your machine | Yes (router config) |
|
||||
| **VPN mesh (Tailscale)** | $0 | Low | Your machine | No (VPN peers only) |
|
||||
26
egress.yaml.example
Normal file
26
egress.yaml.example
Normal file
@@ -0,0 +1,26 @@
|
||||
# LiveKit Egress configuration
|
||||
# Generated by setup-selfhosted.sh — do not edit manually.
|
||||
# See: https://docs.livekit.io/self-hosting/egress/
|
||||
|
||||
api_key: __LIVEKIT_API_KEY__
|
||||
api_secret: __LIVEKIT_API_SECRET__
|
||||
ws_url: ws://livekit-server:7880
|
||||
redis:
|
||||
address: redis:6379
|
||||
|
||||
# Health check
|
||||
health_port: 7082
|
||||
|
||||
# Logging
|
||||
log_level: info
|
||||
|
||||
# CPU cost limits (Track Egress only — no composite video)
|
||||
# Track Egress costs 1.0 CPU unit per track; hundreds can run on one instance.
|
||||
# Default max_cpu_utilization is 0.8 (80% of available cores).
|
||||
|
||||
# Session limits
|
||||
session_limits:
|
||||
file_output_max_duration: 4h # Max 4 hours per recording
|
||||
|
||||
# S3 storage is configured per-request via the API (not here).
|
||||
# The server passes S3 credentials when starting each Track Egress.
|
||||
140
gpu/self_hosted/uv.lock
generated
140
gpu/self_hosted/uv.lock
generated
@@ -13,7 +13,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.13.3"
|
||||
version = "3.13.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
@@ -24,76 +24,76 @@ dependencies = [
|
||||
{ name = "propcache" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/bd/ede278648914cabbabfdf95e436679b5d4156e417896a9b9f4587169e376/aiohttp-3.13.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ee62d4471ce86b108b19c3364db4b91180d13fe3510144872d6bad5401957360", size = 752158, upload-time = "2026-03-28T17:16:06.901Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/de/581c053253c07b480b03785196ca5335e3c606a37dc73e95f6527f1591fe/aiohttp-3.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c0fd8f41b54b58636402eb493afd512c23580456f022c1ba2db0f810c959ed0d", size = 501037, upload-time = "2026-03-28T17:16:08.82Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f9/a5ede193c08f13cc42c0a5b50d1e246ecee9115e4cf6e900d8dbd8fd6acb/aiohttp-3.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4baa48ce49efd82d6b1a0be12d6a36b35e5594d1dd42f8bfba96ea9f8678b88c", size = 501556, upload-time = "2026-03-28T17:16:10.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/10/88ff67cd48a6ec36335b63a640abe86135791544863e0cfe1f065d6cef7a/aiohttp-3.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d738ebab9f71ee652d9dbd0211057690022201b11197f9a7324fd4dba128aa97", size = 1757314, upload-time = "2026-03-28T17:16:12.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/15/fdb90a5cf5a1f52845c276e76298c75fbbcc0ac2b4a86551906d54529965/aiohttp-3.13.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0ce692c3468fa831af7dceed52edf51ac348cebfc8d3feb935927b63bd3e8576", size = 1731819, upload-time = "2026-03-28T17:16:14.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/df/28146785a007f7820416be05d4f28cc207493efd1e8c6c1068e9bdc29198/aiohttp-3.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8e08abcfe752a454d2cb89ff0c08f2d1ecd057ae3e8cc6d84638de853530ebab", size = 1793279, upload-time = "2026-03-28T17:16:16.594Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/47/689c743abf62ea7a77774d5722f220e2c912a77d65d368b884d9779ef41b/aiohttp-3.13.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5977f701b3fff36367a11087f30ea73c212e686d41cd363c50c022d48b011d8d", size = 1891082, upload-time = "2026-03-28T17:16:18.71Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b6/f7f4f318c7e58c23b761c9b13b9a3c9b394e0f9d5d76fbc6622fa98509f6/aiohttp-3.13.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:54203e10405c06f8b6020bd1e076ae0fe6c194adcee12a5a78af3ffa3c57025e", size = 1773938, upload-time = "2026-03-28T17:16:21.125Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/06/f207cb3121852c989586a6fc16ff854c4fcc8651b86c5d3bd1fc83057650/aiohttp-3.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:358a6af0145bc4dda037f13167bef3cce54b132087acc4c295c739d05d16b1c3", size = 1579548, upload-time = "2026-03-28T17:16:23.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/58/e1289661a32161e24c1fe479711d783067210d266842523752869cc1d9c2/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:898ea1850656d7d61832ef06aa9846ab3ddb1621b74f46de78fbc5e1a586ba83", size = 1714669, upload-time = "2026-03-28T17:16:25.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/0a/3e86d039438a74a86e6a948a9119b22540bae037d6ba317a042ae3c22711/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7bc30cceb710cf6a44e9617e43eebb6e3e43ad855a34da7b4b6a73537d8a6763", size = 1754175, upload-time = "2026-03-28T17:16:28.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/30/e717fc5df83133ba467a560b6d8ef20197037b4bb5d7075b90037de1018e/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4a31c0c587a8a038f19a4c7e60654a6c899c9de9174593a13e7cc6e15ff271f9", size = 1762049, upload-time = "2026-03-28T17:16:30.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/28/8f7a2d4492e336e40005151bdd94baf344880a4707573378579f833a64c1/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2062f675f3fe6e06d6113eb74a157fb9df58953ffed0cdb4182554b116545758", size = 1570861, upload-time = "2026-03-28T17:16:32.953Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/45/12e1a3d0645968b1c38de4b23fdf270b8637735ea057d4f84482ff918ad9/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d1ba8afb847ff80626d5e408c1fdc99f942acc877d0702fe137015903a220a9", size = 1790003, upload-time = "2026-03-28T17:16:35.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/0f/60374e18d590de16dcb39d6ff62f39c096c1b958e6f37727b5870026ea30/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b08149419994cdd4d5eecf7fd4bc5986b5a9380285bcd01ab4c0d6bfca47b79d", size = 1737289, upload-time = "2026-03-28T17:16:38.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/bf/535e58d886cfbc40a8b0013c974afad24ef7632d645bca0b678b70033a60/aiohttp-3.13.4-cp312-cp312-win32.whl", hash = "sha256:fc432f6a2c4f720180959bc19aa37259651c1a4ed8af8afc84dd41c60f15f791", size = 434185, upload-time = "2026-03-28T17:16:40.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/1a/d92e3325134ebfff6f4069f270d3aac770d63320bd1fcd0eca023e74d9a8/aiohttp-3.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:6148c9ae97a3e8bff9a1fc9c757fa164116f86c100468339730e717590a3fb77", size = 461285, upload-time = "2026-03-28T17:16:42.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/ac/892f4162df9b115b4758d615f32ec63d00f3084c705ff5526630887b9b42/aiohttp-3.13.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:63dd5e5b1e43b8fb1e91b79b7ceba1feba588b317d1edff385084fcc7a0a4538", size = 745744, upload-time = "2026-03-28T17:16:44.67Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/a9/c5b87e4443a2f0ea88cb3000c93a8fdad1ee63bffc9ded8d8c8e0d66efc6/aiohttp-3.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:746ac3cc00b5baea424dacddea3ec2c2702f9590de27d837aa67004db1eebc6e", size = 498178, upload-time = "2026-03-28T17:16:46.766Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/42/07e1b543a61250783650df13da8ddcdc0d0a5538b2bd15cef6e042aefc61/aiohttp-3.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bda8f16ea99d6a6705e5946732e48487a448be874e54a4f73d514660ff7c05d3", size = 498331, upload-time = "2026-03-28T17:16:48.9Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/d6/492f46bf0328534124772d0cf58570acae5b286ea25006900650f69dae0e/aiohttp-3.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b061e7b5f840391e3f64d0ddf672973e45c4cfff7a0feea425ea24e51530fc2", size = 1744414, upload-time = "2026-03-28T17:16:50.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/4d/e02627b2683f68051246215d2d62b2d2f249ff7a285e7a858dc47d6b6a14/aiohttp-3.13.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b252e8d5cd66184b570d0d010de742736e8a4fab22c58299772b0c5a466d4b21", size = 1719226, upload-time = "2026-03-28T17:16:53.173Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/6c/5d0a3394dd2b9f9aeba6e1b6065d0439e4b75d41f1fb09a3ec010b43552b/aiohttp-3.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20af8aad61d1803ff11152a26146d8d81c266aa8c5aa9b4504432abb965c36a0", size = 1782110, upload-time = "2026-03-28T17:16:55.362Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/2d/c20791e3437700a7441a7edfb59731150322424f5aadf635602d1d326101/aiohttp-3.13.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:13a5cc924b59859ad2adb1478e31f410a7ed46e92a2a619d6d1dd1a63c1a855e", size = 1884809, upload-time = "2026-03-28T17:16:57.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/94/d99dbfbd1924a87ef643833932eb2a3d9e5eee87656efea7d78058539eff/aiohttp-3.13.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:534913dfb0a644d537aebb4123e7d466d94e3be5549205e6a31f72368980a81a", size = 1764938, upload-time = "2026-03-28T17:17:00.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/61/3ce326a1538781deb89f6cf5e094e2029cd308ed1e21b2ba2278b08426f6/aiohttp-3.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:320e40192a2dcc1cf4b5576936e9652981ab596bf81eb309535db7e2f5b5672f", size = 1570697, upload-time = "2026-03-28T17:17:02.985Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/77/4ab5a546857bb3028fbaf34d6eea180267bdab022ee8b1168b1fcde4bfdd/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9e587fcfce2bcf06526a43cb705bdee21ac089096f2e271d75de9c339db3100c", size = 1702258, upload-time = "2026-03-28T17:17:05.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/63/d8f29021e39bc5af8e5d5e9da1b07976fb9846487a784e11e4f4eeda4666/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9eb9c2eea7278206b5c6c1441fdd9dc420c278ead3f3b2cc87f9b693698cc500", size = 1740287, upload-time = "2026-03-28T17:17:07.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/3a/cbc6b3b124859a11bc8055d3682c26999b393531ef926754a3445b99dfef/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:29be00c51972b04bf9d5c8f2d7f7314f48f96070ca40a873a53056e652e805f7", size = 1753011, upload-time = "2026-03-28T17:17:10.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/30/836278675205d58c1368b21520eab9572457cf19afd23759216c04483048/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:90c06228a6c3a7c9f776fe4fc0b7ff647fffd3bed93779a6913c804ae00c1073", size = 1566359, upload-time = "2026-03-28T17:17:12.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/b4/8032cc9b82d17e4277704ba30509eaccb39329dc18d6a35f05e424439e32/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a533ec132f05fd9a1d959e7f34184cd7d5e8511584848dab85faefbaac573069", size = 1785537, upload-time = "2026-03-28T17:17:14.721Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/7d/5873e98230bde59f493bf1f7c3e327486a4b5653fa401144704df5d00211/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1c946f10f413836f82ea4cfb90200d2a59578c549f00857e03111cf45ad01ca5", size = 1740752, upload-time = "2026-03-28T17:17:17.387Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/f2/13e46e0df051494d7d3c68b7f72d071f48c384c12716fc294f75d5b1a064/aiohttp-3.13.4-cp313-cp313-win32.whl", hash = "sha256:48708e2706106da6967eff5908c78ca3943f005ed6bcb75da2a7e4da94ef8c70", size = 433187, upload-time = "2026-03-28T17:17:19.523Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/c0/649856ee655a843c8f8664592cfccb73ac80ede6a8c8db33a25d810c12db/aiohttp-3.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:74a2eb058da44fa3a877a49e2095b591d4913308bb424c418b77beb160c55ce3", size = 459778, upload-time = "2026-03-28T17:17:21.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/29/6657cc37ae04cacc2dbf53fb730a06b6091cc4cbe745028e047c53e6d840/aiohttp-3.13.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:e0a2c961fc92abeff61d6444f2ce6ad35bb982db9fc8ff8a47455beacf454a57", size = 749363, upload-time = "2026-03-28T17:17:24.044Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/7f/30ccdf67ca3d24b610067dc63d64dcb91e5d88e27667811640644aa4a85d/aiohttp-3.13.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:153274535985a0ff2bff1fb6c104ed547cec898a09213d21b0f791a44b14d933", size = 499317, upload-time = "2026-03-28T17:17:26.199Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/13/e372dd4e68ad04ee25dafb050c7f98b0d91ea643f7352757e87231102555/aiohttp-3.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:351f3171e2458da3d731ce83f9e6b9619e325c45cbd534c7759750cabf453ad7", size = 500477, upload-time = "2026-03-28T17:17:28.279Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/fe/ee6298e8e586096fb6f5eddd31393d8544f33ae0792c71ecbb4c2bef98ac/aiohttp-3.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f989ac8bc5595ff761a5ccd32bdb0768a117f36dd1504b1c2c074ed5d3f4df9c", size = 1737227, upload-time = "2026-03-28T17:17:30.587Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b9/a7a0463a09e1a3fe35100f74324f23644bfc3383ac5fd5effe0722a5f0b7/aiohttp-3.13.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d36fc1709110ec1e87a229b201dd3ddc32aa01e98e7868083a794609b081c349", size = 1694036, upload-time = "2026-03-28T17:17:33.29Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/7c/8972ae3fb7be00a91aee6b644b2a6a909aedb2c425269a3bfd90115e6f8f/aiohttp-3.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42adaeea83cbdf069ab94f5103ce0787c21fb1a0153270da76b59d5578302329", size = 1786814, upload-time = "2026-03-28T17:17:36.035Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/01/c81e97e85c774decbaf0d577de7d848934e8166a3a14ad9f8aa5be329d28/aiohttp-3.13.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:92deb95469928cc41fd4b42a95d8012fa6df93f6b1c0a83af0ffbc4a5e218cde", size = 1866676, upload-time = "2026-03-28T17:17:38.441Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/5f/5b46fe8694a639ddea2cd035bf5729e4677ea882cb251396637e2ef1590d/aiohttp-3.13.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0c0c7c07c4257ef3a1df355f840bc62d133bcdef5c1c5ba75add3c08553e2eed", size = 1740842, upload-time = "2026-03-28T17:17:40.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/a2/0d4b03d011cca6b6b0acba8433193c1e484efa8d705ea58295590fe24203/aiohttp-3.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f062c45de8a1098cb137a1898819796a2491aec4e637a06b03f149315dff4d8f", size = 1566508, upload-time = "2026-03-28T17:17:43.235Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/17/e689fd500da52488ec5f889effd6404dece6a59de301e380f3c64f167beb/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:76093107c531517001114f0ebdb4f46858ce818590363e3e99a4a2280334454a", size = 1700569, upload-time = "2026-03-28T17:17:46.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/0d/66402894dbcf470ef7db99449e436105ea862c24f7ea4c95c683e635af35/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:6f6ec32162d293b82f8b63a16edc80769662fbd5ae6fbd4936d3206a2c2cc63b", size = 1707407, upload-time = "2026-03-28T17:17:48.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/eb/af0ab1a3650092cbd8e14ef29e4ab0209e1460e1c299996c3f8288b3f1ff/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5903e2db3d202a00ad9f0ec35a122c005e85d90c9836ab4cda628f01edf425e2", size = 1752214, upload-time = "2026-03-28T17:17:51.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/bf/72326f8a98e4c666f292f03c385545963cc65e358835d2a7375037a97b57/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2d5bea57be7aca98dbbac8da046d99b5557c5cf4e28538c4c786313078aca09e", size = 1562162, upload-time = "2026-03-28T17:17:53.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/9f/13b72435f99151dd9a5469c96b3b5f86aa29b7e785ca7f35cf5e538f74c0/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:bcf0c9902085976edc0232b75006ef38f89686901249ce14226b6877f88464fb", size = 1768904, upload-time = "2026-03-28T17:17:55.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/bc/28d4970e7d5452ac7776cdb5431a1164a0d9cf8bd2fffd67b4fb463aa56d/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3295f98bfeed2e867cab588f2a146a9db37a85e3ae9062abf46ba062bd29165", size = 1723378, upload-time = "2026-03-28T17:17:58.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/74/b32458ca1a7f34d65bdee7aef2036adbe0438123d3d53e2b083c453c24dd/aiohttp-3.13.4-cp314-cp314-win32.whl", hash = "sha256:a598a5c5767e1369d8f5b08695cab1d8160040f796c4416af76fd773d229b3c9", size = 438711, upload-time = "2026-03-28T17:18:00.728Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/b2/54b487316c2df3e03a8f3435e9636f8a81a42a69d942164830d193beb56a/aiohttp-3.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:c555db4bc7a264bead5a7d63d92d41a1122fcd39cc62a4db815f45ad46f9c2c8", size = 464977, upload-time = "2026-03-28T17:18:03.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/fb/e41b63c6ce71b07a59243bb8f3b457ee0c3402a619acb9d2c0d21ef0e647/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45abbbf09a129825d13c18c7d3182fecd46d9da3cfc383756145394013604ac1", size = 781549, upload-time = "2026-03-28T17:18:05.779Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/53/532b8d28df1e17e44c4d9a9368b78dcb6bf0b51037522136eced13afa9e8/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:74c80b2bc2c2adb7b3d1941b2b60701ee2af8296fc8aad8b8bc48bc25767266c", size = 514383, upload-time = "2026-03-28T17:18:08.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/1f/62e5d400603e8468cd635812d99cb81cfdc08127a3dc474c647615f31339/aiohttp-3.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c97989ae40a9746650fa196894f317dafc12227c808c774929dda0ff873a5954", size = 518304, upload-time = "2026-03-28T17:18:10.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/57/2326b37b10896447e3c6e0cbef4fe2486d30913639a5cfd1332b5d870f82/aiohttp-3.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dae86be9811493f9990ef44fff1685f5c1a3192e9061a71a109d527944eed551", size = 1893433, upload-time = "2026-03-28T17:18:13.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/b4/a24d82112c304afdb650167ef2fe190957d81cbddac7460bedd245f765aa/aiohttp-3.13.4-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1db491abe852ca2fa6cc48a3341985b0174b3741838e1341b82ac82c8bd9e871", size = 1755901, upload-time = "2026-03-28T17:18:16.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/2d/0883ef9d878d7846287f036c162a951968f22aabeef3ac97b0bea6f76d5d/aiohttp-3.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0e5d701c0aad02a7dce72eef6b93226cf3734330f1a31d69ebbf69f33b86666e", size = 1876093, upload-time = "2026-03-28T17:18:18.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/52/9204bb59c014869b71971addad6778f005daa72a96eed652c496789d7468/aiohttp-3.13.4-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8ac32a189081ae0a10ba18993f10f338ec94341f0d5df8fff348043962f3c6f8", size = 1970815, upload-time = "2026-03-28T17:18:21.858Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/b5/e4eb20275a866dde0f570f411b36c6b48f7b53edfe4f4071aa1b0728098a/aiohttp-3.13.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98e968cdaba43e45c73c3f306fca418c8009a957733bac85937c9f9cf3f4de27", size = 1816223, upload-time = "2026-03-28T17:18:24.729Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/23/e98075c5bb146aa61a1239ee1ac7714c85e814838d6cebbe37d3fe19214a/aiohttp-3.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca114790c9144c335d538852612d3e43ea0f075288f4849cf4b05d6cd2238ce7", size = 1649145, upload-time = "2026-03-28T17:18:27.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/c1/7bad8be33bb06c2bb224b6468874346026092762cbec388c3bdb65a368ee/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ea2e071661ba9cfe11eabbc81ac5376eaeb3061f6e72ec4cc86d7cdd1ffbdbbb", size = 1816562, upload-time = "2026-03-28T17:18:29.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/10/c00323348695e9a5e316825969c88463dcc24c7e9d443244b8a2c9cf2eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:34e89912b6c20e0fd80e07fa401fd218a410aa1ce9f1c2f1dad6db1bd0ce0927", size = 1800333, upload-time = "2026-03-28T17:18:32.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/43/9b2147a1df3559f49bd723e22905b46a46c068a53adb54abdca32c4de180/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0e217cf9f6a42908c52b46e42c568bd57adc39c9286ced31aaace614b6087965", size = 1820617, upload-time = "2026-03-28T17:18:35.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/7f/b3481a81e7a586d02e99387b18c6dafff41285f6efd3daa2124c01f87eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:0c296f1221e21ba979f5ac1964c3b78cfde15c5c5f855ffd2caab337e9cd9182", size = 1643417, upload-time = "2026-03-28T17:18:37.949Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/72/07181226bc99ce1124e0f89280f5221a82d3ae6a6d9d1973ce429d48e52b/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d99a9d168ebaffb74f36d011750e490085ac418f4db926cce3989c8fe6cb6b1b", size = 1849286, upload-time = "2026-03-28T17:18:40.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/e6/1b3566e103eca6da5be4ae6713e112a053725c584e96574caf117568ffef/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cb19177205d93b881f3f89e6081593676043a6828f59c78c17a0fd6c1fbed2ba", size = 1782635, upload-time = "2026-03-28T17:18:43.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/58/1b11c71904b8d079eb0c39fe664180dd1e14bebe5608e235d8bfbadc8929/aiohttp-3.13.4-cp314-cp314t-win32.whl", hash = "sha256:c606aa5656dab6552e52ca368e43869c916338346bfaf6304e15c58fb113ea30", size = 472537, upload-time = "2026-03-28T17:18:46.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/8f/87c56a1a1977d7dddea5b31e12189665a140fdb48a71e9038ff90bb564ec/aiohttp-3.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:014dcc10ec8ab8db681f0d68e939d1e9286a5aa2b993cbbdb0db130853e02144", size = 506381, upload-time = "2026-03-28T17:18:48.74Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
34
livekit.yaml.example
Normal file
34
livekit.yaml.example
Normal file
@@ -0,0 +1,34 @@
|
||||
# LiveKit server configuration
|
||||
# Generated by setup-selfhosted.sh — do not edit manually.
|
||||
# See: https://docs.livekit.io/self-hosting/deployment/
|
||||
|
||||
port: 7880
|
||||
rtc:
|
||||
tcp_port: 7881
|
||||
port_range_start: 44200
|
||||
port_range_end: 44300
|
||||
# use_external_ip: true # Uncomment for production with public IP
|
||||
|
||||
redis:
|
||||
address: redis:6379
|
||||
|
||||
keys:
|
||||
# API key : API secret (generated by setup script)
|
||||
# devkey: secret
|
||||
__LIVEKIT_API_KEY__: __LIVEKIT_API_SECRET__
|
||||
|
||||
webhook:
|
||||
urls:
|
||||
- http://server:1250/v1/livekit/webhook
|
||||
api_key: __LIVEKIT_API_KEY__
|
||||
|
||||
logging:
|
||||
level: info
|
||||
|
||||
# Room settings
|
||||
room:
|
||||
empty_timeout: 300 # 5 minutes after last participant leaves
|
||||
max_participants: 0 # 0 = unlimited
|
||||
|
||||
# Track Egress only (no composite video)
|
||||
# Egress is configured via egress.yaml on the egress service
|
||||
@@ -26,6 +26,17 @@
|
||||
# (If omitted, configure an external OpenAI-compatible LLM in server/.env)
|
||||
#
|
||||
# Optional flags:
|
||||
# --livekit Enable LiveKit self-hosted video platform (generates credentials,
|
||||
# starts livekit-server + livekit-egress containers)
|
||||
# --ip IP Set the server's IP address for all URLs. Implies --caddy
|
||||
# (self-signed HTTPS, required for browser mic/camera access).
|
||||
# Mutually exclusive with --domain. Use for LAN or cloud VM access.
|
||||
# On Linux, IP is auto-detected; on macOS, use --ip to specify it.
|
||||
# --tunnels TCP,UDP Configure tunnel addresses for NAT traversal (e.g. playit.gg).
|
||||
# TCP=host:port for web/API/signaling, UDP=host:port for WebRTC media.
|
||||
# Implies --caddy. Mutually exclusive with --ip and --domain.
|
||||
# --tunnel-tcp ADDR TCP tunnel address only (e.g. --tunnel-tcp host.playit.gg:9055)
|
||||
# --tunnel-udp ADDR UDP tunnel address only (e.g. --tunnel-udp host.ply.gg:14139)
|
||||
# --garage Use Garage for local S3-compatible storage
|
||||
# --caddy Enable Caddy reverse proxy with auto-SSL
|
||||
# --domain DOMAIN Use a real domain for Caddy (enables Let's Encrypt auto-HTTPS)
|
||||
@@ -42,10 +53,10 @@
|
||||
# --build Build backend and frontend images from source instead of pulling
|
||||
#
|
||||
# Examples:
|
||||
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --domain reflector.example.com
|
||||
# ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --hosted --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy --domain reflector.example.com
|
||||
# ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --livekit --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --hosted --livekit --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --cpu --padding modal --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --gpu --translation passthrough --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --cpu --diarization modal --translation modal --garage
|
||||
@@ -58,9 +69,11 @@
|
||||
# Config memory: after a successful run, flags are saved to data/.selfhosted-last-args.
|
||||
# Re-running with no arguments replays the saved configuration automatically.
|
||||
#
|
||||
# The script auto-detects Daily.co (DAILY_API_KEY) and Whereby (WHEREBY_API_KEY)
|
||||
# from server/.env. If Daily.co is configured, Hatchet workflow services are
|
||||
# started automatically for multitrack recording processing.
|
||||
# The script auto-detects Daily.co (DAILY_API_KEY), Whereby (WHEREBY_API_KEY),
|
||||
# and LiveKit (LIVEKIT_API_KEY) from server/.env.
|
||||
# - Daily.co: enables Hatchet workflow services for multitrack recording processing.
|
||||
# - LiveKit: enables livekit-server + livekit-egress containers (self-hosted,
|
||||
# generates livekit.yaml and egress.yaml configs automatically).
|
||||
#
|
||||
# Idempotent — safe to re-run at any time.
|
||||
#
|
||||
@@ -207,9 +220,13 @@ fi
|
||||
MODEL_MODE="" # gpu or cpu (required, mutually exclusive)
|
||||
OLLAMA_MODE="" # ollama-gpu or ollama-cpu (optional)
|
||||
USE_GARAGE=false
|
||||
USE_LIVEKIT=false
|
||||
USE_CADDY=false
|
||||
CUSTOM_DOMAIN="" # optional domain for Let's Encrypt HTTPS
|
||||
CUSTOM_IP="" # optional --ip override (mutually exclusive with --caddy)
|
||||
BUILD_IMAGES=false # build backend/frontend from source
|
||||
TUNNEL_TCP="" # --tunnel-tcp: TCP tunnel address (e.g., host:port from playit.gg)
|
||||
TUNNEL_UDP="" # --tunnel-udp: UDP tunnel address (e.g., host:port from playit.gg)
|
||||
ADMIN_PASSWORD="" # optional admin password for password auth
|
||||
CUSTOM_CA="" # --custom-ca: path to dir or CA cert file
|
||||
USE_CUSTOM_CA=false # derived flag: true when --custom-ca is provided
|
||||
@@ -261,8 +278,44 @@ for i in "${!ARGS[@]}"; do
|
||||
OLLAMA_MODEL="${ARGS[$next_i]}"
|
||||
SKIP_NEXT=true ;;
|
||||
--garage) USE_GARAGE=true ;;
|
||||
--livekit) USE_LIVEKIT=true ;;
|
||||
--caddy) USE_CADDY=true ;;
|
||||
--ip)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
err "--ip requires an IP address (e.g. --ip 192.168.0.100)"
|
||||
exit 1
|
||||
fi
|
||||
CUSTOM_IP="${ARGS[$next_i]}"
|
||||
SKIP_NEXT=true ;;
|
||||
--build) BUILD_IMAGES=true ;;
|
||||
--tunnels)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
err "--tunnels requires TCP,UDP addresses (e.g. --tunnels host:9055,host:14139)"
|
||||
exit 1
|
||||
fi
|
||||
IFS=',' read -r TUNNEL_TCP TUNNEL_UDP <<< "${ARGS[$next_i]}"
|
||||
# Trim whitespace
|
||||
TUNNEL_TCP="${TUNNEL_TCP// /}"
|
||||
TUNNEL_UDP="${TUNNEL_UDP// /}"
|
||||
SKIP_NEXT=true ;;
|
||||
--tunnel-tcp)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
err "--tunnel-tcp requires a TCP tunnel address (e.g. --tunnel-tcp host:9055)"
|
||||
exit 1
|
||||
fi
|
||||
TUNNEL_TCP="${ARGS[$next_i]}"
|
||||
SKIP_NEXT=true ;;
|
||||
--tunnel-udp)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
err "--tunnel-udp requires a UDP tunnel address (e.g. --tunnel-udp host:14139)"
|
||||
exit 1
|
||||
fi
|
||||
TUNNEL_UDP="${ARGS[$next_i]}"
|
||||
SKIP_NEXT=true ;;
|
||||
--password)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
@@ -356,6 +409,45 @@ for i in "${!ARGS[@]}"; do
|
||||
esac
|
||||
done
|
||||
|
||||
# --- Validate flag combinations ---
|
||||
if [[ -n "$CUSTOM_IP" ]] && [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
err "--ip and --domain are mutually exclusive. Use --ip for IP-based access, or --domain for domain-based access."
|
||||
exit 1
|
||||
fi
|
||||
# --ip implies --caddy (browsers require HTTPS for mic/camera access on non-localhost)
|
||||
if [[ -n "$CUSTOM_IP" ]]; then
|
||||
USE_CADDY=true
|
||||
fi
|
||||
# Validate tunnel address format (must be host:port with numeric port)
|
||||
_validate_tunnel_addr() {
|
||||
local label="$1" addr="$2"
|
||||
if [[ -z "$addr" ]]; then return; fi
|
||||
if [[ "$addr" != *:* ]]; then
|
||||
err "$label address must be host:port (got: $addr)"
|
||||
exit 1
|
||||
fi
|
||||
local port="${addr##*:}"
|
||||
if ! [[ "$port" =~ ^[0-9]+$ ]] || [[ "$port" -lt 1 ]] || [[ "$port" -gt 65535 ]]; then
|
||||
err "$label port must be 1-65535 (got: $port)"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
_validate_tunnel_addr "--tunnel-tcp" "$TUNNEL_TCP"
|
||||
_validate_tunnel_addr "--tunnel-udp" "$TUNNEL_UDP"
|
||||
|
||||
# --tunnels / --tunnel-tcp implies --caddy
|
||||
if [[ -n "$TUNNEL_TCP" ]]; then
|
||||
USE_CADDY=true
|
||||
fi
|
||||
if [[ -n "$TUNNEL_TCP" ]] && [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
err "--tunnel-tcp and --domain are mutually exclusive."
|
||||
exit 1
|
||||
fi
|
||||
if [[ -n "$TUNNEL_TCP" ]] && [[ -n "$CUSTOM_IP" ]]; then
|
||||
err "--tunnel-tcp and --ip are mutually exclusive."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- Save CLI args for config memory (re-run without flags) ---
|
||||
if [[ $# -gt 0 ]]; then
|
||||
mkdir -p "$ROOT_DIR/data"
|
||||
@@ -505,6 +597,138 @@ if [[ "$HAS_OVERRIDES" == "true" ]]; then
|
||||
MODE_DISPLAY="$MODE_DISPLAY (overrides: transcript=$EFF_TRANSCRIPT, diarization=$EFF_DIARIZATION, translation=$EFF_TRANSLATION, padding=$EFF_PADDING, mixdown=$EFF_MIXDOWN)"
|
||||
fi
|
||||
|
||||
# =========================================================
|
||||
# LiveKit config generation helper
|
||||
# =========================================================
|
||||
_generate_livekit_config() {
|
||||
local lk_key lk_secret lk_url
|
||||
lk_key=$(env_get "$SERVER_ENV" "LIVEKIT_API_KEY" || true)
|
||||
lk_secret=$(env_get "$SERVER_ENV" "LIVEKIT_API_SECRET" || true)
|
||||
lk_url=$(env_get "$SERVER_ENV" "LIVEKIT_URL" || true)
|
||||
|
||||
if [[ -z "$lk_key" ]] || [[ -z "$lk_secret" ]]; then
|
||||
warn "LIVEKIT_API_KEY or LIVEKIT_API_SECRET not set — generating random credentials"
|
||||
lk_key="reflector_$(openssl rand -hex 8)"
|
||||
lk_secret="$(openssl rand -hex 32)"
|
||||
env_set "$SERVER_ENV" "LIVEKIT_API_KEY" "$lk_key"
|
||||
env_set "$SERVER_ENV" "LIVEKIT_API_SECRET" "$lk_secret"
|
||||
env_set "$SERVER_ENV" "LIVEKIT_URL" "ws://livekit-server:7880"
|
||||
ok "Generated LiveKit API credentials"
|
||||
fi
|
||||
|
||||
# Set internal URL for server->livekit communication
|
||||
if ! env_has_key "$SERVER_ENV" "LIVEKIT_URL" || [[ -z "$(env_get "$SERVER_ENV" "LIVEKIT_URL" || true)" ]]; then
|
||||
env_set "$SERVER_ENV" "LIVEKIT_URL" "ws://livekit-server:7880"
|
||||
fi
|
||||
|
||||
# Set public URL based on deployment mode.
|
||||
# When Caddy is enabled (HTTPS), LiveKit WebSocket is proxied through Caddy
|
||||
# at /lk-ws to avoid mixed-content blocking (browsers block ws:// on https:// pages).
|
||||
# When no Caddy, browsers connect directly to LiveKit on port 7880.
|
||||
local public_lk_url
|
||||
if [[ -n "$TUNNEL_TCP" ]]; then
|
||||
# Tunnel mode: LiveKit signaling proxied through Caddy on the tunnel address
|
||||
public_lk_url="wss://${TUNNEL_TCP}/lk-ws"
|
||||
elif [[ "$USE_CADDY" == "true" ]]; then
|
||||
if [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
public_lk_url="wss://${CUSTOM_DOMAIN}/lk-ws"
|
||||
elif [[ -n "$PRIMARY_IP" ]]; then
|
||||
public_lk_url="wss://${PRIMARY_IP}/lk-ws"
|
||||
else
|
||||
public_lk_url="wss://localhost/lk-ws"
|
||||
fi
|
||||
else
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
public_lk_url="ws://${PRIMARY_IP}:7880"
|
||||
else
|
||||
public_lk_url="ws://localhost:7880"
|
||||
fi
|
||||
fi
|
||||
env_set "$SERVER_ENV" "LIVEKIT_PUBLIC_URL" "$public_lk_url"
|
||||
env_set "$SERVER_ENV" "DEFAULT_VIDEO_PLATFORM" "livekit"
|
||||
|
||||
# LiveKit storage: always sync from transcript storage config.
|
||||
# Endpoint URL must match (changes between Caddy/no-Caddy runs).
|
||||
local ts_bucket ts_region ts_key ts_secret ts_endpoint
|
||||
ts_bucket=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_BUCKET_NAME" 2>/dev/null || echo "reflector-bucket")
|
||||
ts_region=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_REGION" 2>/dev/null || echo "us-east-1")
|
||||
ts_key=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID" 2>/dev/null || true)
|
||||
ts_secret=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY" 2>/dev/null || true)
|
||||
ts_endpoint=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" 2>/dev/null || true)
|
||||
env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_BUCKET_NAME" "$ts_bucket"
|
||||
env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_REGION" "$ts_region"
|
||||
[[ -n "$ts_key" ]] && env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID" "$ts_key"
|
||||
[[ -n "$ts_secret" ]] && env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY" "$ts_secret"
|
||||
[[ -n "$ts_endpoint" ]] && env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_ENDPOINT_URL" "$ts_endpoint"
|
||||
if [[ -z "$ts_key" ]] || [[ -z "$ts_secret" ]]; then
|
||||
warn "LiveKit storage: S3 credentials not found — Track Egress recording will fail!"
|
||||
warn "Configure LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID and LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY in server/.env"
|
||||
warn "Or run with --garage to auto-configure local S3 storage"
|
||||
else
|
||||
ok "LiveKit storage: synced from transcript storage config"
|
||||
fi
|
||||
|
||||
# Generate livekit.yaml
|
||||
# UDP tunnel mode: use single port matching the tunnel's public port
|
||||
local rtc_config
|
||||
if [[ -n "$TUNNEL_UDP" ]]; then
|
||||
local tunnel_udp_host tunnel_udp_port
|
||||
tunnel_udp_host="${TUNNEL_UDP%:*}"
|
||||
tunnel_udp_port="${TUNNEL_UDP##*:}"
|
||||
# Resolve tunnel hostname to IP for node_ip
|
||||
local tunnel_udp_ip
|
||||
tunnel_udp_ip=$(dig +short "$tunnel_udp_host" 2>/dev/null | head -1 || nslookup "$tunnel_udp_host" 2>/dev/null | grep "Address:" | tail -1 | awk '{print $2}' || true)
|
||||
if [[ -z "$tunnel_udp_ip" ]]; then
|
||||
warn "Could not resolve UDP tunnel hostname: $tunnel_udp_host"
|
||||
warn "Set node_ip manually in livekit.yaml after setup"
|
||||
tunnel_udp_ip="0.0.0.0"
|
||||
fi
|
||||
rtc_config=" tcp_port: 7881
|
||||
udp_port: ${tunnel_udp_port}
|
||||
node_ip: ${tunnel_udp_ip}
|
||||
use_external_ip: false"
|
||||
ok "LiveKit UDP: single port ${tunnel_udp_port}, node_ip=${tunnel_udp_ip} (via tunnel)"
|
||||
else
|
||||
rtc_config=" tcp_port: 7881
|
||||
port_range_start: 44200
|
||||
port_range_end: 44300"
|
||||
fi
|
||||
|
||||
cat > "$ROOT_DIR/livekit.yaml" << LKEOF
|
||||
port: 7880
|
||||
rtc:
|
||||
${rtc_config}
|
||||
redis:
|
||||
address: redis:6379
|
||||
keys:
|
||||
${lk_key}: ${lk_secret}
|
||||
webhook:
|
||||
urls:
|
||||
- http://server:1250/v1/livekit/webhook
|
||||
api_key: ${lk_key}
|
||||
logging:
|
||||
level: info
|
||||
room:
|
||||
empty_timeout: 300
|
||||
max_participants: 0
|
||||
LKEOF
|
||||
ok "Generated livekit.yaml"
|
||||
|
||||
# Generate egress.yaml (Track Egress only — no composite video)
|
||||
cat > "$ROOT_DIR/egress.yaml" << EGEOF
|
||||
api_key: ${lk_key}
|
||||
api_secret: ${lk_secret}
|
||||
ws_url: ws://livekit-server:7880
|
||||
redis:
|
||||
address: redis:6379
|
||||
health_port: 7082
|
||||
log_level: info
|
||||
session_limits:
|
||||
file_output_max_duration: 4h
|
||||
EGEOF
|
||||
ok "Generated egress.yaml"
|
||||
}
|
||||
|
||||
# =========================================================
|
||||
# Step 0: Prerequisites
|
||||
# =========================================================
|
||||
@@ -727,7 +951,10 @@ step_server_env() {
|
||||
|
||||
# Public-facing URLs
|
||||
local server_base_url
|
||||
if [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
if [[ -n "$TUNNEL_TCP" ]]; then
|
||||
# Tunnel mode: public URL is the tunnel address with HTTPS (Caddy terminates TLS)
|
||||
server_base_url="https://$TUNNEL_TCP"
|
||||
elif [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
server_base_url="https://$CUSTOM_DOMAIN"
|
||||
elif [[ "$USE_CADDY" == "true" ]]; then
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
@@ -737,13 +964,23 @@ step_server_env() {
|
||||
fi
|
||||
else
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
server_base_url="http://$PRIMARY_IP"
|
||||
server_base_url="http://$PRIMARY_IP:1250"
|
||||
else
|
||||
server_base_url="http://localhost:1250"
|
||||
fi
|
||||
fi
|
||||
env_set "$SERVER_ENV" "BASE_URL" "$server_base_url"
|
||||
env_set "$SERVER_ENV" "CORS_ORIGIN" "$server_base_url"
|
||||
# CORS: allow the frontend origin (port 3000, not the API port)
|
||||
local cors_origin="${server_base_url}"
|
||||
if [[ "$USE_CADDY" != "true" ]]; then
|
||||
# Without Caddy, frontend is on port 3000, API on 1250
|
||||
cors_origin="${server_base_url/:1250/:3000}"
|
||||
# Safety: if substitution didn't change anything, construct explicitly
|
||||
if [[ "$cors_origin" == "$server_base_url" ]] && [[ -n "$PRIMARY_IP" ]]; then
|
||||
cors_origin="http://${PRIMARY_IP}:3000"
|
||||
fi
|
||||
fi
|
||||
env_set "$SERVER_ENV" "CORS_ORIGIN" "$cors_origin"
|
||||
|
||||
# WebRTC: advertise host IP in ICE candidates so browsers can reach the server
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
@@ -951,8 +1188,31 @@ step_server_env() {
|
||||
# Hatchet is always required (file, live, and multitrack pipelines all use it)
|
||||
env_set "$SERVER_ENV" "HATCHET_CLIENT_SERVER_URL" "http://hatchet:8888"
|
||||
env_set "$SERVER_ENV" "HATCHET_CLIENT_HOST_PORT" "hatchet:7077"
|
||||
env_set "$SERVER_ENV" "HATCHET_CLIENT_TLS_STRATEGY" "none"
|
||||
ok "Hatchet connectivity configured (workflow engine for processing pipelines)"
|
||||
|
||||
# BIND_HOST controls whether server/web ports are exposed on all interfaces
|
||||
local root_env="$ROOT_DIR/.env"
|
||||
touch "$root_env"
|
||||
if [[ "$USE_CADDY" == "true" ]]; then
|
||||
# With Caddy, services stay on localhost (Caddy is the public entry point)
|
||||
env_set "$root_env" "BIND_HOST" "127.0.0.1"
|
||||
elif [[ -n "$PRIMARY_IP" ]]; then
|
||||
# Without Caddy + detected IP, expose on all interfaces for direct access
|
||||
env_set "$root_env" "BIND_HOST" "0.0.0.0"
|
||||
ok "BIND_HOST=0.0.0.0 (ports exposed for direct access)"
|
||||
fi
|
||||
|
||||
# UDP ports for LiveKit (used by docker-compose for port mapping)
|
||||
if [[ -n "$TUNNEL_UDP" ]]; then
|
||||
local tunnel_udp_port="${TUNNEL_UDP##*:}"
|
||||
env_set "$root_env" "LIVEKIT_UDP_PORTS" "${tunnel_udp_port}:${tunnel_udp_port}"
|
||||
ok "LiveKit UDP: single port ${tunnel_udp_port} (via tunnel)"
|
||||
else
|
||||
# Default: full range for direct access
|
||||
env_set "$root_env" "LIVEKIT_UDP_PORTS" "44200-44300:44200-44300"
|
||||
fi
|
||||
|
||||
ok "server/.env ready"
|
||||
}
|
||||
|
||||
@@ -971,7 +1231,9 @@ step_www_env() {
|
||||
|
||||
# Public-facing URL for frontend
|
||||
local base_url
|
||||
if [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
if [[ -n "$TUNNEL_TCP" ]]; then
|
||||
base_url="https://$TUNNEL_TCP"
|
||||
elif [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
base_url="https://$CUSTOM_DOMAIN"
|
||||
elif [[ "$USE_CADDY" == "true" ]]; then
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
@@ -980,18 +1242,26 @@ step_www_env() {
|
||||
base_url="https://localhost"
|
||||
fi
|
||||
else
|
||||
# No Caddy — user's proxy handles SSL. Use http for now, they'll override.
|
||||
# No Caddy — clients connect directly to services on their ports.
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
base_url="http://$PRIMARY_IP"
|
||||
base_url="http://$PRIMARY_IP:3000"
|
||||
else
|
||||
base_url="http://localhost"
|
||||
base_url="http://localhost:3000"
|
||||
fi
|
||||
fi
|
||||
|
||||
# API_URL: with Caddy, same origin (443 proxies both); without Caddy, API is on port 1250
|
||||
local api_url="$base_url"
|
||||
if [[ "$USE_CADDY" != "true" ]]; then
|
||||
api_url="${base_url/:3000/:1250}"
|
||||
# fallback if no port substitution happened (e.g. localhost without port)
|
||||
[[ "$api_url" == "$base_url" ]] && api_url="${base_url}:1250"
|
||||
fi
|
||||
|
||||
env_set "$WWW_ENV" "SITE_URL" "$base_url"
|
||||
env_set "$WWW_ENV" "NEXTAUTH_URL" "$base_url"
|
||||
env_set "$WWW_ENV" "NEXTAUTH_SECRET" "$NEXTAUTH_SECRET"
|
||||
env_set "$WWW_ENV" "API_URL" "$base_url"
|
||||
env_set "$WWW_ENV" "API_URL" "$api_url"
|
||||
env_set "$WWW_ENV" "WEBSOCKET_URL" "auto"
|
||||
env_set "$WWW_ENV" "SERVER_API_URL" "http://server:1250"
|
||||
env_set "$WWW_ENV" "KV_URL" "redis://redis:6379"
|
||||
@@ -1014,14 +1284,17 @@ step_www_env() {
|
||||
fi
|
||||
|
||||
# Enable rooms if any video platform is configured in server/.env
|
||||
local _daily_key="" _whereby_key=""
|
||||
local _daily_key="" _whereby_key="" _livekit_key=""
|
||||
if env_has_key "$SERVER_ENV" "DAILY_API_KEY"; then
|
||||
_daily_key=$(env_get "$SERVER_ENV" "DAILY_API_KEY")
|
||||
fi
|
||||
if env_has_key "$SERVER_ENV" "WHEREBY_API_KEY"; then
|
||||
_whereby_key=$(env_get "$SERVER_ENV" "WHEREBY_API_KEY")
|
||||
fi
|
||||
if [[ -n "$_daily_key" ]] || [[ -n "$_whereby_key" ]]; then
|
||||
if env_has_key "$SERVER_ENV" "LIVEKIT_API_KEY"; then
|
||||
_livekit_key=$(env_get "$SERVER_ENV" "LIVEKIT_API_KEY")
|
||||
fi
|
||||
if [[ -n "$_daily_key" ]] || [[ -n "$_whereby_key" ]] || [[ -n "$_livekit_key" ]]; then
|
||||
env_set "$WWW_ENV" "FEATURE_ROOMS" "true"
|
||||
ok "Rooms feature enabled (video platform configured)"
|
||||
fi
|
||||
@@ -1110,7 +1383,13 @@ step_garage() {
|
||||
|
||||
# Write S3 credentials to server/.env
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_BACKEND" "aws"
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" "http://garage:3900"
|
||||
# Endpoint URL: use public IP when no Caddy so presigned URLs work in the browser.
|
||||
# With Caddy, internal hostname is fine (Caddy proxies or browser never sees presigned URLs directly).
|
||||
if [[ "$USE_CADDY" != "true" ]] && [[ -n "$PRIMARY_IP" ]]; then
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" "http://${PRIMARY_IP}:3900"
|
||||
else
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" "http://garage:3900"
|
||||
fi
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_BUCKET_NAME" "reflector-media"
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_REGION" "garage"
|
||||
if [[ "$created_key" == "true" ]]; then
|
||||
@@ -1188,6 +1467,22 @@ step_caddyfile() {
|
||||
rm -rf "$caddyfile"
|
||||
fi
|
||||
|
||||
# LiveKit reverse proxy snippet (inserted into Caddyfile when --livekit is active)
|
||||
# LiveKit reverse proxy snippet (inserted into Caddyfile when --livekit is active).
|
||||
# Strips /lk-ws prefix so LiveKit server sees requests at its root /.
|
||||
local lk_proxy_block=""
|
||||
if [[ "$LIVEKIT_DETECTED" == "true" ]]; then
|
||||
lk_proxy_block="
|
||||
handle_path /lk-ws/* {
|
||||
reverse_proxy livekit-server:7880
|
||||
}
|
||||
handle_path /lk-ws {
|
||||
reverse_proxy livekit-server:7880
|
||||
}"
|
||||
fi
|
||||
|
||||
local hatchet_proxy_block=""
|
||||
|
||||
if [[ -n "$TLS_CERT_PATH" ]] && [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
# Custom domain with user-provided TLS certificate (from --custom-ca directory)
|
||||
cat > "$caddyfile" << CADDYEOF
|
||||
@@ -1199,7 +1494,7 @@ $CUSTOM_DOMAIN {
|
||||
}
|
||||
handle /health {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
}${lk_proxy_block}${hatchet_proxy_block}
|
||||
handle {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
@@ -1216,7 +1511,7 @@ $CUSTOM_DOMAIN {
|
||||
}
|
||||
handle /health {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
}${lk_proxy_block}${hatchet_proxy_block}
|
||||
handle {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
@@ -1225,17 +1520,19 @@ CADDYEOF
|
||||
ok "Created Caddyfile for $CUSTOM_DOMAIN (Let's Encrypt auto-HTTPS)"
|
||||
elif [[ -n "$PRIMARY_IP" ]]; then
|
||||
# No domain, IP only: catch-all :443 with self-signed cert
|
||||
# (IP connections don't send SNI, so we can't match by address)
|
||||
# on_demand generates certs dynamically for any hostname/IP on first request
|
||||
cat > "$caddyfile" << CADDYEOF
|
||||
# Generated by setup-selfhosted.sh — self-signed cert for IP access
|
||||
:443 {
|
||||
tls internal
|
||||
tls internal {
|
||||
on_demand
|
||||
}
|
||||
handle /v1/* {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
handle /health {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
}${lk_proxy_block}${hatchet_proxy_block}
|
||||
handle {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
@@ -1249,21 +1546,8 @@ CADDYEOF
|
||||
ok "Caddyfile already exists"
|
||||
fi
|
||||
|
||||
# Add Hatchet dashboard route if Daily.co is detected
|
||||
if [[ "$DAILY_DETECTED" == "true" ]]; then
|
||||
if ! grep -q "hatchet" "$caddyfile" 2>/dev/null; then
|
||||
cat >> "$caddyfile" << CADDYEOF
|
||||
|
||||
# Hatchet workflow dashboard (Daily.co multitrack processing)
|
||||
:8888 {
|
||||
tls internal
|
||||
reverse_proxy hatchet:8888
|
||||
}
|
||||
CADDYEOF
|
||||
ok "Added Hatchet dashboard route to Caddyfile (port 8888)"
|
||||
else
|
||||
ok "Hatchet dashboard route already in Caddyfile"
|
||||
fi
|
||||
if [[ "$DAILY_DETECTED" == "true" ]] || [[ "$LIVEKIT_DETECTED" == "true" ]]; then
|
||||
ok "Hatchet dashboard available at port 8888"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -1467,7 +1751,7 @@ step_health() {
|
||||
info "Waiting for Hatchet workflow engine..."
|
||||
local hatchet_ok=false
|
||||
for i in $(seq 1 60); do
|
||||
if curl -sf http://localhost:8888/api/live > /dev/null 2>&1; then
|
||||
if compose_cmd exec -T hatchet curl -sf http://localhost:8888/api/live > /dev/null 2>&1; then
|
||||
hatchet_ok=true
|
||||
break
|
||||
fi
|
||||
@@ -1515,7 +1799,7 @@ step_hatchet_token() {
|
||||
# Wait for hatchet to be healthy
|
||||
local hatchet_ok=false
|
||||
for i in $(seq 1 60); do
|
||||
if curl -sf http://localhost:8888/api/live > /dev/null 2>&1; then
|
||||
if compose_cmd exec -T hatchet curl -sf http://localhost:8888/api/live > /dev/null 2>&1; then
|
||||
hatchet_ok=true
|
||||
break
|
||||
fi
|
||||
@@ -1586,12 +1870,19 @@ main() {
|
||||
[[ "$BUILD_IMAGES" == "true" ]] && echo " Build: from source"
|
||||
echo ""
|
||||
|
||||
# Detect primary IP
|
||||
PRIMARY_IP=""
|
||||
if [[ "$OS" == "Linux" ]]; then
|
||||
PRIMARY_IP=$(hostname -I 2>/dev/null | awk '{print $1}' || true)
|
||||
if [[ "$PRIMARY_IP" == "127."* ]] || [[ -z "$PRIMARY_IP" ]]; then
|
||||
PRIMARY_IP=$(ip -4 route get 1 2>/dev/null | sed -n 's/.*src \([0-9.]*\).*/\1/p' || true)
|
||||
# Detect primary IP (--ip overrides auto-detection)
|
||||
if [[ -n "$CUSTOM_IP" ]]; then
|
||||
PRIMARY_IP="$CUSTOM_IP"
|
||||
ok "Using provided IP: $PRIMARY_IP"
|
||||
else
|
||||
PRIMARY_IP=""
|
||||
if [[ "$OS" == "Linux" ]]; then
|
||||
PRIMARY_IP=$(hostname -I 2>/dev/null | awk '{print $1}' || true)
|
||||
if [[ "$PRIMARY_IP" == "127."* ]] || [[ -z "$PRIMARY_IP" ]]; then
|
||||
PRIMARY_IP=$(ip -4 route get 1 2>/dev/null | sed -n 's/.*src \([0-9.]*\).*/\1/p' || true)
|
||||
fi
|
||||
elif [[ "$OS" == "Darwin" ]]; then
|
||||
PRIMARY_IP=$(detect_lan_ip)
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -1621,14 +1912,21 @@ main() {
|
||||
# Auto-detect video platforms from server/.env (after step_server_env so file exists)
|
||||
DAILY_DETECTED=false
|
||||
WHEREBY_DETECTED=false
|
||||
LIVEKIT_DETECTED=false
|
||||
if env_has_key "$SERVER_ENV" "DAILY_API_KEY" && [[ -n "$(env_get "$SERVER_ENV" "DAILY_API_KEY")" ]]; then
|
||||
DAILY_DETECTED=true
|
||||
fi
|
||||
if env_has_key "$SERVER_ENV" "WHEREBY_API_KEY" && [[ -n "$(env_get "$SERVER_ENV" "WHEREBY_API_KEY")" ]]; then
|
||||
WHEREBY_DETECTED=true
|
||||
fi
|
||||
# LiveKit: enabled via --livekit flag OR pre-existing LIVEKIT_API_KEY in env
|
||||
if [[ "$USE_LIVEKIT" == "true" ]]; then
|
||||
LIVEKIT_DETECTED=true
|
||||
elif env_has_key "$SERVER_ENV" "LIVEKIT_API_KEY" && [[ -n "$(env_get "$SERVER_ENV" "LIVEKIT_API_KEY")" ]]; then
|
||||
LIVEKIT_DETECTED=true
|
||||
fi
|
||||
ANY_PLATFORM_DETECTED=false
|
||||
[[ "$DAILY_DETECTED" == "true" || "$WHEREBY_DETECTED" == "true" ]] && ANY_PLATFORM_DETECTED=true
|
||||
[[ "$DAILY_DETECTED" == "true" || "$WHEREBY_DETECTED" == "true" || "$LIVEKIT_DETECTED" == "true" ]] && ANY_PLATFORM_DETECTED=true
|
||||
|
||||
# Conditional profile activation for Daily.co
|
||||
if [[ "$DAILY_DETECTED" == "true" ]]; then
|
||||
@@ -1636,6 +1934,13 @@ main() {
|
||||
ok "Daily.co detected — enabling Hatchet workflow services"
|
||||
fi
|
||||
|
||||
# Conditional profile activation for LiveKit
|
||||
if [[ "$LIVEKIT_DETECTED" == "true" ]]; then
|
||||
COMPOSE_PROFILES+=("livekit")
|
||||
_generate_livekit_config
|
||||
ok "LiveKit enabled — livekit-server + livekit-egress"
|
||||
fi
|
||||
|
||||
# Generate .env.hatchet for hatchet dashboard config (always needed)
|
||||
local hatchet_server_url hatchet_cookie_domain
|
||||
if [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
@@ -1683,10 +1988,12 @@ EOF
|
||||
echo " App: https://localhost (accept self-signed cert in browser)"
|
||||
echo " API: https://localhost/v1/"
|
||||
fi
|
||||
elif [[ -n "$PRIMARY_IP" ]]; then
|
||||
echo " App: http://$PRIMARY_IP:3000"
|
||||
echo " API: http://$PRIMARY_IP:1250"
|
||||
else
|
||||
echo " No Caddy — point your reverse proxy at:"
|
||||
echo " Frontend: web:3000 (or localhost:3000 from host)"
|
||||
echo " API: server:1250 (or localhost:1250 from host)"
|
||||
echo " App: http://localhost:3000"
|
||||
echo " API: http://localhost:1250"
|
||||
fi
|
||||
echo ""
|
||||
if [[ "$HAS_OVERRIDES" == "true" ]]; then
|
||||
@@ -1702,6 +2009,11 @@ EOF
|
||||
[[ "$USES_OLLAMA" != "true" ]] && echo " LLM: External (configure in server/.env)"
|
||||
[[ "$DAILY_DETECTED" == "true" ]] && echo " Video: Daily.co (live rooms + multitrack processing via Hatchet)"
|
||||
[[ "$WHEREBY_DETECTED" == "true" ]] && echo " Video: Whereby (live rooms)"
|
||||
if [[ "$LIVEKIT_DETECTED" == "true" ]]; then
|
||||
echo " Video: LiveKit (self-hosted, live rooms + track egress)"
|
||||
[[ -n "$TUNNEL_TCP" ]] && echo " Tunnel: TCP=$TUNNEL_TCP"
|
||||
[[ -n "$TUNNEL_UDP" ]] && echo " UDP=$TUNNEL_UDP"
|
||||
fi
|
||||
[[ "$ANY_PLATFORM_DETECTED" != "true" ]] && echo " Video: None (rooms disabled)"
|
||||
if [[ "$USE_CUSTOM_CA" == "true" ]]; then
|
||||
echo " CA: Custom (certs/ca.crt)"
|
||||
|
||||
@@ -42,6 +42,7 @@ dependencies = [
|
||||
"pydantic>=2.12.5",
|
||||
"aiosmtplib>=3.0.0",
|
||||
"email-validator>=2.0.0",
|
||||
"livekit-api>=1.1.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
||||
@@ -15,6 +15,7 @@ from reflector.metrics import metrics_init
|
||||
from reflector.settings import settings
|
||||
from reflector.views.config import router as config_router
|
||||
from reflector.views.daily import router as daily_router
|
||||
from reflector.views.livekit import router as livekit_router
|
||||
from reflector.views.meetings import router as meetings_router
|
||||
from reflector.views.rooms import router as rooms_router
|
||||
from reflector.views.rtc_offer import router as rtc_offer_router
|
||||
@@ -112,6 +113,7 @@ app.include_router(config_router, prefix="/v1")
|
||||
app.include_router(zulip_router, prefix="/v1")
|
||||
app.include_router(whereby_router, prefix="/v1")
|
||||
app.include_router(daily_router, prefix="/v1/daily")
|
||||
app.include_router(livekit_router, prefix="/v1/livekit")
|
||||
if auth_router:
|
||||
app.include_router(auth_router, prefix="/v1")
|
||||
add_pagination(app)
|
||||
|
||||
@@ -165,6 +165,17 @@ class MeetingController:
|
||||
results = await get_database().fetch_all(query)
|
||||
return [Meeting(**result) for result in results]
|
||||
|
||||
async def get_all_inactive_livekit(self) -> list[Meeting]:
|
||||
"""Get inactive LiveKit meetings (for multitrack processing discovery)."""
|
||||
query = meetings.select().where(
|
||||
sa.and_(
|
||||
meetings.c.is_active == sa.false(),
|
||||
meetings.c.platform == "livekit",
|
||||
)
|
||||
)
|
||||
results = await get_database().fetch_all(query)
|
||||
return [Meeting(**result) for result in results]
|
||||
|
||||
async def get_by_room_name(
|
||||
self,
|
||||
room_name: str,
|
||||
|
||||
@@ -486,6 +486,14 @@ class TranscriptController:
|
||||
return None
|
||||
return Transcript(**result)
|
||||
|
||||
async def get_by_meeting_id(self, meeting_id: str) -> Transcript | None:
|
||||
"""Get a transcript by meeting_id (first match)."""
|
||||
query = transcripts.select().where(transcripts.c.meeting_id == meeting_id)
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
return None
|
||||
return Transcript(**result)
|
||||
|
||||
async def get_by_recording_id(
|
||||
self, recording_id: str, **kwargs
|
||||
) -> Transcript | None:
|
||||
|
||||
@@ -273,8 +273,10 @@ def with_error_handling(
|
||||
)
|
||||
@with_error_handling(TaskName.GET_RECORDING)
|
||||
async def get_recording(input: PipelineInput, ctx: Context) -> RecordingResult:
|
||||
"""Fetch recording metadata from Daily.co API."""
|
||||
ctx.log(f"get_recording: starting for recording_id={input.recording_id}")
|
||||
"""Fetch recording metadata. Platform-aware: Daily calls API, LiveKit skips."""
|
||||
ctx.log(
|
||||
f"get_recording: starting for recording_id={input.recording_id}, platform={input.source_platform}"
|
||||
)
|
||||
ctx.log(
|
||||
f"get_recording: transcript_id={input.transcript_id}, room_id={input.room_id}"
|
||||
)
|
||||
@@ -299,6 +301,18 @@ async def get_recording(input: PipelineInput, ctx: Context) -> RecordingResult:
|
||||
)
|
||||
ctx.log(f"get_recording: status set to 'processing' and broadcasted")
|
||||
|
||||
# LiveKit: no external API call needed — metadata comes from S3 track listing
|
||||
if input.source_platform == "livekit":
|
||||
ctx.log(
|
||||
"get_recording: LiveKit platform — skipping API call (metadata from S3)"
|
||||
)
|
||||
return RecordingResult(
|
||||
id=input.recording_id,
|
||||
mtg_session_id=None,
|
||||
duration=0, # Duration calculated from tracks later
|
||||
)
|
||||
|
||||
# Daily.co: fetch recording metadata from API
|
||||
if not settings.DAILY_API_KEY:
|
||||
ctx.log("get_recording: ERROR - DAILY_API_KEY not configured")
|
||||
raise ValueError("DAILY_API_KEY not configured")
|
||||
@@ -332,11 +346,12 @@ async def get_recording(input: PipelineInput, ctx: Context) -> RecordingResult:
|
||||
)
|
||||
@with_error_handling(TaskName.GET_PARTICIPANTS)
|
||||
async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsResult:
|
||||
"""Fetch participant list from Daily.co API and update transcript in database."""
|
||||
ctx.log(f"get_participants: transcript_id={input.transcript_id}")
|
||||
"""Fetch participant list and update transcript. Platform-aware."""
|
||||
ctx.log(
|
||||
f"get_participants: transcript_id={input.transcript_id}, platform={input.source_platform}"
|
||||
)
|
||||
|
||||
recording = ctx.task_output(get_recording)
|
||||
mtg_session_id = recording.mtg_session_id
|
||||
async with fresh_db_connection():
|
||||
from reflector.db.transcripts import ( # noqa: PLC0415
|
||||
TranscriptDuration,
|
||||
@@ -347,8 +362,8 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe
|
||||
transcript = await transcripts_controller.get_by_id(input.transcript_id)
|
||||
if not transcript:
|
||||
raise ValueError(f"Transcript {input.transcript_id} not found")
|
||||
# Note: title NOT cleared - preserves existing titles
|
||||
# Duration from Daily API (seconds -> milliseconds) - master source
|
||||
|
||||
# Duration from recording metadata (seconds -> milliseconds)
|
||||
duration_ms = recording.duration * 1000 if recording.duration else 0
|
||||
await transcripts_controller.update(
|
||||
transcript,
|
||||
@@ -360,65 +375,141 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe
|
||||
},
|
||||
)
|
||||
|
||||
await append_event_and_broadcast(
|
||||
input.transcript_id,
|
||||
transcript,
|
||||
"DURATION",
|
||||
TranscriptDuration(duration=duration_ms),
|
||||
logger=logger,
|
||||
)
|
||||
|
||||
mtg_session_id = assert_non_none_and_non_empty(
|
||||
mtg_session_id, "mtg_session_id is required"
|
||||
)
|
||||
daily_api_key = assert_non_none_and_non_empty(
|
||||
settings.DAILY_API_KEY, "DAILY_API_KEY is required"
|
||||
)
|
||||
|
||||
async with DailyApiClient(
|
||||
api_key=daily_api_key, base_url=settings.DAILY_API_URL
|
||||
) as client:
|
||||
participants = await client.get_meeting_participants(mtg_session_id)
|
||||
|
||||
id_to_name = {}
|
||||
id_to_user_id = {}
|
||||
for p in participants.data:
|
||||
if p.user_name:
|
||||
id_to_name[p.participant_id] = p.user_name
|
||||
if p.user_id:
|
||||
id_to_user_id[p.participant_id] = p.user_id
|
||||
|
||||
track_keys = [t["s3_key"] for t in input.tracks]
|
||||
cam_audio_keys = filter_cam_audio_tracks(track_keys)
|
||||
if duration_ms:
|
||||
await append_event_and_broadcast(
|
||||
input.transcript_id,
|
||||
transcript,
|
||||
"DURATION",
|
||||
TranscriptDuration(duration=duration_ms),
|
||||
logger=logger,
|
||||
)
|
||||
|
||||
participants_list: list[ParticipantInfo] = []
|
||||
for idx, key in enumerate(cam_audio_keys):
|
||||
|
||||
if input.source_platform == "livekit":
|
||||
# LiveKit: participant identity is in the track dict or can be parsed from filepath
|
||||
from reflector.utils.livekit import (
|
||||
parse_livekit_track_filepath, # noqa: PLC0415
|
||||
)
|
||||
|
||||
# Look up identity → Reflector user_id mapping from Redis
|
||||
# (stored at join time in rooms.py)
|
||||
identity_to_user_id: dict[str, str] = {}
|
||||
try:
|
||||
parsed = parse_daily_recording_filename(key)
|
||||
participant_id = parsed.participant_id
|
||||
except ValueError as e:
|
||||
logger.error(
|
||||
"Failed to parse Daily recording filename",
|
||||
error=str(e),
|
||||
key=key,
|
||||
from reflector.db.meetings import (
|
||||
meetings_controller as mc, # noqa: PLC0415
|
||||
)
|
||||
from reflector.redis_cache import (
|
||||
get_async_redis_client, # noqa: PLC0415
|
||||
)
|
||||
continue
|
||||
|
||||
default_name = f"Speaker {idx}"
|
||||
name = id_to_name.get(participant_id, default_name)
|
||||
user_id = id_to_user_id.get(participant_id)
|
||||
meeting = (
|
||||
await mc.get_by_id(transcript.meeting_id)
|
||||
if transcript.meeting_id
|
||||
else None
|
||||
)
|
||||
if meeting:
|
||||
redis_client = await get_async_redis_client()
|
||||
mapping_key = f"livekit:participant_map:{meeting.room_name}"
|
||||
raw_map = await redis_client.hgetall(mapping_key)
|
||||
identity_to_user_id = {
|
||||
k.decode() if isinstance(k, bytes) else k: v.decode()
|
||||
if isinstance(v, bytes)
|
||||
else v
|
||||
for k, v in raw_map.items()
|
||||
}
|
||||
ctx.log(
|
||||
f"get_participants: loaded {len(identity_to_user_id)} identity→user_id mappings from Redis"
|
||||
)
|
||||
except Exception as e:
|
||||
ctx.log(
|
||||
f"get_participants: could not load identity map from Redis: {e}"
|
||||
)
|
||||
|
||||
participant = TranscriptParticipant(
|
||||
id=participant_id, speaker=idx, name=name, user_id=user_id
|
||||
)
|
||||
await transcripts_controller.upsert_participant(transcript, participant)
|
||||
participants_list.append(
|
||||
ParticipantInfo(
|
||||
participant_id=participant_id,
|
||||
user_name=name,
|
||||
for idx, track in enumerate(input.tracks):
|
||||
identity = track.get("participant_identity")
|
||||
if not identity:
|
||||
# Reprocess path: parse from S3 key
|
||||
try:
|
||||
parsed = parse_livekit_track_filepath(track["s3_key"])
|
||||
identity = parsed.participant_identity
|
||||
except (ValueError, KeyError):
|
||||
identity = f"speaker-{idx}"
|
||||
|
||||
# Strip the uuid suffix from identity for display name
|
||||
# e.g., "Juan-2bcea0" → "Juan"
|
||||
display_name = (
|
||||
identity.rsplit("-", 1)[0] if "-" in identity else identity
|
||||
)
|
||||
reflector_user_id = identity_to_user_id.get(identity)
|
||||
|
||||
participant = TranscriptParticipant(
|
||||
id=identity,
|
||||
speaker=idx,
|
||||
name=display_name,
|
||||
user_id=reflector_user_id,
|
||||
)
|
||||
await transcripts_controller.upsert_participant(transcript, participant)
|
||||
participants_list.append(
|
||||
ParticipantInfo(
|
||||
participant_id=identity,
|
||||
user_name=display_name,
|
||||
speaker=idx,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Daily.co: fetch participant names from API
|
||||
mtg_session_id = recording.mtg_session_id
|
||||
mtg_session_id = assert_non_none_and_non_empty(
|
||||
mtg_session_id, "mtg_session_id is required"
|
||||
)
|
||||
daily_api_key = assert_non_none_and_non_empty(
|
||||
settings.DAILY_API_KEY, "DAILY_API_KEY is required"
|
||||
)
|
||||
|
||||
async with DailyApiClient(
|
||||
api_key=daily_api_key, base_url=settings.DAILY_API_URL
|
||||
) as client:
|
||||
participants = await client.get_meeting_participants(mtg_session_id)
|
||||
|
||||
id_to_name = {}
|
||||
id_to_user_id = {}
|
||||
for p in participants.data:
|
||||
if p.user_name:
|
||||
id_to_name[p.participant_id] = p.user_name
|
||||
if p.user_id:
|
||||
id_to_user_id[p.participant_id] = p.user_id
|
||||
|
||||
track_keys = [t["s3_key"] for t in input.tracks]
|
||||
cam_audio_keys = filter_cam_audio_tracks(track_keys)
|
||||
|
||||
for idx, key in enumerate(cam_audio_keys):
|
||||
try:
|
||||
parsed = parse_daily_recording_filename(key)
|
||||
participant_id = parsed.participant_id
|
||||
except ValueError as e:
|
||||
logger.error(
|
||||
"Failed to parse Daily recording filename",
|
||||
error=str(e),
|
||||
key=key,
|
||||
)
|
||||
continue
|
||||
|
||||
default_name = f"Speaker {idx}"
|
||||
name = id_to_name.get(participant_id, default_name)
|
||||
user_id = id_to_user_id.get(participant_id)
|
||||
|
||||
participant = TranscriptParticipant(
|
||||
id=participant_id, speaker=idx, name=name, user_id=user_id
|
||||
)
|
||||
await transcripts_controller.upsert_participant(transcript, participant)
|
||||
participants_list.append(
|
||||
ParticipantInfo(
|
||||
participant_id=participant_id,
|
||||
user_name=name,
|
||||
speaker=idx,
|
||||
)
|
||||
)
|
||||
|
||||
ctx.log(f"get_participants complete: {len(participants_list)} participants")
|
||||
|
||||
@@ -433,6 +524,7 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe
|
||||
@daily_multitrack_pipeline.task(
|
||||
parents=[get_participants],
|
||||
execution_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
schedule_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
retries=3,
|
||||
backoff_factor=2.0,
|
||||
backoff_max_seconds=30,
|
||||
@@ -440,11 +532,66 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe
|
||||
@with_error_handling(TaskName.PROCESS_TRACKS)
|
||||
async def process_tracks(input: PipelineInput, ctx: Context) -> ProcessTracksResult:
|
||||
"""Spawn child workflows for each track (dynamic fan-out)."""
|
||||
ctx.log(f"process_tracks: spawning {len(input.tracks)} track workflows")
|
||||
ctx.log(
|
||||
f"process_tracks: spawning {len(input.tracks)} track workflows, platform={input.source_platform}"
|
||||
)
|
||||
|
||||
participants_result = ctx.task_output(get_participants)
|
||||
source_language = participants_result.source_language
|
||||
|
||||
# For LiveKit: calculate padding offsets from filename timestamps.
|
||||
# OGG files don't have embedded start_time metadata, so we pre-calculate.
|
||||
track_padding: dict[int, float] = {}
|
||||
if input.source_platform == "livekit":
|
||||
from datetime import datetime # noqa: PLC0415
|
||||
|
||||
from reflector.utils.livekit import (
|
||||
parse_livekit_track_filepath, # noqa: PLC0415
|
||||
)
|
||||
|
||||
timestamps = []
|
||||
for i, track in enumerate(input.tracks):
|
||||
ts_str = track.get("timestamp")
|
||||
if ts_str:
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_str)
|
||||
timestamps.append((i, ts))
|
||||
except (ValueError, TypeError):
|
||||
ctx.log(
|
||||
f"process_tracks: could not parse timestamp for track {i}: {ts_str}"
|
||||
)
|
||||
timestamps.append((i, None))
|
||||
else:
|
||||
# Reprocess path: parse timestamp from S3 key
|
||||
try:
|
||||
parsed = parse_livekit_track_filepath(track["s3_key"])
|
||||
timestamps.append((i, parsed.timestamp))
|
||||
ctx.log(
|
||||
f"process_tracks: parsed timestamp from S3 key for track {i}: {parsed.timestamp}"
|
||||
)
|
||||
except (ValueError, KeyError):
|
||||
timestamps.append((i, None))
|
||||
|
||||
valid_timestamps = [(i, ts) for i, ts in timestamps if ts is not None]
|
||||
if valid_timestamps:
|
||||
earliest = min(ts for _, ts in valid_timestamps)
|
||||
# LiveKit Track Egress outputs OGG/Opus files, but the transcription
|
||||
# service only accepts WebM. The padding step converts OGG→WebM as a
|
||||
# side effect of applying the adelay filter. For the earliest track
|
||||
# (offset=0), we use a minimal padding to force this conversion.
|
||||
LIVEKIT_MIN_PADDING_SECONDS = (
|
||||
0.001 # 1ms — inaudible, forces OGG→WebM conversion
|
||||
)
|
||||
|
||||
for i, ts in valid_timestamps:
|
||||
offset = (ts - earliest).total_seconds()
|
||||
if offset == 0.0:
|
||||
offset = LIVEKIT_MIN_PADDING_SECONDS
|
||||
track_padding[i] = offset
|
||||
ctx.log(
|
||||
f"process_tracks: track {i} padding={offset}s (from filename timestamp)"
|
||||
)
|
||||
|
||||
bulk_runs = [
|
||||
track_workflow.create_bulk_run_item(
|
||||
input=TrackInput(
|
||||
@@ -454,6 +601,7 @@ async def process_tracks(input: PipelineInput, ctx: Context) -> ProcessTracksRes
|
||||
transcript_id=input.transcript_id,
|
||||
language=source_language,
|
||||
source_platform=input.source_platform,
|
||||
padding_seconds=track_padding.get(i),
|
||||
)
|
||||
)
|
||||
for i, track in enumerate(input.tracks)
|
||||
@@ -516,6 +664,7 @@ async def process_tracks(input: PipelineInput, ctx: Context) -> ProcessTracksRes
|
||||
@daily_multitrack_pipeline.task(
|
||||
parents=[process_tracks],
|
||||
execution_timeout=timedelta(seconds=TIMEOUT_AUDIO),
|
||||
schedule_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
retries=2,
|
||||
backoff_factor=2.0,
|
||||
backoff_max_seconds=15,
|
||||
@@ -605,13 +754,31 @@ async def mixdown_tracks(input: PipelineInput, ctx: Context) -> MixdownResult:
|
||||
# else: modal backend already uploaded to output_url
|
||||
|
||||
async with fresh_db_connection():
|
||||
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
|
||||
from reflector.db.transcripts import ( # noqa: PLC0415
|
||||
TranscriptDuration,
|
||||
transcripts_controller,
|
||||
)
|
||||
|
||||
transcript = await transcripts_controller.get_by_id(input.transcript_id)
|
||||
if transcript:
|
||||
await transcripts_controller.update(
|
||||
transcript, {"audio_location": "storage"}
|
||||
)
|
||||
update_data = {"audio_location": "storage"}
|
||||
# Set duration from mixdown if not already set (LiveKit: duration starts at 0)
|
||||
if not transcript.duration or transcript.duration == 0:
|
||||
update_data["duration"] = result.duration_ms
|
||||
await transcripts_controller.update(transcript, update_data)
|
||||
|
||||
# Broadcast duration update if it was missing
|
||||
if not transcript.duration or transcript.duration == 0:
|
||||
await append_event_and_broadcast(
|
||||
input.transcript_id,
|
||||
transcript,
|
||||
"DURATION",
|
||||
TranscriptDuration(duration=result.duration_ms),
|
||||
logger=logger,
|
||||
)
|
||||
ctx.log(
|
||||
f"mixdown_tracks: set duration={result.duration_ms}ms from mixdown"
|
||||
)
|
||||
|
||||
ctx.log(f"mixdown_tracks complete: {result.size} bytes to {storage_path}")
|
||||
|
||||
@@ -695,6 +862,7 @@ async def generate_waveform(input: PipelineInput, ctx: Context) -> WaveformResul
|
||||
@daily_multitrack_pipeline.task(
|
||||
parents=[process_tracks],
|
||||
execution_timeout=timedelta(seconds=TIMEOUT_EXTRA_HEAVY),
|
||||
schedule_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
retries=3,
|
||||
backoff_factor=2.0,
|
||||
backoff_max_seconds=30,
|
||||
|
||||
@@ -37,6 +37,9 @@ class TrackInput(BaseModel):
|
||||
transcript_id: str
|
||||
language: str = "en"
|
||||
source_platform: str = "daily"
|
||||
# Pre-calculated padding in seconds (from filename timestamps for LiveKit).
|
||||
# When set, overrides container metadata extraction for start_time.
|
||||
padding_seconds: float | None = None
|
||||
|
||||
|
||||
hatchet = HatchetClientManager.get_client()
|
||||
@@ -53,15 +56,19 @@ track_workflow = hatchet.workflow(name="TrackProcessing", input_validator=TrackI
|
||||
async def pad_track(input: TrackInput, ctx: Context) -> PadTrackResult:
|
||||
"""Pad single audio track with silence for alignment.
|
||||
|
||||
Extracts stream.start_time from WebM container metadata and applies
|
||||
silence padding using PyAV filter graph (adelay).
|
||||
For Daily: extracts stream.start_time from WebM container metadata.
|
||||
For LiveKit: uses pre-calculated padding_seconds from filename timestamps
|
||||
(OGG files don't have embedded start_time metadata).
|
||||
"""
|
||||
ctx.log(f"pad_track: track {input.track_index}, s3_key={input.s3_key}")
|
||||
ctx.log(
|
||||
f"pad_track: track {input.track_index}, s3_key={input.s3_key}, padding_seconds={input.padding_seconds}"
|
||||
)
|
||||
logger.info(
|
||||
"[Hatchet] pad_track",
|
||||
track_index=input.track_index,
|
||||
s3_key=input.s3_key,
|
||||
transcript_id=input.transcript_id,
|
||||
padding_seconds=input.padding_seconds,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -79,10 +86,16 @@ async def pad_track(input: TrackInput, ctx: Context) -> PadTrackResult:
|
||||
bucket=input.bucket_name,
|
||||
)
|
||||
|
||||
with av.open(source_url) as in_container:
|
||||
start_time_seconds = extract_stream_start_time_from_container(
|
||||
in_container, input.track_index, logger=logger
|
||||
)
|
||||
if input.padding_seconds is not None:
|
||||
# Pre-calculated offset (LiveKit: from filename timestamps)
|
||||
start_time_seconds = input.padding_seconds
|
||||
ctx.log(f"pad_track: using pre-calculated padding={start_time_seconds}s")
|
||||
else:
|
||||
# Extract from container metadata (Daily: WebM start_time)
|
||||
with av.open(source_url) as in_container:
|
||||
start_time_seconds = extract_stream_start_time_from_container(
|
||||
in_container, input.track_index, logger=logger
|
||||
)
|
||||
|
||||
# If no padding needed, return original S3 key
|
||||
if start_time_seconds <= 0:
|
||||
|
||||
12
server/reflector/livekit_api/__init__.py
Normal file
12
server/reflector/livekit_api/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""
|
||||
LiveKit API Module — thin wrapper around the livekit-api SDK.
|
||||
"""
|
||||
|
||||
from .client import LiveKitApiClient
|
||||
from .webhooks import create_webhook_receiver, verify_webhook
|
||||
|
||||
__all__ = [
|
||||
"LiveKitApiClient",
|
||||
"create_webhook_receiver",
|
||||
"verify_webhook",
|
||||
]
|
||||
195
server/reflector/livekit_api/client.py
Normal file
195
server/reflector/livekit_api/client.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""
|
||||
LiveKit API client wrapping the official livekit-api Python SDK.
|
||||
|
||||
Handles room management, access tokens, and Track Egress for
|
||||
per-participant audio recording to S3-compatible storage.
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from livekit.api import (
|
||||
AccessToken,
|
||||
AutoTrackEgress,
|
||||
CreateRoomRequest,
|
||||
DeleteRoomRequest,
|
||||
DirectFileOutput,
|
||||
EgressInfo,
|
||||
ListEgressRequest,
|
||||
ListParticipantsRequest,
|
||||
LiveKitAPI,
|
||||
Room,
|
||||
RoomEgress,
|
||||
S3Upload,
|
||||
StopEgressRequest,
|
||||
TrackEgressRequest,
|
||||
VideoGrants,
|
||||
)
|
||||
|
||||
|
||||
class LiveKitApiClient:
|
||||
"""Thin wrapper around LiveKitAPI for Reflector's needs."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
api_key: str,
|
||||
api_secret: str,
|
||||
s3_bucket: str | None = None,
|
||||
s3_region: str | None = None,
|
||||
s3_access_key: str | None = None,
|
||||
s3_secret_key: str | None = None,
|
||||
s3_endpoint: str | None = None,
|
||||
):
|
||||
self._url = url
|
||||
self._api_key = api_key
|
||||
self._api_secret = api_secret
|
||||
self._s3_bucket = s3_bucket
|
||||
self._s3_region = s3_region or "us-east-1"
|
||||
self._s3_access_key = s3_access_key
|
||||
self._s3_secret_key = s3_secret_key
|
||||
self._s3_endpoint = s3_endpoint
|
||||
self._api = LiveKitAPI(url=url, api_key=api_key, api_secret=api_secret)
|
||||
|
||||
# ── Room management ──────────────────────────────────────────
|
||||
|
||||
async def create_room(
|
||||
self,
|
||||
name: str,
|
||||
empty_timeout: int = 300,
|
||||
max_participants: int = 0,
|
||||
enable_auto_track_egress: bool = False,
|
||||
track_egress_filepath: str = "livekit/{room_name}/{publisher_identity}-{time}",
|
||||
) -> Room:
|
||||
"""Create a LiveKit room.
|
||||
|
||||
Args:
|
||||
name: Room name (unique identifier).
|
||||
empty_timeout: Seconds to keep room alive after last participant leaves.
|
||||
max_participants: 0 = unlimited.
|
||||
enable_auto_track_egress: If True, automatically record each participant's
|
||||
audio track to S3 as a separate file (OGG/Opus).
|
||||
track_egress_filepath: S3 filepath template for auto track egress.
|
||||
Supports {room_name}, {publisher_identity}, {time}.
|
||||
"""
|
||||
egress = None
|
||||
if enable_auto_track_egress:
|
||||
egress = RoomEgress(
|
||||
tracks=AutoTrackEgress(
|
||||
filepath=track_egress_filepath,
|
||||
s3=self._build_s3_upload(),
|
||||
),
|
||||
)
|
||||
|
||||
req = CreateRoomRequest(
|
||||
name=name,
|
||||
empty_timeout=empty_timeout,
|
||||
max_participants=max_participants,
|
||||
egress=egress,
|
||||
)
|
||||
return await self._api.room.create_room(req)
|
||||
|
||||
async def delete_room(self, room_name: str) -> None:
|
||||
await self._api.room.delete_room(DeleteRoomRequest(room=room_name))
|
||||
|
||||
async def list_participants(self, room_name: str):
|
||||
resp = await self._api.room.list_participants(
|
||||
ListParticipantsRequest(room=room_name)
|
||||
)
|
||||
return resp.participants
|
||||
|
||||
# ── Access tokens ────────────────────────────────────────────
|
||||
|
||||
def create_access_token(
|
||||
self,
|
||||
room_name: str,
|
||||
participant_identity: str,
|
||||
participant_name: str | None = None,
|
||||
can_publish: bool = True,
|
||||
can_subscribe: bool = True,
|
||||
room_admin: bool = False,
|
||||
ttl_seconds: int = 86400,
|
||||
) -> str:
|
||||
"""Generate a JWT access token for a participant."""
|
||||
token = AccessToken(
|
||||
api_key=self._api_key,
|
||||
api_secret=self._api_secret,
|
||||
)
|
||||
token.identity = participant_identity
|
||||
token.name = participant_name or participant_identity
|
||||
token.ttl = timedelta(seconds=ttl_seconds)
|
||||
token.with_grants(
|
||||
VideoGrants(
|
||||
room_join=True,
|
||||
room=room_name,
|
||||
can_publish=can_publish,
|
||||
can_subscribe=can_subscribe,
|
||||
room_admin=room_admin,
|
||||
)
|
||||
)
|
||||
return token.to_jwt()
|
||||
|
||||
# ── Track Egress (per-participant audio recording) ───────────
|
||||
|
||||
def _build_s3_upload(self) -> S3Upload:
|
||||
"""Build S3Upload config for egress output."""
|
||||
if not all([self._s3_bucket, self._s3_access_key, self._s3_secret_key]):
|
||||
raise ValueError(
|
||||
"S3 storage not configured for LiveKit egress. "
|
||||
"Set LIVEKIT_STORAGE_AWS_* environment variables."
|
||||
)
|
||||
kwargs = {
|
||||
"access_key": self._s3_access_key,
|
||||
"secret": self._s3_secret_key,
|
||||
"bucket": self._s3_bucket,
|
||||
"region": self._s3_region,
|
||||
"force_path_style": True, # Required for Garage/MinIO
|
||||
}
|
||||
if self._s3_endpoint:
|
||||
kwargs["endpoint"] = self._s3_endpoint
|
||||
return S3Upload(**kwargs)
|
||||
|
||||
async def start_track_egress(
|
||||
self,
|
||||
room_name: str,
|
||||
track_sid: str,
|
||||
s3_filepath: str,
|
||||
) -> EgressInfo:
|
||||
"""Start Track Egress for a single audio track (writes OGG/Opus to S3).
|
||||
|
||||
Args:
|
||||
room_name: LiveKit room name.
|
||||
track_sid: Track SID to record.
|
||||
s3_filepath: S3 key path for the output file.
|
||||
"""
|
||||
req = TrackEgressRequest(
|
||||
room_name=room_name,
|
||||
track_id=track_sid,
|
||||
file=DirectFileOutput(
|
||||
filepath=s3_filepath,
|
||||
s3=self._build_s3_upload(),
|
||||
),
|
||||
)
|
||||
return await self._api.egress.start_track_egress(req)
|
||||
|
||||
async def list_egress(self, room_name: str | None = None) -> list[EgressInfo]:
|
||||
req = ListEgressRequest()
|
||||
if room_name:
|
||||
req.room_name = room_name
|
||||
resp = await self._api.egress.list_egress(req)
|
||||
return list(resp.items)
|
||||
|
||||
async def stop_egress(self, egress_id: str) -> EgressInfo:
|
||||
return await self._api.egress.stop_egress(
|
||||
StopEgressRequest(egress_id=egress_id)
|
||||
)
|
||||
|
||||
# ── Cleanup ──────────────────────────────────────────────────
|
||||
|
||||
async def close(self):
|
||||
await self._api.aclose()
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
await self.close()
|
||||
52
server/reflector/livekit_api/webhooks.py
Normal file
52
server/reflector/livekit_api/webhooks.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
LiveKit webhook verification and event parsing.
|
||||
|
||||
LiveKit signs webhooks using the API secret as a JWT.
|
||||
The WebhookReceiver from the SDK handles verification.
|
||||
"""
|
||||
|
||||
from livekit.api import TokenVerifier, WebhookEvent, WebhookReceiver
|
||||
|
||||
from reflector.logger import logger
|
||||
|
||||
|
||||
def create_webhook_receiver(api_key: str, api_secret: str) -> WebhookReceiver:
|
||||
"""Create a WebhookReceiver for verifying LiveKit webhook signatures."""
|
||||
return WebhookReceiver(
|
||||
token_verifier=TokenVerifier(api_key=api_key, api_secret=api_secret)
|
||||
)
|
||||
|
||||
|
||||
def verify_webhook(
|
||||
receiver: WebhookReceiver,
|
||||
body: str | bytes,
|
||||
auth_header: str,
|
||||
) -> WebhookEvent | None:
|
||||
"""Verify and parse a LiveKit webhook event.
|
||||
|
||||
Returns the parsed WebhookEvent if valid, None if verification fails.
|
||||
Logs at different levels depending on failure type:
|
||||
- WARNING: invalid signature, expired token, malformed JWT (expected rejections)
|
||||
- ERROR: unexpected exceptions (potential bugs or attacks)
|
||||
"""
|
||||
if isinstance(body, bytes):
|
||||
body = body.decode("utf-8")
|
||||
try:
|
||||
return receiver.receive(body, auth_header)
|
||||
except (ValueError, KeyError) as e:
|
||||
# Expected verification failures (bad JWT, wrong key, expired, malformed)
|
||||
logger.warning(
|
||||
"LiveKit webhook verification failed",
|
||||
error=str(e),
|
||||
error_type=type(e).__name__,
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
# Unexpected errors — log at ERROR for visibility (potential attack or SDK bug)
|
||||
logger.error(
|
||||
"Unexpected error during LiveKit webhook verification",
|
||||
error=str(e),
|
||||
error_type=type(e).__name__,
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import Literal
|
||||
|
||||
Platform = Literal["whereby", "daily"]
|
||||
Platform = Literal["whereby", "daily", "livekit"]
|
||||
WHEREBY_PLATFORM: Platform = "whereby"
|
||||
DAILY_PLATFORM: Platform = "daily"
|
||||
LIVEKIT_PLATFORM: Platform = "livekit"
|
||||
|
||||
@@ -155,12 +155,17 @@ async def prepare_transcript_processing(validation: ValidationOk) -> PrepareResu
|
||||
)
|
||||
|
||||
if track_keys:
|
||||
# Detect platform from recording ID prefix
|
||||
source_platform = (
|
||||
"livekit" if recording_id and recording_id.startswith("lk-") else "daily"
|
||||
)
|
||||
return MultitrackProcessingConfig(
|
||||
bucket_name=bucket_name, # type: ignore (validated above)
|
||||
track_keys=track_keys,
|
||||
transcript_id=validation.transcript_id,
|
||||
recording_id=recording_id,
|
||||
room_id=validation.room_id,
|
||||
source_platform=source_platform,
|
||||
)
|
||||
|
||||
return FileProcessingConfig(
|
||||
|
||||
@@ -195,6 +195,23 @@ class Settings(BaseSettings):
|
||||
DAILY_WEBHOOK_UUID: str | None = (
|
||||
None # Webhook UUID for this environment. Not used by production code
|
||||
)
|
||||
|
||||
# LiveKit integration (self-hosted open-source video platform)
|
||||
LIVEKIT_URL: str | None = (
|
||||
None # e.g. ws://livekit:7880 (internal) or wss://livekit.example.com
|
||||
)
|
||||
LIVEKIT_API_KEY: str | None = None
|
||||
LIVEKIT_API_SECRET: str | None = None
|
||||
LIVEKIT_WEBHOOK_SECRET: str | None = None # Defaults to API_SECRET if not set
|
||||
# LiveKit egress S3 storage (Track Egress writes per-participant audio here)
|
||||
LIVEKIT_STORAGE_AWS_BUCKET_NAME: str | None = None
|
||||
LIVEKIT_STORAGE_AWS_REGION: str | None = None
|
||||
LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID: str | None = None
|
||||
LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY: str | None = None
|
||||
LIVEKIT_STORAGE_AWS_ENDPOINT_URL: str | None = None # For Garage/MinIO
|
||||
# Public URL for LiveKit (used in frontend room_url, e.g. wss://livekit.example.com)
|
||||
LIVEKIT_PUBLIC_URL: str | None = None
|
||||
|
||||
# Platform Configuration
|
||||
DEFAULT_VIDEO_PLATFORM: Platform = DAILY_PLATFORM
|
||||
|
||||
|
||||
@@ -57,6 +57,22 @@ def get_source_storage(platform: str) -> Storage:
|
||||
aws_secret_access_key=settings.WHEREBY_STORAGE_AWS_SECRET_ACCESS_KEY,
|
||||
)
|
||||
|
||||
elif platform == "livekit":
|
||||
if (
|
||||
settings.LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID
|
||||
and settings.LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY
|
||||
and settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME
|
||||
):
|
||||
from reflector.storage.storage_aws import AwsStorage
|
||||
|
||||
return AwsStorage(
|
||||
aws_bucket_name=settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME,
|
||||
aws_region=settings.LIVEKIT_STORAGE_AWS_REGION or "us-east-1",
|
||||
aws_access_key_id=settings.LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=settings.LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY,
|
||||
aws_endpoint_url=settings.LIVEKIT_STORAGE_AWS_ENDPOINT_URL,
|
||||
)
|
||||
|
||||
return get_transcripts_storage()
|
||||
|
||||
|
||||
|
||||
112
server/reflector/utils/livekit.py
Normal file
112
server/reflector/utils/livekit.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""
|
||||
LiveKit track file utilities.
|
||||
|
||||
Parse participant identity and timing from Auto Track Egress S3 filepaths.
|
||||
|
||||
Actual filepath format from LiveKit Auto Track Egress:
|
||||
livekit/{room_name}/{publisher_identity}-{ISO_timestamp}-{track_id}.{ext}
|
||||
|
||||
Examples:
|
||||
livekit/myroom-20260401172036/juan-4b82ed-2026-04-01T195758-TR_AMR3SWs74Divho.ogg
|
||||
livekit/myroom-20260401172036/juan2-63abcf-2026-04-01T195847-TR_AMyoSbM7tAQbYj.ogg
|
||||
livekit/myroom-20260401172036/EG_K5sipvfB5fTM.json (manifest, skip)
|
||||
livekit/myroom-20260401172036/juan-4b82ed-2026-04-01T195727-TR_VC679dgMQBdfhT.webm (video, skip)
|
||||
"""
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
|
||||
@dataclass
|
||||
class LiveKitTrackFile:
|
||||
"""Parsed info from a LiveKit track egress filepath."""
|
||||
|
||||
s3_key: str
|
||||
room_name: str
|
||||
participant_identity: str
|
||||
timestamp: datetime # Parsed from ISO timestamp in filename
|
||||
track_id: str # LiveKit track ID (e.g., TR_AMR3SWs74Divho)
|
||||
|
||||
|
||||
# Pattern: livekit/{room_name}/{identity}-{ISO_date}T{time}-{track_id}.{ext}
|
||||
# The identity can contain alphanumeric, hyphens, underscores
|
||||
# ISO timestamp is like 2026-04-01T195758
|
||||
# Track ID starts with TR_
|
||||
_TRACK_FILENAME_PATTERN = re.compile(
|
||||
r"^livekit/(?P<room_name>[^/]+)/(?P<identity>.+?)-(?P<timestamp>\d{4}-\d{2}-\d{2}T\d{6})-(?P<track_id>TR_\w+)\.(?P<ext>\w+)$"
|
||||
)
|
||||
|
||||
|
||||
def parse_livekit_track_filepath(s3_key: str) -> LiveKitTrackFile:
|
||||
"""Parse a LiveKit track egress filepath into components.
|
||||
|
||||
Args:
|
||||
s3_key: S3 key like 'livekit/myroom-20260401/juan-4b82ed-2026-04-01T195758-TR_AMR3SWs74Divho.ogg'
|
||||
|
||||
Returns:
|
||||
LiveKitTrackFile with parsed components.
|
||||
|
||||
Raises:
|
||||
ValueError: If the filepath doesn't match the expected format.
|
||||
"""
|
||||
match = _TRACK_FILENAME_PATTERN.match(s3_key)
|
||||
if not match:
|
||||
raise ValueError(
|
||||
f"LiveKit track filepath doesn't match expected format: {s3_key}"
|
||||
)
|
||||
|
||||
# Parse ISO-ish timestamp (e.g., 2026-04-01T195758 → datetime)
|
||||
ts_str = match.group("timestamp")
|
||||
try:
|
||||
ts = datetime.strptime(ts_str, "%Y-%m-%dT%H%M%S").replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
raise ValueError(f"Cannot parse timestamp '{ts_str}' from: {s3_key}")
|
||||
|
||||
return LiveKitTrackFile(
|
||||
s3_key=s3_key,
|
||||
room_name=match.group("room_name"),
|
||||
participant_identity=match.group("identity"),
|
||||
timestamp=ts,
|
||||
track_id=match.group("track_id"),
|
||||
)
|
||||
|
||||
|
||||
def filter_audio_tracks(s3_keys: list[str]) -> list[str]:
|
||||
"""Filter S3 keys to only audio tracks (.ogg), excluding manifests and video."""
|
||||
return [k for k in s3_keys if k.endswith(".ogg")]
|
||||
|
||||
|
||||
def calculate_track_offsets(
|
||||
tracks: list[LiveKitTrackFile],
|
||||
) -> list[tuple[LiveKitTrackFile, float]]:
|
||||
"""Calculate silence padding offset for each track.
|
||||
|
||||
The earliest track starts at time zero. Each subsequent track
|
||||
gets (track_timestamp - earliest_timestamp) seconds of silence prepended.
|
||||
|
||||
Returns:
|
||||
List of (track, offset_seconds) tuples.
|
||||
"""
|
||||
if not tracks:
|
||||
return []
|
||||
|
||||
earliest = min(t.timestamp for t in tracks)
|
||||
return [(t, (t.timestamp - earliest).total_seconds()) for t in tracks]
|
||||
|
||||
|
||||
def extract_livekit_base_room_name(livekit_room_name: str) -> NonEmptyString:
|
||||
"""Extract base room name from LiveKit timestamped room name.
|
||||
|
||||
LiveKit rooms use the same naming as Daily: {base_name}-YYYYMMDDHHMMSS
|
||||
"""
|
||||
base_name = livekit_room_name.rsplit("-", 1)[0]
|
||||
assert base_name, f"Extracted base name is empty from: {livekit_room_name}"
|
||||
return NonEmptyString(base_name)
|
||||
|
||||
|
||||
def recording_lock_key(room_name: str) -> str:
|
||||
"""Redis lock key for preventing duplicate processing."""
|
||||
return f"livekit:processing:{room_name}"
|
||||
@@ -1,7 +1,7 @@
|
||||
from reflector.settings import settings
|
||||
from reflector.storage import get_dailyco_storage, get_whereby_storage
|
||||
|
||||
from ..schemas.platform import WHEREBY_PLATFORM, Platform
|
||||
from ..schemas.platform import LIVEKIT_PLATFORM, WHEREBY_PLATFORM, Platform
|
||||
from .base import VideoPlatformClient, VideoPlatformConfig
|
||||
from .registry import get_platform_client
|
||||
|
||||
@@ -44,6 +44,27 @@ def get_platform_config(platform: Platform) -> VideoPlatformConfig:
|
||||
s3_region=daily_storage.region,
|
||||
aws_role_arn=daily_storage.role_credential,
|
||||
)
|
||||
elif platform == LIVEKIT_PLATFORM:
|
||||
if not settings.LIVEKIT_URL:
|
||||
raise ValueError(
|
||||
"LIVEKIT_URL is required when platform='livekit'. "
|
||||
"Set LIVEKIT_URL environment variable."
|
||||
)
|
||||
if not settings.LIVEKIT_API_KEY or not settings.LIVEKIT_API_SECRET:
|
||||
raise ValueError(
|
||||
"LIVEKIT_API_KEY and LIVEKIT_API_SECRET are required when platform='livekit'. "
|
||||
"Set LIVEKIT_API_KEY and LIVEKIT_API_SECRET environment variables."
|
||||
)
|
||||
return VideoPlatformConfig(
|
||||
api_key=settings.LIVEKIT_API_KEY,
|
||||
webhook_secret=settings.LIVEKIT_WEBHOOK_SECRET
|
||||
or settings.LIVEKIT_API_SECRET,
|
||||
api_url=settings.LIVEKIT_URL,
|
||||
s3_bucket=settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME,
|
||||
s3_region=settings.LIVEKIT_STORAGE_AWS_REGION,
|
||||
aws_access_key_id=settings.LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID,
|
||||
aws_access_key_secret=settings.LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown platform: {platform}")
|
||||
|
||||
|
||||
192
server/reflector/video_platforms/livekit.py
Normal file
192
server/reflector/video_platforms/livekit.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""
|
||||
LiveKit video platform client for Reflector.
|
||||
|
||||
Self-hosted, open-source alternative to Daily.co.
|
||||
Uses Track Egress for per-participant audio recording (no composite video).
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urlencode
|
||||
from uuid import uuid4
|
||||
|
||||
from reflector.db.rooms import Room
|
||||
from reflector.livekit_api.client import LiveKitApiClient
|
||||
from reflector.livekit_api.webhooks import create_webhook_receiver, verify_webhook
|
||||
from reflector.logger import logger
|
||||
from reflector.settings import settings
|
||||
|
||||
from ..schemas.platform import Platform
|
||||
from ..utils.string import NonEmptyString
|
||||
from .base import ROOM_PREFIX_SEPARATOR, VideoPlatformClient
|
||||
from .models import MeetingData, SessionData, VideoPlatformConfig
|
||||
|
||||
|
||||
class LiveKitClient(VideoPlatformClient):
|
||||
PLATFORM_NAME: Platform = "livekit"
|
||||
TIMESTAMP_FORMAT = "%Y%m%d%H%M%S"
|
||||
|
||||
def __init__(self, config: VideoPlatformConfig):
|
||||
super().__init__(config)
|
||||
self._api_client = LiveKitApiClient(
|
||||
url=config.api_url or "",
|
||||
api_key=config.api_key,
|
||||
api_secret=config.webhook_secret, # LiveKit uses API secret for both auth and webhooks
|
||||
s3_bucket=config.s3_bucket,
|
||||
s3_region=config.s3_region,
|
||||
s3_access_key=config.aws_access_key_id,
|
||||
s3_secret_key=config.aws_access_key_secret,
|
||||
s3_endpoint=settings.LIVEKIT_STORAGE_AWS_ENDPOINT_URL,
|
||||
)
|
||||
self._webhook_receiver = create_webhook_receiver(
|
||||
api_key=config.api_key,
|
||||
api_secret=config.webhook_secret,
|
||||
)
|
||||
|
||||
async def create_meeting(
|
||||
self, room_name_prefix: NonEmptyString, end_date: datetime, room: Room
|
||||
) -> MeetingData:
|
||||
"""Create a LiveKit room for this meeting.
|
||||
|
||||
LiveKit rooms are created explicitly via API. A new room is created
|
||||
for each Reflector meeting (same pattern as Daily.co).
|
||||
"""
|
||||
now = datetime.now(timezone.utc)
|
||||
timestamp = now.strftime(self.TIMESTAMP_FORMAT)
|
||||
room_name = f"{room_name_prefix}{ROOM_PREFIX_SEPARATOR}{timestamp}"
|
||||
|
||||
# Calculate empty_timeout from end_date (seconds until expiry)
|
||||
# Ensure end_date is timezone-aware for subtraction
|
||||
end_date_aware = (
|
||||
end_date if end_date.tzinfo else end_date.replace(tzinfo=timezone.utc)
|
||||
)
|
||||
remaining = int((end_date_aware - now).total_seconds())
|
||||
empty_timeout = max(300, min(remaining, 86400)) # 5 min to 24 hours
|
||||
|
||||
# Enable auto track egress for cloud recording (per-participant audio to S3).
|
||||
# Gracefully degrade if S3 credentials are missing — room still works, just no recording.
|
||||
enable_recording = room.recording_type == "cloud"
|
||||
egress_enabled = False
|
||||
if enable_recording:
|
||||
try:
|
||||
self._api_client._build_s3_upload() # Validate credentials exist
|
||||
egress_enabled = True
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"S3 credentials not configured — room created without auto track egress. "
|
||||
"Set LIVEKIT_STORAGE_AWS_* to enable recording.",
|
||||
room_name=room_name,
|
||||
)
|
||||
|
||||
lk_room = await self._api_client.create_room(
|
||||
name=room_name,
|
||||
empty_timeout=empty_timeout,
|
||||
enable_auto_track_egress=egress_enabled,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"LiveKit room created",
|
||||
room_name=lk_room.name,
|
||||
room_sid=lk_room.sid,
|
||||
empty_timeout=empty_timeout,
|
||||
auto_track_egress=egress_enabled,
|
||||
)
|
||||
|
||||
# room_url includes the server URL + room name as query param.
|
||||
# The join endpoint in rooms.py appends the token as another query param.
|
||||
# Frontend parses: ws://host:7880?room=<name>&token=<jwt>
|
||||
public_url = settings.LIVEKIT_PUBLIC_URL or settings.LIVEKIT_URL or ""
|
||||
room_url = f"{public_url}?{urlencode({'room': lk_room.name})}"
|
||||
|
||||
return MeetingData(
|
||||
meeting_id=lk_room.sid or str(uuid4()),
|
||||
room_name=lk_room.name,
|
||||
room_url=room_url,
|
||||
host_room_url=room_url,
|
||||
platform=self.PLATFORM_NAME,
|
||||
extra_data={"livekit_room_sid": lk_room.sid},
|
||||
)
|
||||
|
||||
async def get_room_sessions(self, room_name: str) -> list[SessionData]:
|
||||
"""Get current participants in a LiveKit room.
|
||||
|
||||
For historical sessions, we rely on webhook-stored data (same as Daily).
|
||||
This returns currently-connected participants.
|
||||
"""
|
||||
try:
|
||||
participants = await self._api_client.list_participants(room_name)
|
||||
return [
|
||||
SessionData(
|
||||
session_id=p.sid,
|
||||
started_at=datetime.fromtimestamp(
|
||||
p.joined_at if p.joined_at else 0, tz=timezone.utc
|
||||
),
|
||||
ended_at=None, # Still active
|
||||
)
|
||||
for p in participants
|
||||
if p.sid # Skip empty entries
|
||||
]
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
"Could not list LiveKit participants (room may not exist)",
|
||||
room_name=room_name,
|
||||
error=str(e),
|
||||
)
|
||||
return []
|
||||
|
||||
async def upload_logo(self, room_name: str, logo_path: str) -> bool:
|
||||
# LiveKit doesn't have a logo upload concept; handled in frontend theming
|
||||
return True
|
||||
|
||||
def verify_webhook_signature(
|
||||
self, body: bytes, signature: str, timestamp: str | None = None
|
||||
) -> bool:
|
||||
"""Verify LiveKit webhook signature.
|
||||
|
||||
LiveKit sends the JWT in the Authorization header. The `signature`
|
||||
param here receives the Authorization header value.
|
||||
"""
|
||||
event = verify_webhook(self._webhook_receiver, body, signature)
|
||||
return event is not None
|
||||
|
||||
def create_access_token(
|
||||
self,
|
||||
room_name: str,
|
||||
participant_identity: str,
|
||||
participant_name: str | None = None,
|
||||
is_admin: bool = False,
|
||||
) -> str:
|
||||
"""Generate a LiveKit access token for a participant."""
|
||||
return self._api_client.create_access_token(
|
||||
room_name=room_name,
|
||||
participant_identity=participant_identity,
|
||||
participant_name=participant_name,
|
||||
room_admin=is_admin,
|
||||
)
|
||||
|
||||
async def start_track_egress(
|
||||
self,
|
||||
room_name: str,
|
||||
track_sid: str,
|
||||
s3_filepath: str,
|
||||
):
|
||||
"""Start Track Egress for a single audio track."""
|
||||
return await self._api_client.start_track_egress(
|
||||
room_name=room_name,
|
||||
track_sid=track_sid,
|
||||
s3_filepath=s3_filepath,
|
||||
)
|
||||
|
||||
async def list_egress(self, room_name: str | None = None):
|
||||
return await self._api_client.list_egress(room_name=room_name)
|
||||
|
||||
async def stop_egress(self, egress_id: str):
|
||||
return await self._api_client.stop_egress(egress_id=egress_id)
|
||||
|
||||
async def close(self):
|
||||
await self._api_client.close()
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
await self.close()
|
||||
@@ -1,6 +1,11 @@
|
||||
from typing import Dict, Type
|
||||
|
||||
from ..schemas.platform import DAILY_PLATFORM, WHEREBY_PLATFORM, Platform
|
||||
from ..schemas.platform import (
|
||||
DAILY_PLATFORM,
|
||||
LIVEKIT_PLATFORM,
|
||||
WHEREBY_PLATFORM,
|
||||
Platform,
|
||||
)
|
||||
from .base import VideoPlatformClient, VideoPlatformConfig
|
||||
|
||||
_PLATFORMS: Dict[Platform, Type[VideoPlatformClient]] = {}
|
||||
@@ -26,10 +31,12 @@ def get_available_platforms() -> list[Platform]:
|
||||
|
||||
def _register_builtin_platforms():
|
||||
from .daily import DailyClient # noqa: PLC0415
|
||||
from .livekit import LiveKitClient # noqa: PLC0415
|
||||
from .whereby import WherebyClient # noqa: PLC0415
|
||||
|
||||
register_platform(WHEREBY_PLATFORM, WherebyClient)
|
||||
register_platform(DAILY_PLATFORM, DailyClient)
|
||||
register_platform(LIVEKIT_PLATFORM, LiveKitClient)
|
||||
|
||||
|
||||
_register_builtin_platforms()
|
||||
|
||||
246
server/reflector/views/livekit.py
Normal file
246
server/reflector/views/livekit.py
Normal file
@@ -0,0 +1,246 @@
|
||||
"""LiveKit webhook handler.
|
||||
|
||||
Processes LiveKit webhook events for participant tracking and
|
||||
Track Egress recording completion.
|
||||
|
||||
LiveKit sends webhooks as POST requests with JWT authentication
|
||||
in the Authorization header.
|
||||
|
||||
Webhooks are used as fast-path triggers and logging. Track discovery
|
||||
for the multitrack pipeline uses S3 listing (source of truth), not
|
||||
webhook data.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.livekit_api.webhooks import create_webhook_receiver, verify_webhook
|
||||
from reflector.logger import logger as _logger
|
||||
from reflector.settings import settings
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
logger = _logger.bind(platform="livekit")
|
||||
|
||||
# Module-level receiver, lazily initialized on first webhook
|
||||
_webhook_receiver = None
|
||||
|
||||
|
||||
def _get_webhook_receiver():
|
||||
global _webhook_receiver
|
||||
if _webhook_receiver is None:
|
||||
if not settings.LIVEKIT_API_KEY or not settings.LIVEKIT_API_SECRET:
|
||||
raise ValueError("LiveKit not configured")
|
||||
_webhook_receiver = create_webhook_receiver(
|
||||
api_key=settings.LIVEKIT_API_KEY,
|
||||
api_secret=settings.LIVEKIT_WEBHOOK_SECRET or settings.LIVEKIT_API_SECRET,
|
||||
)
|
||||
return _webhook_receiver
|
||||
|
||||
|
||||
@router.post("/webhook")
|
||||
async def livekit_webhook(request: Request):
|
||||
"""Handle LiveKit webhook events.
|
||||
|
||||
LiveKit webhook events include:
|
||||
- participant_joined / participant_left
|
||||
- egress_started / egress_updated / egress_ended
|
||||
- room_started / room_finished
|
||||
- track_published / track_unpublished
|
||||
"""
|
||||
if not settings.LIVEKIT_API_KEY or not settings.LIVEKIT_API_SECRET:
|
||||
raise HTTPException(status_code=500, detail="LiveKit not configured")
|
||||
|
||||
body = await request.body()
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
|
||||
receiver = _get_webhook_receiver()
|
||||
event = verify_webhook(receiver, body, auth_header)
|
||||
if event is None:
|
||||
logger.warning(
|
||||
"Invalid LiveKit webhook signature",
|
||||
has_auth=bool(auth_header),
|
||||
has_body=bool(body),
|
||||
)
|
||||
raise HTTPException(status_code=401, detail="Invalid webhook signature")
|
||||
|
||||
event_type = event.event
|
||||
|
||||
match event_type:
|
||||
case "participant_joined":
|
||||
await _handle_participant_joined(event)
|
||||
case "participant_left":
|
||||
await _handle_participant_left(event)
|
||||
case "egress_started":
|
||||
await _handle_egress_started(event)
|
||||
case "egress_ended":
|
||||
await _handle_egress_ended(event)
|
||||
case "room_started":
|
||||
logger.info(
|
||||
"Room started",
|
||||
room_name=event.room.name if event.room else None,
|
||||
)
|
||||
case "room_finished":
|
||||
await _handle_room_finished(event)
|
||||
case "track_published" | "track_unpublished":
|
||||
logger.debug(
|
||||
f"Track event: {event_type}",
|
||||
room_name=event.room.name if event.room else None,
|
||||
participant=event.participant.identity if event.participant else None,
|
||||
)
|
||||
case _:
|
||||
logger.debug(
|
||||
"Unhandled LiveKit webhook event",
|
||||
event_type=event_type,
|
||||
)
|
||||
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
async def _handle_participant_joined(event):
|
||||
room_name = event.room.name if event.room else None
|
||||
participant = event.participant
|
||||
|
||||
if not room_name or not participant:
|
||||
logger.warning("participant_joined: missing room or participant data")
|
||||
return
|
||||
|
||||
meeting = await meetings_controller.get_by_room_name(room_name)
|
||||
if not meeting:
|
||||
logger.warning("participant_joined: meeting not found", room_name=room_name)
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Participant joined",
|
||||
meeting_id=meeting.id,
|
||||
room_name=room_name,
|
||||
participant_identity=participant.identity,
|
||||
participant_sid=participant.sid,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_participant_left(event):
|
||||
room_name = event.room.name if event.room else None
|
||||
participant = event.participant
|
||||
|
||||
if not room_name or not participant:
|
||||
logger.warning("participant_left: missing room or participant data")
|
||||
return
|
||||
|
||||
meeting = await meetings_controller.get_by_room_name(room_name)
|
||||
if not meeting:
|
||||
logger.warning("participant_left: meeting not found", room_name=room_name)
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Participant left",
|
||||
meeting_id=meeting.id,
|
||||
room_name=room_name,
|
||||
participant_identity=participant.identity,
|
||||
participant_sid=participant.sid,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_egress_started(event):
|
||||
egress = event.egress_info
|
||||
logger.info(
|
||||
"Egress started",
|
||||
room_name=egress.room_name if egress else None,
|
||||
egress_id=egress.egress_id if egress else None,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_egress_ended(event):
|
||||
"""Handle Track Egress completion. Delete video files immediately to save storage.
|
||||
|
||||
AutoTrackEgress records ALL tracks (audio + video). Audio is kept for the
|
||||
transcription pipeline. Video files are unused and deleted on completion.
|
||||
This saves ~50x storage (video is 98% of egress output for HD cameras).
|
||||
"""
|
||||
egress = event.egress_info
|
||||
if not egress:
|
||||
logger.warning("egress_ended: no egress info in payload")
|
||||
return
|
||||
|
||||
# EGRESS_FAILED = 4
|
||||
if egress.status == 4:
|
||||
logger.error(
|
||||
"Egress failed",
|
||||
room_name=egress.room_name,
|
||||
egress_id=egress.egress_id,
|
||||
error=egress.error,
|
||||
)
|
||||
return
|
||||
|
||||
file_results = list(egress.file_results)
|
||||
logger.info(
|
||||
"Egress ended",
|
||||
room_name=egress.room_name,
|
||||
egress_id=egress.egress_id,
|
||||
status=egress.status,
|
||||
num_files=len(file_results),
|
||||
filenames=[f.filename for f in file_results] if file_results else [],
|
||||
)
|
||||
|
||||
# Delete video files (.webm) immediately — only audio (.ogg) is needed for transcription.
|
||||
# Video tracks are 50-90x larger than audio and unused by the pipeline.
|
||||
# JSON manifests are kept (lightweight metadata, ~430 bytes each).
|
||||
for file_result in file_results:
|
||||
filename = file_result.filename
|
||||
if filename and filename.endswith(".webm"):
|
||||
try:
|
||||
from reflector.storage import get_source_storage # noqa: PLC0415
|
||||
|
||||
storage = get_source_storage("livekit")
|
||||
await storage.delete_file(filename)
|
||||
logger.info(
|
||||
"Deleted video egress file",
|
||||
filename=filename,
|
||||
room_name=egress.room_name,
|
||||
)
|
||||
except Exception as e:
|
||||
# Non-critical — pipeline filters these out anyway
|
||||
logger.warning(
|
||||
"Failed to delete video egress file",
|
||||
filename=filename,
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
|
||||
async def _handle_room_finished(event):
|
||||
"""Fast-path: trigger multitrack processing when room closes.
|
||||
|
||||
This is an optimization — if missed, the process_livekit_ended_meetings
|
||||
beat task catches it within ~2 minutes.
|
||||
"""
|
||||
room_name = event.room.name if event.room else None
|
||||
if not room_name:
|
||||
logger.warning("room_finished: no room name in payload")
|
||||
return
|
||||
|
||||
logger.info("Room finished", room_name=room_name)
|
||||
|
||||
meeting = await meetings_controller.get_by_room_name(room_name)
|
||||
if not meeting:
|
||||
logger.warning("room_finished: meeting not found", room_name=room_name)
|
||||
return
|
||||
|
||||
# Deactivate the meeting — LiveKit room is destroyed, so process_meetings
|
||||
# can't detect this via API (list_participants returns empty for deleted rooms).
|
||||
if meeting.is_active:
|
||||
await meetings_controller.update_meeting(meeting.id, is_active=False)
|
||||
logger.info("room_finished: meeting deactivated", meeting_id=meeting.id)
|
||||
|
||||
# Import here to avoid circular imports (worker imports views)
|
||||
from reflector.worker.process import process_livekit_multitrack
|
||||
|
||||
process_livekit_multitrack.delay(
|
||||
room_name=room_name,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"room_finished: queued multitrack processing",
|
||||
meeting_id=meeting.id,
|
||||
room_name=room_name,
|
||||
)
|
||||
@@ -91,9 +91,7 @@ class StartRecordingRequest(BaseModel):
|
||||
async def start_recording(
|
||||
meeting_id: NonEmptyString,
|
||||
body: StartRecordingRequest,
|
||||
user: Annotated[
|
||||
Optional[auth.UserInfo], Depends(auth.current_user_optional_if_public_mode)
|
||||
],
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
) -> dict[str, Any]:
|
||||
"""Start cloud or raw-tracks recording via Daily.co REST API.
|
||||
|
||||
|
||||
@@ -554,6 +554,7 @@ async def rooms_join_meeting(
|
||||
room_name: str,
|
||||
meeting_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
display_name: str | None = None,
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(room_name)
|
||||
@@ -598,4 +599,51 @@ async def rooms_join_meeting(
|
||||
meeting = meeting.model_copy()
|
||||
meeting.room_url = add_query_param(meeting.room_url, "t", token)
|
||||
|
||||
elif meeting.platform == "livekit":
|
||||
import re
|
||||
import uuid
|
||||
|
||||
client = create_platform_client(meeting.platform)
|
||||
# Identity must be unique per participant to avoid S3 key collisions.
|
||||
# Format: {readable_name}-{short_uuid} ensures uniqueness even for same names.
|
||||
uid_suffix = uuid.uuid4().hex[:6]
|
||||
if display_name:
|
||||
safe_name = re.sub(r"[^a-zA-Z0-9_-]", "_", display_name.strip())[:40]
|
||||
participant_identity = (
|
||||
f"{safe_name}-{uid_suffix}" if safe_name else f"anon-{uid_suffix}"
|
||||
)
|
||||
elif user_id:
|
||||
email = getattr(user, "email", None)
|
||||
if email and "@" in email:
|
||||
participant_identity = f"{email.split('@')[0]}-{uid_suffix}"
|
||||
else:
|
||||
participant_identity = f"{user_id[:12]}-{uid_suffix}"
|
||||
else:
|
||||
participant_identity = f"anon-{uid_suffix}"
|
||||
participant_name = display_name or participant_identity
|
||||
|
||||
# Store identity → Reflector user_id mapping for the pipeline
|
||||
# (so TranscriptParticipant.user_id can be set correctly)
|
||||
if user_id:
|
||||
from reflector.redis_cache import get_async_redis_client # noqa: PLC0415
|
||||
|
||||
redis_client = await get_async_redis_client()
|
||||
mapping_key = f"livekit:participant_map:{meeting.room_name}"
|
||||
await redis_client.hset(mapping_key, participant_identity, user_id)
|
||||
await redis_client.expire(mapping_key, 7 * 86400) # 7 day TTL
|
||||
|
||||
token = client.create_access_token(
|
||||
room_name=meeting.room_name,
|
||||
participant_identity=participant_identity,
|
||||
participant_name=participant_name,
|
||||
is_admin=user_id == room.user_id if user_id else False,
|
||||
)
|
||||
# Close the platform client to release aiohttp session
|
||||
if hasattr(client, "close"):
|
||||
await client.close()
|
||||
|
||||
meeting = meeting.model_copy()
|
||||
# For LiveKit, room_url is the WS URL; token goes as a query param
|
||||
meeting.room_url = add_query_param(meeting.room_url, "token", token)
|
||||
|
||||
return meeting
|
||||
|
||||
@@ -83,7 +83,25 @@ def build_beat_schedule(
|
||||
else:
|
||||
logger.info("Daily.co beat tasks disabled (no DAILY_API_KEY)")
|
||||
|
||||
_any_platform = _whereby_enabled or _daily_enabled
|
||||
_livekit_enabled = bool(settings.LIVEKIT_API_KEY and settings.LIVEKIT_URL)
|
||||
if _livekit_enabled:
|
||||
beat_schedule["process_livekit_ended_meetings"] = {
|
||||
"task": "reflector.worker.process.process_livekit_ended_meetings",
|
||||
"schedule": 120, # Every 2 minutes
|
||||
}
|
||||
beat_schedule["reprocess_failed_livekit_recordings"] = {
|
||||
"task": "reflector.worker.process.reprocess_failed_livekit_recordings",
|
||||
"schedule": crontab(hour=5, minute=0),
|
||||
}
|
||||
logger.info(
|
||||
"LiveKit beat tasks enabled",
|
||||
tasks=[
|
||||
"process_livekit_ended_meetings",
|
||||
"reprocess_failed_livekit_recordings",
|
||||
],
|
||||
)
|
||||
|
||||
_any_platform = _whereby_enabled or _daily_enabled or _livekit_enabled
|
||||
if _any_platform:
|
||||
beat_schedule["process_meetings"] = {
|
||||
"task": "reflector.worker.process.process_meetings",
|
||||
|
||||
@@ -869,13 +869,30 @@ async def process_meetings():
|
||||
elif has_had_sessions:
|
||||
should_deactivate = True
|
||||
logger_.info("Meeting ended - all participants left")
|
||||
elif current_time > end_date:
|
||||
should_deactivate = True
|
||||
logger_.info(
|
||||
"Meeting deactivated - scheduled time ended with no participants",
|
||||
)
|
||||
else:
|
||||
logger_.debug("Meeting not yet started, keep it")
|
||||
elif not has_had_sessions:
|
||||
# No sessions recorded — either no one joined, or webhooks
|
||||
# didn't arrive (e.g. local dev without tunnel).
|
||||
meeting_start = meeting.start_date
|
||||
if meeting_start.tzinfo is None:
|
||||
meeting_start = meeting_start.replace(tzinfo=timezone.utc)
|
||||
age_minutes = (current_time - meeting_start).total_seconds() / 60
|
||||
is_scheduled = bool(meeting.calendar_event_id)
|
||||
|
||||
if is_scheduled and current_time > end_date:
|
||||
# Scheduled meeting past its end time with no participants
|
||||
should_deactivate = True
|
||||
logger_.info(
|
||||
"Meeting deactivated - scheduled time ended with no participants",
|
||||
)
|
||||
elif not is_scheduled and age_minutes > 30:
|
||||
# On-the-fly meeting with no sessions after 30 min
|
||||
should_deactivate = True
|
||||
logger_.info(
|
||||
"Meeting deactivated - no sessions after 30 min",
|
||||
age_minutes=round(age_minutes, 1),
|
||||
)
|
||||
else:
|
||||
logger_.debug("Meeting not yet started, keep it")
|
||||
|
||||
if should_deactivate:
|
||||
await meetings_controller.update_meeting(
|
||||
@@ -1170,3 +1187,311 @@ async def trigger_daily_reconciliation() -> None:
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Reconciliation trigger failed", error=str(e), exc_info=True)
|
||||
|
||||
|
||||
# ============================================================
|
||||
# LiveKit multitrack recording tasks
|
||||
# ============================================================
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def process_livekit_multitrack(
|
||||
room_name: str,
|
||||
meeting_id: str,
|
||||
):
|
||||
"""
|
||||
Process LiveKit multitrack recording by discovering tracks on S3.
|
||||
|
||||
Tracks are discovered via S3 listing (source of truth), not webhooks.
|
||||
Called from room_finished webhook (fast-path) or beat task (fallback).
|
||||
"""
|
||||
from reflector.utils.livekit import ( # noqa: PLC0415
|
||||
recording_lock_key,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Processing LiveKit multitrack recording",
|
||||
room_name=room_name,
|
||||
meeting_id=meeting_id,
|
||||
)
|
||||
|
||||
lock_key = recording_lock_key(room_name)
|
||||
async with RedisAsyncLock(
|
||||
key=lock_key,
|
||||
timeout=600,
|
||||
extend_interval=60,
|
||||
skip_if_locked=True,
|
||||
blocking=False,
|
||||
) as lock:
|
||||
if not lock.acquired:
|
||||
logger.warning(
|
||||
"LiveKit processing skipped - lock already held",
|
||||
room_name=room_name,
|
||||
lock_key=lock_key,
|
||||
)
|
||||
return
|
||||
|
||||
await _process_livekit_multitrack_inner(room_name, meeting_id)
|
||||
|
||||
|
||||
async def _process_livekit_multitrack_inner(
|
||||
room_name: str,
|
||||
meeting_id: str,
|
||||
):
|
||||
"""Inner processing logic for LiveKit multitrack recording."""
|
||||
# 1. Discover tracks by listing S3 prefix.
|
||||
# Wait briefly for egress files to finish flushing to S3 — the room_finished
|
||||
# webhook fires after empty_timeout, but egress finalization may still be in progress.
|
||||
import asyncio as _asyncio # noqa: PLC0415
|
||||
|
||||
from reflector.storage import get_source_storage # noqa: PLC0415
|
||||
from reflector.utils.livekit import ( # noqa: PLC0415
|
||||
extract_livekit_base_room_name,
|
||||
filter_audio_tracks,
|
||||
parse_livekit_track_filepath,
|
||||
)
|
||||
|
||||
EGRESS_FLUSH_DELAY = 10 # seconds — egress typically flushes within a few seconds
|
||||
EGRESS_RETRY_DELAY = 30 # seconds — retry if first listing finds nothing
|
||||
|
||||
await _asyncio.sleep(EGRESS_FLUSH_DELAY)
|
||||
|
||||
storage = get_source_storage("livekit")
|
||||
s3_prefix = f"livekit/{room_name}/"
|
||||
all_keys = await storage.list_objects(prefix=s3_prefix)
|
||||
|
||||
# Filter to audio tracks only (.ogg) — skip .json manifests and .webm video
|
||||
audio_keys = filter_audio_tracks(all_keys) if all_keys else []
|
||||
|
||||
if not audio_keys:
|
||||
# Retry once after a longer delay — egress may still be flushing
|
||||
logger.info(
|
||||
"No audio tracks found yet, retrying after delay",
|
||||
room_name=room_name,
|
||||
retry_delay=EGRESS_RETRY_DELAY,
|
||||
)
|
||||
await _asyncio.sleep(EGRESS_RETRY_DELAY)
|
||||
all_keys = await storage.list_objects(prefix=s3_prefix)
|
||||
audio_keys = filter_audio_tracks(all_keys) if all_keys else []
|
||||
|
||||
# Sanity check: compare audio tracks against egress manifests.
|
||||
# Each Track Egress (audio or video) produces a .json manifest.
|
||||
# Video tracks produce .webm files. So expected audio count ≈ manifests - video files.
|
||||
if all_keys:
|
||||
manifest_count = sum(1 for k in all_keys if k.endswith(".json"))
|
||||
video_count = sum(1 for k in all_keys if k.endswith(".webm"))
|
||||
expected_audio = manifest_count - video_count
|
||||
if expected_audio > len(audio_keys) and expected_audio > 0:
|
||||
# Some audio tracks may still be flushing — wait and retry
|
||||
logger.info(
|
||||
"Expected more audio tracks based on manifests, waiting for late flushes",
|
||||
room_name=room_name,
|
||||
expected=expected_audio,
|
||||
found=len(audio_keys),
|
||||
)
|
||||
await _asyncio.sleep(EGRESS_RETRY_DELAY)
|
||||
all_keys = await storage.list_objects(prefix=s3_prefix)
|
||||
audio_keys = filter_audio_tracks(all_keys) if all_keys else []
|
||||
|
||||
logger.info(
|
||||
"S3 track discovery complete",
|
||||
room_name=room_name,
|
||||
total_files=len(all_keys) if all_keys else 0,
|
||||
audio_files=len(audio_keys),
|
||||
)
|
||||
|
||||
if not audio_keys:
|
||||
logger.warning(
|
||||
"No audio track files found on S3 after retries",
|
||||
room_name=room_name,
|
||||
s3_prefix=s3_prefix,
|
||||
)
|
||||
return
|
||||
|
||||
# 2. Parse track info from filenames
|
||||
parsed_tracks = []
|
||||
for key in audio_keys:
|
||||
try:
|
||||
parsed = parse_livekit_track_filepath(key)
|
||||
parsed_tracks.append(parsed)
|
||||
except ValueError as e:
|
||||
logger.warning("Skipping unparseable track file", s3_key=key, error=str(e))
|
||||
|
||||
if not parsed_tracks:
|
||||
logger.warning(
|
||||
"No valid track files found after parsing",
|
||||
room_name=room_name,
|
||||
raw_keys=all_keys,
|
||||
)
|
||||
return
|
||||
|
||||
track_keys = [t.s3_key for t in parsed_tracks]
|
||||
|
||||
# 3. Find meeting and room
|
||||
meeting = await meetings_controller.get_by_id(meeting_id)
|
||||
if not meeting:
|
||||
logger.error(
|
||||
"Meeting not found for LiveKit recording",
|
||||
meeting_id=meeting_id,
|
||||
room_name=room_name,
|
||||
)
|
||||
return
|
||||
|
||||
base_room_name = extract_livekit_base_room_name(room_name)
|
||||
room = await rooms_controller.get_by_name(base_room_name)
|
||||
if not room:
|
||||
logger.error("Room not found", room_name=base_room_name)
|
||||
return
|
||||
|
||||
# 4. Create recording
|
||||
recording_id = f"lk-{room_name}"
|
||||
bucket_name = settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME or ""
|
||||
|
||||
existing_recording = await recordings_controller.get_by_id(recording_id)
|
||||
if existing_recording and existing_recording.deleted_at is not None:
|
||||
logger.info("Skipping soft-deleted recording", recording_id=recording_id)
|
||||
return
|
||||
|
||||
if not existing_recording:
|
||||
recording = await recordings_controller.create(
|
||||
Recording(
|
||||
id=recording_id,
|
||||
bucket_name=bucket_name,
|
||||
object_key=s3_prefix,
|
||||
recorded_at=datetime.now(timezone.utc),
|
||||
meeting_id=meeting.id,
|
||||
track_keys=track_keys,
|
||||
)
|
||||
)
|
||||
else:
|
||||
recording = existing_recording
|
||||
|
||||
# 5. Create or get transcript
|
||||
transcript = await transcripts_controller.get_by_recording_id(recording.id)
|
||||
if transcript and transcript.deleted_at is not None:
|
||||
logger.info("Skipping soft-deleted transcript", recording_id=recording.id)
|
||||
return
|
||||
if not transcript:
|
||||
transcript = await transcripts_controller.add(
|
||||
"",
|
||||
source_kind=SourceKind.ROOM,
|
||||
source_language="en",
|
||||
target_language="en",
|
||||
user_id=room.user_id,
|
||||
recording_id=recording.id,
|
||||
share_mode="semi-private",
|
||||
meeting_id=meeting.id,
|
||||
room_id=room.id,
|
||||
)
|
||||
|
||||
# 6. Start Hatchet pipeline (reuses DiarizationPipeline with source_platform="livekit")
|
||||
workflow_id = await HatchetClientManager.start_workflow(
|
||||
workflow_name="DiarizationPipeline",
|
||||
input_data={
|
||||
"recording_id": recording_id,
|
||||
"tracks": [
|
||||
{
|
||||
"s3_key": t.s3_key,
|
||||
"participant_identity": t.participant_identity,
|
||||
"timestamp": t.timestamp.isoformat(),
|
||||
}
|
||||
for t in parsed_tracks
|
||||
],
|
||||
"bucket_name": bucket_name,
|
||||
"transcript_id": transcript.id,
|
||||
"room_id": room.id,
|
||||
"source_platform": "livekit",
|
||||
},
|
||||
additional_metadata={
|
||||
"transcript_id": transcript.id,
|
||||
"recording_id": recording_id,
|
||||
},
|
||||
)
|
||||
logger.info(
|
||||
"Started LiveKit Hatchet workflow",
|
||||
workflow_id=workflow_id,
|
||||
transcript_id=transcript.id,
|
||||
room_name=room_name,
|
||||
num_tracks=len(parsed_tracks),
|
||||
)
|
||||
|
||||
await transcripts_controller.update(transcript, {"workflow_run_id": workflow_id})
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def process_livekit_ended_meetings():
|
||||
"""Check for inactive LiveKit meetings that need multitrack processing.
|
||||
|
||||
Runs on a beat schedule. Catches cases where room_finished webhook was missed.
|
||||
Only processes meetings that:
|
||||
- Platform is "livekit"
|
||||
- is_active=False (already deactivated by process_meetings)
|
||||
- No associated transcript yet
|
||||
"""
|
||||
from reflector.db.transcripts import transcripts_controller as tc # noqa: PLC0415
|
||||
|
||||
all_livekit = await meetings_controller.get_all_inactive_livekit()
|
||||
|
||||
queued = 0
|
||||
for meeting in all_livekit:
|
||||
# Skip if already has a transcript
|
||||
existing = await tc.get_by_meeting_id(meeting.id)
|
||||
if existing:
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Found unprocessed inactive LiveKit meeting",
|
||||
meeting_id=meeting.id,
|
||||
room_name=meeting.room_name,
|
||||
)
|
||||
|
||||
process_livekit_multitrack.delay(
|
||||
room_name=meeting.room_name,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
queued += 1
|
||||
|
||||
if queued > 0:
|
||||
logger.info("Queued LiveKit multitrack processing", count=queued)
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def reprocess_failed_livekit_recordings():
|
||||
"""Reprocess LiveKit recordings that failed.
|
||||
|
||||
Runs daily at 5 AM. Finds recordings with livekit prefix and error status.
|
||||
"""
|
||||
bucket_name = settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME
|
||||
if not bucket_name:
|
||||
return
|
||||
|
||||
failed = await recordings_controller.get_multitrack_needing_reprocessing(
|
||||
bucket_name
|
||||
)
|
||||
livekit_failed = [r for r in failed if r.id.startswith("lk-")]
|
||||
|
||||
for recording in livekit_failed:
|
||||
if not recording.meeting_id:
|
||||
logger.warning(
|
||||
"Skipping reprocess — no meeting_id",
|
||||
recording_id=recording.id,
|
||||
)
|
||||
continue
|
||||
|
||||
meeting = await meetings_controller.get_by_id(recording.meeting_id)
|
||||
if not meeting:
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Reprocessing failed LiveKit recording",
|
||||
recording_id=recording.id,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
|
||||
process_livekit_multitrack.delay(
|
||||
room_name=meeting.room_name,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
|
||||
@@ -171,5 +171,5 @@ async def test_multitrack_pipeline_end_to_end(
|
||||
assert len(messages) >= 1, "Should have received at least 1 email"
|
||||
email_msg = messages[0]
|
||||
assert (
|
||||
"Transcript Ready" in email_msg.get("Subject", "")
|
||||
"Reflector:" in email_msg.get("Subject", "")
|
||||
), f"Email subject should contain 'Transcript Ready', got: {email_msg.get('Subject')}"
|
||||
|
||||
408
server/tests/test_livekit_backend.py
Normal file
408
server/tests/test_livekit_backend.py
Normal file
@@ -0,0 +1,408 @@
|
||||
"""
|
||||
Tests for LiveKit backend: webhook verification, token generation,
|
||||
display_name sanitization, and platform client behavior.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.livekit_api.webhooks import create_webhook_receiver, verify_webhook
|
||||
|
||||
# ── Webhook verification ──────────────────────────────────────
|
||||
|
||||
|
||||
class TestWebhookVerification:
|
||||
def _make_receiver(self):
|
||||
"""Create a receiver with test credentials."""
|
||||
return create_webhook_receiver(
|
||||
api_key="test_key",
|
||||
api_secret="test_secret_that_is_long_enough_for_hmac",
|
||||
)
|
||||
|
||||
def test_rejects_empty_auth_header(self):
|
||||
receiver = self._make_receiver()
|
||||
result = verify_webhook(receiver, b'{"event":"test"}', "")
|
||||
assert result is None
|
||||
|
||||
def test_rejects_garbage_auth_header(self):
|
||||
receiver = self._make_receiver()
|
||||
result = verify_webhook(receiver, b'{"event":"test"}', "not-a-jwt")
|
||||
assert result is None
|
||||
|
||||
def test_rejects_empty_body(self):
|
||||
receiver = self._make_receiver()
|
||||
result = verify_webhook(receiver, b"", "Bearer some.jwt.token")
|
||||
assert result is None
|
||||
|
||||
def test_handles_bytes_body(self):
|
||||
receiver = self._make_receiver()
|
||||
# Should not crash on bytes input
|
||||
result = verify_webhook(receiver, b'{"event":"test"}', "invalid")
|
||||
assert result is None
|
||||
|
||||
def test_handles_string_body(self):
|
||||
receiver = self._make_receiver()
|
||||
result = verify_webhook(receiver, '{"event":"test"}', "invalid")
|
||||
assert result is None
|
||||
|
||||
def test_rejects_wrong_secret(self):
|
||||
"""Webhook signed with different secret should be rejected."""
|
||||
receiver = self._make_receiver()
|
||||
# A JWT signed with a different secret
|
||||
fake_jwt = "eyJhbGciOiJIUzI1NiJ9.eyJ0ZXN0IjoxfQ.wrong_signature"
|
||||
result = verify_webhook(receiver, b"{}", fake_jwt)
|
||||
assert result is None
|
||||
|
||||
|
||||
# ── Token generation ──────────────────────────────────────────
|
||||
|
||||
|
||||
class TestTokenGeneration:
|
||||
"""Test token generation using the LiveKit SDK directly (no client instantiation)."""
|
||||
|
||||
def _generate_token(
|
||||
self, room_name="room", identity="user", name=None, admin=False, ttl=86400
|
||||
):
|
||||
"""Generate a token using the SDK directly, avoiding LiveKitAPI client session."""
|
||||
from datetime import timedelta
|
||||
|
||||
from livekit.api import AccessToken, VideoGrants
|
||||
|
||||
token = AccessToken(
|
||||
api_key="test_key", api_secret="test_secret_that_is_long_enough_for_hmac"
|
||||
)
|
||||
token.identity = identity
|
||||
token.name = name or identity
|
||||
token.ttl = timedelta(seconds=ttl)
|
||||
token.with_grants(
|
||||
VideoGrants(
|
||||
room_join=True,
|
||||
room=room_name,
|
||||
can_publish=True,
|
||||
can_subscribe=True,
|
||||
room_admin=admin,
|
||||
)
|
||||
)
|
||||
return token.to_jwt()
|
||||
|
||||
def _decode_claims(self, token):
|
||||
import base64
|
||||
import json
|
||||
|
||||
payload = token.split(".")[1]
|
||||
payload += "=" * (4 - len(payload) % 4)
|
||||
return json.loads(base64.b64decode(payload))
|
||||
|
||||
def test_creates_valid_jwt(self):
|
||||
token = self._generate_token(
|
||||
room_name="test-room", identity="user123", name="Test User"
|
||||
)
|
||||
assert isinstance(token, str)
|
||||
assert len(token.split(".")) == 3
|
||||
|
||||
def test_token_includes_room_name(self):
|
||||
token = self._generate_token(room_name="my-room-20260401", identity="alice")
|
||||
claims = self._decode_claims(token)
|
||||
assert claims.get("video", {}).get("room") == "my-room-20260401"
|
||||
assert claims.get("sub") == "alice"
|
||||
|
||||
def test_token_respects_admin_flag(self):
|
||||
token = self._generate_token(identity="admin", admin=True)
|
||||
claims = self._decode_claims(token)
|
||||
assert claims["video"]["roomAdmin"] is True
|
||||
|
||||
def test_token_non_admin_by_default(self):
|
||||
token = self._generate_token(identity="user")
|
||||
claims = self._decode_claims(token)
|
||||
assert claims.get("video", {}).get("roomAdmin") in (None, False)
|
||||
|
||||
def test_ttl_is_timedelta(self):
|
||||
"""Verify ttl as timedelta works (previous bug: int caused TypeError)."""
|
||||
token = self._generate_token(ttl=3600)
|
||||
assert isinstance(token, str)
|
||||
|
||||
|
||||
# ── Display name sanitization ─────────────────────────────────
|
||||
|
||||
|
||||
class TestDisplayNameSanitization:
|
||||
"""Test the sanitization logic from rooms.py join endpoint."""
|
||||
|
||||
def _sanitize(self, display_name: str) -> str:
|
||||
"""Replicate the sanitization from rooms_join_meeting."""
|
||||
safe_name = re.sub(r"[^a-zA-Z0-9_-]", "_", display_name.strip())[:40]
|
||||
return safe_name
|
||||
|
||||
def test_normal_name(self):
|
||||
assert self._sanitize("Alice") == "Alice"
|
||||
|
||||
def test_name_with_spaces(self):
|
||||
assert self._sanitize("John Doe") == "John_Doe"
|
||||
|
||||
def test_name_with_special_chars(self):
|
||||
assert self._sanitize("user@email.com") == "user_email_com"
|
||||
|
||||
def test_name_with_unicode(self):
|
||||
result = self._sanitize("José García")
|
||||
assert result == "Jos__Garc_a"
|
||||
assert all(
|
||||
c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-"
|
||||
for c in result
|
||||
)
|
||||
|
||||
def test_name_with_emoji(self):
|
||||
result = self._sanitize("👋 Hello")
|
||||
assert "_" in result # Emoji replaced with underscore
|
||||
assert "Hello" in result
|
||||
|
||||
def test_very_long_name(self):
|
||||
long_name = "A" * 100
|
||||
result = self._sanitize(long_name)
|
||||
assert len(result) == 40
|
||||
|
||||
def test_empty_name(self):
|
||||
result = self._sanitize("")
|
||||
assert result == ""
|
||||
|
||||
def test_only_special_chars(self):
|
||||
result = self._sanitize("!!!")
|
||||
assert result == "___"
|
||||
|
||||
def test_whitespace_stripped(self):
|
||||
result = self._sanitize(" Alice ")
|
||||
assert result == "Alice"
|
||||
|
||||
def test_hyphens_preserved(self):
|
||||
assert self._sanitize("first-last") == "first-last"
|
||||
|
||||
def test_underscores_preserved(self):
|
||||
assert self._sanitize("first_last") == "first_last"
|
||||
|
||||
def test_html_injection(self):
|
||||
result = self._sanitize("<script>alert('xss')</script>")
|
||||
assert "<" not in result
|
||||
assert ">" not in result
|
||||
assert "'" not in result
|
||||
|
||||
|
||||
# ── S3 egress configuration ───────────────────────────────────
|
||||
|
||||
|
||||
class TestS3EgressConfig:
|
||||
"""Test S3Upload construction using the SDK directly."""
|
||||
|
||||
def test_build_s3_upload_requires_all_fields(self):
|
||||
# Missing fields should raise or produce invalid config
|
||||
# The validation happens in our client wrapper, not the SDK
|
||||
# Test the validation logic directly
|
||||
s3_bucket = None
|
||||
s3_access_key = "AKID"
|
||||
s3_secret_key = "secret"
|
||||
assert not all([s3_bucket, s3_access_key, s3_secret_key])
|
||||
|
||||
def test_s3_upload_with_credentials(self):
|
||||
from livekit.api import S3Upload
|
||||
|
||||
upload = S3Upload(
|
||||
access_key="AKID",
|
||||
secret="secret123",
|
||||
bucket="test-bucket",
|
||||
region="us-east-1",
|
||||
force_path_style=True,
|
||||
)
|
||||
assert upload.bucket == "test-bucket"
|
||||
assert upload.force_path_style is True
|
||||
|
||||
def test_s3_upload_with_endpoint(self):
|
||||
from livekit.api import S3Upload
|
||||
|
||||
upload = S3Upload(
|
||||
access_key="AKID",
|
||||
secret="secret",
|
||||
bucket="bucket",
|
||||
region="us-east-1",
|
||||
force_path_style=True,
|
||||
endpoint="http://garage:3900",
|
||||
)
|
||||
assert upload.endpoint == "http://garage:3900"
|
||||
|
||||
|
||||
# ── Platform detection ────────────────────────────────────────
|
||||
|
||||
|
||||
# ── Redis participant mapping ──────────────────────────────
|
||||
|
||||
|
||||
class TestParticipantIdentityMapping:
|
||||
"""Test the identity → user_id Redis mapping pattern."""
|
||||
|
||||
def test_mapping_key_format(self):
|
||||
room_name = "myroom-20260401172036"
|
||||
mapping_key = f"livekit:participant_map:{room_name}"
|
||||
assert mapping_key == "livekit:participant_map:myroom-20260401172036"
|
||||
|
||||
def test_identity_with_uuid_suffix_is_unique(self):
|
||||
import uuid
|
||||
|
||||
name = "Juan"
|
||||
id1 = f"{name}-{uuid.uuid4().hex[:6]}"
|
||||
id2 = f"{name}-{uuid.uuid4().hex[:6]}"
|
||||
assert id1 != id2
|
||||
assert id1.startswith("Juan-")
|
||||
assert id2.startswith("Juan-")
|
||||
|
||||
def test_strip_uuid_suffix_for_display(self):
|
||||
"""Pipeline strips UUID suffix for display name."""
|
||||
identity = "Juan-2bcea0"
|
||||
display_name = identity.rsplit("-", 1)[0] if "-" in identity else identity
|
||||
assert display_name == "Juan"
|
||||
|
||||
def test_strip_uuid_preserves_hyphenated_names(self):
|
||||
identity = "Mary-Jane-abc123"
|
||||
display_name = identity.rsplit("-", 1)[0] if "-" in identity else identity
|
||||
assert display_name == "Mary-Jane"
|
||||
|
||||
def test_anon_identity_no_user_id(self):
|
||||
"""Anonymous participants should not have a user_id mapping."""
|
||||
identity = "anon-abc123"
|
||||
# In the pipeline, anon identities don't get looked up
|
||||
assert identity.startswith("anon-")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_redis_hset_hgetall_roundtrip(self):
|
||||
"""Test the actual Redis operations used for participant mapping."""
|
||||
try:
|
||||
from reflector.redis_cache import get_async_redis_client
|
||||
|
||||
redis_client = await get_async_redis_client()
|
||||
test_key = "livekit:participant_map:__test_room__"
|
||||
|
||||
# Write
|
||||
await redis_client.hset(test_key, "Juan-abc123", "user-id-1")
|
||||
await redis_client.hset(test_key, "Alice-def456", "user-id-2")
|
||||
|
||||
# Read
|
||||
raw_map = await redis_client.hgetall(test_key)
|
||||
decoded = {
|
||||
k.decode() if isinstance(k, bytes) else k: v.decode()
|
||||
if isinstance(v, bytes)
|
||||
else v
|
||||
for k, v in raw_map.items()
|
||||
}
|
||||
|
||||
assert decoded["Juan-abc123"] == "user-id-1"
|
||||
assert decoded["Alice-def456"] == "user-id-2"
|
||||
|
||||
# Cleanup
|
||||
await redis_client.delete(test_key)
|
||||
except Exception:
|
||||
pytest.skip("Redis not available")
|
||||
|
||||
|
||||
# ── Egress video cleanup safety ────────────────────────────────
|
||||
|
||||
|
||||
class TestEgressVideoCleanup:
|
||||
"""Ensure video cleanup logic NEVER deletes audio files."""
|
||||
|
||||
AUDIO_FILES = [
|
||||
"livekit/room-20260401/juan-abc123-2026-04-01T100000-TR_AMR3SWs74Divho.ogg",
|
||||
"livekit/room-20260401/alice-def456-2026-04-01T100030-TR_AMirKjdAvLteAZ.ogg",
|
||||
"livekit/room-20260401/bob-789abc-2026-04-01T100100-TR_AMyoSbM7tAQbYj.ogg",
|
||||
]
|
||||
|
||||
VIDEO_FILES = [
|
||||
"livekit/room-20260401/juan-abc123-2026-04-01T100000-TR_VC679dgMQBdfhT.webm",
|
||||
"livekit/room-20260401/alice-def456-2026-04-01T100030-TR_VCLsuRuxLp4eik.webm",
|
||||
]
|
||||
|
||||
MANIFEST_FILES = [
|
||||
"livekit/room-20260401/EG_K5sipvfB5fTM.json",
|
||||
"livekit/room-20260401/EG_nzwBsH9xzgoj.json",
|
||||
]
|
||||
|
||||
def _should_delete(self, filename: str) -> bool:
|
||||
"""Replicate the deletion logic from _handle_egress_ended."""
|
||||
return filename.endswith(".webm")
|
||||
|
||||
def test_audio_files_never_deleted(self):
|
||||
"""CRITICAL: Audio files must NEVER be marked for deletion."""
|
||||
for f in self.AUDIO_FILES:
|
||||
assert not self._should_delete(f), f"Audio file would be deleted: {f}"
|
||||
|
||||
def test_video_files_are_deleted(self):
|
||||
for f in self.VIDEO_FILES:
|
||||
assert self._should_delete(f), f"Video file NOT marked for deletion: {f}"
|
||||
|
||||
def test_manifests_are_kept(self):
|
||||
for f in self.MANIFEST_FILES:
|
||||
assert not self._should_delete(f), f"Manifest would be deleted: {f}"
|
||||
|
||||
def test_ogg_extension_never_matches_delete(self):
|
||||
"""Double-check: no .ogg file ever matches the deletion condition."""
|
||||
test_names = [
|
||||
"anything.ogg",
|
||||
"livekit/room/track.ogg",
|
||||
"video.ogg", # Even if someone names it "video.ogg"
|
||||
".ogg",
|
||||
"TR_VC_fake_video.ogg", # Video-like track ID but .ogg extension
|
||||
]
|
||||
for f in test_names:
|
||||
assert not self._should_delete(f), f".ogg file would be deleted: {f}"
|
||||
|
||||
def test_webm_always_matches_delete(self):
|
||||
test_names = [
|
||||
"anything.webm",
|
||||
"livekit/room/track.webm",
|
||||
"audio.webm", # Even if someone names it "audio.webm"
|
||||
".webm",
|
||||
]
|
||||
for f in test_names:
|
||||
assert self._should_delete(f), f".webm file NOT marked for deletion: {f}"
|
||||
|
||||
def test_unknown_extensions_are_kept(self):
|
||||
"""Unknown file types should NOT be deleted (safe by default)."""
|
||||
test_names = [
|
||||
"file.mp4",
|
||||
"file.wav",
|
||||
"file.mp3",
|
||||
"file.txt",
|
||||
"file",
|
||||
"",
|
||||
]
|
||||
for f in test_names:
|
||||
assert not self._should_delete(
|
||||
f
|
||||
), f"Unknown file type would be deleted: {f}"
|
||||
|
||||
|
||||
# ── Platform detection ────────────────────────────────────────
|
||||
|
||||
|
||||
class TestSourcePlatformDetection:
|
||||
"""Test the recording ID prefix-based platform detection from transcript_process.py."""
|
||||
|
||||
def test_livekit_prefix(self):
|
||||
recording_id = "lk-livekit-20260401234423"
|
||||
platform = "livekit" if recording_id.startswith("lk-") else "daily"
|
||||
assert platform == "livekit"
|
||||
|
||||
def test_daily_no_prefix(self):
|
||||
recording_id = "08fa0b24-9220-44c5-846c-3f116cf8e738"
|
||||
platform = "livekit" if recording_id.startswith("lk-") else "daily"
|
||||
assert platform == "daily"
|
||||
|
||||
def test_none_recording_id(self):
|
||||
recording_id = None
|
||||
platform = (
|
||||
"livekit" if recording_id and recording_id.startswith("lk-") else "daily"
|
||||
)
|
||||
assert platform == "daily"
|
||||
|
||||
def test_empty_recording_id(self):
|
||||
recording_id = ""
|
||||
platform = (
|
||||
"livekit" if recording_id and recording_id.startswith("lk-") else "daily"
|
||||
)
|
||||
assert platform == "daily"
|
||||
393
server/tests/test_livekit_track_processing.py
Normal file
393
server/tests/test_livekit_track_processing.py
Normal file
@@ -0,0 +1,393 @@
|
||||
"""
|
||||
Tests for LiveKit track processing: filepath parsing, offset calculation,
|
||||
and pad_track padding_seconds behavior.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from fractions import Fraction
|
||||
|
||||
import av
|
||||
import pytest
|
||||
|
||||
from reflector.utils.livekit import (
|
||||
LiveKitTrackFile,
|
||||
calculate_track_offsets,
|
||||
extract_livekit_base_room_name,
|
||||
filter_audio_tracks,
|
||||
parse_livekit_track_filepath,
|
||||
)
|
||||
|
||||
# ── Filepath parsing ──────────────────────────────────────────
|
||||
|
||||
|
||||
class TestParseLiveKitTrackFilepath:
|
||||
def test_parses_ogg_audio_track(self):
|
||||
result = parse_livekit_track_filepath(
|
||||
"livekit/myroom-20260401172036/juan-4b82ed-2026-04-01T195758-TR_AMR3SWs74Divho.ogg"
|
||||
)
|
||||
assert result.room_name == "myroom-20260401172036"
|
||||
assert result.participant_identity == "juan-4b82ed"
|
||||
assert result.track_id == "TR_AMR3SWs74Divho"
|
||||
assert result.timestamp == datetime(2026, 4, 1, 19, 57, 58, tzinfo=timezone.utc)
|
||||
|
||||
def test_parses_different_identities(self):
|
||||
r1 = parse_livekit_track_filepath(
|
||||
"livekit/room-20260401/alice-a1b2c3-2026-04-01T100000-TR_abc123.ogg"
|
||||
)
|
||||
r2 = parse_livekit_track_filepath(
|
||||
"livekit/room-20260401/bob_smith-d4e5f6-2026-04-01T100030-TR_def456.ogg"
|
||||
)
|
||||
assert r1.participant_identity == "alice-a1b2c3"
|
||||
assert r2.participant_identity == "bob_smith-d4e5f6"
|
||||
|
||||
def test_rejects_json_manifest(self):
|
||||
with pytest.raises(ValueError, match="doesn't match expected format"):
|
||||
parse_livekit_track_filepath("livekit/myroom-20260401/EG_K5sipvfB5fTM.json")
|
||||
|
||||
def test_rejects_webm_video(self):
|
||||
# webm files match the pattern but are filtered by filter_audio_tracks
|
||||
result = parse_livekit_track_filepath(
|
||||
"livekit/myroom-20260401/juan-4b82ed-2026-04-01T195727-TR_VC679dgMQBdfhT.webm"
|
||||
)
|
||||
# webm parses successfully (TR_ prefix matches video tracks too)
|
||||
assert result.track_id == "TR_VC679dgMQBdfhT"
|
||||
|
||||
def test_rejects_invalid_path(self):
|
||||
with pytest.raises(ValueError):
|
||||
parse_livekit_track_filepath("not/a/valid/path.ogg")
|
||||
|
||||
def test_rejects_missing_track_id(self):
|
||||
with pytest.raises(ValueError):
|
||||
parse_livekit_track_filepath("livekit/room/user-2026-04-01T100000.ogg")
|
||||
|
||||
def test_parses_timestamp_correctly(self):
|
||||
result = parse_livekit_track_filepath(
|
||||
"livekit/room-20260401/user-abc123-2026-12-25T235959-TR_test.ogg"
|
||||
)
|
||||
assert result.timestamp == datetime(
|
||||
2026, 12, 25, 23, 59, 59, tzinfo=timezone.utc
|
||||
)
|
||||
|
||||
|
||||
# ── Audio track filtering ─────────────────────────────────────
|
||||
|
||||
|
||||
class TestFilterAudioTracks:
|
||||
def test_filters_to_ogg_only(self):
|
||||
keys = [
|
||||
"livekit/room/EG_abc.json",
|
||||
"livekit/room/user-abc-2026-04-01T100000-TR_audio.ogg",
|
||||
"livekit/room/user-abc-2026-04-01T100000-TR_video.webm",
|
||||
"livekit/room/EG_def.json",
|
||||
"livekit/room/user2-def-2026-04-01T100030-TR_audio2.ogg",
|
||||
]
|
||||
result = filter_audio_tracks(keys)
|
||||
assert len(result) == 2
|
||||
assert all(k.endswith(".ogg") for k in result)
|
||||
|
||||
def test_empty_input(self):
|
||||
assert filter_audio_tracks([]) == []
|
||||
|
||||
def test_no_audio_tracks(self):
|
||||
keys = ["livekit/room/EG_abc.json", "livekit/room/user-TR_v.webm"]
|
||||
assert filter_audio_tracks(keys) == []
|
||||
|
||||
|
||||
# ── Offset calculation ─────────────────────────────────────────
|
||||
|
||||
|
||||
class TestCalculateTrackOffsets:
|
||||
def test_single_track_zero_offset(self):
|
||||
tracks = [
|
||||
LiveKitTrackFile(
|
||||
s3_key="k1",
|
||||
room_name="r",
|
||||
participant_identity="alice",
|
||||
timestamp=datetime(2026, 4, 1, 10, 0, 0, tzinfo=timezone.utc),
|
||||
track_id="TR_1",
|
||||
)
|
||||
]
|
||||
offsets = calculate_track_offsets(tracks)
|
||||
assert len(offsets) == 1
|
||||
assert offsets[0][1] == 0.0
|
||||
|
||||
def test_two_tracks_correct_offset(self):
|
||||
tracks = [
|
||||
LiveKitTrackFile(
|
||||
s3_key="k1",
|
||||
room_name="r",
|
||||
participant_identity="alice",
|
||||
timestamp=datetime(2026, 4, 1, 10, 0, 0, tzinfo=timezone.utc),
|
||||
track_id="TR_1",
|
||||
),
|
||||
LiveKitTrackFile(
|
||||
s3_key="k2",
|
||||
room_name="r",
|
||||
participant_identity="bob",
|
||||
timestamp=datetime(2026, 4, 1, 10, 1, 10, tzinfo=timezone.utc),
|
||||
track_id="TR_2",
|
||||
),
|
||||
]
|
||||
offsets = calculate_track_offsets(tracks)
|
||||
assert offsets[0][1] == 0.0 # alice (earliest)
|
||||
assert offsets[1][1] == 70.0 # bob (70 seconds later)
|
||||
|
||||
def test_three_tracks_earliest_is_zero(self):
|
||||
tracks = [
|
||||
LiveKitTrackFile(
|
||||
s3_key="k2",
|
||||
room_name="r",
|
||||
participant_identity="bob",
|
||||
timestamp=datetime(2026, 4, 1, 10, 0, 30, tzinfo=timezone.utc),
|
||||
track_id="TR_2",
|
||||
),
|
||||
LiveKitTrackFile(
|
||||
s3_key="k1",
|
||||
room_name="r",
|
||||
participant_identity="alice",
|
||||
timestamp=datetime(2026, 4, 1, 10, 0, 0, tzinfo=timezone.utc),
|
||||
track_id="TR_1",
|
||||
),
|
||||
LiveKitTrackFile(
|
||||
s3_key="k3",
|
||||
room_name="r",
|
||||
participant_identity="charlie",
|
||||
timestamp=datetime(2026, 4, 1, 10, 1, 0, tzinfo=timezone.utc),
|
||||
track_id="TR_3",
|
||||
),
|
||||
]
|
||||
offsets = calculate_track_offsets(tracks)
|
||||
offset_map = {t.participant_identity: o for t, o in offsets}
|
||||
assert offset_map["alice"] == 0.0
|
||||
assert offset_map["bob"] == 30.0
|
||||
assert offset_map["charlie"] == 60.0
|
||||
|
||||
def test_empty_tracks(self):
|
||||
assert calculate_track_offsets([]) == []
|
||||
|
||||
def test_simultaneous_tracks_zero_offsets(self):
|
||||
ts = datetime(2026, 4, 1, 10, 0, 0, tzinfo=timezone.utc)
|
||||
tracks = [
|
||||
LiveKitTrackFile(
|
||||
s3_key="k1",
|
||||
room_name="r",
|
||||
participant_identity="a",
|
||||
timestamp=ts,
|
||||
track_id="TR_1",
|
||||
),
|
||||
LiveKitTrackFile(
|
||||
s3_key="k2",
|
||||
room_name="r",
|
||||
participant_identity="b",
|
||||
timestamp=ts,
|
||||
track_id="TR_2",
|
||||
),
|
||||
]
|
||||
offsets = calculate_track_offsets(tracks)
|
||||
assert all(o == 0.0 for _, o in offsets)
|
||||
|
||||
|
||||
# ── Room name extraction ───────────────────────────────────────
|
||||
|
||||
|
||||
class TestExtractLiveKitBaseRoomName:
|
||||
def test_strips_timestamp_suffix(self):
|
||||
assert extract_livekit_base_room_name("myroom-20260401172036") == "myroom"
|
||||
|
||||
def test_preserves_hyphenated_name(self):
|
||||
assert (
|
||||
extract_livekit_base_room_name("my-room-name-20260401172036")
|
||||
== "my-room-name"
|
||||
)
|
||||
|
||||
def test_single_segment(self):
|
||||
assert extract_livekit_base_room_name("room-20260401") == "room"
|
||||
|
||||
|
||||
# ── pad_track padding_seconds behavior ─────────────────────────
|
||||
|
||||
|
||||
class TestPadTrackPaddingSeconds:
|
||||
"""Test that pad_track correctly uses pre-calculated padding_seconds
|
||||
for LiveKit (skipping container metadata) vs extracting from container
|
||||
for Daily (when padding_seconds is None).
|
||||
"""
|
||||
|
||||
def _make_test_ogg(self, path: str, duration_seconds: float = 5.0):
|
||||
"""Create a minimal OGG/Opus file for testing."""
|
||||
with av.open(path, "w", format="ogg") as out:
|
||||
stream = out.add_stream("libopus", rate=48000)
|
||||
stream.bit_rate = 64000
|
||||
samples_per_frame = 960 # Opus standard
|
||||
total_samples = int(duration_seconds * 48000)
|
||||
pts = 0
|
||||
while pts < total_samples:
|
||||
frame = av.AudioFrame(
|
||||
format="s16", layout="stereo", samples=samples_per_frame
|
||||
)
|
||||
# Fill with silence (zeros)
|
||||
frame.planes[0].update(bytes(samples_per_frame * 2 * 2)) # s16 * stereo
|
||||
frame.sample_rate = 48000
|
||||
frame.pts = pts
|
||||
frame.time_base = Fraction(1, 48000)
|
||||
for packet in stream.encode(frame):
|
||||
out.mux(packet)
|
||||
pts += samples_per_frame
|
||||
for packet in stream.encode(None):
|
||||
out.mux(packet)
|
||||
|
||||
def test_ogg_has_zero_start_time(self, tmp_path):
|
||||
"""Verify that OGG files (like LiveKit produces) have start_time=0,
|
||||
confirming why pre-calculated padding is needed."""
|
||||
ogg_path = str(tmp_path / "test.ogg")
|
||||
self._make_test_ogg(ogg_path)
|
||||
|
||||
with av.open(ogg_path) as container:
|
||||
from reflector.utils.audio_padding import (
|
||||
extract_stream_start_time_from_container,
|
||||
)
|
||||
|
||||
start_time = extract_stream_start_time_from_container(container, 0)
|
||||
|
||||
assert start_time <= 0.0, (
|
||||
"OGG files should have start_time<=0 (no usable offset), confirming "
|
||||
f"LiveKit tracks need pre-calculated padding_seconds. Got: {start_time}"
|
||||
)
|
||||
|
||||
def test_precalculated_padding_skips_metadata_extraction(self, tmp_path):
|
||||
"""When padding_seconds is set, pad_track should use it directly
|
||||
and NOT call extract_stream_start_time_from_container."""
|
||||
from reflector.hatchet.workflows.track_processing import TrackInput
|
||||
|
||||
input_data = TrackInput(
|
||||
track_index=0,
|
||||
s3_key="livekit/room/user-abc-2026-04-01T100000-TR_audio.ogg",
|
||||
bucket_name="test-bucket",
|
||||
transcript_id="test-transcript",
|
||||
source_platform="livekit",
|
||||
padding_seconds=70.0,
|
||||
)
|
||||
|
||||
assert input_data.padding_seconds == 70.0
|
||||
# The pad_track function checks: if input.padding_seconds is not None → use it
|
||||
# This means extract_stream_start_time_from_container is never called for LiveKit
|
||||
|
||||
def test_none_padding_falls_back_to_metadata(self, tmp_path):
|
||||
"""When padding_seconds is None (Daily), pad_track should extract
|
||||
start_time from container metadata."""
|
||||
from reflector.hatchet.workflows.track_processing import TrackInput
|
||||
|
||||
input_data = TrackInput(
|
||||
track_index=0,
|
||||
s3_key="daily/room/track.webm",
|
||||
bucket_name="test-bucket",
|
||||
transcript_id="test-transcript",
|
||||
source_platform="daily",
|
||||
padding_seconds=None,
|
||||
)
|
||||
|
||||
assert input_data.padding_seconds is None
|
||||
# pad_track will call extract_stream_start_time_from_container for this case
|
||||
|
||||
def test_zero_padding_returns_original_key(self):
|
||||
"""When padding_seconds=0.0, pad_track should return the original S3 key
|
||||
without applying any padding (same as start_time=0 from metadata)."""
|
||||
from reflector.hatchet.workflows.track_processing import TrackInput
|
||||
|
||||
input_data = TrackInput(
|
||||
track_index=0,
|
||||
s3_key="livekit/room/earliest-track.ogg",
|
||||
bucket_name="test-bucket",
|
||||
transcript_id="test-transcript",
|
||||
source_platform="livekit",
|
||||
padding_seconds=0.0,
|
||||
)
|
||||
|
||||
# padding_seconds=0.0 → start_time_seconds=0.0 → "no padding needed" branch
|
||||
assert input_data.padding_seconds == 0.0
|
||||
|
||||
|
||||
# ── Pipeline offset calculation (process_tracks logic) ─────────
|
||||
|
||||
|
||||
class TestProcessTracksOffsetCalculation:
|
||||
"""Test the offset calculation logic used in process_tracks
|
||||
for LiveKit source_platform."""
|
||||
|
||||
def test_livekit_offsets_from_timestamps(self):
|
||||
"""Simulate the offset calculation done in process_tracks."""
|
||||
tracks = [
|
||||
{
|
||||
"s3_key": "track1.ogg",
|
||||
"participant_identity": "admin-0129c3",
|
||||
"timestamp": "2026-04-01T23:44:50+00:00",
|
||||
},
|
||||
{
|
||||
"s3_key": "track2.ogg",
|
||||
"participant_identity": "juan-5a5b41",
|
||||
"timestamp": "2026-04-01T23:46:00+00:00",
|
||||
},
|
||||
]
|
||||
|
||||
# Replicate the logic from process_tracks
|
||||
timestamps = []
|
||||
for i, track in enumerate(tracks):
|
||||
ts_str = track.get("timestamp")
|
||||
if ts_str:
|
||||
ts = datetime.fromisoformat(ts_str)
|
||||
timestamps.append((i, ts))
|
||||
|
||||
earliest = min(ts for _, ts in timestamps)
|
||||
track_padding = {}
|
||||
for i, ts in timestamps:
|
||||
track_padding[i] = (ts - earliest).total_seconds()
|
||||
|
||||
assert track_padding[0] == 0.0 # admin (earliest)
|
||||
assert track_padding[1] == 70.0 # juan (70s later)
|
||||
|
||||
def test_daily_tracks_get_no_precalculated_padding(self):
|
||||
"""Daily tracks should NOT get padding_seconds (use container metadata)."""
|
||||
tracks = [
|
||||
{"s3_key": "daily-track1.webm"},
|
||||
{"s3_key": "daily-track2.webm"},
|
||||
]
|
||||
|
||||
# Daily tracks don't have "timestamp" field
|
||||
track_padding = {}
|
||||
source_platform = "daily"
|
||||
|
||||
if source_platform == "livekit":
|
||||
# This block should NOT execute for daily
|
||||
pass
|
||||
|
||||
# Daily tracks get no pre-calculated padding
|
||||
assert track_padding == {}
|
||||
for i, _ in enumerate(tracks):
|
||||
assert track_padding.get(i) is None
|
||||
|
||||
def test_livekit_missing_timestamp_graceful(self):
|
||||
"""If a LiveKit track is missing timestamp, it should be skipped."""
|
||||
tracks = [
|
||||
{
|
||||
"s3_key": "track1.ogg",
|
||||
"participant_identity": "alice",
|
||||
"timestamp": "2026-04-01T10:00:00+00:00",
|
||||
},
|
||||
{"s3_key": "track2.ogg", "participant_identity": "bob"}, # no timestamp
|
||||
]
|
||||
|
||||
timestamps = []
|
||||
for i, track in enumerate(tracks):
|
||||
ts_str = track.get("timestamp")
|
||||
if ts_str:
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_str)
|
||||
timestamps.append((i, ts))
|
||||
except (ValueError, TypeError):
|
||||
timestamps.append((i, None))
|
||||
else:
|
||||
timestamps.append((i, None))
|
||||
|
||||
valid = [(i, ts) for i, ts in timestamps if ts is not None]
|
||||
assert len(valid) == 1 # only alice has a timestamp
|
||||
assert valid[0][0] == 0 # track index 0
|
||||
@@ -452,9 +452,11 @@ async def test_anonymous_cannot_webrtc_record_when_not_public(client, monkeypatc
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_start_meeting_recording_when_not_public(
|
||||
async def test_anonymous_can_start_meeting_recording_when_not_public(
|
||||
client, monkeypatch
|
||||
):
|
||||
"""Anonymous users can start recording since it's triggered from the frontend
|
||||
and recording is at room level via Daily REST API."""
|
||||
monkeypatch.setattr(settings, "PUBLIC_MODE", False)
|
||||
|
||||
room = await rooms_controller.add(
|
||||
@@ -486,7 +488,8 @@ async def test_anonymous_cannot_start_meeting_recording_when_not_public(
|
||||
f"/meetings/{meeting.id}/recordings/start",
|
||||
json={"type": "cloud", "instanceId": "00000000-0000-0000-0000-000000000001"},
|
||||
)
|
||||
assert resp.status_code == 401, resp.text
|
||||
# Should not be 401 (may fail for other reasons like no Daily API, but auth passes)
|
||||
assert resp.status_code != 401, f"Should not get 401: {resp.text}"
|
||||
|
||||
|
||||
# ======================================================================
|
||||
|
||||
136
server/uv.lock
generated
136
server/uv.lock
generated
@@ -74,7 +74,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.13.3"
|
||||
version = "3.13.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
@@ -85,42 +85,42 @@ dependencies = [
|
||||
{ name = "propcache" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/4c/a164164834f03924d9a29dc3acd9e7ee58f95857e0b467f6d04298594ebb/aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b", size = 746051, upload-time = "2026-01-03T17:29:43.287Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/71/d5c31390d18d4f58115037c432b7e0348c60f6f53b727cad33172144a112/aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64", size = 499234, upload-time = "2026-01-03T17:29:44.822Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/c9/741f8ac91e14b1d2e7100690425a5b2b919a87a5075406582991fb7de920/aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea", size = 494979, upload-time = "2026-01-03T17:29:46.405Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/b5/31d4d2e802dfd59f74ed47eba48869c1c21552c586d5e81a9d0d5c2ad640/aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a", size = 1748297, upload-time = "2026-01-03T17:29:48.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/3e/eefad0ad42959f226bb79664826883f2687d602a9ae2941a18e0484a74d3/aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540", size = 1707172, upload-time = "2026-01-03T17:29:49.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/3a/54a64299fac2891c346cdcf2aa6803f994a2e4beeaf2e5a09dcc54acc842/aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b", size = 1805405, upload-time = "2026-01-03T17:29:51.244Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/70/ddc1b7169cf64075e864f64595a14b147a895a868394a48f6a8031979038/aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3", size = 1899449, upload-time = "2026-01-03T17:29:53.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/7e/6815aab7d3a56610891c76ef79095677b8b5be6646aaf00f69b221765021/aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1", size = 1748444, upload-time = "2026-01-03T17:29:55.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/f2/073b145c4100da5511f457dc0f7558e99b2987cf72600d42b559db856fbc/aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3", size = 1606038, upload-time = "2026-01-03T17:29:57.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/c1/778d011920cae03ae01424ec202c513dc69243cf2db303965615b81deeea/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440", size = 1724156, upload-time = "2026-01-03T17:29:58.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/cb/3419eabf4ec1e9ec6f242c32b689248365a1cf621891f6f0386632525494/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7", size = 1722340, upload-time = "2026-01-03T17:30:01.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/e5/76cf77bdbc435bf233c1f114edad39ed4177ccbfab7c329482b179cff4f4/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c", size = 1783041, upload-time = "2026-01-03T17:30:03.609Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/d4/dd1ca234c794fd29c057ce8c0566b8ef7fd6a51069de5f06fa84b9a1971c/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51", size = 1596024, upload-time = "2026-01-03T17:30:05.132Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/58/4345b5f26661a6180afa686c473620c30a66afdf120ed3dd545bbc809e85/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4", size = 1804590, upload-time = "2026-01-03T17:30:07.135Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/06/05950619af6c2df7e0a431d889ba2813c9f0129cec76f663e547a5ad56f2/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29", size = 1740355, upload-time = "2026-01-03T17:30:09.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/80/958f16de79ba0422d7c1e284b2abd0c84bc03394fbe631d0a39ffa10e1eb/aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239", size = 433701, upload-time = "2026-01-03T17:30:10.869Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/f2/27cdf04c9851712d6c1b99df6821a6623c3c9e55956d4b1e318c337b5a48/aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f", size = 457678, upload-time = "2026-01-03T17:30:12.719Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/7e/cb94129302d78c46662b47f9897d642fd0b33bdfef4b73b20c6ced35aa4c/aiohttp-3.13.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8ea0c64d1bcbf201b285c2246c51a0c035ba3bbd306640007bc5844a3b4658c1", size = 760027, upload-time = "2026-03-28T17:15:33.022Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/cd/2db3c9397c3bd24216b203dd739945b04f8b87bb036c640da7ddb63c75ef/aiohttp-3.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6f742e1fa45c0ed522b00ede565e18f97e4cf8d1883a712ac42d0339dfb0cce7", size = 508325, upload-time = "2026-03-28T17:15:34.714Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/a3/d28b2722ec13107f2e37a86b8a169897308bab6a3b9e071ecead9d67bd9b/aiohttp-3.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dcfb50ee25b3b7a1222a9123be1f9f89e56e67636b561441f0b304e25aaef8f", size = 502402, upload-time = "2026-03-28T17:15:36.409Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/d6/acd47b5f17c4430e555590990a4746efbcb2079909bb865516892bf85f37/aiohttp-3.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3262386c4ff370849863ea93b9ea60fd59c6cf56bf8f93beac625cf4d677c04d", size = 1771224, upload-time = "2026-03-28T17:15:38.223Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/af/af6e20113ba6a48fd1cd9e5832c4851e7613ef50c7619acdaee6ec5f1aff/aiohttp-3.13.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:473bb5aa4218dd254e9ae4834f20e31f5a0083064ac0136a01a62ddbae2eaa42", size = 1731530, upload-time = "2026-03-28T17:15:39.988Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/16/78a2f5d9c124ad05d5ce59a9af94214b6466c3491a25fb70760e98e9f762/aiohttp-3.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e56423766399b4c77b965f6aaab6c9546617b8994a956821cc507d00b91d978c", size = 1827925, upload-time = "2026-03-28T17:15:41.944Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/1f/79acf0974ced805e0e70027389fccbb7d728e6f30fcac725fb1071e63075/aiohttp-3.13.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8af249343fafd5ad90366a16d230fc265cf1149f26075dc9fe93cfd7c7173942", size = 1923579, upload-time = "2026-03-28T17:15:44.071Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/53/29f9e2054ea6900413f3b4c3eb9d8331f60678ec855f13ba8714c47fd48d/aiohttp-3.13.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bc0a5cf4f10ef5a2c94fdde488734b582a3a7a000b131263e27c9295bd682d9", size = 1767655, upload-time = "2026-03-28T17:15:45.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/57/462fe1d3da08109ba4aa8590e7aed57c059af2a7e80ec21f4bac5cfe1094/aiohttp-3.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5c7ff1028e3c9fc5123a865ce17df1cb6424d180c503b8517afbe89aa566e6be", size = 1630439, upload-time = "2026-03-28T17:15:48.11Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/4b/4813344aacdb8127263e3eec343d24e973421143826364fa9fc847f6283f/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ba5cf98b5dcb9bddd857da6713a503fa6d341043258ca823f0f5ab7ab4a94ee8", size = 1745557, upload-time = "2026-03-28T17:15:50.13Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/01/1ef1adae1454341ec50a789f03cfafe4c4ac9c003f6a64515ecd32fe4210/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d85965d3ba21ee4999e83e992fecb86c4614d6920e40705501c0a1f80a583c12", size = 1741796, upload-time = "2026-03-28T17:15:52.351Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/04/8cdd99af988d2aa6922714d957d21383c559835cbd43fbf5a47ddf2e0f05/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:49f0b18a9b05d79f6f37ddd567695943fcefb834ef480f17a4211987302b2dc7", size = 1805312, upload-time = "2026-03-28T17:15:54.407Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/7f/b48d5577338d4b25bbdbae35c75dbfd0493cb8886dc586fbfb2e90862239/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7f78cb080c86fbf765920e5f1ef35af3f24ec4314d6675d0a21eaf41f6f2679c", size = 1621751, upload-time = "2026-03-28T17:15:56.564Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/89/4eecad8c1858e6d0893c05929e22343e0ebe3aec29a8a399c65c3cc38311/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:67a3ec705534a614b68bbf1c70efa777a21c3da3895d1c44510a41f5a7ae0453", size = 1826073, upload-time = "2026-03-28T17:15:58.489Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/5c/9dc8293ed31b46c39c9c513ac7ca152b3c3d38e0ea111a530ad12001b827/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6630ec917e85c5356b2295744c8a97d40f007f96a1c76bf1928dc2e27465393", size = 1760083, upload-time = "2026-03-28T17:16:00.677Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/19/8bbf6a4994205d96831f97b7d21a0feed120136e6267b5b22d229c6dc4dc/aiohttp-3.13.4-cp311-cp311-win32.whl", hash = "sha256:54049021bc626f53a5394c29e8c444f726ee5a14b6e89e0ad118315b1f90f5e3", size = 439690, upload-time = "2026-03-28T17:16:02.902Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/f5/ac409ecd1007528d15c3e8c3a57d34f334c70d76cfb7128a28cffdebd4c1/aiohttp-3.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:c033f2bc964156030772d31cbf7e5defea181238ce1f87b9455b786de7d30145", size = 463824, upload-time = "2026-03-28T17:16:05.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/bd/ede278648914cabbabfdf95e436679b5d4156e417896a9b9f4587169e376/aiohttp-3.13.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ee62d4471ce86b108b19c3364db4b91180d13fe3510144872d6bad5401957360", size = 752158, upload-time = "2026-03-28T17:16:06.901Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/de/581c053253c07b480b03785196ca5335e3c606a37dc73e95f6527f1591fe/aiohttp-3.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c0fd8f41b54b58636402eb493afd512c23580456f022c1ba2db0f810c959ed0d", size = 501037, upload-time = "2026-03-28T17:16:08.82Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f9/a5ede193c08f13cc42c0a5b50d1e246ecee9115e4cf6e900d8dbd8fd6acb/aiohttp-3.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4baa48ce49efd82d6b1a0be12d6a36b35e5594d1dd42f8bfba96ea9f8678b88c", size = 501556, upload-time = "2026-03-28T17:16:10.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/10/88ff67cd48a6ec36335b63a640abe86135791544863e0cfe1f065d6cef7a/aiohttp-3.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d738ebab9f71ee652d9dbd0211057690022201b11197f9a7324fd4dba128aa97", size = 1757314, upload-time = "2026-03-28T17:16:12.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/15/fdb90a5cf5a1f52845c276e76298c75fbbcc0ac2b4a86551906d54529965/aiohttp-3.13.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0ce692c3468fa831af7dceed52edf51ac348cebfc8d3feb935927b63bd3e8576", size = 1731819, upload-time = "2026-03-28T17:16:14.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/df/28146785a007f7820416be05d4f28cc207493efd1e8c6c1068e9bdc29198/aiohttp-3.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8e08abcfe752a454d2cb89ff0c08f2d1ecd057ae3e8cc6d84638de853530ebab", size = 1793279, upload-time = "2026-03-28T17:16:16.594Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/47/689c743abf62ea7a77774d5722f220e2c912a77d65d368b884d9779ef41b/aiohttp-3.13.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5977f701b3fff36367a11087f30ea73c212e686d41cd363c50c022d48b011d8d", size = 1891082, upload-time = "2026-03-28T17:16:18.71Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b6/f7f4f318c7e58c23b761c9b13b9a3c9b394e0f9d5d76fbc6622fa98509f6/aiohttp-3.13.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:54203e10405c06f8b6020bd1e076ae0fe6c194adcee12a5a78af3ffa3c57025e", size = 1773938, upload-time = "2026-03-28T17:16:21.125Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/06/f207cb3121852c989586a6fc16ff854c4fcc8651b86c5d3bd1fc83057650/aiohttp-3.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:358a6af0145bc4dda037f13167bef3cce54b132087acc4c295c739d05d16b1c3", size = 1579548, upload-time = "2026-03-28T17:16:23.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/58/e1289661a32161e24c1fe479711d783067210d266842523752869cc1d9c2/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:898ea1850656d7d61832ef06aa9846ab3ddb1621b74f46de78fbc5e1a586ba83", size = 1714669, upload-time = "2026-03-28T17:16:25.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/0a/3e86d039438a74a86e6a948a9119b22540bae037d6ba317a042ae3c22711/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7bc30cceb710cf6a44e9617e43eebb6e3e43ad855a34da7b4b6a73537d8a6763", size = 1754175, upload-time = "2026-03-28T17:16:28.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/30/e717fc5df83133ba467a560b6d8ef20197037b4bb5d7075b90037de1018e/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4a31c0c587a8a038f19a4c7e60654a6c899c9de9174593a13e7cc6e15ff271f9", size = 1762049, upload-time = "2026-03-28T17:16:30.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/28/8f7a2d4492e336e40005151bdd94baf344880a4707573378579f833a64c1/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2062f675f3fe6e06d6113eb74a157fb9df58953ffed0cdb4182554b116545758", size = 1570861, upload-time = "2026-03-28T17:16:32.953Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/45/12e1a3d0645968b1c38de4b23fdf270b8637735ea057d4f84482ff918ad9/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d1ba8afb847ff80626d5e408c1fdc99f942acc877d0702fe137015903a220a9", size = 1790003, upload-time = "2026-03-28T17:16:35.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/0f/60374e18d590de16dcb39d6ff62f39c096c1b958e6f37727b5870026ea30/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b08149419994cdd4d5eecf7fd4bc5986b5a9380285bcd01ab4c0d6bfca47b79d", size = 1737289, upload-time = "2026-03-28T17:16:38.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/bf/535e58d886cfbc40a8b0013c974afad24ef7632d645bca0b678b70033a60/aiohttp-3.13.4-cp312-cp312-win32.whl", hash = "sha256:fc432f6a2c4f720180959bc19aa37259651c1a4ed8af8afc84dd41c60f15f791", size = 434185, upload-time = "2026-03-28T17:16:40.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/1a/d92e3325134ebfff6f4069f270d3aac770d63320bd1fcd0eca023e74d9a8/aiohttp-3.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:6148c9ae97a3e8bff9a1fc9c757fa164116f86c100468339730e717590a3fb77", size = 461285, upload-time = "2026-03-28T17:16:42.713Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1805,6 +1805,35 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/25/f4/ead6e0e37209b07c9baa3e984ccdb0348ca370b77cea3aaea8ddbb097e00/lightning_utilities-0.15.3-py3-none-any.whl", hash = "sha256:6c55f1bee70084a1cbeaa41ada96e4b3a0fea5909e844dd335bd80f5a73c5f91", size = 31906, upload-time = "2026-02-22T14:48:52.488Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "livekit-api"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "livekit-protocol" },
|
||||
{ name = "protobuf" },
|
||||
{ name = "pyjwt" },
|
||||
{ name = "types-protobuf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b5/0a/ad3cce124e608c056d6390244ec4dd18c8a4b5f055693a95831da2119af7/livekit_api-1.1.0.tar.gz", hash = "sha256:f94c000534d3a9b506e6aed2f35eb88db1b23bdea33bb322f0144c4e9f73934e", size = 16649, upload-time = "2025-12-02T19:37:11.452Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/b9/8d8515e3e0e629ab07d399cf858b8fc7e0a02bbf6384a6592b285264b4b9/livekit_api-1.1.0-py3-none-any.whl", hash = "sha256:bfc1c2c65392eb3f580a2c28108269f0e79873f053578a677eee7bb1de8aa8fb", size = 19620, upload-time = "2025-12-02T19:37:10.075Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "livekit-protocol"
|
||||
version = "1.1.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "protobuf" },
|
||||
{ name = "types-protobuf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/ca/d15e2a2cc8c8aa4ba621fe5f9ffd1806d88ac91c7b8fa4c09a3c0304dd92/livekit_protocol-1.1.3.tar.gz", hash = "sha256:cb4948d2513e81d91583f4a795bf80faa9026cedda509c5714999c7e33564287", size = 88746, upload-time = "2026-03-18T05:25:43.562Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/0e/f3d3e48628294df4559cffd0f8e1adf030127029e5a8da9beff9979090a0/livekit_protocol-1.1.3-py3-none-any.whl", hash = "sha256:fdae5640e064ab6549ec3d62d8bac75a3ef44d7ea73716069b419cbe8b360a5c", size = 107498, upload-time = "2026-03-18T05:25:42.077Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-cloud"
|
||||
version = "0.1.35"
|
||||
@@ -3364,6 +3393,7 @@ dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "icalendar" },
|
||||
{ name = "jsonschema" },
|
||||
{ name = "livekit-api" },
|
||||
{ name = "llama-index" },
|
||||
{ name = "llama-index-llms-openai-like" },
|
||||
{ name = "openai" },
|
||||
@@ -3445,6 +3475,7 @@ requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.24.1" },
|
||||
{ name = "icalendar", specifier = ">=6.0.0" },
|
||||
{ name = "jsonschema", specifier = ">=4.23.0" },
|
||||
{ name = "livekit-api", specifier = ">=1.1.0" },
|
||||
{ name = "llama-index", specifier = ">=0.12.52" },
|
||||
{ name = "llama-index-llms-openai-like", specifier = ">=0.4.0" },
|
||||
{ name = "openai", specifier = ">=1.59.7" },
|
||||
@@ -4210,8 +4241,8 @@ dependencies = [
|
||||
{ name = "typing-extensions", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:3d05017d19bc99741288e458888283a44b0ee881d53f05f72f8b1cfea8998122" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:a47b7986bee3f61ad217d8a8ce24605809ab425baf349f97de758815edd2ef54" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:3d05017d19bc99741288e458888283a44b0ee881d53f05f72f8b1cfea8998122" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:a47b7986bee3f61ad217d8a8ce24605809ab425baf349f97de758815edd2ef54" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4242,16 +4273,16 @@ dependencies = [
|
||||
{ name = "typing-extensions", marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-linux_s390x.whl", hash = "sha256:2bfc013dd6efdc8f8223a0241d3529af9f315dffefb53ffa3bf14d3f10127da6" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:680129efdeeec3db5da3f88ee5d28c1b1e103b774aef40f9d638e2cce8f8d8d8" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cb06175284673a581dd91fb1965662ae4ecaba6e5c357aa0ea7bb8b84b6b7eeb" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-win_amd64.whl", hash = "sha256:7631ef49fbd38d382909525b83696dc12a55d68492ade4ace3883c62b9fc140f" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-win_arm64.whl", hash = "sha256:41e6fc5ec0914fcdce44ccf338b1d19a441b55cafdd741fd0bf1af3f9e4cfd14" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-linux_s390x.whl", hash = "sha256:0e34e276722ab7dd0dffa9e12fe2135a9b34a0e300c456ed7ad6430229404eb5" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:610f600c102386e581327d5efc18c0d6edecb9820b4140d26163354a99cd800d" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:cb9a8ba8137ab24e36bf1742cb79a1294bd374db570f09fc15a5e1318160db4e" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:2be20b2c05a0cce10430cc25f32b689259640d273232b2de357c35729132256d" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_arm64.whl", hash = "sha256:99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-linux_s390x.whl", hash = "sha256:2bfc013dd6efdc8f8223a0241d3529af9f315dffefb53ffa3bf14d3f10127da6" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:680129efdeeec3db5da3f88ee5d28c1b1e103b774aef40f9d638e2cce8f8d8d8" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cb06175284673a581dd91fb1965662ae4ecaba6e5c357aa0ea7bb8b84b6b7eeb" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-win_amd64.whl", hash = "sha256:7631ef49fbd38d382909525b83696dc12a55d68492ade4ace3883c62b9fc140f" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-win_arm64.whl", hash = "sha256:41e6fc5ec0914fcdce44ccf338b1d19a441b55cafdd741fd0bf1af3f9e4cfd14" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-linux_s390x.whl", hash = "sha256:0e34e276722ab7dd0dffa9e12fe2135a9b34a0e300c456ed7ad6430229404eb5" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:610f600c102386e581327d5efc18c0d6edecb9820b4140d26163354a99cd800d" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:cb9a8ba8137ab24e36bf1742cb79a1294bd374db570f09fc15a5e1318160db4e" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:2be20b2c05a0cce10430cc25f32b689259640d273232b2de357c35729132256d" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_arm64.whl", hash = "sha256:99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4399,6 +4430,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317, upload-time = "2025-05-26T14:30:30.523Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-protobuf"
|
||||
version = "6.32.1.20260221"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5f/e2/9aa4a3b2469508bd7b4e2ae11cbedaf419222a09a1b94daffcd5efca4023/types_protobuf-6.32.1.20260221.tar.gz", hash = "sha256:6d5fb060a616bfb076cbb61b4b3c3969f5fc8bec5810f9a2f7e648ee5cbcbf6e", size = 64408, upload-time = "2026-02-21T03:55:13.916Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/e8/1fd38926f9cf031188fbc5a96694203ea6f24b0e34bd64a225ec6f6291ba/types_protobuf-6.32.1.20260221-py3-none-any.whl", hash = "sha256:da7cdd947975964a93c30bfbcc2c6841ee646b318d3816b033adc2c4eb6448e4", size = 77956, upload-time = "2026-02-21T03:55:12.894Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.14.1"
|
||||
|
||||
@@ -10,14 +10,17 @@ import {
|
||||
Badge,
|
||||
VStack,
|
||||
Icon,
|
||||
Tooltip,
|
||||
} from "@chakra-ui/react";
|
||||
import { LuLink, LuRefreshCw } from "react-icons/lu";
|
||||
import { FaStop } from "react-icons/fa";
|
||||
import { FaCalendarAlt } from "react-icons/fa";
|
||||
import type { components } from "../../../reflector-api";
|
||||
import {
|
||||
useRoomActiveMeetings,
|
||||
useRoomUpcomingMeetings,
|
||||
useRoomIcsSync,
|
||||
useMeetingDeactivate,
|
||||
} from "../../../lib/apiHooks";
|
||||
|
||||
type Room = components["schemas"]["Room"];
|
||||
@@ -107,6 +110,7 @@ const getZulipDisplay = (
|
||||
function MeetingStatus({ roomName }: { roomName: string }) {
|
||||
const activeMeetingsQuery = useRoomActiveMeetings(roomName);
|
||||
const upcomingMeetingsQuery = useRoomUpcomingMeetings(roomName);
|
||||
const deactivateMutation = useMeetingDeactivate();
|
||||
|
||||
const activeMeetings = activeMeetingsQuery.data || [];
|
||||
const upcomingMeetings = upcomingMeetingsQuery.data || [];
|
||||
@@ -121,14 +125,46 @@ function MeetingStatus({ roomName }: { roomName: string }) {
|
||||
meeting.calendar_metadata?.["title"] || "Active Meeting",
|
||||
);
|
||||
return (
|
||||
<VStack gap={1} alignItems="start">
|
||||
<Text fontSize="xs" color="gray.600" lineHeight={1}>
|
||||
{title}
|
||||
</Text>
|
||||
<Text fontSize="xs" color="gray.500" lineHeight={1}>
|
||||
{meeting.num_clients} participants
|
||||
</Text>
|
||||
</VStack>
|
||||
<Flex alignItems="center" gap={2}>
|
||||
<VStack gap={1} alignItems="start">
|
||||
<Text fontSize="xs" color="gray.600" lineHeight={1}>
|
||||
{title}
|
||||
</Text>
|
||||
<Text fontSize="xs" color="gray.500" lineHeight={1}>
|
||||
{meeting.num_clients} participants
|
||||
</Text>
|
||||
</VStack>
|
||||
{activeMeetings.length === 1 && (meeting.num_clients ?? 0) < 2 && (
|
||||
<Tooltip.Root openDelay={100}>
|
||||
<Tooltip.Trigger asChild>
|
||||
<IconButton
|
||||
aria-label="End meeting"
|
||||
size="xs"
|
||||
variant="ghost"
|
||||
color="red.500"
|
||||
_hover={{ bg: "transparent", color: "red.600" }}
|
||||
onClick={() =>
|
||||
deactivateMutation.mutate({
|
||||
params: { path: { meeting_id: meeting.id } },
|
||||
})
|
||||
}
|
||||
disabled={deactivateMutation.isPending}
|
||||
>
|
||||
{deactivateMutation.isPending ? (
|
||||
<Spinner size="xs" />
|
||||
) : (
|
||||
<FaStop />
|
||||
)}
|
||||
</IconButton>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Positioner>
|
||||
<Tooltip.Content>
|
||||
End this meeting and stop any active recordings
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Positioner>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</Flex>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -74,6 +74,7 @@ const recordingTypeOptions: SelectOption[] = [
|
||||
const platformOptions: SelectOption[] = [
|
||||
{ label: "Whereby", value: "whereby" },
|
||||
{ label: "Daily", value: "daily" },
|
||||
{ label: "LiveKit", value: "livekit" },
|
||||
];
|
||||
|
||||
const roomInitialState = {
|
||||
@@ -309,10 +310,7 @@ export default function RoomsList() {
|
||||
return;
|
||||
}
|
||||
|
||||
const platform: "whereby" | "daily" =
|
||||
room.platform === "whereby" || room.platform === "daily"
|
||||
? room.platform
|
||||
: "daily";
|
||||
const platform = room.platform as "whereby" | "daily" | "livekit";
|
||||
|
||||
const roomData = {
|
||||
name: room.name,
|
||||
@@ -544,7 +542,10 @@ export default function RoomsList() {
|
||||
<Select.Root
|
||||
value={[room.platform]}
|
||||
onValueChange={(e) => {
|
||||
const newPlatform = e.value[0] as "whereby" | "daily";
|
||||
const newPlatform = e.value[0] as
|
||||
| "whereby"
|
||||
| "daily"
|
||||
| "livekit";
|
||||
const updates: Partial<typeof room> = {
|
||||
platform: newPlatform,
|
||||
};
|
||||
|
||||
@@ -337,6 +337,10 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
|
||||
[],
|
||||
);
|
||||
const emailIconUrl = useMemo(
|
||||
() => new URL("/email-icon-dark.svg", window.location.origin),
|
||||
[],
|
||||
);
|
||||
const emailIconDarkModeUrl = useMemo(
|
||||
() => new URL("/email-icon.svg", window.location.origin),
|
||||
[],
|
||||
);
|
||||
@@ -399,12 +403,13 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
|
||||
show
|
||||
? {
|
||||
iconPath: emailIconUrl.href,
|
||||
iconPathDarkMode: emailIconDarkModeUrl.href,
|
||||
label: "Email Transcript",
|
||||
tooltip: "Get transcript emailed to you",
|
||||
}
|
||||
: null,
|
||||
);
|
||||
}, [emailIconUrl, setCustomTrayButton]);
|
||||
}, [emailIconUrl, emailIconDarkModeUrl, setCustomTrayButton]);
|
||||
|
||||
if (authLastUserId === undefined) {
|
||||
return (
|
||||
|
||||
277
www/app/[roomName]/components/LiveKitRoom.tsx
Normal file
277
www/app/[roomName]/components/LiveKitRoom.tsx
Normal file
@@ -0,0 +1,277 @@
|
||||
"use client";
|
||||
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { Box, Spinner, Center, Text, IconButton } from "@chakra-ui/react";
|
||||
import { useRouter, useParams } from "next/navigation";
|
||||
import {
|
||||
LiveKitRoom as LKRoom,
|
||||
VideoConference,
|
||||
RoomAudioRenderer,
|
||||
PreJoin,
|
||||
type LocalUserChoices,
|
||||
} from "@livekit/components-react";
|
||||
import type { components } from "../../reflector-api";
|
||||
import { useAuth } from "../../lib/AuthProvider";
|
||||
import { useRoomJoinMeeting } from "../../lib/apiHooks";
|
||||
import { assertMeetingId } from "../../lib/types";
|
||||
import {
|
||||
ConsentDialogButton,
|
||||
RecordingIndicator,
|
||||
useConsentDialog,
|
||||
} from "../../lib/consent";
|
||||
import { useEmailTranscriptDialog } from "../../lib/emailTranscript";
|
||||
import { featureEnabled } from "../../lib/features";
|
||||
import { LuMail } from "react-icons/lu";
|
||||
|
||||
type Meeting = components["schemas"]["Meeting"];
|
||||
type Room = components["schemas"]["RoomDetails"];
|
||||
|
||||
interface LiveKitRoomProps {
|
||||
meeting: Meeting;
|
||||
room: Room;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract LiveKit WebSocket URL, room name, and token from the room_url.
|
||||
*
|
||||
* The backend returns room_url like: ws://host:7880?room=<name>&token=<jwt>
|
||||
* We split these for the LiveKit React SDK.
|
||||
*/
|
||||
function parseLiveKitUrl(roomUrl: string): {
|
||||
serverUrl: string;
|
||||
roomName: string | null;
|
||||
token: string | null;
|
||||
} {
|
||||
try {
|
||||
const url = new URL(roomUrl);
|
||||
const token = url.searchParams.get("token");
|
||||
const roomName = url.searchParams.get("room");
|
||||
url.searchParams.delete("token");
|
||||
url.searchParams.delete("room");
|
||||
// Strip trailing slash and leftover ? from URL API
|
||||
const serverUrl = url.toString().replace(/[?/]+$/, "");
|
||||
return { serverUrl, roomName, token };
|
||||
} catch {
|
||||
return { serverUrl: roomUrl, roomName: null, token: null };
|
||||
}
|
||||
}
|
||||
|
||||
export default function LiveKitRoom({ meeting, room }: LiveKitRoomProps) {
|
||||
const router = useRouter();
|
||||
const params = useParams();
|
||||
const auth = useAuth();
|
||||
const authLastUserId = auth.lastUserId;
|
||||
const roomName = params?.roomName as string;
|
||||
const meetingId = assertMeetingId(meeting.id);
|
||||
|
||||
const joinMutation = useRoomJoinMeeting();
|
||||
const [joinedMeeting, setJoinedMeeting] = useState<Meeting | null>(null);
|
||||
const [connectionError, setConnectionError] = useState(false);
|
||||
const [userChoices, setUserChoices] = useState<LocalUserChoices | null>(null);
|
||||
|
||||
// ── Consent dialog (same hooks as Daily/Whereby) ──────────
|
||||
const { showConsentButton, showRecordingIndicator } = useConsentDialog({
|
||||
meetingId,
|
||||
recordingType: meeting.recording_type,
|
||||
skipConsent: room.skip_consent,
|
||||
});
|
||||
|
||||
// ── Email transcript dialog ───────────────────────────────
|
||||
const userEmail =
|
||||
auth.status === "authenticated" || auth.status === "refreshing"
|
||||
? auth.user.email
|
||||
: null;
|
||||
const { showEmailModal } = useEmailTranscriptDialog({
|
||||
meetingId,
|
||||
userEmail,
|
||||
});
|
||||
const showEmailFeature = featureEnabled("emailTranscript");
|
||||
|
||||
// ── PreJoin defaults (persisted to localStorage for page refresh) ──
|
||||
const STORAGE_KEY = `livekit-username-${roomName}`;
|
||||
const defaultUsername = (() => {
|
||||
if (typeof window !== "undefined") {
|
||||
const saved = localStorage.getItem(STORAGE_KEY);
|
||||
if (saved) return saved;
|
||||
}
|
||||
if (auth.status === "authenticated" || auth.status === "refreshing") {
|
||||
return auth.user.email?.split("@")[0] || auth.user.id?.slice(0, 12) || "";
|
||||
}
|
||||
return "";
|
||||
})();
|
||||
const isJoining = !!userChoices && !joinedMeeting && !connectionError;
|
||||
|
||||
// ── Join meeting via backend API after PreJoin submit ─────
|
||||
useEffect(() => {
|
||||
if (
|
||||
authLastUserId === undefined ||
|
||||
!userChoices ||
|
||||
!meeting?.id ||
|
||||
!roomName
|
||||
)
|
||||
return;
|
||||
let cancelled = false;
|
||||
|
||||
async function join() {
|
||||
try {
|
||||
const result = await joinMutation.mutateAsync({
|
||||
params: {
|
||||
path: { room_name: roomName, meeting_id: meeting.id },
|
||||
query: { display_name: userChoices!.username || undefined },
|
||||
},
|
||||
});
|
||||
if (!cancelled) setJoinedMeeting(result);
|
||||
} catch (err) {
|
||||
console.error("Failed to join LiveKit meeting:", err);
|
||||
if (!cancelled) setConnectionError(true);
|
||||
}
|
||||
}
|
||||
|
||||
join();
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [meeting?.id, roomName, authLastUserId, userChoices]);
|
||||
|
||||
const handleDisconnected = useCallback(() => {
|
||||
router.push("/browse");
|
||||
}, [router]);
|
||||
|
||||
const handlePreJoinSubmit = useCallback(
|
||||
(choices: LocalUserChoices) => {
|
||||
// Persist username for page refresh
|
||||
if (choices.username) {
|
||||
localStorage.setItem(STORAGE_KEY, choices.username);
|
||||
}
|
||||
setUserChoices(choices);
|
||||
},
|
||||
[STORAGE_KEY],
|
||||
);
|
||||
|
||||
// ── PreJoin screen (name + device selection) ──────────────
|
||||
if (!userChoices) {
|
||||
return (
|
||||
<Box
|
||||
w="100vw"
|
||||
h="100vh"
|
||||
display="flex"
|
||||
alignItems="center"
|
||||
justifyContent="center"
|
||||
bg="gray.900"
|
||||
data-lk-theme="default"
|
||||
>
|
||||
<PreJoin
|
||||
defaults={{
|
||||
username: defaultUsername,
|
||||
audioEnabled: true,
|
||||
videoEnabled: true,
|
||||
}}
|
||||
onSubmit={handlePreJoinSubmit}
|
||||
userLabel="Display Name"
|
||||
/>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Loading / error states ────────────────────────────────
|
||||
if (isJoining) {
|
||||
return (
|
||||
<Center h="100vh" bg="gray.900">
|
||||
<Spinner color="blue.500" size="xl" />
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
if (connectionError) {
|
||||
return (
|
||||
<Center h="100vh" bg="gray.50">
|
||||
<Text fontSize="lg">Failed to connect to meeting</Text>
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
if (!joinedMeeting) {
|
||||
return (
|
||||
<Center h="100vh" bg="gray.50">
|
||||
<Spinner color="blue.500" size="xl" />
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
const {
|
||||
serverUrl,
|
||||
roomName: lkRoomName,
|
||||
token,
|
||||
} = parseLiveKitUrl(joinedMeeting.room_url);
|
||||
|
||||
if (
|
||||
serverUrl &&
|
||||
!serverUrl.startsWith("ws://") &&
|
||||
!serverUrl.startsWith("wss://")
|
||||
) {
|
||||
console.warn(
|
||||
`LiveKit serverUrl has unexpected scheme: ${serverUrl}. Expected ws:// or wss://`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!token || !lkRoomName) {
|
||||
return (
|
||||
<Center h="100vh" bg="gray.50">
|
||||
<Text fontSize="lg">
|
||||
{!token
|
||||
? "No access token received from server"
|
||||
: "No room name received from server"}
|
||||
</Text>
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Render ────────────────────────────────────────────────
|
||||
// The token already encodes the room name (in VideoGrants.room),
|
||||
// so LiveKit SDK joins the correct room from the token alone.
|
||||
return (
|
||||
<Box w="100vw" h="100vh" bg="black" position="relative">
|
||||
<LKRoom
|
||||
serverUrl={serverUrl}
|
||||
token={token}
|
||||
connect={true}
|
||||
audio={userChoices.audioEnabled}
|
||||
video={userChoices.videoEnabled}
|
||||
onDisconnected={handleDisconnected}
|
||||
data-lk-theme="default"
|
||||
style={{ height: "100%" }}
|
||||
>
|
||||
<VideoConference />
|
||||
<RoomAudioRenderer />
|
||||
</LKRoom>
|
||||
|
||||
{/* ── Floating overlay buttons (consent, email, extensible) ── */}
|
||||
{showConsentButton && (
|
||||
<ConsentDialogButton
|
||||
meetingId={meetingId}
|
||||
recordingType={meeting.recording_type}
|
||||
skipConsent={room.skip_consent}
|
||||
/>
|
||||
)}
|
||||
|
||||
{showRecordingIndicator && <RecordingIndicator />}
|
||||
|
||||
{showEmailFeature && (
|
||||
<IconButton
|
||||
aria-label="Email transcript"
|
||||
position="absolute"
|
||||
top="56px"
|
||||
right="8px"
|
||||
zIndex={1000}
|
||||
colorPalette="blue"
|
||||
size="sm"
|
||||
onClick={showEmailModal}
|
||||
variant="solid"
|
||||
borderRadius="full"
|
||||
>
|
||||
<LuMail />
|
||||
</IconButton>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -14,6 +14,7 @@ import MeetingSelection from "../MeetingSelection";
|
||||
import useRoomDefaultMeeting from "../useRoomDefaultMeeting";
|
||||
import WherebyRoom from "./WherebyRoom";
|
||||
import DailyRoom from "./DailyRoom";
|
||||
import LiveKitRoom from "./LiveKitRoom";
|
||||
import { useAuth } from "../../lib/AuthProvider";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import { parseNonEmptyString } from "../../lib/utils";
|
||||
@@ -199,8 +200,9 @@ export default function RoomContainer(details: RoomDetails) {
|
||||
return <DailyRoom meeting={meeting} room={room} />;
|
||||
case "whereby":
|
||||
return <WherebyRoom meeting={meeting} room={room} />;
|
||||
default: {
|
||||
const _exhaustive: never = platform;
|
||||
case "livekit":
|
||||
return <LiveKitRoom meeting={meeting} room={room} />;
|
||||
default:
|
||||
return (
|
||||
<Box
|
||||
display="flex"
|
||||
@@ -213,6 +215,5 @@ export default function RoomContainer(details: RoomDetails) {
|
||||
<Text fontSize="lg">Unknown platform: {platform}</Text>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import "./styles/globals.scss";
|
||||
import "@livekit/components-styles";
|
||||
import { Metadata, Viewport } from "next";
|
||||
import { Poppins } from "next/font/google";
|
||||
import { ErrorProvider } from "./(errors)/errorContext";
|
||||
|
||||
60
www/app/reflector-api.d.ts
vendored
60
www/app/reflector-api.d.ts
vendored
@@ -911,6 +911,32 @@ export interface paths {
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/livekit/webhook": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
get?: never;
|
||||
put?: never;
|
||||
/**
|
||||
* Livekit Webhook
|
||||
* @description Handle LiveKit webhook events.
|
||||
*
|
||||
* LiveKit webhook events include:
|
||||
* - participant_joined / participant_left
|
||||
* - egress_started / egress_updated / egress_ended
|
||||
* - room_started / room_finished
|
||||
* - track_published / track_unpublished
|
||||
*/
|
||||
post: operations["v1_livekit_webhook"];
|
||||
delete?: never;
|
||||
options?: never;
|
||||
head?: never;
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/auth/login": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
@@ -1100,7 +1126,7 @@ export interface components {
|
||||
* Platform
|
||||
* @enum {string}
|
||||
*/
|
||||
platform: "whereby" | "daily";
|
||||
platform: "whereby" | "daily" | "livekit";
|
||||
/**
|
||||
* Skip Consent
|
||||
* @default false
|
||||
@@ -1821,7 +1847,7 @@ export interface components {
|
||||
* Platform
|
||||
* @enum {string}
|
||||
*/
|
||||
platform: "whereby" | "daily";
|
||||
platform: "whereby" | "daily" | "livekit";
|
||||
/** Daily Composed Video S3 Key */
|
||||
daily_composed_video_s3_key?: string | null;
|
||||
/** Daily Composed Video Duration */
|
||||
@@ -1921,7 +1947,7 @@ export interface components {
|
||||
* Platform
|
||||
* @enum {string}
|
||||
*/
|
||||
platform: "whereby" | "daily";
|
||||
platform: "whereby" | "daily" | "livekit";
|
||||
/**
|
||||
* Skip Consent
|
||||
* @default false
|
||||
@@ -1979,7 +2005,7 @@ export interface components {
|
||||
* Platform
|
||||
* @enum {string}
|
||||
*/
|
||||
platform: "whereby" | "daily";
|
||||
platform: "whereby" | "daily" | "livekit";
|
||||
/**
|
||||
* Skip Consent
|
||||
* @default false
|
||||
@@ -2358,7 +2384,7 @@ export interface components {
|
||||
/** Ics Enabled */
|
||||
ics_enabled?: boolean | null;
|
||||
/** Platform */
|
||||
platform?: ("whereby" | "daily") | null;
|
||||
platform?: ("whereby" | "daily" | "livekit") | null;
|
||||
/** Skip Consent */
|
||||
skip_consent?: boolean | null;
|
||||
/** Email Transcript To */
|
||||
@@ -3233,7 +3259,9 @@ export interface operations {
|
||||
};
|
||||
v1_rooms_join_meeting: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
query?: {
|
||||
display_name?: string | null;
|
||||
};
|
||||
header?: never;
|
||||
path: {
|
||||
room_name: string;
|
||||
@@ -4504,6 +4532,26 @@ export interface operations {
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_livekit_webhook: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
requestBody?: never;
|
||||
responses: {
|
||||
/** @description Successful Response */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": unknown;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_login: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
|
||||
@@ -20,6 +20,8 @@
|
||||
"@fortawesome/fontawesome-svg-core": "^7.2.0",
|
||||
"@fortawesome/free-solid-svg-icons": "^7.2.0",
|
||||
"@fortawesome/react-fontawesome": "^3.2.0",
|
||||
"@livekit/components-react": "2.9.20",
|
||||
"@livekit/components-styles": "1.2.0",
|
||||
"@sentry/nextjs": "^10.40.0",
|
||||
"@tanstack/react-query": "^5.90.21",
|
||||
"@whereby.com/browser-sdk": "^3.18.21",
|
||||
@@ -30,6 +32,7 @@
|
||||
"fontawesome": "^5.6.3",
|
||||
"ioredis": "^5.10.0",
|
||||
"jest-worker": "^30.2.0",
|
||||
"livekit-client": "2.18.0",
|
||||
"lucide-react": "^0.575.0",
|
||||
"next": "16.1.7",
|
||||
"next-auth": "^4.24.13",
|
||||
|
||||
150
www/pnpm-lock.yaml
generated
150
www/pnpm-lock.yaml
generated
@@ -34,6 +34,12 @@ importers:
|
||||
'@fortawesome/react-fontawesome':
|
||||
specifier: ^3.2.0
|
||||
version: 3.2.0(@fortawesome/fontawesome-svg-core@7.2.0)(react@19.2.4)
|
||||
'@livekit/components-react':
|
||||
specifier: 2.9.20
|
||||
version: 2.9.20(livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tslib@2.8.1)
|
||||
'@livekit/components-styles':
|
||||
specifier: 1.2.0
|
||||
version: 1.2.0
|
||||
'@sentry/nextjs':
|
||||
specifier: ^10.40.0
|
||||
version: 10.40.0(@opentelemetry/context-async-hooks@2.5.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.5.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.5.1(@opentelemetry/api@1.9.0))(next@16.1.7(@babel/core@7.29.0)(@opentelemetry/api@1.9.0)(babel-plugin-macros@3.1.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.97.3))(react@19.2.4)(webpack@5.105.3)
|
||||
@@ -64,6 +70,9 @@ importers:
|
||||
jest-worker:
|
||||
specifier: ^30.2.0
|
||||
version: 30.2.0
|
||||
livekit-client:
|
||||
specifier: 2.18.0
|
||||
version: 2.18.0(@types/dom-mediacapture-record@1.0.22)
|
||||
lucide-react:
|
||||
specifier: ^0.575.0
|
||||
version: 0.575.0(react@19.2.4)
|
||||
@@ -343,6 +352,9 @@ packages:
|
||||
'@bcoe/v8-coverage@0.2.3':
|
||||
resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==}
|
||||
|
||||
'@bufbuild/protobuf@1.10.1':
|
||||
resolution: {integrity: sha512-wJ8ReQbHxsAfXhrf9ixl0aYbZorRuOWpBNzm8pL8ftmSxQx/wnJD5Eg861NwJU/czy2VXFIebCeZnZrI9rktIQ==}
|
||||
|
||||
'@chakra-ui/react@3.33.0':
|
||||
resolution: {integrity: sha512-HNbUFsFABjVL5IHBxsqtuT+AH/vQT1+xsEWrxnG0GBM2VjlzlMqlqCxNiDyQOsjLZXQC1ciCMbzPNcSCc63Y9w==}
|
||||
peerDependencies:
|
||||
@@ -445,6 +457,9 @@ packages:
|
||||
'@floating-ui/core@1.7.4':
|
||||
resolution: {integrity: sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg==}
|
||||
|
||||
'@floating-ui/dom@1.7.4':
|
||||
resolution: {integrity: sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==}
|
||||
|
||||
'@floating-ui/dom@1.7.5':
|
||||
resolution: {integrity: sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg==}
|
||||
|
||||
@@ -767,6 +782,36 @@ packages:
|
||||
'@jridgewell/trace-mapping@0.3.31':
|
||||
resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==}
|
||||
|
||||
'@livekit/components-core@0.12.13':
|
||||
resolution: {integrity: sha512-DQmi84afHoHjZ62wm8y+XPNIDHTwFHAltjd3lmyXj8UZHOY7wcza4vFt1xnghJOD5wLRY58L1dkAgAw59MgWvw==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
livekit-client: ^2.17.2
|
||||
tslib: ^2.6.2
|
||||
|
||||
'@livekit/components-react@2.9.20':
|
||||
resolution: {integrity: sha512-hjkYOsJj9Jbghb7wM5cI8HoVisKeL6Zcy1VnRWTLm0sqVbto8GJp/17T4Udx85mCPY6Jgh8I1Cv0yVzgz7CQtg==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
'@livekit/krisp-noise-filter': ^0.2.12 || ^0.3.0
|
||||
livekit-client: ^2.17.2
|
||||
react: '>=18'
|
||||
react-dom: '>=18'
|
||||
tslib: ^2.6.2
|
||||
peerDependenciesMeta:
|
||||
'@livekit/krisp-noise-filter':
|
||||
optional: true
|
||||
|
||||
'@livekit/components-styles@1.2.0':
|
||||
resolution: {integrity: sha512-74/rt0lDh6aHmOPmWAeDE9C4OrNW9RIdmhX/YRbovQBVNGNVWojRjl3FgQZ5LPFXO6l1maKB4JhXcBFENVxVvw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@livekit/mutex@1.1.1':
|
||||
resolution: {integrity: sha512-EsshAucklmpuUAfkABPxJNhzj9v2sG7JuzFDL4ML1oJQSV14sqrpTYnsaOudMAw9yOaW53NU3QQTlUQoRs4czw==}
|
||||
|
||||
'@livekit/protocol@1.44.0':
|
||||
resolution: {integrity: sha512-/vfhDUGcUKO8Q43r6i+5FrDhl5oZjm/X3U4x2Iciqvgn5C8qbj+57YPcWSJ1kyIZm5Cm6AV2nAPjMm3ETD/iyg==}
|
||||
|
||||
'@lukeed/csprng@1.1.0':
|
||||
resolution: {integrity: sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -1886,6 +1931,9 @@ packages:
|
||||
'@types/debug@4.1.12':
|
||||
resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==}
|
||||
|
||||
'@types/dom-mediacapture-record@1.0.22':
|
||||
resolution: {integrity: sha512-mUMZLK3NvwRLcAAT9qmcK+9p7tpU2FHdDsntR3YI4+GY88XrgG4XiE7u1Q2LAN2/FZOz/tdMDC3GQCR4T8nFuw==}
|
||||
|
||||
'@types/eslint-scope@3.7.7':
|
||||
resolution: {integrity: sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==}
|
||||
|
||||
@@ -3839,6 +3887,9 @@ packages:
|
||||
jose@4.15.9:
|
||||
resolution: {integrity: sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA==}
|
||||
|
||||
jose@6.2.2:
|
||||
resolution: {integrity: sha512-d7kPDd34KO/YnzaDOlikGpOurfF0ByC2sEV4cANCtdqLlTfBlw2p14O/5d/zv40gJPbIQxfES3nSx1/oYNyuZQ==}
|
||||
|
||||
js-levenshtein@1.1.6:
|
||||
resolution: {integrity: sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
@@ -3984,6 +4035,11 @@ packages:
|
||||
lines-and-columns@1.2.4:
|
||||
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
||||
|
||||
livekit-client@2.18.0:
|
||||
resolution: {integrity: sha512-wjH4y0rw5fnkPmmaxutPhD4XcAq6goQszS8lw9PEpGXVwiRE6sI/ZH+mOT/s8AHJnEC3tjmfiMZ4MQt8BlaWew==}
|
||||
peerDependencies:
|
||||
'@types/dom-mediacapture-record': ^1
|
||||
|
||||
loader-runner@4.3.1:
|
||||
resolution: {integrity: sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q==}
|
||||
engines: {node: '>=6.11.5'}
|
||||
@@ -3996,6 +4052,9 @@ packages:
|
||||
resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
lodash.debounce@4.0.8:
|
||||
resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==}
|
||||
|
||||
lodash.defaults@4.2.0:
|
||||
resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==}
|
||||
|
||||
@@ -4005,6 +4064,14 @@ packages:
|
||||
lodash.memoize@4.1.2:
|
||||
resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==}
|
||||
|
||||
loglevel@1.9.1:
|
||||
resolution: {integrity: sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
|
||||
loglevel@1.9.2:
|
||||
resolution: {integrity: sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
|
||||
longest-streak@3.1.0:
|
||||
resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==}
|
||||
|
||||
@@ -4752,6 +4819,9 @@ packages:
|
||||
resolution: {integrity: sha512-K6p9y4ZyL9wPzA+PMDloNQPfoDGTiFYDvdlXznyGKgD10BJpcAosvATKrExRKOrNLgD8E7Um7WGW0lxsnOuNLg==}
|
||||
engines: {node: '>=4.0.0'}
|
||||
|
||||
rxjs@7.8.2:
|
||||
resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==}
|
||||
|
||||
safe-array-concat@1.1.3:
|
||||
resolution: {integrity: sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==}
|
||||
engines: {node: '>=0.4'}
|
||||
@@ -5133,6 +5203,9 @@ packages:
|
||||
resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
typed-emitter@2.1.0:
|
||||
resolution: {integrity: sha512-g/KzbYKbH5C2vPkaXGu8DJlHrGKHLsM25Zg9WuC9pMGfuvT+X25tZQWo5fK1BjBm8+UrVE9LDCvaY0CQk+fXDA==}
|
||||
|
||||
typescript-eslint@8.56.1:
|
||||
resolution: {integrity: sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
@@ -5228,6 +5301,12 @@ packages:
|
||||
'@types/react':
|
||||
optional: true
|
||||
|
||||
usehooks-ts@3.1.1:
|
||||
resolution: {integrity: sha512-I4diPp9Cq6ieSUH2wu+fDAVQO43xwtulo+fKEidHUwZPnYImbtkTjzIJYcDcJqxgmX31GVqNFURodvcgHcW0pA==}
|
||||
engines: {node: '>=16.15.0'}
|
||||
peerDependencies:
|
||||
react: ^16.8.0 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
||||
|
||||
util-deprecate@1.0.2:
|
||||
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
|
||||
|
||||
@@ -5662,6 +5741,8 @@ snapshots:
|
||||
|
||||
'@bcoe/v8-coverage@0.2.3': {}
|
||||
|
||||
'@bufbuild/protobuf@1.10.1': {}
|
||||
|
||||
'@chakra-ui/react@3.33.0(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)':
|
||||
dependencies:
|
||||
'@ark-ui/react': 5.34.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
@@ -5811,6 +5892,11 @@ snapshots:
|
||||
dependencies:
|
||||
'@floating-ui/utils': 0.2.10
|
||||
|
||||
'@floating-ui/dom@1.7.4':
|
||||
dependencies:
|
||||
'@floating-ui/core': 1.7.4
|
||||
'@floating-ui/utils': 0.2.10
|
||||
|
||||
'@floating-ui/dom@1.7.5':
|
||||
dependencies:
|
||||
'@floating-ui/core': 1.7.4
|
||||
@@ -6179,6 +6265,34 @@ snapshots:
|
||||
'@jridgewell/resolve-uri': 3.1.2
|
||||
'@jridgewell/sourcemap-codec': 1.5.5
|
||||
|
||||
'@livekit/components-core@0.12.13(livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22))(tslib@2.8.1)':
|
||||
dependencies:
|
||||
'@floating-ui/dom': 1.7.4
|
||||
livekit-client: 2.18.0(@types/dom-mediacapture-record@1.0.22)
|
||||
loglevel: 1.9.1
|
||||
rxjs: 7.8.2
|
||||
tslib: 2.8.1
|
||||
|
||||
'@livekit/components-react@2.9.20(livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tslib@2.8.1)':
|
||||
dependencies:
|
||||
'@livekit/components-core': 0.12.13(livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22))(tslib@2.8.1)
|
||||
clsx: 2.1.1
|
||||
events: 3.3.0
|
||||
jose: 6.2.2
|
||||
livekit-client: 2.18.0(@types/dom-mediacapture-record@1.0.22)
|
||||
react: 19.2.4
|
||||
react-dom: 19.2.4(react@19.2.4)
|
||||
tslib: 2.8.1
|
||||
usehooks-ts: 3.1.1(react@19.2.4)
|
||||
|
||||
'@livekit/components-styles@1.2.0': {}
|
||||
|
||||
'@livekit/mutex@1.1.1': {}
|
||||
|
||||
'@livekit/protocol@1.44.0':
|
||||
dependencies:
|
||||
'@bufbuild/protobuf': 1.10.1
|
||||
|
||||
'@lukeed/csprng@1.1.0': {}
|
||||
|
||||
'@lukeed/uuid@2.0.1':
|
||||
@@ -7259,6 +7373,8 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/ms': 2.1.0
|
||||
|
||||
'@types/dom-mediacapture-record@1.0.22': {}
|
||||
|
||||
'@types/eslint-scope@3.7.7':
|
||||
dependencies:
|
||||
'@types/eslint': 9.6.1
|
||||
@@ -9986,6 +10102,8 @@ snapshots:
|
||||
|
||||
jose@4.15.9: {}
|
||||
|
||||
jose@6.2.2: {}
|
||||
|
||||
js-levenshtein@1.1.6: {}
|
||||
|
||||
js-tokens@4.0.0: {}
|
||||
@@ -10101,6 +10219,19 @@ snapshots:
|
||||
|
||||
lines-and-columns@1.2.4: {}
|
||||
|
||||
livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22):
|
||||
dependencies:
|
||||
'@livekit/mutex': 1.1.1
|
||||
'@livekit/protocol': 1.44.0
|
||||
'@types/dom-mediacapture-record': 1.0.22
|
||||
events: 3.3.0
|
||||
jose: 6.2.2
|
||||
loglevel: 1.9.2
|
||||
sdp-transform: 2.15.0
|
||||
tslib: 2.8.1
|
||||
typed-emitter: 2.1.0
|
||||
webrtc-adapter: 9.0.4
|
||||
|
||||
loader-runner@4.3.1: {}
|
||||
|
||||
locate-path@5.0.0:
|
||||
@@ -10111,12 +10242,18 @@ snapshots:
|
||||
dependencies:
|
||||
p-locate: 5.0.0
|
||||
|
||||
lodash.debounce@4.0.8: {}
|
||||
|
||||
lodash.defaults@4.2.0: {}
|
||||
|
||||
lodash.isarguments@3.1.0: {}
|
||||
|
||||
lodash.memoize@4.1.2: {}
|
||||
|
||||
loglevel@1.9.1: {}
|
||||
|
||||
loglevel@1.9.2: {}
|
||||
|
||||
longest-streak@3.1.0: {}
|
||||
|
||||
loose-envify@1.4.0:
|
||||
@@ -11009,6 +11146,10 @@ snapshots:
|
||||
|
||||
runes@0.4.3: {}
|
||||
|
||||
rxjs@7.8.2:
|
||||
dependencies:
|
||||
tslib: 2.8.1
|
||||
|
||||
safe-array-concat@1.1.3:
|
||||
dependencies:
|
||||
call-bind: 1.0.8
|
||||
@@ -11462,6 +11603,10 @@ snapshots:
|
||||
possible-typed-array-names: 1.1.0
|
||||
reflect.getprototypeof: 1.0.10
|
||||
|
||||
typed-emitter@2.1.0:
|
||||
optionalDependencies:
|
||||
rxjs: 7.8.2
|
||||
|
||||
typescript-eslint@8.56.1(eslint@10.0.2(jiti@2.6.1))(typescript@5.9.3):
|
||||
dependencies:
|
||||
'@typescript-eslint/eslint-plugin': 8.56.1(@typescript-eslint/parser@8.56.1(eslint@10.0.2(jiti@2.6.1))(typescript@5.9.3))(eslint@10.0.2(jiti@2.6.1))(typescript@5.9.3)
|
||||
@@ -11585,6 +11730,11 @@ snapshots:
|
||||
optionalDependencies:
|
||||
'@types/react': 19.2.14
|
||||
|
||||
usehooks-ts@3.1.1(react@19.2.4):
|
||||
dependencies:
|
||||
lodash.debounce: 4.0.8
|
||||
react: 19.2.4
|
||||
|
||||
util-deprecate@1.0.2: {}
|
||||
|
||||
uuid-validate@0.0.3: {}
|
||||
|
||||
4
www/public/email-icon-dark.svg
Normal file
4
www/public/email-icon-dark.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<rect x="2" y="4" width="20" height="16" rx="2"/>
|
||||
<path d="m22 7-8.97 5.7a1.94 1.94 0 0 1-2.06 0L2 7"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 267 B |
Reference in New Issue
Block a user