mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2026-04-23 13:45:18 +00:00
Compare commits
11 Commits
v0.42.0
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2f14df4b44 | ||
|
|
7ed3b781ee | ||
|
|
26239f05a3 | ||
|
|
08c276e4f4 | ||
|
|
f4f94a0d99 | ||
|
|
df782107d7 | ||
|
|
bc8338fa4f | ||
|
|
b570d202dc | ||
|
|
8c4f5e9c0f | ||
|
|
ec8b49738e | ||
|
|
cc9c5cd4a5 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,6 +3,8 @@ server/.env
|
||||
server/.env.production
|
||||
.env
|
||||
Caddyfile
|
||||
livekit.yaml
|
||||
egress.yaml
|
||||
.env.hatchet
|
||||
server/exportdanswer
|
||||
.vercel
|
||||
|
||||
27
CHANGELOG.md
27
CHANGELOG.md
@@ -1,5 +1,32 @@
|
||||
# Changelog
|
||||
|
||||
## [0.44.0](https://github.com/GreyhavenHQ/reflector/compare/v0.43.0...v0.44.0) (2026-04-07)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* allow livekit to work with tunnels udp and tcp with quickguide ([#948](https://github.com/GreyhavenHQ/reflector/issues/948)) ([f4f94a0](https://github.com/GreyhavenHQ/reflector/commit/f4f94a0d9998030e5ef7f01935d99722045165ac))
|
||||
* Livekit - Selfhost video room solution ([#946](https://github.com/GreyhavenHQ/reflector/issues/946)) ([bc8338f](https://github.com/GreyhavenHQ/reflector/commit/bc8338fa4f136534f5f27784f5dd10d47cecf412))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* allow anonymous recording start and add light theme email icon ([#949](https://github.com/GreyhavenHQ/reflector/issues/949)) ([08c276e](https://github.com/GreyhavenHQ/reflector/commit/08c276e4f493c7960158a44e77062af1b30c3352))
|
||||
* deactivate meeting button and better deactivation heuristics ([#950](https://github.com/GreyhavenHQ/reflector/issues/950)) ([26239f0](https://github.com/GreyhavenHQ/reflector/commit/26239f05a34af07ebba764d669343c32e40e63bf))
|
||||
|
||||
## [0.43.0](https://github.com/GreyhavenHQ/reflector/compare/v0.42.0...v0.43.0) (2026-03-31)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* show trash for soft deleted transcripts and hard delete option ([#942](https://github.com/GreyhavenHQ/reflector/issues/942)) ([ec8b497](https://github.com/GreyhavenHQ/reflector/commit/ec8b49738e8e76f6e5d2496a42cb454ef6c2d7c7))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add parakeet as default transcriber and fix diarizer image ([#939](https://github.com/GreyhavenHQ/reflector/issues/939)) ([cc9c5cd](https://github.com/GreyhavenHQ/reflector/commit/cc9c5cd4a5f4123ef957ad82461ca37a727d1ba6))
|
||||
* cpu usage + email improvements ([#944](https://github.com/GreyhavenHQ/reflector/issues/944)) ([8c4f5e9](https://github.com/GreyhavenHQ/reflector/commit/8c4f5e9c0f893f4cb029595505b53136f04760f4))
|
||||
|
||||
## [0.42.0](https://github.com/GreyhavenHQ/reflector/compare/v0.41.0...v0.42.0) (2026-03-30)
|
||||
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ services:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:1250:1250"
|
||||
- "${BIND_HOST:-127.0.0.1}:1250:1250"
|
||||
- "40000-40100:40000-40100/udp"
|
||||
env_file:
|
||||
- ./server/.env
|
||||
@@ -116,7 +116,7 @@ services:
|
||||
image: monadicalsas/reflector-frontend:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:3000:3000"
|
||||
- "${BIND_HOST:-127.0.0.1}:3000:3000"
|
||||
env_file:
|
||||
- ./www/.env
|
||||
environment:
|
||||
@@ -339,7 +339,7 @@ services:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- "127.0.0.1:8888:8888"
|
||||
- "0.0.0.0:8888:8888" # Hatchet dashboard (plain HTTP — no TLS)
|
||||
- "127.0.0.1:7078:7077"
|
||||
env_file:
|
||||
- ./.env.hatchet
|
||||
@@ -366,7 +366,7 @@ services:
|
||||
context: ./server
|
||||
dockerfile: Dockerfile
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
profiles: [dailyco]
|
||||
profiles: [dailyco, livekit]
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./server/.env
|
||||
@@ -406,6 +406,40 @@ services:
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
|
||||
# ===========================================================
|
||||
# LiveKit — self-hosted open-source video platform
|
||||
# Activated via --profile livekit (auto-detected from LIVEKIT_API_KEY in server/.env)
|
||||
# ===========================================================
|
||||
|
||||
livekit-server:
|
||||
image: livekit/livekit-server:v1.10.1
|
||||
profiles: [livekit]
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "7880:7880" # Signaling (HTTP/WS)
|
||||
- "7881:7881" # WebRTC over TCP
|
||||
- "${LIVEKIT_UDP_PORTS:-44200-44300:44200-44300}/udp" # WebRTC ICE (range or single port for tunnels)
|
||||
volumes:
|
||||
- ./livekit.yaml:/etc/livekit.yaml:ro
|
||||
command: ["--config", "/etc/livekit.yaml"]
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
|
||||
livekit-egress:
|
||||
image: livekit/egress:v1.12.0
|
||||
profiles: [livekit]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
EGRESS_CONFIG_FILE: /etc/egress.yaml
|
||||
volumes:
|
||||
- ./egress.yaml:/etc/egress.yaml:ro
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
livekit-server:
|
||||
condition: service_started
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
|
||||
@@ -95,6 +95,12 @@ DAILYCO_STORAGE_AWS_BUCKET_NAME=<your-bucket-from-daily-setup>
|
||||
DAILYCO_STORAGE_AWS_REGION=us-east-1
|
||||
DAILYCO_STORAGE_AWS_ROLE_ARN=<your-role-arn-from-daily-setup>
|
||||
|
||||
# Worker credentials for reading/deleting recordings from Daily's S3 bucket.
|
||||
# Required when transcript storage uses a different bucket or credentials
|
||||
# (e.g., selfhosted with Garage or a separate S3 account).
|
||||
DAILYCO_STORAGE_AWS_ACCESS_KEY_ID=<your-aws-access-key>
|
||||
DAILYCO_STORAGE_AWS_SECRET_ACCESS_KEY=<your-aws-secret-key>
|
||||
|
||||
# Transcript storage (should already be configured from main setup)
|
||||
# TRANSCRIPT_STORAGE_BACKEND=aws
|
||||
# TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=<your-key>
|
||||
@@ -103,6 +109,19 @@ DAILYCO_STORAGE_AWS_ROLE_ARN=<your-role-arn-from-daily-setup>
|
||||
# TRANSCRIPT_STORAGE_AWS_REGION=<your-bucket-region>
|
||||
```
|
||||
|
||||
:::info Two separate credential sets for Daily.co
|
||||
|
||||
- **`ROLE_ARN`** — Used by Daily's API to *write* recordings into your S3 bucket (configured via Daily dashboard).
|
||||
- **`ACCESS_KEY_ID` / `SECRET_ACCESS_KEY`** — Used by Reflector workers to *read* recordings for transcription and *delete* them on consent denial or permanent transcript deletion.
|
||||
|
||||
Required IAM permissions for the worker key on the Daily recordings bucket:
|
||||
- `s3:GetObject` — Download recording files for processing
|
||||
- `s3:DeleteObject` — Remove files on consent denial, trash destroy, or data retention cleanup
|
||||
- `s3:ListBucket` — Scan for recordings needing reprocessing
|
||||
|
||||
If the worker keys are not set, Reflector falls back to the transcript storage master key, which then needs cross-bucket access to the Daily bucket.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
## Restart Services
|
||||
|
||||
97
docsv2/firewall-ports.md
Normal file
97
docsv2/firewall-ports.md
Normal file
@@ -0,0 +1,97 @@
|
||||
# Firewall & Port Requirements
|
||||
|
||||
Ports that need to be open on your server firewall, organized by deployment mode.
|
||||
|
||||
## With Caddy (--caddy or --ip or --domain)
|
||||
|
||||
Caddy acts as the reverse proxy. Most services are only accessible through Caddy on port 443.
|
||||
|
||||
| Port | Protocol | Direction | Service | Required? |
|
||||
|------|----------|-----------|---------|-----------|
|
||||
| 443 | TCP | Inbound | Caddy HTTPS — web app, API, LiveKit signaling (`/lk-ws`) | Yes |
|
||||
| 80 | TCP | Inbound | Caddy HTTP — redirects to HTTPS | Yes |
|
||||
| 44200-44300 | UDP | Inbound | LiveKit WebRTC media (audio/video) | Yes (if LiveKit enabled) |
|
||||
| 7881 | TCP | Inbound | LiveKit TCP media fallback (when UDP is blocked by client network) | Recommended |
|
||||
| 8888 | TCP | Inbound | Hatchet dashboard (plain HTTP, no TLS) | Optional (admin only) |
|
||||
|
||||
Ports that do NOT need to be open (proxied through Caddy):
|
||||
- 1250 (backend API)
|
||||
- 3000 (frontend)
|
||||
- 7880 (LiveKit signaling — proxied via `/lk-ws`)
|
||||
- 3900 (Garage S3)
|
||||
|
||||
## Without Caddy (direct access)
|
||||
|
||||
All services need direct port access. Use this only for local development or trusted networks.
|
||||
|
||||
| Port | Protocol | Direction | Service | Required? |
|
||||
|------|----------|-----------|---------|-----------|
|
||||
| 3000 | TCP | Inbound | Frontend (Next.js) | Yes |
|
||||
| 1250 | TCP | Inbound | Backend API (FastAPI) | Yes |
|
||||
| 7880 | TCP | Inbound | LiveKit signaling (WebSocket) | Yes (if LiveKit enabled) |
|
||||
| 7881 | TCP | Inbound | LiveKit TCP media fallback | Recommended |
|
||||
| 44200-44300 | UDP | Inbound | LiveKit WebRTC media | Yes (if LiveKit enabled) |
|
||||
| 40000-40100 | UDP | Inbound | Reflector WebRTC (browser recording) | Yes (if using browser WebRTC) |
|
||||
| 3900 | TCP | Inbound | Garage S3 (for presigned URLs in browser) | Yes (if using Garage) |
|
||||
| 8888 | TCP | Inbound | Hatchet dashboard | Optional |
|
||||
|
||||
> **Important:** Without Caddy, all traffic is plain HTTP. Browsers block microphone/camera access on non-HTTPS pages (except `localhost`). Use `--ip` (which implies Caddy) for any non-localhost deployment.
|
||||
|
||||
## Internal-Only Ports (never expose)
|
||||
|
||||
These ports are used between Docker containers and should NOT be open on the firewall:
|
||||
|
||||
| Port | Service | Purpose |
|
||||
|------|---------|---------|
|
||||
| 5432 | PostgreSQL | Database |
|
||||
| 6379 | Redis | Cache + message broker |
|
||||
| 7077 | Hatchet gRPC | Worker communication |
|
||||
|
||||
## Cloud Provider Firewall Examples
|
||||
|
||||
### DigitalOcean (with Caddy + LiveKit)
|
||||
|
||||
```bash
|
||||
# Create firewall
|
||||
doctl compute firewall create \
|
||||
--name reflector \
|
||||
--inbound-rules "protocol:tcp,ports:443,address:0.0.0.0/0 protocol:tcp,ports:80,address:0.0.0.0/0 protocol:udp,ports:44200-44300,address:0.0.0.0/0 protocol:tcp,ports:7881,address:0.0.0.0/0 protocol:tcp,ports:22,address:0.0.0.0/0" \
|
||||
--outbound-rules "protocol:tcp,ports:all,address:0.0.0.0/0 protocol:udp,ports:all,address:0.0.0.0/0" \
|
||||
--droplet-ids <DROPLET_ID>
|
||||
```
|
||||
|
||||
### AWS Security Group (with Caddy + LiveKit)
|
||||
|
||||
| Type | Port Range | Source | Description |
|
||||
|------|-----------|--------|-------------|
|
||||
| HTTPS | 443 | 0.0.0.0/0 | Web app + API + LiveKit signaling |
|
||||
| HTTP | 80 | 0.0.0.0/0 | Redirect to HTTPS |
|
||||
| Custom UDP | 44200-44300 | 0.0.0.0/0 | LiveKit WebRTC media |
|
||||
| Custom TCP | 7881 | 0.0.0.0/0 | LiveKit TCP fallback |
|
||||
| SSH | 22 | Your IP | Admin access |
|
||||
|
||||
### Ubuntu UFW (with Caddy + LiveKit)
|
||||
|
||||
```bash
|
||||
sudo ufw allow 443/tcp # Caddy HTTPS
|
||||
sudo ufw allow 80/tcp # HTTP redirect
|
||||
sudo ufw allow 7881/tcp # LiveKit TCP fallback
|
||||
sudo ufw allow 44200:44300/udp # LiveKit WebRTC media
|
||||
sudo ufw allow 22/tcp # SSH
|
||||
sudo ufw enable
|
||||
```
|
||||
|
||||
## Port Ranges Explained
|
||||
|
||||
### Why 44200-44300 for LiveKit?
|
||||
|
||||
LiveKit's WebRTC ICE candidates use UDP. The port range was chosen to avoid collisions:
|
||||
- **40000-40100** — Reflector's own WebRTC (browser recording)
|
||||
- **44200-44300** — LiveKit WebRTC
|
||||
- **49152-65535** — macOS ephemeral ports (reserved by OS)
|
||||
|
||||
The range is configurable in `livekit.yaml` under `rtc.port_range_start` / `rtc.port_range_end`. If changed, update `docker-compose.selfhosted.yml` port mapping to match.
|
||||
|
||||
### Why 101 ports?
|
||||
|
||||
100 UDP ports support ~100 concurrent WebRTC connections (roughly 50 participants with audio + video). For larger deployments, increase the range in both `livekit.yaml` and `docker-compose.selfhosted.yml`.
|
||||
297
docsv2/livekit-setup.md
Normal file
297
docsv2/livekit-setup.md
Normal file
@@ -0,0 +1,297 @@
|
||||
# LiveKit Setup (Self-Hosted Video Platform)
|
||||
|
||||
LiveKit is the recommended open-source, self-hosted video platform for Reflector. It replaces Daily.co for deployments that need free, fully self-hosted video rooms with per-participant audio recording.
|
||||
|
||||
> LiveKit runs alongside Daily.co and Whereby — you choose the platform per room. Existing Daily/Whereby setups are not affected.
|
||||
|
||||
## What LiveKit Provides
|
||||
|
||||
- **Video/audio rooms** — WebRTC-based conferencing via `livekit-server` (Go SFU)
|
||||
- **Per-participant audio recording** — Track Egress writes each participant's audio to S3 as a separate OGG/Opus file (no composite video, no Chrome dependency)
|
||||
- **S3-compatible storage** — works with Garage, MinIO, AWS S3, or any S3-compatible provider via `force_path_style`
|
||||
- **Webhook events** — participant join/leave, egress start/end, room lifecycle
|
||||
- **JWT access tokens** — per-participant tokens with granular permissions
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────┐
|
||||
Participants ────>│ livekit-server │ :7880 (WS signaling)
|
||||
(browser) │ (Go SFU) │ :7881 (TCP RTC)
|
||||
│ │ :44200-44300/udp (ICE)
|
||||
└────────┬────────┘
|
||||
│ media forwarding
|
||||
┌────────┴────────┐
|
||||
│ livekit-egress │ Track Egress
|
||||
│ (per-track OGG) │ writes to S3
|
||||
└────────┬────────┘
|
||||
│
|
||||
┌────────┴────────┐
|
||||
│ S3 Storage │ Garage / MinIO / AWS
|
||||
│ (audio tracks) │
|
||||
└─────────────────┘
|
||||
```
|
||||
|
||||
Both services share Redis with the existing Reflector stack (same instance, same db).
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Option 1: Via Setup Script (Recommended)
|
||||
|
||||
Pass `--livekit` to the setup script. It generates all credentials and config automatically:
|
||||
|
||||
```bash
|
||||
# First run — --livekit generates credentials and config files
|
||||
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy
|
||||
|
||||
# Re-runs — LiveKit is auto-detected from existing LIVEKIT_API_KEY in server/.env
|
||||
./scripts/setup-selfhosted.sh
|
||||
```
|
||||
|
||||
The `--livekit` flag will:
|
||||
1. Generate `LIVEKIT_API_KEY` and `LIVEKIT_API_SECRET` (random credentials)
|
||||
2. Set `LIVEKIT_URL`, `LIVEKIT_PUBLIC_URL`, and storage credentials in `server/.env`
|
||||
3. Generate `livekit.yaml` and `egress.yaml` config files
|
||||
4. Set `DEFAULT_VIDEO_PLATFORM=livekit`
|
||||
5. Enable the `livekit` Docker Compose profile
|
||||
6. Start `livekit-server` and `livekit-egress` containers
|
||||
|
||||
On subsequent re-runs (without flags), the script detects the existing `LIVEKIT_API_KEY` in `server/.env` and re-enables the profile automatically.
|
||||
|
||||
### Option 2: Manual Setup
|
||||
|
||||
If you prefer manual configuration:
|
||||
|
||||
1. **Generate credentials:**
|
||||
|
||||
```bash
|
||||
export LK_KEY="reflector_$(openssl rand -hex 8)"
|
||||
export LK_SECRET="$(openssl rand -hex 32)"
|
||||
```
|
||||
|
||||
2. **Add to `server/.env`:**
|
||||
|
||||
```env
|
||||
# LiveKit connection
|
||||
LIVEKIT_URL=ws://livekit-server:7880
|
||||
LIVEKIT_API_KEY=$LK_KEY
|
||||
LIVEKIT_API_SECRET=$LK_SECRET
|
||||
LIVEKIT_PUBLIC_URL=wss://your-domain:7880 # or ws://your-ip:7880
|
||||
|
||||
# LiveKit egress S3 storage (reuse transcript storage or configure separately)
|
||||
LIVEKIT_STORAGE_AWS_BUCKET_NAME=reflector-bucket
|
||||
LIVEKIT_STORAGE_AWS_REGION=us-east-1
|
||||
LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID=your-key
|
||||
LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY=your-secret
|
||||
LIVEKIT_STORAGE_AWS_ENDPOINT_URL=http://garage:3900 # for Garage/MinIO
|
||||
|
||||
# Set LiveKit as default platform for new rooms
|
||||
DEFAULT_VIDEO_PLATFORM=livekit
|
||||
```
|
||||
|
||||
3. **Create `livekit.yaml`:**
|
||||
|
||||
```yaml
|
||||
port: 7880
|
||||
rtc:
|
||||
tcp_port: 7881
|
||||
port_range_start: 44200
|
||||
port_range_end: 44300
|
||||
redis:
|
||||
address: redis:6379
|
||||
keys:
|
||||
your_api_key: your_api_secret
|
||||
webhook:
|
||||
urls:
|
||||
- http://server:1250/v1/livekit/webhook
|
||||
api_key: your_api_key
|
||||
logging:
|
||||
level: info
|
||||
room:
|
||||
empty_timeout: 300
|
||||
max_participants: 0
|
||||
```
|
||||
|
||||
4. **Create `egress.yaml`:**
|
||||
|
||||
```yaml
|
||||
api_key: your_api_key
|
||||
api_secret: your_api_secret
|
||||
ws_url: ws://livekit-server:7880
|
||||
health_port: 7082
|
||||
log_level: info
|
||||
session_limits:
|
||||
file_output_max_duration: 4h
|
||||
```
|
||||
|
||||
5. **Start with the livekit profile:**
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.selfhosted.yml --profile livekit up -d livekit-server livekit-egress
|
||||
```
|
||||
|
||||
## Environment Variables Reference
|
||||
|
||||
### Required
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `LIVEKIT_URL` | Internal WebSocket URL (server -> LiveKit) | `ws://livekit-server:7880` |
|
||||
| `LIVEKIT_API_KEY` | API key for authentication | `reflector_a1b2c3d4e5f6` |
|
||||
| `LIVEKIT_API_SECRET` | API secret for token signing and webhooks | `64-char hex string` |
|
||||
|
||||
### Recommended
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `LIVEKIT_PUBLIC_URL` | Public WebSocket URL (browser -> LiveKit). **Must be reachable from participants' browsers**, not a Docker-internal address. Without `--domain`, set to `ws://<server-ip>:7880`. With `--domain`, set to `wss://<domain>:7880`. | `wss://reflector.example.com:7880` |
|
||||
| `LIVEKIT_WEBHOOK_SECRET` | Webhook verification secret. Defaults to `LIVEKIT_API_SECRET` if not set. Only needed if you want a separate secret for webhooks. | (same as API secret) |
|
||||
| `DEFAULT_VIDEO_PLATFORM` | Default platform for new rooms | `livekit` |
|
||||
|
||||
### Storage (for Track Egress)
|
||||
|
||||
Track Egress writes per-participant audio files to S3. If not configured, falls back to the transcript storage credentials.
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `LIVEKIT_STORAGE_AWS_BUCKET_NAME` | S3 bucket for egress output | `reflector-bucket` |
|
||||
| `LIVEKIT_STORAGE_AWS_REGION` | S3 region | `us-east-1` |
|
||||
| `LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID` | S3 access key | `GK...` |
|
||||
| `LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY` | S3 secret key | `...` |
|
||||
| `LIVEKIT_STORAGE_AWS_ENDPOINT_URL` | S3 endpoint (for Garage/MinIO) | `http://garage:3900` |
|
||||
|
||||
## Docker Compose Services
|
||||
|
||||
Two services are added under the `livekit` profile in `docker-compose.selfhosted.yml`:
|
||||
|
||||
### livekit-server
|
||||
|
||||
| Setting | Value |
|
||||
|---------|-------|
|
||||
| Image | `livekit/livekit-server:v1.10.1` |
|
||||
| Ports | 7880 (signaling), 7881 (TCP RTC), 44200-44300/udp (ICE) |
|
||||
| Config | `./livekit.yaml` mounted at `/etc/livekit.yaml` |
|
||||
| Depends on | Redis |
|
||||
|
||||
### livekit-egress
|
||||
|
||||
| Setting | Value |
|
||||
|---------|-------|
|
||||
| Image | `livekit/egress:v1.10.1` |
|
||||
| Config | `./egress.yaml` mounted at `/etc/egress.yaml` |
|
||||
| Depends on | Redis, livekit-server |
|
||||
|
||||
No `--cap-add=SYS_ADMIN` is needed because Track Egress does not use Chrome (that's only for Room Composite video recording, which we don't use).
|
||||
|
||||
## Port Ranges
|
||||
|
||||
| Range | Protocol | Service | Notes |
|
||||
|-------|----------|---------|-------|
|
||||
| 7880 | TCP | LiveKit signaling | WebSocket connections from browsers (direct, no Caddy) |
|
||||
| 7881 | TCP | LiveKit RTC over TCP | Fallback when UDP is blocked |
|
||||
| 44200-44300 | UDP | LiveKit ICE | WebRTC media. Avoids collision with Reflector WebRTC (40000-40100) and macOS ephemeral ports (49152-65535) |
|
||||
|
||||
### TLS / Caddy Integration
|
||||
|
||||
When `--caddy` is enabled (HTTPS), the setup script automatically:
|
||||
|
||||
1. Adds a `/lk-ws` reverse proxy route to the Caddyfile that proxies `wss://domain/lk-ws` → `ws://livekit-server:7880`
|
||||
2. Sets `LIVEKIT_PUBLIC_URL` to `wss://<domain>/lk-ws` (or `wss://<ip>/lk-ws`)
|
||||
|
||||
This avoids mixed-content blocking (browsers reject `ws://` connections on `https://` pages). Caddy handles TLS termination; LiveKit server itself runs plain WebSocket internally.
|
||||
|
||||
Without `--caddy`, browsers connect directly to LiveKit on port 7880 via `ws://`.
|
||||
|
||||
### Security Note: on_demand TLS
|
||||
|
||||
When using `--ip` (Caddy with self-signed certs), the Caddyfile uses `tls internal { on_demand }`. This generates certificates dynamically for any hostname/IP on first TLS request.
|
||||
|
||||
**Risk:** An attacker can trigger certificate generation for arbitrary hostnames by sending TLS requests with spoofed SNI values, causing disk and CPU usage. This is a low-severity resource exhaustion risk, not a data theft risk.
|
||||
|
||||
**Mitigations:**
|
||||
- For LAN/development use: not a concern (not internet-exposed)
|
||||
- For cloud VMs: restrict port 443 access via firewall to trusted IPs
|
||||
- For production: use `--domain` with a real domain name instead of `--ip` — Caddy uses Let's Encrypt (no `on_demand` needed)
|
||||
|
||||
| Deployment | `LIVEKIT_PUBLIC_URL` | How it works |
|
||||
|---|---|---|
|
||||
| localhost, no Caddy | `ws://localhost:7880` | Direct connection |
|
||||
| LAN IP, no Caddy | `ws://192.168.1.x:7880` | Direct connection |
|
||||
| IP + Caddy | `wss://192.168.1.x/lk-ws` | Caddy terminates TLS, proxies to LiveKit |
|
||||
| Domain + Caddy | `wss://example.com/lk-ws` | Caddy terminates TLS, proxies to LiveKit |
|
||||
|
||||
## Webhook Endpoint
|
||||
|
||||
LiveKit sends webhook events to `POST /v1/livekit/webhook`. Events handled:
|
||||
|
||||
| Event | Action |
|
||||
|-------|--------|
|
||||
| `participant_joined` | Logs participant join, updates meeting state |
|
||||
| `participant_left` | Logs participant leave |
|
||||
| `egress_started` | Logs recording start |
|
||||
| `egress_ended` | Logs recording completion with output file info |
|
||||
| `room_started` / `room_finished` | Logs room lifecycle |
|
||||
|
||||
Webhooks are authenticated via JWT in the `Authorization` header, verified using the API secret.
|
||||
|
||||
## Frontend
|
||||
|
||||
The LiveKit room component uses `@livekit/components-react` with the prebuilt `<VideoConference>` UI. It includes:
|
||||
|
||||
- Recording consent dialog (same as Daily/Whereby)
|
||||
- Email transcript button (feature-gated)
|
||||
- Extensible overlay buttons for custom actions
|
||||
|
||||
When a user joins a LiveKit room, the backend generates a JWT access token and returns it in the `room_url` query parameter. The frontend parses this and passes it to the LiveKit React SDK.
|
||||
|
||||
## Separate Server Deployment
|
||||
|
||||
For larger deployments (15+ participants, multiple simultaneous rooms), LiveKit can run on a dedicated server:
|
||||
|
||||
1. Run `livekit-server` and `livekit-egress` on a separate machine
|
||||
2. Point `LIVEKIT_URL` to the remote LiveKit server (e.g., `ws://livekit-host:7880`)
|
||||
3. Set `LIVEKIT_PUBLIC_URL` to the public-facing URL (e.g., `wss://livekit.example.com`)
|
||||
4. Configure the remote LiveKit's `webhook.urls` to point back to the Reflector server
|
||||
5. Both need access to the same Redis (or configure LiveKit's own Redis)
|
||||
6. Both need access to the same S3 storage
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### LiveKit server not starting
|
||||
|
||||
```bash
|
||||
# Check logs
|
||||
docker compose -f docker-compose.selfhosted.yml logs livekit-server --tail 30
|
||||
|
||||
# Verify config
|
||||
cat livekit.yaml
|
||||
|
||||
# Common issues:
|
||||
# - Redis not reachable (check redis service is running)
|
||||
# - Port 7880 already in use
|
||||
# - Invalid API key format in livekit.yaml
|
||||
```
|
||||
|
||||
### Participants can't connect
|
||||
|
||||
```bash
|
||||
# Check that LIVEKIT_PUBLIC_URL is accessible from the browser
|
||||
# It must be the URL the browser can reach, not the Docker-internal URL
|
||||
|
||||
# Check firewall allows ports 7880, 7881, and 44200-44300/udp
|
||||
sudo ufw status # or iptables -L
|
||||
|
||||
# Verify the access token is being generated
|
||||
docker compose -f docker-compose.selfhosted.yml logs server | grep livekit
|
||||
```
|
||||
|
||||
### Track Egress not writing files
|
||||
|
||||
```bash
|
||||
# Check egress logs
|
||||
docker compose -f docker-compose.selfhosted.yml logs livekit-egress --tail 30
|
||||
|
||||
# Verify S3 credentials
|
||||
# Egress receives S3 config per-request from the server, so check server/.env:
|
||||
grep LIVEKIT_STORAGE server/.env
|
||||
```
|
||||
73
docsv2/migrate-daily-to-livekit.md
Normal file
73
docsv2/migrate-daily-to-livekit.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# Migrating from Daily.co to LiveKit
|
||||
|
||||
This guide covers running LiveKit alongside Daily.co or fully replacing it.
|
||||
|
||||
## Both Platforms Run Simultaneously
|
||||
|
||||
LiveKit and Daily.co coexist — the platform is selected **per room**. You don't need to migrate all rooms at once.
|
||||
|
||||
- Existing Daily rooms continue to work as-is
|
||||
- New rooms can use LiveKit
|
||||
- Each room's `platform` field determines which video service is used
|
||||
- Transcripts, topics, summaries work identically regardless of platform
|
||||
|
||||
## Step 1: Enable LiveKit
|
||||
|
||||
Add `--livekit` to your setup command:
|
||||
|
||||
```bash
|
||||
# If currently running:
|
||||
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy
|
||||
|
||||
# Add --livekit:
|
||||
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy
|
||||
```
|
||||
|
||||
This starts `livekit-server` + `livekit-egress` containers alongside your existing stack.
|
||||
|
||||
## Step 2: Set Default Platform
|
||||
|
||||
The setup script automatically sets `DEFAULT_VIDEO_PLATFORM=livekit` in `server/.env`. This means **new rooms** default to LiveKit. Existing rooms keep their current platform.
|
||||
|
||||
To keep Daily as the default for new rooms:
|
||||
```bash
|
||||
# In server/.env, change:
|
||||
DEFAULT_VIDEO_PLATFORM=daily
|
||||
```
|
||||
|
||||
## Step 3: Switch Individual Rooms
|
||||
|
||||
In the Rooms admin page, edit any room and change the **Platform** dropdown from "Daily" to "LiveKit". The next meeting in that room will use LiveKit.
|
||||
|
||||
Previously recorded Daily transcripts for that room are unaffected.
|
||||
|
||||
## Step 4: (Optional) Remove Daily.co
|
||||
|
||||
Once all rooms use LiveKit and you no longer need Daily.co:
|
||||
|
||||
1. Remove `DAILY_API_KEY` and related Daily settings from `server/.env`
|
||||
2. Re-run the setup script — it won't activate the `dailyco` profile
|
||||
3. Hatchet workers are shared between Daily and LiveKit, so they continue running
|
||||
|
||||
Daily-specific services that stop:
|
||||
- `hatchet-worker-cpu` with `dailyco` profile (but continues if `livekit` profile is active)
|
||||
- Daily webhook polling tasks (`poll_daily_recordings`, etc.)
|
||||
|
||||
## What Changes for Users
|
||||
|
||||
| Feature | Daily.co | LiveKit |
|
||||
|---------|---------|---------|
|
||||
| Video/audio quality | Daily.co SFU | LiveKit SFU (comparable) |
|
||||
| Pre-join screen | Daily's built-in iframe | LiveKit PreJoin component (name + device selection) |
|
||||
| Recording | Starts via REST API from frontend | Auto Track Egress (automatic, no user action) |
|
||||
| Multitrack audio | Per-participant WebM tracks | Per-participant OGG tracks |
|
||||
| Transcript quality | Same pipeline | Same pipeline |
|
||||
| Self-hosted | No (SaaS only) | Yes (fully self-hosted) |
|
||||
|
||||
## Database Changes
|
||||
|
||||
None required. The `platform` field on rooms and meetings already supports `"livekit"`. LiveKit recordings use recording IDs prefixed with `lk-` to distinguish them from Daily recordings.
|
||||
|
||||
## Rollback
|
||||
|
||||
To revert a room back to Daily, just change the Platform dropdown back to "Daily" in the Rooms admin page. No data migration needed.
|
||||
@@ -170,6 +170,8 @@ These start regardless of which flags you pass:
|
||||
| `ollama-cpu` | `ollama-cpu` | Local Ollama LLM on CPU |
|
||||
| `garage` | `garage` | Local S3-compatible object storage |
|
||||
| `caddy` | `caddy` | Reverse proxy with SSL |
|
||||
| `dailyco` | `hatchet-worker-cpu` | Hatchet workflow workers for Daily.co multitrack processing |
|
||||
| `livekit` | `livekit-server`, `livekit-egress` | Self-hosted video platform + per-participant audio recording |
|
||||
|
||||
### The "transcription" Alias
|
||||
|
||||
@@ -206,11 +208,17 @@ Both the `gpu` and `cpu` services define a Docker network alias of `transcriptio
|
||||
│ :8000 │ └─────────┘ └─────────┘
|
||||
└───────────┘
|
||||
│
|
||||
┌─────┴─────┐ ┌─────────┐
|
||||
│ ollama │ │ garage │
|
||||
│(optional) │ │(optional│
|
||||
│ :11435 │ │ S3) │
|
||||
└───────────┘ └─────────┘
|
||||
┌─────┴─────┐ ┌─────────┐ ┌──────────────┐
|
||||
│ ollama │ │ garage │ │livekit-server│
|
||||
│(optional) │ │(optional│ │ (optional) │
|
||||
│ :11435 │ │ S3) │ │ :7880 │
|
||||
└───────────┘ └─────────┘ └──────┬───────┘
|
||||
│
|
||||
┌──────┴───────┐
|
||||
│livekit-egress│
|
||||
│ (Track Egress│
|
||||
│ to S3) │
|
||||
└──────────────┘
|
||||
```
|
||||
|
||||
### How Services Interact
|
||||
@@ -320,7 +328,9 @@ You can point your own reverse proxy (nginx, Traefik, etc.) at these ports.
|
||||
|
||||
### WebRTC and UDP
|
||||
|
||||
The server exposes UDP ports 50000-50100 for WebRTC ICE candidates. The `WEBRTC_HOST` variable tells the server which IP to advertise in ICE candidates — this must be the server's actual IP address (not a domain), because WebRTC uses UDP which doesn't go through the HTTP reverse proxy.
|
||||
The server exposes UDP ports 40000-40100 for Reflector's own WebRTC ICE candidates. When LiveKit is enabled, it additionally uses ports 44200-44300/udp for its WebRTC ICE candidates. The `WEBRTC_HOST` variable tells the server which IP to advertise in ICE candidates — this must be the server's actual IP address (not a domain), because WebRTC uses UDP which doesn't go through the HTTP reverse proxy.
|
||||
|
||||
Port ranges are chosen to avoid collision with macOS ephemeral ports (49152-65535).
|
||||
|
||||
---
|
||||
|
||||
@@ -426,7 +436,10 @@ All services communicate over Docker's default bridge network. Only specific por
|
||||
| 3903 | Garage | `0.0.0.0:3903` | Garage admin API |
|
||||
| 8000 | GPU/CPU | `127.0.0.1:8000` | ML model API (localhost only) |
|
||||
| 11435 | Ollama | `127.0.0.1:11435` | Ollama API (localhost only) |
|
||||
| 50000-50100/udp | Server | `0.0.0.0:50000-50100` | WebRTC ICE candidates |
|
||||
| 40000-40100/udp | Server | `0.0.0.0:40000-40100` | Reflector WebRTC ICE candidates |
|
||||
| 7880 | LiveKit | `0.0.0.0:7880` | LiveKit signaling (WS) |
|
||||
| 7881 | LiveKit | `0.0.0.0:7881` | LiveKit RTC over TCP |
|
||||
| 44200-44300/udp | LiveKit | `0.0.0.0:44200-44300` | LiveKit WebRTC ICE candidates |
|
||||
|
||||
Services bound to `127.0.0.1` are only accessible from the host itself (not from the network). Caddy is the only service exposed to the internet on standard HTTP/HTTPS ports.
|
||||
|
||||
@@ -443,6 +456,8 @@ Inside the Docker network, services reach each other by their compose service na
|
||||
| `transcription` | GPU or CPU container (network alias) |
|
||||
| `ollama` / `ollama-cpu` | Ollama container |
|
||||
| `garage` | Garage S3 container |
|
||||
| `livekit-server` | LiveKit SFU server |
|
||||
| `livekit-egress` | LiveKit Track Egress service |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -144,6 +144,7 @@ Browse all available models at https://ollama.com/library.
|
||||
|
||||
| Flag | What it does |
|
||||
|------|-------------|
|
||||
| `--livekit` | Enables LiveKit self-hosted video platform. Generates API credentials, starts `livekit-server` + `livekit-egress`. See [LiveKit Setup](livekit-setup.md). |
|
||||
| `--garage` | Starts Garage (local S3-compatible storage). Auto-configures bucket, keys, and env vars. |
|
||||
| `--caddy` | Starts Caddy reverse proxy on ports 80/443 with self-signed cert. |
|
||||
| `--domain DOMAIN` | Use a real domain with Let's Encrypt auto-HTTPS (implies `--caddy`). Requires DNS A record pointing to this server and ports 80/443 open. |
|
||||
@@ -154,6 +155,20 @@ Without `--garage`, you **must** provide S3-compatible credentials (the script w
|
||||
|
||||
Without `--caddy` or `--domain`, no ports are exposed. Point your own reverse proxy at `web:3000` (frontend) and `server:1250` (API).
|
||||
|
||||
## Video Platform (LiveKit)
|
||||
|
||||
For self-hosted video rooms with per-participant audio recording, add `--livekit` to your setup command:
|
||||
|
||||
```bash
|
||||
./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy
|
||||
```
|
||||
|
||||
This generates LiveKit API credentials, creates config files (`livekit.yaml`, `egress.yaml`), and starts `livekit-server` (WebRTC SFU) + `livekit-egress` (per-participant audio recording to S3). LiveKit reuses the same Redis and S3 storage as the rest of the stack.
|
||||
|
||||
New rooms default to LiveKit when `DEFAULT_VIDEO_PLATFORM=livekit` is set (done automatically by the setup script). Existing Daily.co and Whereby rooms continue to work. On re-runs, the script detects the existing `LIVEKIT_API_KEY` in `server/.env` automatically.
|
||||
|
||||
> For detailed configuration, environment variables, ports, and troubleshooting, see [LiveKit Setup](livekit-setup.md).
|
||||
|
||||
**Using a domain (recommended for production):** Point a DNS A record at your server's IP, then pass `--domain your.domain.com`. Caddy will automatically obtain and renew a Let's Encrypt certificate. Ports 80 and 443 must be open.
|
||||
|
||||
**Without a domain:** `--caddy` alone uses a self-signed certificate. Browsers will show a security warning that must be accepted.
|
||||
@@ -305,6 +320,48 @@ TRANSCRIPT_STORAGE_AWS_REGION=us-east-1
|
||||
TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL=http://minio:9000
|
||||
```
|
||||
|
||||
### S3 IAM Permissions Reference
|
||||
|
||||
Reflector uses up to 3 separate S3 credential sets, each scoped to a specific bucket. When using AWS IAM in production, each key should have only the permissions it needs.
|
||||
|
||||
**Transcript storage key** (`TRANSCRIPT_STORAGE_AWS_*`) — the main bucket for processed files:
|
||||
|
||||
```json
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": ["s3:PutObject", "s3:GetObject", "s3:DeleteObject", "s3:ListBucket"],
|
||||
"Resource": ["arn:aws:s3:::reflector-media/*", "arn:aws:s3:::reflector-media"]
|
||||
}
|
||||
```
|
||||
|
||||
Used for: processed MP3 audio, waveform JSON, temporary pipeline files. Deletions happen during trash "Destroy", consent-denied cleanup, and public mode data retention.
|
||||
|
||||
**Daily.co worker key** (`DAILYCO_STORAGE_AWS_ACCESS_KEY_ID/SECRET_ACCESS_KEY`) — for reading and cleaning up Daily recordings:
|
||||
|
||||
```json
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": ["s3:GetObject", "s3:DeleteObject", "s3:ListBucket"],
|
||||
"Resource": ["arn:aws:s3:::your-daily-bucket/*", "arn:aws:s3:::your-daily-bucket"]
|
||||
}
|
||||
```
|
||||
|
||||
Used for: downloading multitrack recording files for processing, deleting track files and composed video on consent denial or trash destroy. No `s3:PutObject` needed — Daily's own API writes via the Role ARN.
|
||||
|
||||
**Whereby worker key** (`WHEREBY_STORAGE_AWS_ACCESS_KEY_ID/SECRET_ACCESS_KEY`) — same pattern as Daily:
|
||||
|
||||
```json
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": ["s3:GetObject", "s3:DeleteObject", "s3:ListBucket"],
|
||||
"Resource": ["arn:aws:s3:::your-whereby-bucket/*", "arn:aws:s3:::your-whereby-bucket"]
|
||||
}
|
||||
```
|
||||
|
||||
> **Fallback behavior:** If platform-specific worker keys are not set, Reflector falls back to the transcript storage master key with a bucket override. This means the master key would need cross-bucket access to the Daily/Whereby buckets. For least-privilege, configure platform-specific keys so each only accesses its own bucket.
|
||||
|
||||
> **Garage / single-bucket setups:** When using Garage or a single S3 bucket for everything, one master key with full permissions on that bucket is sufficient. The IAM scoping above only matters when using separate buckets per platform (typical in AWS production).
|
||||
|
||||
## What Authentication Enables
|
||||
|
||||
By default, Reflector runs in **public mode** (`AUTH_BACKEND=none`, `PUBLIC_MODE=true`) — anyone can create and view transcripts without logging in. Transcripts are anonymous (not linked to any user) and cannot be edited or deleted after creation.
|
||||
|
||||
144
docsv2/tunnel-setup.md
Normal file
144
docsv2/tunnel-setup.md
Normal file
@@ -0,0 +1,144 @@
|
||||
# Tunnel Setup (Self-Hosting Behind NAT)
|
||||
|
||||
Expose your self-hosted Reflector + LiveKit stack to the internet without port forwarding, static IPs, or cloud VMs using tunneling services.
|
||||
|
||||
## Requirements
|
||||
|
||||
You need **two tunnels**:
|
||||
|
||||
| Tunnel | Protocol | What it carries | Local port | Examples |
|
||||
|--------|----------|----------------|------------|----------|
|
||||
| **TCP tunnel** | TCP | Web app, API, LiveKit signaling (WebSocket) | 443 (Caddy) | playit.gg, ngrok, Cloudflare Tunnel, bore, frp |
|
||||
| **UDP tunnel** | UDP | WebRTC audio/video media | Assigned by tunnel service | playit.gg, frp |
|
||||
|
||||
> **Important:** Most tunneling services only support TCP. WebRTC media requires UDP. Make sure your chosen service supports UDP tunnels. As of writing, [playit.gg](https://playit.gg) is one of the few that supports both TCP and UDP (premium $3/mo).
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
Internet participants
|
||||
│
|
||||
├── TCP tunnel (HTTPS)
|
||||
│ └── tunnel service → your machine port 443 (Caddy)
|
||||
│ ├── /v1/* → server:1250 (API)
|
||||
│ ├── /lk-ws/* → livekit-server:7880 (signaling)
|
||||
│ └── /* → web:3000 (frontend)
|
||||
│
|
||||
└── UDP tunnel
|
||||
└── tunnel service → your machine port N (LiveKit ICE)
|
||||
```
|
||||
|
||||
## Setup
|
||||
|
||||
### Step 1: Create tunnels with your chosen service
|
||||
|
||||
Create two tunnels and note the public addresses:
|
||||
|
||||
- **TCP tunnel**: Points to your local port `443`
|
||||
- You'll get an address like `your-tunnel.example.com:PORT`
|
||||
- **UDP tunnel**: Points to a local port (e.g., `14139`)
|
||||
- You'll get an address like `udp-host.example.com:PORT`
|
||||
- **The local port must match the public port** (or LiveKit ICE candidates won't match). Set the local port to the same number as the public port assigned by the tunnel service.
|
||||
|
||||
### Step 2: Run the setup script
|
||||
|
||||
```bash
|
||||
./scripts/setup-selfhosted.sh <mode> --livekit --garage \
|
||||
--tunnels <TCP_ADDRESS>,<UDP_ADDRESS>
|
||||
```
|
||||
|
||||
Example:
|
||||
```bash
|
||||
./scripts/setup-selfhosted.sh --cpu --livekit --garage \
|
||||
--tunnels my-tunnel.example.com:9055,udp-host.example.com:14139
|
||||
```
|
||||
|
||||
Or use separate flags:
|
||||
```bash
|
||||
./scripts/setup-selfhosted.sh --cpu --livekit --garage \
|
||||
--tunnel-tcp my-tunnel.example.com:9055 \
|
||||
--tunnel-udp udp-host.example.com:14139
|
||||
```
|
||||
|
||||
The script automatically:
|
||||
- Sets all URLs (API, frontend, LiveKit signaling) to the TCP tunnel address
|
||||
- Configures LiveKit with the UDP tunnel port and resolved IP for ICE candidates
|
||||
- Enables Caddy with self-signed TLS (catch-all on port 443)
|
||||
- Saves tunnel config for re-runs
|
||||
|
||||
### Step 3: Start the tunnel agent
|
||||
|
||||
Run your tunneling service's agent/client on the same machine. It must be running whenever you want external access.
|
||||
|
||||
### Step 4: Access
|
||||
|
||||
Share `https://<TCP_TUNNEL_ADDRESS>` with participants. They'll need to accept the self-signed certificate warning in their browser.
|
||||
|
||||
## Flag Reference
|
||||
|
||||
| Flag | Description |
|
||||
|------|-------------|
|
||||
| `--tunnels TCP,UDP` | Both tunnel addresses comma-separated (e.g., `host:9055,host:14139`) |
|
||||
| `--tunnel-tcp ADDR` | TCP tunnel address only (e.g., `host.example.com:9055`) |
|
||||
| `--tunnel-udp ADDR` | UDP tunnel address only (e.g., `host.example.com:14139`) |
|
||||
|
||||
Tunnel flags:
|
||||
- Imply `--caddy` (HTTPS required for browser mic/camera access)
|
||||
- Are mutually exclusive with `--ip` and `--domain`
|
||||
- Are saved to config memory (re-run without flags replays saved config)
|
||||
|
||||
## UDP Port Matching
|
||||
|
||||
LiveKit advertises ICE candidates with a specific IP and port. The browser connects to that exact address. If the tunnel's public port differs from the local port, ICE will fail.
|
||||
|
||||
**Correct setup:** Set the tunnel's local port to match its public port.
|
||||
|
||||
```
|
||||
Tunnel assigns public port 14139
|
||||
→ Set local port to 14139
|
||||
→ LiveKit listens on 14139 (udp_port in livekit.yaml)
|
||||
→ Docker maps 14139:14139/udp
|
||||
→ ICE candidates advertise tunnel_ip:14139
|
||||
→ Browser connects to tunnel_ip:14139 → tunnel → local:14139 → LiveKit
|
||||
```
|
||||
|
||||
If your tunneling service doesn't let you choose the local port, you'll need to update `livekit.yaml` manually with the assigned ports.
|
||||
|
||||
## TLS Certificate Warning
|
||||
|
||||
With tunnel services on non-standard ports (e.g., `:9055`), Let's Encrypt can't auto-provision certificates (it requires ports 80/443). Caddy uses `tls internal` which generates a self-signed certificate. Participants will see a browser warning they must accept.
|
||||
|
||||
**To avoid the warning:**
|
||||
- Use a tunnel service that provides port 443 for TCP
|
||||
- Or use a real domain with `--domain` on a server with a public IP
|
||||
|
||||
## Compatible Tunnel Services
|
||||
|
||||
| Service | TCP | UDP | Free tier | Notes |
|
||||
|---------|-----|-----|-----------|-------|
|
||||
| [playit.gg](https://playit.gg) | Yes (premium) | Yes (premium) | Limited | $3/mo premium. Supports both TCP + UDP. |
|
||||
| [ngrok](https://ngrok.com) | Yes | No | Limited | TCP only — needs a separate UDP tunnel for media |
|
||||
| [Cloudflare Tunnel](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/) | Yes | No | Yes | TCP only — needs a separate UDP tunnel for media |
|
||||
| [bore](https://github.com/ekzhang/bore) | Yes | No | Self-hosted | TCP only |
|
||||
| [frp](https://github.com/fatedier/frp) | Yes | Yes | Self-hosted | Requires your own VPS to run the frp server |
|
||||
| [Tailscale Funnel](https://tailscale.com/kb/1223/funnel) | Yes | No | Free (3 nodes) | TCP only, requires Tailscale account |
|
||||
|
||||
For a full self-contained setup without a VPS, playit.gg (TCP + UDP) is currently the simplest option.
|
||||
|
||||
## Limitations
|
||||
|
||||
- **Latency**: Adds a hop through the tunnel service's relay servers
|
||||
- **Bandwidth**: Tunnel services may have bandwidth limits on free/cheap tiers
|
||||
- **Reliability**: Depends on the tunnel service's uptime
|
||||
- **Certificate warning**: Unavoidable with non-standard ports (see above)
|
||||
- **Single UDP port**: Tunnel mode uses a single UDP port instead of a range, which limits concurrent WebRTC connections (~50 participants max)
|
||||
- **Not production-grade**: Suitable for demos, small teams, development, and privacy-first setups. For production, use a server with a public IP.
|
||||
|
||||
## Comparison
|
||||
|
||||
| Approach | Cost | Setup | Data location | Port forwarding needed |
|
||||
|----------|------|-------|---------------|----------------------|
|
||||
| **Tunnel (this guide)** | $0-3/mo | Low | Your machine | No |
|
||||
| **Cloud VM** | $5-20/mo | Low | Cloud provider | No |
|
||||
| **Port forwarding** | $0 | Medium | Your machine | Yes (router config) |
|
||||
| **VPN mesh (Tailscale)** | $0 | Low | Your machine | No (VPN peers only) |
|
||||
26
egress.yaml.example
Normal file
26
egress.yaml.example
Normal file
@@ -0,0 +1,26 @@
|
||||
# LiveKit Egress configuration
|
||||
# Generated by setup-selfhosted.sh — do not edit manually.
|
||||
# See: https://docs.livekit.io/self-hosting/egress/
|
||||
|
||||
api_key: __LIVEKIT_API_KEY__
|
||||
api_secret: __LIVEKIT_API_SECRET__
|
||||
ws_url: ws://livekit-server:7880
|
||||
redis:
|
||||
address: redis:6379
|
||||
|
||||
# Health check
|
||||
health_port: 7082
|
||||
|
||||
# Logging
|
||||
log_level: info
|
||||
|
||||
# CPU cost limits (Track Egress only — no composite video)
|
||||
# Track Egress costs 1.0 CPU unit per track; hundreds can run on one instance.
|
||||
# Default max_cpu_utilization is 0.8 (80% of available cores).
|
||||
|
||||
# Session limits
|
||||
session_limits:
|
||||
file_output_max_duration: 4h # Max 4 hours per recording
|
||||
|
||||
# S3 storage is configured per-request via the API (not here).
|
||||
# The server passes S3 credentials when starting each Track Egress.
|
||||
@@ -114,8 +114,8 @@ modal secret create reflector-gpu REFLECTOR_GPU_APIKEY="$API_KEY"
|
||||
|
||||
# --- Deploy Functions ---
|
||||
echo ""
|
||||
echo "Deploying transcriber (Whisper)..."
|
||||
TRANSCRIBER_URL=$(modal deploy reflector_transcriber.py 2>&1 | grep -o 'https://[^ ]*web.modal.run' | head -1)
|
||||
echo "Deploying transcriber (Parakeet)..."
|
||||
TRANSCRIBER_URL=$(modal deploy reflector_transcriber_parakeet.py 2>&1 | grep -o 'https://[^ ]*web.modal.run' | head -1)
|
||||
if [ -z "$TRANSCRIBER_URL" ]; then
|
||||
echo "Error: Failed to deploy transcriber. Check Modal dashboard for details."
|
||||
exit 1
|
||||
|
||||
@@ -113,12 +113,14 @@ def download_pyannote_audio():
|
||||
|
||||
|
||||
diarizer_image = (
|
||||
modal.Image.debian_slim(python_version="3.10")
|
||||
modal.Image.from_registry(
|
||||
"nvidia/cuda:11.8.0-cudnn8-devel-ubuntu22.04", add_python="3.10"
|
||||
)
|
||||
.pip_install(
|
||||
"pyannote.audio==3.1.0",
|
||||
"requests",
|
||||
"onnx",
|
||||
"torchaudio",
|
||||
"torchaudio==2.0.1",
|
||||
"onnxruntime-gpu",
|
||||
"torch==2.0.0",
|
||||
"transformers==4.34.0",
|
||||
@@ -133,14 +135,6 @@ diarizer_image = (
|
||||
secrets=[modal.Secret.from_name("hf_token")],
|
||||
)
|
||||
.run_function(migrate_cache_llm)
|
||||
.env(
|
||||
{
|
||||
"LD_LIBRARY_PATH": (
|
||||
"/usr/local/lib/python3.10/site-packages/nvidia/cudnn/lib/:"
|
||||
"/opt/conda/lib/python3.10/site-packages/nvidia/cublas/lib/"
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
||||
246
gpu/self_hosted/uv.lock
generated
246
gpu/self_hosted/uv.lock
generated
@@ -13,7 +13,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.13.3"
|
||||
version = "3.13.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
@@ -24,76 +24,76 @@ dependencies = [
|
||||
{ name = "propcache" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/bd/ede278648914cabbabfdf95e436679b5d4156e417896a9b9f4587169e376/aiohttp-3.13.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ee62d4471ce86b108b19c3364db4b91180d13fe3510144872d6bad5401957360", size = 752158, upload-time = "2026-03-28T17:16:06.901Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/de/581c053253c07b480b03785196ca5335e3c606a37dc73e95f6527f1591fe/aiohttp-3.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c0fd8f41b54b58636402eb493afd512c23580456f022c1ba2db0f810c959ed0d", size = 501037, upload-time = "2026-03-28T17:16:08.82Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f9/a5ede193c08f13cc42c0a5b50d1e246ecee9115e4cf6e900d8dbd8fd6acb/aiohttp-3.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4baa48ce49efd82d6b1a0be12d6a36b35e5594d1dd42f8bfba96ea9f8678b88c", size = 501556, upload-time = "2026-03-28T17:16:10.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/10/88ff67cd48a6ec36335b63a640abe86135791544863e0cfe1f065d6cef7a/aiohttp-3.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d738ebab9f71ee652d9dbd0211057690022201b11197f9a7324fd4dba128aa97", size = 1757314, upload-time = "2026-03-28T17:16:12.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/15/fdb90a5cf5a1f52845c276e76298c75fbbcc0ac2b4a86551906d54529965/aiohttp-3.13.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0ce692c3468fa831af7dceed52edf51ac348cebfc8d3feb935927b63bd3e8576", size = 1731819, upload-time = "2026-03-28T17:16:14.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/df/28146785a007f7820416be05d4f28cc207493efd1e8c6c1068e9bdc29198/aiohttp-3.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8e08abcfe752a454d2cb89ff0c08f2d1ecd057ae3e8cc6d84638de853530ebab", size = 1793279, upload-time = "2026-03-28T17:16:16.594Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/47/689c743abf62ea7a77774d5722f220e2c912a77d65d368b884d9779ef41b/aiohttp-3.13.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5977f701b3fff36367a11087f30ea73c212e686d41cd363c50c022d48b011d8d", size = 1891082, upload-time = "2026-03-28T17:16:18.71Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b6/f7f4f318c7e58c23b761c9b13b9a3c9b394e0f9d5d76fbc6622fa98509f6/aiohttp-3.13.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:54203e10405c06f8b6020bd1e076ae0fe6c194adcee12a5a78af3ffa3c57025e", size = 1773938, upload-time = "2026-03-28T17:16:21.125Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/06/f207cb3121852c989586a6fc16ff854c4fcc8651b86c5d3bd1fc83057650/aiohttp-3.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:358a6af0145bc4dda037f13167bef3cce54b132087acc4c295c739d05d16b1c3", size = 1579548, upload-time = "2026-03-28T17:16:23.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/58/e1289661a32161e24c1fe479711d783067210d266842523752869cc1d9c2/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:898ea1850656d7d61832ef06aa9846ab3ddb1621b74f46de78fbc5e1a586ba83", size = 1714669, upload-time = "2026-03-28T17:16:25.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/0a/3e86d039438a74a86e6a948a9119b22540bae037d6ba317a042ae3c22711/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7bc30cceb710cf6a44e9617e43eebb6e3e43ad855a34da7b4b6a73537d8a6763", size = 1754175, upload-time = "2026-03-28T17:16:28.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/30/e717fc5df83133ba467a560b6d8ef20197037b4bb5d7075b90037de1018e/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4a31c0c587a8a038f19a4c7e60654a6c899c9de9174593a13e7cc6e15ff271f9", size = 1762049, upload-time = "2026-03-28T17:16:30.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/28/8f7a2d4492e336e40005151bdd94baf344880a4707573378579f833a64c1/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2062f675f3fe6e06d6113eb74a157fb9df58953ffed0cdb4182554b116545758", size = 1570861, upload-time = "2026-03-28T17:16:32.953Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/45/12e1a3d0645968b1c38de4b23fdf270b8637735ea057d4f84482ff918ad9/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d1ba8afb847ff80626d5e408c1fdc99f942acc877d0702fe137015903a220a9", size = 1790003, upload-time = "2026-03-28T17:16:35.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/0f/60374e18d590de16dcb39d6ff62f39c096c1b958e6f37727b5870026ea30/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b08149419994cdd4d5eecf7fd4bc5986b5a9380285bcd01ab4c0d6bfca47b79d", size = 1737289, upload-time = "2026-03-28T17:16:38.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/bf/535e58d886cfbc40a8b0013c974afad24ef7632d645bca0b678b70033a60/aiohttp-3.13.4-cp312-cp312-win32.whl", hash = "sha256:fc432f6a2c4f720180959bc19aa37259651c1a4ed8af8afc84dd41c60f15f791", size = 434185, upload-time = "2026-03-28T17:16:40.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/1a/d92e3325134ebfff6f4069f270d3aac770d63320bd1fcd0eca023e74d9a8/aiohttp-3.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:6148c9ae97a3e8bff9a1fc9c757fa164116f86c100468339730e717590a3fb77", size = 461285, upload-time = "2026-03-28T17:16:42.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/ac/892f4162df9b115b4758d615f32ec63d00f3084c705ff5526630887b9b42/aiohttp-3.13.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:63dd5e5b1e43b8fb1e91b79b7ceba1feba588b317d1edff385084fcc7a0a4538", size = 745744, upload-time = "2026-03-28T17:16:44.67Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/a9/c5b87e4443a2f0ea88cb3000c93a8fdad1ee63bffc9ded8d8c8e0d66efc6/aiohttp-3.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:746ac3cc00b5baea424dacddea3ec2c2702f9590de27d837aa67004db1eebc6e", size = 498178, upload-time = "2026-03-28T17:16:46.766Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/42/07e1b543a61250783650df13da8ddcdc0d0a5538b2bd15cef6e042aefc61/aiohttp-3.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bda8f16ea99d6a6705e5946732e48487a448be874e54a4f73d514660ff7c05d3", size = 498331, upload-time = "2026-03-28T17:16:48.9Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/d6/492f46bf0328534124772d0cf58570acae5b286ea25006900650f69dae0e/aiohttp-3.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b061e7b5f840391e3f64d0ddf672973e45c4cfff7a0feea425ea24e51530fc2", size = 1744414, upload-time = "2026-03-28T17:16:50.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/4d/e02627b2683f68051246215d2d62b2d2f249ff7a285e7a858dc47d6b6a14/aiohttp-3.13.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b252e8d5cd66184b570d0d010de742736e8a4fab22c58299772b0c5a466d4b21", size = 1719226, upload-time = "2026-03-28T17:16:53.173Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/6c/5d0a3394dd2b9f9aeba6e1b6065d0439e4b75d41f1fb09a3ec010b43552b/aiohttp-3.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20af8aad61d1803ff11152a26146d8d81c266aa8c5aa9b4504432abb965c36a0", size = 1782110, upload-time = "2026-03-28T17:16:55.362Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/2d/c20791e3437700a7441a7edfb59731150322424f5aadf635602d1d326101/aiohttp-3.13.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:13a5cc924b59859ad2adb1478e31f410a7ed46e92a2a619d6d1dd1a63c1a855e", size = 1884809, upload-time = "2026-03-28T17:16:57.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/94/d99dbfbd1924a87ef643833932eb2a3d9e5eee87656efea7d78058539eff/aiohttp-3.13.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:534913dfb0a644d537aebb4123e7d466d94e3be5549205e6a31f72368980a81a", size = 1764938, upload-time = "2026-03-28T17:17:00.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/61/3ce326a1538781deb89f6cf5e094e2029cd308ed1e21b2ba2278b08426f6/aiohttp-3.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:320e40192a2dcc1cf4b5576936e9652981ab596bf81eb309535db7e2f5b5672f", size = 1570697, upload-time = "2026-03-28T17:17:02.985Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/77/4ab5a546857bb3028fbaf34d6eea180267bdab022ee8b1168b1fcde4bfdd/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9e587fcfce2bcf06526a43cb705bdee21ac089096f2e271d75de9c339db3100c", size = 1702258, upload-time = "2026-03-28T17:17:05.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/63/d8f29021e39bc5af8e5d5e9da1b07976fb9846487a784e11e4f4eeda4666/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9eb9c2eea7278206b5c6c1441fdd9dc420c278ead3f3b2cc87f9b693698cc500", size = 1740287, upload-time = "2026-03-28T17:17:07.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/3a/cbc6b3b124859a11bc8055d3682c26999b393531ef926754a3445b99dfef/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:29be00c51972b04bf9d5c8f2d7f7314f48f96070ca40a873a53056e652e805f7", size = 1753011, upload-time = "2026-03-28T17:17:10.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/30/836278675205d58c1368b21520eab9572457cf19afd23759216c04483048/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:90c06228a6c3a7c9f776fe4fc0b7ff647fffd3bed93779a6913c804ae00c1073", size = 1566359, upload-time = "2026-03-28T17:17:12.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/b4/8032cc9b82d17e4277704ba30509eaccb39329dc18d6a35f05e424439e32/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a533ec132f05fd9a1d959e7f34184cd7d5e8511584848dab85faefbaac573069", size = 1785537, upload-time = "2026-03-28T17:17:14.721Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/7d/5873e98230bde59f493bf1f7c3e327486a4b5653fa401144704df5d00211/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1c946f10f413836f82ea4cfb90200d2a59578c549f00857e03111cf45ad01ca5", size = 1740752, upload-time = "2026-03-28T17:17:17.387Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/f2/13e46e0df051494d7d3c68b7f72d071f48c384c12716fc294f75d5b1a064/aiohttp-3.13.4-cp313-cp313-win32.whl", hash = "sha256:48708e2706106da6967eff5908c78ca3943f005ed6bcb75da2a7e4da94ef8c70", size = 433187, upload-time = "2026-03-28T17:17:19.523Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/c0/649856ee655a843c8f8664592cfccb73ac80ede6a8c8db33a25d810c12db/aiohttp-3.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:74a2eb058da44fa3a877a49e2095b591d4913308bb424c418b77beb160c55ce3", size = 459778, upload-time = "2026-03-28T17:17:21.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/29/6657cc37ae04cacc2dbf53fb730a06b6091cc4cbe745028e047c53e6d840/aiohttp-3.13.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:e0a2c961fc92abeff61d6444f2ce6ad35bb982db9fc8ff8a47455beacf454a57", size = 749363, upload-time = "2026-03-28T17:17:24.044Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/7f/30ccdf67ca3d24b610067dc63d64dcb91e5d88e27667811640644aa4a85d/aiohttp-3.13.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:153274535985a0ff2bff1fb6c104ed547cec898a09213d21b0f791a44b14d933", size = 499317, upload-time = "2026-03-28T17:17:26.199Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/13/e372dd4e68ad04ee25dafb050c7f98b0d91ea643f7352757e87231102555/aiohttp-3.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:351f3171e2458da3d731ce83f9e6b9619e325c45cbd534c7759750cabf453ad7", size = 500477, upload-time = "2026-03-28T17:17:28.279Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/fe/ee6298e8e586096fb6f5eddd31393d8544f33ae0792c71ecbb4c2bef98ac/aiohttp-3.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f989ac8bc5595ff761a5ccd32bdb0768a117f36dd1504b1c2c074ed5d3f4df9c", size = 1737227, upload-time = "2026-03-28T17:17:30.587Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b9/a7a0463a09e1a3fe35100f74324f23644bfc3383ac5fd5effe0722a5f0b7/aiohttp-3.13.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d36fc1709110ec1e87a229b201dd3ddc32aa01e98e7868083a794609b081c349", size = 1694036, upload-time = "2026-03-28T17:17:33.29Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/7c/8972ae3fb7be00a91aee6b644b2a6a909aedb2c425269a3bfd90115e6f8f/aiohttp-3.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42adaeea83cbdf069ab94f5103ce0787c21fb1a0153270da76b59d5578302329", size = 1786814, upload-time = "2026-03-28T17:17:36.035Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/01/c81e97e85c774decbaf0d577de7d848934e8166a3a14ad9f8aa5be329d28/aiohttp-3.13.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:92deb95469928cc41fd4b42a95d8012fa6df93f6b1c0a83af0ffbc4a5e218cde", size = 1866676, upload-time = "2026-03-28T17:17:38.441Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/5f/5b46fe8694a639ddea2cd035bf5729e4677ea882cb251396637e2ef1590d/aiohttp-3.13.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0c0c7c07c4257ef3a1df355f840bc62d133bcdef5c1c5ba75add3c08553e2eed", size = 1740842, upload-time = "2026-03-28T17:17:40.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/a2/0d4b03d011cca6b6b0acba8433193c1e484efa8d705ea58295590fe24203/aiohttp-3.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f062c45de8a1098cb137a1898819796a2491aec4e637a06b03f149315dff4d8f", size = 1566508, upload-time = "2026-03-28T17:17:43.235Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/17/e689fd500da52488ec5f889effd6404dece6a59de301e380f3c64f167beb/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:76093107c531517001114f0ebdb4f46858ce818590363e3e99a4a2280334454a", size = 1700569, upload-time = "2026-03-28T17:17:46.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/0d/66402894dbcf470ef7db99449e436105ea862c24f7ea4c95c683e635af35/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:6f6ec32162d293b82f8b63a16edc80769662fbd5ae6fbd4936d3206a2c2cc63b", size = 1707407, upload-time = "2026-03-28T17:17:48.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/eb/af0ab1a3650092cbd8e14ef29e4ab0209e1460e1c299996c3f8288b3f1ff/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5903e2db3d202a00ad9f0ec35a122c005e85d90c9836ab4cda628f01edf425e2", size = 1752214, upload-time = "2026-03-28T17:17:51.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/bf/72326f8a98e4c666f292f03c385545963cc65e358835d2a7375037a97b57/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2d5bea57be7aca98dbbac8da046d99b5557c5cf4e28538c4c786313078aca09e", size = 1562162, upload-time = "2026-03-28T17:17:53.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/9f/13b72435f99151dd9a5469c96b3b5f86aa29b7e785ca7f35cf5e538f74c0/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:bcf0c9902085976edc0232b75006ef38f89686901249ce14226b6877f88464fb", size = 1768904, upload-time = "2026-03-28T17:17:55.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/bc/28d4970e7d5452ac7776cdb5431a1164a0d9cf8bd2fffd67b4fb463aa56d/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3295f98bfeed2e867cab588f2a146a9db37a85e3ae9062abf46ba062bd29165", size = 1723378, upload-time = "2026-03-28T17:17:58.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/74/b32458ca1a7f34d65bdee7aef2036adbe0438123d3d53e2b083c453c24dd/aiohttp-3.13.4-cp314-cp314-win32.whl", hash = "sha256:a598a5c5767e1369d8f5b08695cab1d8160040f796c4416af76fd773d229b3c9", size = 438711, upload-time = "2026-03-28T17:18:00.728Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/b2/54b487316c2df3e03a8f3435e9636f8a81a42a69d942164830d193beb56a/aiohttp-3.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:c555db4bc7a264bead5a7d63d92d41a1122fcd39cc62a4db815f45ad46f9c2c8", size = 464977, upload-time = "2026-03-28T17:18:03.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/fb/e41b63c6ce71b07a59243bb8f3b457ee0c3402a619acb9d2c0d21ef0e647/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45abbbf09a129825d13c18c7d3182fecd46d9da3cfc383756145394013604ac1", size = 781549, upload-time = "2026-03-28T17:18:05.779Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/53/532b8d28df1e17e44c4d9a9368b78dcb6bf0b51037522136eced13afa9e8/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:74c80b2bc2c2adb7b3d1941b2b60701ee2af8296fc8aad8b8bc48bc25767266c", size = 514383, upload-time = "2026-03-28T17:18:08.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/1f/62e5d400603e8468cd635812d99cb81cfdc08127a3dc474c647615f31339/aiohttp-3.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c97989ae40a9746650fa196894f317dafc12227c808c774929dda0ff873a5954", size = 518304, upload-time = "2026-03-28T17:18:10.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/57/2326b37b10896447e3c6e0cbef4fe2486d30913639a5cfd1332b5d870f82/aiohttp-3.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dae86be9811493f9990ef44fff1685f5c1a3192e9061a71a109d527944eed551", size = 1893433, upload-time = "2026-03-28T17:18:13.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/b4/a24d82112c304afdb650167ef2fe190957d81cbddac7460bedd245f765aa/aiohttp-3.13.4-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1db491abe852ca2fa6cc48a3341985b0174b3741838e1341b82ac82c8bd9e871", size = 1755901, upload-time = "2026-03-28T17:18:16.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/2d/0883ef9d878d7846287f036c162a951968f22aabeef3ac97b0bea6f76d5d/aiohttp-3.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0e5d701c0aad02a7dce72eef6b93226cf3734330f1a31d69ebbf69f33b86666e", size = 1876093, upload-time = "2026-03-28T17:18:18.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/52/9204bb59c014869b71971addad6778f005daa72a96eed652c496789d7468/aiohttp-3.13.4-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8ac32a189081ae0a10ba18993f10f338ec94341f0d5df8fff348043962f3c6f8", size = 1970815, upload-time = "2026-03-28T17:18:21.858Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/b5/e4eb20275a866dde0f570f411b36c6b48f7b53edfe4f4071aa1b0728098a/aiohttp-3.13.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98e968cdaba43e45c73c3f306fca418c8009a957733bac85937c9f9cf3f4de27", size = 1816223, upload-time = "2026-03-28T17:18:24.729Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/23/e98075c5bb146aa61a1239ee1ac7714c85e814838d6cebbe37d3fe19214a/aiohttp-3.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca114790c9144c335d538852612d3e43ea0f075288f4849cf4b05d6cd2238ce7", size = 1649145, upload-time = "2026-03-28T17:18:27.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/c1/7bad8be33bb06c2bb224b6468874346026092762cbec388c3bdb65a368ee/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ea2e071661ba9cfe11eabbc81ac5376eaeb3061f6e72ec4cc86d7cdd1ffbdbbb", size = 1816562, upload-time = "2026-03-28T17:18:29.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/10/c00323348695e9a5e316825969c88463dcc24c7e9d443244b8a2c9cf2eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:34e89912b6c20e0fd80e07fa401fd218a410aa1ce9f1c2f1dad6db1bd0ce0927", size = 1800333, upload-time = "2026-03-28T17:18:32.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/43/9b2147a1df3559f49bd723e22905b46a46c068a53adb54abdca32c4de180/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0e217cf9f6a42908c52b46e42c568bd57adc39c9286ced31aaace614b6087965", size = 1820617, upload-time = "2026-03-28T17:18:35.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/7f/b3481a81e7a586d02e99387b18c6dafff41285f6efd3daa2124c01f87eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:0c296f1221e21ba979f5ac1964c3b78cfde15c5c5f855ffd2caab337e9cd9182", size = 1643417, upload-time = "2026-03-28T17:18:37.949Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/72/07181226bc99ce1124e0f89280f5221a82d3ae6a6d9d1973ce429d48e52b/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d99a9d168ebaffb74f36d011750e490085ac418f4db926cce3989c8fe6cb6b1b", size = 1849286, upload-time = "2026-03-28T17:18:40.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/e6/1b3566e103eca6da5be4ae6713e112a053725c584e96574caf117568ffef/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cb19177205d93b881f3f89e6081593676043a6828f59c78c17a0fd6c1fbed2ba", size = 1782635, upload-time = "2026-03-28T17:18:43.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/58/1b11c71904b8d079eb0c39fe664180dd1e14bebe5608e235d8bfbadc8929/aiohttp-3.13.4-cp314-cp314t-win32.whl", hash = "sha256:c606aa5656dab6552e52ca368e43869c916338346bfaf6304e15c58fb113ea30", size = 472537, upload-time = "2026-03-28T17:18:46.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/8f/87c56a1a1977d7dddea5b31e12189665a140fdb48a71e9038ff90bb564ec/aiohttp-3.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:014dcc10ec8ab8db681f0d68e939d1e9286a5aa2b993cbbdb0db130853e02144", size = 506381, upload-time = "2026-03-28T17:18:48.74Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -764,17 +764,34 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "hf-xet"
|
||||
version = "1.1.9"
|
||||
version = "1.4.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/23/0f/5b60fc28ee7f8cc17a5114a584fd6b86e11c3e0a6e142a7f97a161e9640a/hf_xet-1.1.9.tar.gz", hash = "sha256:c99073ce404462e909f1d5839b2d14a3827b8fe75ed8aed551ba6609c026c803", size = 484242, upload-time = "2025-08-27T23:05:19.441Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/53/92/ec9ad04d0b5728dca387a45af7bc98fbb0d73b2118759f5f6038b61a57e8/hf_xet-1.4.3.tar.gz", hash = "sha256:8ddedb73c8c08928c793df2f3401ec26f95be7f7e516a7bee2fbb546f6676113", size = 670477, upload-time = "2026-03-31T22:40:07.874Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/de/12/56e1abb9a44cdef59a411fe8a8673313195711b5ecce27880eb9c8fa90bd/hf_xet-1.1.9-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:a3b6215f88638dd7a6ff82cb4e738dcbf3d863bf667997c093a3c990337d1160", size = 2762553, upload-time = "2025-08-27T23:05:15.153Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/e6/2d0d16890c5f21b862f5df3146519c182e7f0ae49b4b4bf2bd8a40d0b05e/hf_xet-1.1.9-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9b486de7a64a66f9a172f4b3e0dfe79c9f0a93257c501296a2521a13495a698a", size = 2623216, upload-time = "2025-08-27T23:05:13.778Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/42/7e6955cf0621e87491a1fb8cad755d5c2517803cea174229b0ec00ff0166/hf_xet-1.1.9-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c5a840c2c4e6ec875ed13703a60e3523bc7f48031dfd750923b2a4d1a5fc3c", size = 3186789, upload-time = "2025-08-27T23:05:12.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/8b/759233bce05457f5f7ec062d63bbfd2d0c740b816279eaaa54be92aa452a/hf_xet-1.1.9-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:96a6139c9e44dad1c52c52520db0fffe948f6bce487cfb9d69c125f254bb3790", size = 3088747, upload-time = "2025-08-27T23:05:10.439Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/3c/28cc4db153a7601a996985bcb564f7b8f5b9e1a706c7537aad4b4809f358/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ad1022e9a998e784c97b2173965d07fe33ee26e4594770b7785a8cc8f922cd95", size = 3251429, upload-time = "2025-08-27T23:05:16.471Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/17/7caf27a1d101bfcb05be85850d4aa0a265b2e1acc2d4d52a48026ef1d299/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:86754c2d6d5afb11b0a435e6e18911a4199262fe77553f8c50d75e21242193ea", size = 3354643, upload-time = "2025-08-27T23:05:17.828Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/50/0c39c9eed3411deadcc98749a6699d871b822473f55fe472fad7c01ec588/hf_xet-1.1.9-cp37-abi3-win_amd64.whl", hash = "sha256:5aad3933de6b725d61d51034e04174ed1dce7a57c63d530df0014dea15a40127", size = 2804797, upload-time = "2025-08-27T23:05:20.77Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/43/724d307b34e353da0abd476e02f72f735cdd2bc86082dee1b32ea0bfee1d/hf_xet-1.4.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:7551659ba4f1e1074e9623996f28c3873682530aee0a846b7f2f066239228144", size = 3800935, upload-time = "2026-03-31T22:39:49.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/d2/8bee5996b699262edb87dbb54118d287c0e1b2fc78af7cdc41857ba5e3c4/hf_xet-1.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bee693ada985e7045997f05f081d0e12c4c08bd7626dc397f8a7c487e6c04f7f", size = 3558942, upload-time = "2026-03-31T22:39:47.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/a1/e993d09cbe251196fb60812b09a58901c468127b7259d2bf0f68bf6088eb/hf_xet-1.4.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21644b404bb0100fe3857892f752c4d09642586fd988e61501c95bbf44b393a3", size = 4207657, upload-time = "2026-03-31T22:39:39.69Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/44/9eb6d21e5c34c63e5e399803a6932fa983cabdf47c0ecbcfe7ea97684b8c/hf_xet-1.4.3-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:987f09cfe418237812896a6736b81b1af02a3a6dcb4b4944425c4c4fca7a7cf8", size = 3986765, upload-time = "2026-03-31T22:39:37.936Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/7b/8ad6f16fdb82f5f7284a34b5ec48645bd575bdcd2f6f0d1644775909c486/hf_xet-1.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:60cf7fc43a99da0a853345cf86d23738c03983ee5249613a6305d3e57a5dca74", size = 4188162, upload-time = "2026-03-31T22:39:58.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/c4/39d6e136cbeea9ca5a23aad4b33024319222adbdc059ebcda5fc7d9d5ff4/hf_xet-1.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2815a49a7a59f3e2edf0cf113ae88e8cb2ca2a221bf353fb60c609584f4884d4", size = 4424525, upload-time = "2026-03-31T22:40:00.225Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/f2/adc32dae6bdbc367853118b9878139ac869419a4ae7ba07185dc31251b76/hf_xet-1.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:42ee323265f1e6a81b0e11094564fb7f7e0ec75b5105ffd91ae63f403a11931b", size = 3671610, upload-time = "2026-03-31T22:40:10.42Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/19/25d897dcc3f81953e0c2cde9ec186c7a0fee413eb0c9a7a9130d87d94d3a/hf_xet-1.4.3-cp313-cp313t-win_arm64.whl", hash = "sha256:27c976ba60079fb8217f485b9c5c7fcd21c90b0367753805f87cb9f3cdc4418a", size = 3528529, upload-time = "2026-03-31T22:40:09.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/36/3e8f85ca9fe09b8de2b2e10c63b3b3353d7dda88a0b3d426dffbe7b8313b/hf_xet-1.4.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5251d5ece3a81815bae9abab41cf7ddb7bcb8f56411bce0827f4a3071c92fdc6", size = 3801019, upload-time = "2026-03-31T22:39:56.651Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/9c/defb6cb1de28bccb7bd8d95f6e60f72a3d3fa4cb3d0329c26fb9a488bfe7/hf_xet-1.4.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1feb0f3abeacee143367c326a128a2e2b60868ec12a36c225afb1d6c5a05e6d2", size = 3558746, upload-time = "2026-03-31T22:39:54.766Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/bd/8d001191893178ff8e826e46ad5299446e62b93cd164e17b0ffea08832ec/hf_xet-1.4.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8b301fc150290ca90b4fccd079829b84bb4786747584ae08b94b4577d82fb791", size = 4207692, upload-time = "2026-03-31T22:39:46.246Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/48/6790b402803250e9936435613d3a78b9aaeee7973439f0918848dde58309/hf_xet-1.4.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:d972fbe95ddc0d3c0fc49b31a8a69f47db35c1e3699bf316421705741aab6653", size = 3986281, upload-time = "2026-03-31T22:39:44.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/56/ea62552fe53db652a9099eda600b032d75554d0e86c12a73824bfedef88b/hf_xet-1.4.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c5b48db1ee344a805a1b9bd2cda9b6b65fe77ed3787bd6e87ad5521141d317cd", size = 4187414, upload-time = "2026-03-31T22:40:04.951Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/f5/bc1456d4638061bea997e6d2db60a1a613d7b200e0755965ec312dc1ef79/hf_xet-1.4.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:22bdc1f5fb8b15bf2831440b91d1c9bbceeb7e10c81a12e8d75889996a5c9da8", size = 4424368, upload-time = "2026-03-31T22:40:06.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/76/ab597bae87e1f06d18d3ecb8ed7f0d3c9a37037fc32ce76233d369273c64/hf_xet-1.4.3-cp314-cp314t-win_amd64.whl", hash = "sha256:0392c79b7cf48418cd61478c1a925246cf10639f4cd9d94368d8ca1e8df9ea07", size = 3672280, upload-time = "2026-03-31T22:40:16.401Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/05/2e462d34e23a09a74d73785dbed71cc5dbad82a72eee2ad60a72a554155d/hf_xet-1.4.3-cp314-cp314t-win_arm64.whl", hash = "sha256:681c92a07796325778a79d76c67011764ecc9042a8c3579332b61b63ae512075", size = 3528945, upload-time = "2026-03-31T22:40:14.995Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/9f/9c23e4a447b8f83120798f9279d0297a4d1360bdbf59ef49ebec78fe2545/hf_xet-1.4.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d0da85329eaf196e03e90b84c2d0aca53bd4573d097a75f99609e80775f98025", size = 3805048, upload-time = "2026-03-31T22:39:53.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/f8/7aacb8e5f4a7899d39c787b5984e912e6c18b11be136ef13947d7a66d265/hf_xet-1.4.3-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:e23717ce4186b265f69afa66e6f0069fe7efbf331546f5c313d00e123dc84583", size = 3562178, upload-time = "2026-03-31T22:39:51.295Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/9a/a24b26dc8a65f0ecc0fe5be981a19e61e7ca963b85e062c083f3a9100529/hf_xet-1.4.3-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc360b70c815bf340ed56c7b8c63aacf11762a4b099b2fe2c9bd6d6068668c08", size = 4212320, upload-time = "2026-03-31T22:39:42.922Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/60/46d493db155d2ee2801b71fb1b0fd67696359047fdd8caee2c914cc50c79/hf_xet-1.4.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39f2d2e9654cd9b4319885733993807aab6de9dfbd34c42f0b78338d6617421f", size = 3991546, upload-time = "2026-03-31T22:39:41.335Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/f5/067363e1c96c6b17256910830d1b54099d06287e10f4ec6ec4e7e08371fc/hf_xet-1.4.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:49ad8a8cead2b56051aa84d7fce3e1335efe68df3cf6c058f22a65513885baac", size = 4193200, upload-time = "2026-03-31T22:40:01.936Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/4b/53951592882d9c23080c7644542fda34a3813104e9e11fa1a7d82d419cb8/hf_xet-1.4.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7716d62015477a70ea272d2d68cd7cad140f61c52ee452e133e139abfe2c17ba", size = 4429392, upload-time = "2026-03-31T22:40:03.492Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/21/75a6c175b4e79662ad8e62f46a40ce341d8d6b206b06b4320d07d55b188c/hf_xet-1.4.3-cp37-abi3-win_amd64.whl", hash = "sha256:6b591fcad34e272a5b02607485e4f2a1334aebf1bc6d16ce8eb1eb8978ac2021", size = 3677359, upload-time = "2026-03-31T22:40:13.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/7c/44314ecd0e89f8b2b51c9d9e5e7a60a9c1c82024ac471d415860557d3cd8/hf_xet-1.4.3-cp37-abi3-win_arm64.whl", hash = "sha256:7c2c7e20bcfcc946dc67187c203463f5e932e395845d098cc2a93f5b67ca0b47", size = 3533664, upload-time = "2026-03-31T22:40:12.152Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -829,21 +846,22 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.34.4"
|
||||
version = "1.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "filelock" },
|
||||
{ name = "fsspec" },
|
||||
{ name = "hf-xet", marker = "platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" },
|
||||
{ name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" },
|
||||
{ name = "httpx" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "requests" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "typer" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/c9/bdbe19339f76d12985bc03572f330a01a93c04dffecaaea3061bdd7fb892/huggingface_hub-0.34.4.tar.gz", hash = "sha256:a4228daa6fb001be3f4f4bdaf9a0db00e1739235702848df00885c9b5742c85c", size = 459768, upload-time = "2025-08-08T09:14:52.365Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/44/40/68d9b286b125d9318ae95c8f8b206e8672e7244b0eea61ebb4a88037638c/huggingface_hub-1.9.1.tar.gz", hash = "sha256:442af372207cc24dcb089caf507fcd7dbc1217c11d6059a06f6b90afe64e8bd2", size = 750355, upload-time = "2026-04-07T13:47:59.167Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/39/7b/bb06b061991107cd8783f300adff3e7b7f284e330fd82f507f2a1417b11d/huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a", size = 561452, upload-time = "2025-08-08T09:14:50.159Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/af/10a89c54937dccf6c10792770f362d96dd67aedfde108e6e1fd7a0836789/huggingface_hub-1.9.1-py3-none-any.whl", hash = "sha256:8dae771b969b318203727a6c6c5209d25e661f6f0dd010fc09cc4a12cf81c657", size = 637356, upload-time = "2026-04-07T13:47:57.239Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2657,27 +2675,28 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "tokenizers"
|
||||
version = "0.22.0"
|
||||
version = "0.22.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "huggingface-hub" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/b4/c1ce3699e81977da2ace8b16d2badfd42b060e7d33d75c4ccdbf9dc920fa/tokenizers-0.22.0.tar.gz", hash = "sha256:2e33b98525be8453f355927f3cab312c36cd3e44f4d7e9e97da2fa94d0a49dcb", size = 362771, upload-time = "2025-08-29T10:25:33.914Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/73/6f/f80cfef4a312e1fb34baf7d85c72d4411afde10978d4657f8cdd811d3ccc/tokenizers-0.22.2.tar.gz", hash = "sha256:473b83b915e547aa366d1eee11806deaf419e17be16310ac0a14077f1e28f917", size = 372115, upload-time = "2026-01-05T10:45:15.988Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/b1/18c13648edabbe66baa85fe266a478a7931ddc0cd1ba618802eb7b8d9865/tokenizers-0.22.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:eaa9620122a3fb99b943f864af95ed14c8dfc0f47afa3b404ac8c16b3f2bb484", size = 3081954, upload-time = "2025-08-29T10:25:24.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/02/c3c454b641bd7c4f79e4464accfae9e7dfc913a777d2e561e168ae060362/tokenizers-0.22.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:71784b9ab5bf0ff3075bceeb198149d2c5e068549c0d18fe32d06ba0deb63f79", size = 2945644, upload-time = "2025-08-29T10:25:23.405Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/02/d10185ba2fd8c2d111e124c9d92de398aee0264b35ce433f79fb8472f5d0/tokenizers-0.22.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec5b71f668a8076802b0241a42387d48289f25435b86b769ae1837cad4172a17", size = 3254764, upload-time = "2025-08-29T10:25:12.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/89/17514bd7ef4bf5bfff58e2b131cec0f8d5cea2b1c8ffe1050a2c8de88dbb/tokenizers-0.22.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ea8562fa7498850d02a16178105b58803ea825b50dc9094d60549a7ed63654bb", size = 3161654, upload-time = "2025-08-29T10:25:15.493Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/d8/bac9f3a7ef6dcceec206e3857c3b61bb16c6b702ed7ae49585f5bd85c0ef/tokenizers-0.22.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4136e1558a9ef2e2f1de1555dcd573e1cbc4a320c1a06c4107a3d46dc8ac6e4b", size = 3511484, upload-time = "2025-08-29T10:25:20.477Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/27/9c9800eb6763683010a4851db4d1802d8cab9cec114c17056eccb4d4a6e0/tokenizers-0.22.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf5954de3962a5fd9781dc12048d24a1a6f1f5df038c6e95db328cd22964206", size = 3712829, upload-time = "2025-08-29T10:25:17.154Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/e3/b1726dbc1f03f757260fa21752e1921445b5bc350389a8314dd3338836db/tokenizers-0.22.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8337ca75d0731fc4860e6204cc24bb36a67d9736142aa06ed320943b50b1e7ed", size = 3408934, upload-time = "2025-08-29T10:25:18.76Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/61/aeab3402c26874b74bb67a7f2c4b569dde29b51032c5384db592e7b216f4/tokenizers-0.22.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a89264e26f63c449d8cded9061adea7b5de53ba2346fc7e87311f7e4117c1cc8", size = 3345585, upload-time = "2025-08-29T10:25:22.08Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/d3/498b4a8a8764cce0900af1add0f176ff24f475d4413d55b760b8cdf00893/tokenizers-0.22.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:790bad50a1b59d4c21592f9c3cf5e5cf9c3c7ce7e1a23a739f13e01fb1be377a", size = 9322986, upload-time = "2025-08-29T10:25:26.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/62/92378eb1c2c565837ca3cb5f9569860d132ab9d195d7950c1ea2681dffd0/tokenizers-0.22.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:76cf6757c73a10ef10bf06fa937c0ec7393d90432f543f49adc8cab3fb6f26cb", size = 9276630, upload-time = "2025-08-29T10:25:28.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/f0/342d80457aa1cda7654327460f69db0d69405af1e4c453f4dc6ca7c4a76e/tokenizers-0.22.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:1626cb186e143720c62c6c6b5371e62bbc10af60481388c0da89bc903f37ea0c", size = 9547175, upload-time = "2025-08-29T10:25:29.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/84/8aa9b4adfc4fbd09381e20a5bc6aa27040c9c09caa89988c01544e008d18/tokenizers-0.22.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:da589a61cbfea18ae267723d6b029b84598dc8ca78db9951d8f5beff72d8507c", size = 9692735, upload-time = "2025-08-29T10:25:32.089Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/24/83ee2b1dc76bfe05c3142e7d0ccdfe69f0ad2f1ebf6c726cea7f0874c0d0/tokenizers-0.22.0-cp39-abi3-win32.whl", hash = "sha256:dbf9d6851bddae3e046fedfb166f47743c1c7bd11c640f0691dd35ef0bcad3be", size = 2471915, upload-time = "2025-08-29T10:25:36.411Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/9b/0e0bf82214ee20231845b127aa4a8015936ad5a46779f30865d10e404167/tokenizers-0.22.0-cp39-abi3-win_amd64.whl", hash = "sha256:c78174859eeaee96021f248a56c801e36bfb6bd5b067f2e95aa82445ca324f00", size = 2680494, upload-time = "2025-08-29T10:25:35.14Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/97/5dbfabf04c7e348e655e907ed27913e03db0923abb5dfdd120d7b25630e1/tokenizers-0.22.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c", size = 3100275, upload-time = "2026-01-05T10:41:02.158Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/47/174dca0502ef88b28f1c9e06b73ce33500eedfac7a7692108aec220464e7/tokenizers-0.22.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001", size = 2981472, upload-time = "2026-01-05T10:41:00.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/84/7990e799f1309a8b87af6b948f31edaa12a3ed22d11b352eaf4f4b2e5753/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249487018adec45d6e3554c71d46eb39fa8ea67156c640f7513eb26f318cec7", size = 3290736, upload-time = "2026-01-05T10:40:32.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/59/09d0d9ba94dcd5f4f1368d4858d24546b4bdc0231c2354aa31d6199f0399/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b85325d0815e86e0bac263506dd114578953b7b53d7de09a6485e4a160a7dd", size = 3168835, upload-time = "2026-01-05T10:40:38.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/50/b3ebb4243e7160bda8d34b731e54dd8ab8b133e50775872e7a434e524c28/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfb88f22a209ff7b40a576d5324bf8286b519d7358663db21d6246fb17eea2d5", size = 3521673, upload-time = "2026-01-05T10:40:56.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/fa/89f4cb9e08df770b57adb96f8cbb7e22695a4cb6c2bd5f0c4f0ebcf33b66/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c774b1276f71e1ef716e5486f21e76333464f47bece56bbd554485982a9e03e", size = 3724818, upload-time = "2026-01-05T10:40:44.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/04/ca2363f0bfbe3b3d36e95bf67e56a4c88c8e3362b658e616d1ac185d47f2/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df6c4265b289083bf710dff49bc51ef252f9d5be33a45ee2bed151114a56207b", size = 3379195, upload-time = "2026-01-05T10:40:51.139Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/76/932be4b50ef6ccedf9d3c6639b056a967a86258c6d9200643f01269211ca/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369cc9fc8cc10cb24143873a0d95438bb8ee257bb80c71989e3ee290e8d72c67", size = 3274982, upload-time = "2026-01-05T10:40:58.331Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/28/5f9f5a4cc211b69e89420980e483831bcc29dade307955cc9dc858a40f01/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:29c30b83d8dcd061078b05ae0cb94d3c710555fbb44861139f9f83dcca3dc3e4", size = 9478245, upload-time = "2026-01-05T10:41:04.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/fb/66e2da4704d6aadebf8cb39f1d6d1957df667ab24cff2326b77cda0dcb85/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:37ae80a28c1d3265bb1f22464c856bd23c02a05bb211e56d0c5301a435be6c1a", size = 9560069, upload-time = "2026-01-05T10:45:10.673Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/04/fed398b05caa87ce9b1a1bb5166645e38196081b225059a6edaff6440fac/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:791135ee325f2336f498590eb2f11dc5c295232f288e75c99a36c5dbce63088a", size = 9899263, upload-time = "2026-01-05T10:45:12.559Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/a1/d62dfe7376beaaf1394917e0f8e93ee5f67fea8fcf4107501db35996586b/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38337540fbbddff8e999d59970f3c6f35a82de10053206a7562f1ea02d046fa5", size = 10033429, upload-time = "2026-01-05T10:45:14.333Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/18/a545c4ea42af3df6effd7d13d250ba77a0a86fb20393143bbb9a92e434d4/tokenizers-0.22.2-cp39-abi3-win32.whl", hash = "sha256:a6bf3f88c554a2b653af81f3204491c818ae2ac6fbc09e76ef4773351292bc92", size = 2502363, upload-time = "2026-01-05T10:45:20.593Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/71/0670843133a43d43070abeb1949abfdef12a86d490bea9cd9e18e37c5ff7/tokenizers-0.22.2-cp39-abi3-win_amd64.whl", hash = "sha256:c9ea31edff2968b44a88f97d784c2f16dc0729b8b143ed004699ebca91f05c48", size = 2747786, upload-time = "2026-01-05T10:45:18.411Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/f4/0de46cfa12cdcbcd464cc59fde36912af405696f687e53a091fb432f694c/tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc", size = 2612133, upload-time = "2026-01-05T10:45:17.232Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2804,7 +2823,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "transformers"
|
||||
version = "4.56.1"
|
||||
version = "5.0.0rc3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "filelock" },
|
||||
@@ -2817,10 +2836,11 @@ dependencies = [
|
||||
{ name = "safetensors" },
|
||||
{ name = "tokenizers" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "typer-slim" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/89/21/dc88ef3da1e49af07ed69386a11047a31dcf1aaf4ded3bc4b173fbf94116/transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74", size = 9855473, upload-time = "2025-09-04T20:47:13.14Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3f/a3/7c116a8d85f69ea7749cf4c2df79e64c35d028e5fc7ea0168f299d03b8c7/transformers-5.0.0rc3.tar.gz", hash = "sha256:a0315b92b7e087617ade42ec9e6e92ee7620541cc5d6a3331886c52cbe306f5c", size = 8388520, upload-time = "2026-01-14T16:49:02.952Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/71/7c/283c3dd35e00e22a7803a0b2a65251347b745474a82399be058bde1c9f15/transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248", size = 11608197, upload-time = "2025-09-04T20:47:04.895Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/f2/ae2b8968764253bdf38a48dee3c299b8d0bedf7c8ffbe3449fca9bd95338/transformers-5.0.0rc3-py3-none-any.whl", hash = "sha256:383fad27f4f73092d330e45fae384681e5c8521e1dc1cf6cb1a297780e68bf2d", size = 10107087, upload-time = "2026-01-14T16:48:59.393Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2838,17 +2858,29 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "typer"
|
||||
version = "0.17.3"
|
||||
version = "0.24.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
{ name = "click" },
|
||||
{ name = "rich" },
|
||||
{ name = "shellingham" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/dd/82/f4bfed3bc18c6ebd6f828320811bbe4098f92a31adf4040bee59c4ae02ea/typer-0.17.3.tar.gz", hash = "sha256:0c600503d472bcf98d29914d4dcd67f80c24cc245395e2e00ba3603c9332e8ba", size = 103517, upload-time = "2025-08-30T12:35:24.05Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/e8/b3d537470e8404659a6335e7af868e90657efb73916ef31ddf3d8b9cb237/typer-0.17.3-py3-none-any.whl", hash = "sha256:643919a79182ab7ac7581056d93c6a2b865b026adf2872c4d02c72758e6f095b", size = 46494, upload-time = "2025-08-30T12:35:22.391Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typer-slim"
|
||||
version = "0.24.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typer" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a7/a7/e6aecc4b4eb59598829a3b5076a93aff291b4fdaa2ded25efc4e1f4d219c/typer_slim-0.24.0.tar.gz", hash = "sha256:f0ed36127183f52ae6ced2ecb2521789995992c521a46083bfcdbb652d22ad34", size = 4776, upload-time = "2026-02-16T22:08:51.2Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/24/5480c20380dfd18cf33d14784096dca45a24eae6102e91d49a718d3b6855/typer_slim-0.24.0-py3-none-any.whl", hash = "sha256:d5d7ee1ee2834d5020c7c616ed5e0d0f29b9a4b1dd283bdebae198ec09778d0e", size = 3394, upload-time = "2026-02-16T22:08:49.92Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
34
livekit.yaml.example
Normal file
34
livekit.yaml.example
Normal file
@@ -0,0 +1,34 @@
|
||||
# LiveKit server configuration
|
||||
# Generated by setup-selfhosted.sh — do not edit manually.
|
||||
# See: https://docs.livekit.io/self-hosting/deployment/
|
||||
|
||||
port: 7880
|
||||
rtc:
|
||||
tcp_port: 7881
|
||||
port_range_start: 44200
|
||||
port_range_end: 44300
|
||||
# use_external_ip: true # Uncomment for production with public IP
|
||||
|
||||
redis:
|
||||
address: redis:6379
|
||||
|
||||
keys:
|
||||
# API key : API secret (generated by setup script)
|
||||
# devkey: secret
|
||||
__LIVEKIT_API_KEY__: __LIVEKIT_API_SECRET__
|
||||
|
||||
webhook:
|
||||
urls:
|
||||
- http://server:1250/v1/livekit/webhook
|
||||
api_key: __LIVEKIT_API_KEY__
|
||||
|
||||
logging:
|
||||
level: info
|
||||
|
||||
# Room settings
|
||||
room:
|
||||
empty_timeout: 300 # 5 minutes after last participant leaves
|
||||
max_participants: 0 # 0 = unlimited
|
||||
|
||||
# Track Egress only (no composite video)
|
||||
# Egress is configured via egress.yaml on the egress service
|
||||
@@ -26,6 +26,17 @@
|
||||
# (If omitted, configure an external OpenAI-compatible LLM in server/.env)
|
||||
#
|
||||
# Optional flags:
|
||||
# --livekit Enable LiveKit self-hosted video platform (generates credentials,
|
||||
# starts livekit-server + livekit-egress containers)
|
||||
# --ip IP Set the server's IP address for all URLs. Implies --caddy
|
||||
# (self-signed HTTPS, required for browser mic/camera access).
|
||||
# Mutually exclusive with --domain. Use for LAN or cloud VM access.
|
||||
# On Linux, IP is auto-detected; on macOS, use --ip to specify it.
|
||||
# --tunnels TCP,UDP Configure tunnel addresses for NAT traversal (e.g. playit.gg).
|
||||
# TCP=host:port for web/API/signaling, UDP=host:port for WebRTC media.
|
||||
# Implies --caddy. Mutually exclusive with --ip and --domain.
|
||||
# --tunnel-tcp ADDR TCP tunnel address only (e.g. --tunnel-tcp host.playit.gg:9055)
|
||||
# --tunnel-udp ADDR UDP tunnel address only (e.g. --tunnel-udp host.ply.gg:14139)
|
||||
# --garage Use Garage for local S3-compatible storage
|
||||
# --caddy Enable Caddy reverse proxy with auto-SSL
|
||||
# --domain DOMAIN Use a real domain for Caddy (enables Let's Encrypt auto-HTTPS)
|
||||
@@ -42,10 +53,10 @@
|
||||
# --build Build backend and frontend images from source instead of pulling
|
||||
#
|
||||
# Examples:
|
||||
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --domain reflector.example.com
|
||||
# ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --hosted --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --livekit --garage --caddy --domain reflector.example.com
|
||||
# ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --livekit --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --hosted --livekit --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --cpu --padding modal --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --gpu --translation passthrough --garage --caddy
|
||||
# ./scripts/setup-selfhosted.sh --cpu --diarization modal --translation modal --garage
|
||||
@@ -58,9 +69,11 @@
|
||||
# Config memory: after a successful run, flags are saved to data/.selfhosted-last-args.
|
||||
# Re-running with no arguments replays the saved configuration automatically.
|
||||
#
|
||||
# The script auto-detects Daily.co (DAILY_API_KEY) and Whereby (WHEREBY_API_KEY)
|
||||
# from server/.env. If Daily.co is configured, Hatchet workflow services are
|
||||
# started automatically for multitrack recording processing.
|
||||
# The script auto-detects Daily.co (DAILY_API_KEY), Whereby (WHEREBY_API_KEY),
|
||||
# and LiveKit (LIVEKIT_API_KEY) from server/.env.
|
||||
# - Daily.co: enables Hatchet workflow services for multitrack recording processing.
|
||||
# - LiveKit: enables livekit-server + livekit-egress containers (self-hosted,
|
||||
# generates livekit.yaml and egress.yaml configs automatically).
|
||||
#
|
||||
# Idempotent — safe to re-run at any time.
|
||||
#
|
||||
@@ -207,9 +220,13 @@ fi
|
||||
MODEL_MODE="" # gpu or cpu (required, mutually exclusive)
|
||||
OLLAMA_MODE="" # ollama-gpu or ollama-cpu (optional)
|
||||
USE_GARAGE=false
|
||||
USE_LIVEKIT=false
|
||||
USE_CADDY=false
|
||||
CUSTOM_DOMAIN="" # optional domain for Let's Encrypt HTTPS
|
||||
CUSTOM_IP="" # optional --ip override (mutually exclusive with --caddy)
|
||||
BUILD_IMAGES=false # build backend/frontend from source
|
||||
TUNNEL_TCP="" # --tunnel-tcp: TCP tunnel address (e.g., host:port from playit.gg)
|
||||
TUNNEL_UDP="" # --tunnel-udp: UDP tunnel address (e.g., host:port from playit.gg)
|
||||
ADMIN_PASSWORD="" # optional admin password for password auth
|
||||
CUSTOM_CA="" # --custom-ca: path to dir or CA cert file
|
||||
USE_CUSTOM_CA=false # derived flag: true when --custom-ca is provided
|
||||
@@ -261,8 +278,44 @@ for i in "${!ARGS[@]}"; do
|
||||
OLLAMA_MODEL="${ARGS[$next_i]}"
|
||||
SKIP_NEXT=true ;;
|
||||
--garage) USE_GARAGE=true ;;
|
||||
--livekit) USE_LIVEKIT=true ;;
|
||||
--caddy) USE_CADDY=true ;;
|
||||
--ip)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
err "--ip requires an IP address (e.g. --ip 192.168.0.100)"
|
||||
exit 1
|
||||
fi
|
||||
CUSTOM_IP="${ARGS[$next_i]}"
|
||||
SKIP_NEXT=true ;;
|
||||
--build) BUILD_IMAGES=true ;;
|
||||
--tunnels)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
err "--tunnels requires TCP,UDP addresses (e.g. --tunnels host:9055,host:14139)"
|
||||
exit 1
|
||||
fi
|
||||
IFS=',' read -r TUNNEL_TCP TUNNEL_UDP <<< "${ARGS[$next_i]}"
|
||||
# Trim whitespace
|
||||
TUNNEL_TCP="${TUNNEL_TCP// /}"
|
||||
TUNNEL_UDP="${TUNNEL_UDP// /}"
|
||||
SKIP_NEXT=true ;;
|
||||
--tunnel-tcp)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
err "--tunnel-tcp requires a TCP tunnel address (e.g. --tunnel-tcp host:9055)"
|
||||
exit 1
|
||||
fi
|
||||
TUNNEL_TCP="${ARGS[$next_i]}"
|
||||
SKIP_NEXT=true ;;
|
||||
--tunnel-udp)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
err "--tunnel-udp requires a UDP tunnel address (e.g. --tunnel-udp host:14139)"
|
||||
exit 1
|
||||
fi
|
||||
TUNNEL_UDP="${ARGS[$next_i]}"
|
||||
SKIP_NEXT=true ;;
|
||||
--password)
|
||||
next_i=$((i + 1))
|
||||
if [[ $next_i -ge ${#ARGS[@]} ]] || [[ "${ARGS[$next_i]}" == --* ]]; then
|
||||
@@ -356,6 +409,45 @@ for i in "${!ARGS[@]}"; do
|
||||
esac
|
||||
done
|
||||
|
||||
# --- Validate flag combinations ---
|
||||
if [[ -n "$CUSTOM_IP" ]] && [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
err "--ip and --domain are mutually exclusive. Use --ip for IP-based access, or --domain for domain-based access."
|
||||
exit 1
|
||||
fi
|
||||
# --ip implies --caddy (browsers require HTTPS for mic/camera access on non-localhost)
|
||||
if [[ -n "$CUSTOM_IP" ]]; then
|
||||
USE_CADDY=true
|
||||
fi
|
||||
# Validate tunnel address format (must be host:port with numeric port)
|
||||
_validate_tunnel_addr() {
|
||||
local label="$1" addr="$2"
|
||||
if [[ -z "$addr" ]]; then return; fi
|
||||
if [[ "$addr" != *:* ]]; then
|
||||
err "$label address must be host:port (got: $addr)"
|
||||
exit 1
|
||||
fi
|
||||
local port="${addr##*:}"
|
||||
if ! [[ "$port" =~ ^[0-9]+$ ]] || [[ "$port" -lt 1 ]] || [[ "$port" -gt 65535 ]]; then
|
||||
err "$label port must be 1-65535 (got: $port)"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
_validate_tunnel_addr "--tunnel-tcp" "$TUNNEL_TCP"
|
||||
_validate_tunnel_addr "--tunnel-udp" "$TUNNEL_UDP"
|
||||
|
||||
# --tunnels / --tunnel-tcp implies --caddy
|
||||
if [[ -n "$TUNNEL_TCP" ]]; then
|
||||
USE_CADDY=true
|
||||
fi
|
||||
if [[ -n "$TUNNEL_TCP" ]] && [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
err "--tunnel-tcp and --domain are mutually exclusive."
|
||||
exit 1
|
||||
fi
|
||||
if [[ -n "$TUNNEL_TCP" ]] && [[ -n "$CUSTOM_IP" ]]; then
|
||||
err "--tunnel-tcp and --ip are mutually exclusive."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- Save CLI args for config memory (re-run without flags) ---
|
||||
if [[ $# -gt 0 ]]; then
|
||||
mkdir -p "$ROOT_DIR/data"
|
||||
@@ -505,6 +597,138 @@ if [[ "$HAS_OVERRIDES" == "true" ]]; then
|
||||
MODE_DISPLAY="$MODE_DISPLAY (overrides: transcript=$EFF_TRANSCRIPT, diarization=$EFF_DIARIZATION, translation=$EFF_TRANSLATION, padding=$EFF_PADDING, mixdown=$EFF_MIXDOWN)"
|
||||
fi
|
||||
|
||||
# =========================================================
|
||||
# LiveKit config generation helper
|
||||
# =========================================================
|
||||
_generate_livekit_config() {
|
||||
local lk_key lk_secret lk_url
|
||||
lk_key=$(env_get "$SERVER_ENV" "LIVEKIT_API_KEY" || true)
|
||||
lk_secret=$(env_get "$SERVER_ENV" "LIVEKIT_API_SECRET" || true)
|
||||
lk_url=$(env_get "$SERVER_ENV" "LIVEKIT_URL" || true)
|
||||
|
||||
if [[ -z "$lk_key" ]] || [[ -z "$lk_secret" ]]; then
|
||||
warn "LIVEKIT_API_KEY or LIVEKIT_API_SECRET not set — generating random credentials"
|
||||
lk_key="reflector_$(openssl rand -hex 8)"
|
||||
lk_secret="$(openssl rand -hex 32)"
|
||||
env_set "$SERVER_ENV" "LIVEKIT_API_KEY" "$lk_key"
|
||||
env_set "$SERVER_ENV" "LIVEKIT_API_SECRET" "$lk_secret"
|
||||
env_set "$SERVER_ENV" "LIVEKIT_URL" "ws://livekit-server:7880"
|
||||
ok "Generated LiveKit API credentials"
|
||||
fi
|
||||
|
||||
# Set internal URL for server->livekit communication
|
||||
if ! env_has_key "$SERVER_ENV" "LIVEKIT_URL" || [[ -z "$(env_get "$SERVER_ENV" "LIVEKIT_URL" || true)" ]]; then
|
||||
env_set "$SERVER_ENV" "LIVEKIT_URL" "ws://livekit-server:7880"
|
||||
fi
|
||||
|
||||
# Set public URL based on deployment mode.
|
||||
# When Caddy is enabled (HTTPS), LiveKit WebSocket is proxied through Caddy
|
||||
# at /lk-ws to avoid mixed-content blocking (browsers block ws:// on https:// pages).
|
||||
# When no Caddy, browsers connect directly to LiveKit on port 7880.
|
||||
local public_lk_url
|
||||
if [[ -n "$TUNNEL_TCP" ]]; then
|
||||
# Tunnel mode: LiveKit signaling proxied through Caddy on the tunnel address
|
||||
public_lk_url="wss://${TUNNEL_TCP}/lk-ws"
|
||||
elif [[ "$USE_CADDY" == "true" ]]; then
|
||||
if [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
public_lk_url="wss://${CUSTOM_DOMAIN}/lk-ws"
|
||||
elif [[ -n "$PRIMARY_IP" ]]; then
|
||||
public_lk_url="wss://${PRIMARY_IP}/lk-ws"
|
||||
else
|
||||
public_lk_url="wss://localhost/lk-ws"
|
||||
fi
|
||||
else
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
public_lk_url="ws://${PRIMARY_IP}:7880"
|
||||
else
|
||||
public_lk_url="ws://localhost:7880"
|
||||
fi
|
||||
fi
|
||||
env_set "$SERVER_ENV" "LIVEKIT_PUBLIC_URL" "$public_lk_url"
|
||||
env_set "$SERVER_ENV" "DEFAULT_VIDEO_PLATFORM" "livekit"
|
||||
|
||||
# LiveKit storage: always sync from transcript storage config.
|
||||
# Endpoint URL must match (changes between Caddy/no-Caddy runs).
|
||||
local ts_bucket ts_region ts_key ts_secret ts_endpoint
|
||||
ts_bucket=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_BUCKET_NAME" 2>/dev/null || echo "reflector-bucket")
|
||||
ts_region=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_REGION" 2>/dev/null || echo "us-east-1")
|
||||
ts_key=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID" 2>/dev/null || true)
|
||||
ts_secret=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY" 2>/dev/null || true)
|
||||
ts_endpoint=$(env_get "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" 2>/dev/null || true)
|
||||
env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_BUCKET_NAME" "$ts_bucket"
|
||||
env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_REGION" "$ts_region"
|
||||
[[ -n "$ts_key" ]] && env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID" "$ts_key"
|
||||
[[ -n "$ts_secret" ]] && env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY" "$ts_secret"
|
||||
[[ -n "$ts_endpoint" ]] && env_set "$SERVER_ENV" "LIVEKIT_STORAGE_AWS_ENDPOINT_URL" "$ts_endpoint"
|
||||
if [[ -z "$ts_key" ]] || [[ -z "$ts_secret" ]]; then
|
||||
warn "LiveKit storage: S3 credentials not found — Track Egress recording will fail!"
|
||||
warn "Configure LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID and LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY in server/.env"
|
||||
warn "Or run with --garage to auto-configure local S3 storage"
|
||||
else
|
||||
ok "LiveKit storage: synced from transcript storage config"
|
||||
fi
|
||||
|
||||
# Generate livekit.yaml
|
||||
# UDP tunnel mode: use single port matching the tunnel's public port
|
||||
local rtc_config
|
||||
if [[ -n "$TUNNEL_UDP" ]]; then
|
||||
local tunnel_udp_host tunnel_udp_port
|
||||
tunnel_udp_host="${TUNNEL_UDP%:*}"
|
||||
tunnel_udp_port="${TUNNEL_UDP##*:}"
|
||||
# Resolve tunnel hostname to IP for node_ip
|
||||
local tunnel_udp_ip
|
||||
tunnel_udp_ip=$(dig +short "$tunnel_udp_host" 2>/dev/null | head -1 || nslookup "$tunnel_udp_host" 2>/dev/null | grep "Address:" | tail -1 | awk '{print $2}' || true)
|
||||
if [[ -z "$tunnel_udp_ip" ]]; then
|
||||
warn "Could not resolve UDP tunnel hostname: $tunnel_udp_host"
|
||||
warn "Set node_ip manually in livekit.yaml after setup"
|
||||
tunnel_udp_ip="0.0.0.0"
|
||||
fi
|
||||
rtc_config=" tcp_port: 7881
|
||||
udp_port: ${tunnel_udp_port}
|
||||
node_ip: ${tunnel_udp_ip}
|
||||
use_external_ip: false"
|
||||
ok "LiveKit UDP: single port ${tunnel_udp_port}, node_ip=${tunnel_udp_ip} (via tunnel)"
|
||||
else
|
||||
rtc_config=" tcp_port: 7881
|
||||
port_range_start: 44200
|
||||
port_range_end: 44300"
|
||||
fi
|
||||
|
||||
cat > "$ROOT_DIR/livekit.yaml" << LKEOF
|
||||
port: 7880
|
||||
rtc:
|
||||
${rtc_config}
|
||||
redis:
|
||||
address: redis:6379
|
||||
keys:
|
||||
${lk_key}: ${lk_secret}
|
||||
webhook:
|
||||
urls:
|
||||
- http://server:1250/v1/livekit/webhook
|
||||
api_key: ${lk_key}
|
||||
logging:
|
||||
level: info
|
||||
room:
|
||||
empty_timeout: 300
|
||||
max_participants: 0
|
||||
LKEOF
|
||||
ok "Generated livekit.yaml"
|
||||
|
||||
# Generate egress.yaml (Track Egress only — no composite video)
|
||||
cat > "$ROOT_DIR/egress.yaml" << EGEOF
|
||||
api_key: ${lk_key}
|
||||
api_secret: ${lk_secret}
|
||||
ws_url: ws://livekit-server:7880
|
||||
redis:
|
||||
address: redis:6379
|
||||
health_port: 7082
|
||||
log_level: info
|
||||
session_limits:
|
||||
file_output_max_duration: 4h
|
||||
EGEOF
|
||||
ok "Generated egress.yaml"
|
||||
}
|
||||
|
||||
# =========================================================
|
||||
# Step 0: Prerequisites
|
||||
# =========================================================
|
||||
@@ -727,7 +951,10 @@ step_server_env() {
|
||||
|
||||
# Public-facing URLs
|
||||
local server_base_url
|
||||
if [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
if [[ -n "$TUNNEL_TCP" ]]; then
|
||||
# Tunnel mode: public URL is the tunnel address with HTTPS (Caddy terminates TLS)
|
||||
server_base_url="https://$TUNNEL_TCP"
|
||||
elif [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
server_base_url="https://$CUSTOM_DOMAIN"
|
||||
elif [[ "$USE_CADDY" == "true" ]]; then
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
@@ -737,13 +964,23 @@ step_server_env() {
|
||||
fi
|
||||
else
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
server_base_url="http://$PRIMARY_IP"
|
||||
server_base_url="http://$PRIMARY_IP:1250"
|
||||
else
|
||||
server_base_url="http://localhost:1250"
|
||||
fi
|
||||
fi
|
||||
env_set "$SERVER_ENV" "BASE_URL" "$server_base_url"
|
||||
env_set "$SERVER_ENV" "CORS_ORIGIN" "$server_base_url"
|
||||
# CORS: allow the frontend origin (port 3000, not the API port)
|
||||
local cors_origin="${server_base_url}"
|
||||
if [[ "$USE_CADDY" != "true" ]]; then
|
||||
# Without Caddy, frontend is on port 3000, API on 1250
|
||||
cors_origin="${server_base_url/:1250/:3000}"
|
||||
# Safety: if substitution didn't change anything, construct explicitly
|
||||
if [[ "$cors_origin" == "$server_base_url" ]] && [[ -n "$PRIMARY_IP" ]]; then
|
||||
cors_origin="http://${PRIMARY_IP}:3000"
|
||||
fi
|
||||
fi
|
||||
env_set "$SERVER_ENV" "CORS_ORIGIN" "$cors_origin"
|
||||
|
||||
# WebRTC: advertise host IP in ICE candidates so browsers can reach the server
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
@@ -951,8 +1188,31 @@ step_server_env() {
|
||||
# Hatchet is always required (file, live, and multitrack pipelines all use it)
|
||||
env_set "$SERVER_ENV" "HATCHET_CLIENT_SERVER_URL" "http://hatchet:8888"
|
||||
env_set "$SERVER_ENV" "HATCHET_CLIENT_HOST_PORT" "hatchet:7077"
|
||||
env_set "$SERVER_ENV" "HATCHET_CLIENT_TLS_STRATEGY" "none"
|
||||
ok "Hatchet connectivity configured (workflow engine for processing pipelines)"
|
||||
|
||||
# BIND_HOST controls whether server/web ports are exposed on all interfaces
|
||||
local root_env="$ROOT_DIR/.env"
|
||||
touch "$root_env"
|
||||
if [[ "$USE_CADDY" == "true" ]]; then
|
||||
# With Caddy, services stay on localhost (Caddy is the public entry point)
|
||||
env_set "$root_env" "BIND_HOST" "127.0.0.1"
|
||||
elif [[ -n "$PRIMARY_IP" ]]; then
|
||||
# Without Caddy + detected IP, expose on all interfaces for direct access
|
||||
env_set "$root_env" "BIND_HOST" "0.0.0.0"
|
||||
ok "BIND_HOST=0.0.0.0 (ports exposed for direct access)"
|
||||
fi
|
||||
|
||||
# UDP ports for LiveKit (used by docker-compose for port mapping)
|
||||
if [[ -n "$TUNNEL_UDP" ]]; then
|
||||
local tunnel_udp_port="${TUNNEL_UDP##*:}"
|
||||
env_set "$root_env" "LIVEKIT_UDP_PORTS" "${tunnel_udp_port}:${tunnel_udp_port}"
|
||||
ok "LiveKit UDP: single port ${tunnel_udp_port} (via tunnel)"
|
||||
else
|
||||
# Default: full range for direct access
|
||||
env_set "$root_env" "LIVEKIT_UDP_PORTS" "44200-44300:44200-44300"
|
||||
fi
|
||||
|
||||
ok "server/.env ready"
|
||||
}
|
||||
|
||||
@@ -971,7 +1231,9 @@ step_www_env() {
|
||||
|
||||
# Public-facing URL for frontend
|
||||
local base_url
|
||||
if [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
if [[ -n "$TUNNEL_TCP" ]]; then
|
||||
base_url="https://$TUNNEL_TCP"
|
||||
elif [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
base_url="https://$CUSTOM_DOMAIN"
|
||||
elif [[ "$USE_CADDY" == "true" ]]; then
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
@@ -980,18 +1242,26 @@ step_www_env() {
|
||||
base_url="https://localhost"
|
||||
fi
|
||||
else
|
||||
# No Caddy — user's proxy handles SSL. Use http for now, they'll override.
|
||||
# No Caddy — clients connect directly to services on their ports.
|
||||
if [[ -n "$PRIMARY_IP" ]]; then
|
||||
base_url="http://$PRIMARY_IP"
|
||||
base_url="http://$PRIMARY_IP:3000"
|
||||
else
|
||||
base_url="http://localhost"
|
||||
base_url="http://localhost:3000"
|
||||
fi
|
||||
fi
|
||||
|
||||
# API_URL: with Caddy, same origin (443 proxies both); without Caddy, API is on port 1250
|
||||
local api_url="$base_url"
|
||||
if [[ "$USE_CADDY" != "true" ]]; then
|
||||
api_url="${base_url/:3000/:1250}"
|
||||
# fallback if no port substitution happened (e.g. localhost without port)
|
||||
[[ "$api_url" == "$base_url" ]] && api_url="${base_url}:1250"
|
||||
fi
|
||||
|
||||
env_set "$WWW_ENV" "SITE_URL" "$base_url"
|
||||
env_set "$WWW_ENV" "NEXTAUTH_URL" "$base_url"
|
||||
env_set "$WWW_ENV" "NEXTAUTH_SECRET" "$NEXTAUTH_SECRET"
|
||||
env_set "$WWW_ENV" "API_URL" "$base_url"
|
||||
env_set "$WWW_ENV" "API_URL" "$api_url"
|
||||
env_set "$WWW_ENV" "WEBSOCKET_URL" "auto"
|
||||
env_set "$WWW_ENV" "SERVER_API_URL" "http://server:1250"
|
||||
env_set "$WWW_ENV" "KV_URL" "redis://redis:6379"
|
||||
@@ -1014,14 +1284,17 @@ step_www_env() {
|
||||
fi
|
||||
|
||||
# Enable rooms if any video platform is configured in server/.env
|
||||
local _daily_key="" _whereby_key=""
|
||||
local _daily_key="" _whereby_key="" _livekit_key=""
|
||||
if env_has_key "$SERVER_ENV" "DAILY_API_KEY"; then
|
||||
_daily_key=$(env_get "$SERVER_ENV" "DAILY_API_KEY")
|
||||
fi
|
||||
if env_has_key "$SERVER_ENV" "WHEREBY_API_KEY"; then
|
||||
_whereby_key=$(env_get "$SERVER_ENV" "WHEREBY_API_KEY")
|
||||
fi
|
||||
if [[ -n "$_daily_key" ]] || [[ -n "$_whereby_key" ]]; then
|
||||
if env_has_key "$SERVER_ENV" "LIVEKIT_API_KEY"; then
|
||||
_livekit_key=$(env_get "$SERVER_ENV" "LIVEKIT_API_KEY")
|
||||
fi
|
||||
if [[ -n "$_daily_key" ]] || [[ -n "$_whereby_key" ]] || [[ -n "$_livekit_key" ]]; then
|
||||
env_set "$WWW_ENV" "FEATURE_ROOMS" "true"
|
||||
ok "Rooms feature enabled (video platform configured)"
|
||||
fi
|
||||
@@ -1110,7 +1383,13 @@ step_garage() {
|
||||
|
||||
# Write S3 credentials to server/.env
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_BACKEND" "aws"
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" "http://garage:3900"
|
||||
# Endpoint URL: use public IP when no Caddy so presigned URLs work in the browser.
|
||||
# With Caddy, internal hostname is fine (Caddy proxies or browser never sees presigned URLs directly).
|
||||
if [[ "$USE_CADDY" != "true" ]] && [[ -n "$PRIMARY_IP" ]]; then
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" "http://${PRIMARY_IP}:3900"
|
||||
else
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_ENDPOINT_URL" "http://garage:3900"
|
||||
fi
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_BUCKET_NAME" "reflector-media"
|
||||
env_set "$SERVER_ENV" "TRANSCRIPT_STORAGE_AWS_REGION" "garage"
|
||||
if [[ "$created_key" == "true" ]]; then
|
||||
@@ -1188,6 +1467,22 @@ step_caddyfile() {
|
||||
rm -rf "$caddyfile"
|
||||
fi
|
||||
|
||||
# LiveKit reverse proxy snippet (inserted into Caddyfile when --livekit is active)
|
||||
# LiveKit reverse proxy snippet (inserted into Caddyfile when --livekit is active).
|
||||
# Strips /lk-ws prefix so LiveKit server sees requests at its root /.
|
||||
local lk_proxy_block=""
|
||||
if [[ "$LIVEKIT_DETECTED" == "true" ]]; then
|
||||
lk_proxy_block="
|
||||
handle_path /lk-ws/* {
|
||||
reverse_proxy livekit-server:7880
|
||||
}
|
||||
handle_path /lk-ws {
|
||||
reverse_proxy livekit-server:7880
|
||||
}"
|
||||
fi
|
||||
|
||||
local hatchet_proxy_block=""
|
||||
|
||||
if [[ -n "$TLS_CERT_PATH" ]] && [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
# Custom domain with user-provided TLS certificate (from --custom-ca directory)
|
||||
cat > "$caddyfile" << CADDYEOF
|
||||
@@ -1199,7 +1494,7 @@ $CUSTOM_DOMAIN {
|
||||
}
|
||||
handle /health {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
}${lk_proxy_block}${hatchet_proxy_block}
|
||||
handle {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
@@ -1216,7 +1511,7 @@ $CUSTOM_DOMAIN {
|
||||
}
|
||||
handle /health {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
}${lk_proxy_block}${hatchet_proxy_block}
|
||||
handle {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
@@ -1225,17 +1520,19 @@ CADDYEOF
|
||||
ok "Created Caddyfile for $CUSTOM_DOMAIN (Let's Encrypt auto-HTTPS)"
|
||||
elif [[ -n "$PRIMARY_IP" ]]; then
|
||||
# No domain, IP only: catch-all :443 with self-signed cert
|
||||
# (IP connections don't send SNI, so we can't match by address)
|
||||
# on_demand generates certs dynamically for any hostname/IP on first request
|
||||
cat > "$caddyfile" << CADDYEOF
|
||||
# Generated by setup-selfhosted.sh — self-signed cert for IP access
|
||||
:443 {
|
||||
tls internal
|
||||
tls internal {
|
||||
on_demand
|
||||
}
|
||||
handle /v1/* {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
handle /health {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
}${lk_proxy_block}${hatchet_proxy_block}
|
||||
handle {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
@@ -1249,21 +1546,8 @@ CADDYEOF
|
||||
ok "Caddyfile already exists"
|
||||
fi
|
||||
|
||||
# Add Hatchet dashboard route if Daily.co is detected
|
||||
if [[ "$DAILY_DETECTED" == "true" ]]; then
|
||||
if ! grep -q "hatchet" "$caddyfile" 2>/dev/null; then
|
||||
cat >> "$caddyfile" << CADDYEOF
|
||||
|
||||
# Hatchet workflow dashboard (Daily.co multitrack processing)
|
||||
:8888 {
|
||||
tls internal
|
||||
reverse_proxy hatchet:8888
|
||||
}
|
||||
CADDYEOF
|
||||
ok "Added Hatchet dashboard route to Caddyfile (port 8888)"
|
||||
else
|
||||
ok "Hatchet dashboard route already in Caddyfile"
|
||||
fi
|
||||
if [[ "$DAILY_DETECTED" == "true" ]] || [[ "$LIVEKIT_DETECTED" == "true" ]]; then
|
||||
ok "Hatchet dashboard available at port 8888"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -1467,7 +1751,7 @@ step_health() {
|
||||
info "Waiting for Hatchet workflow engine..."
|
||||
local hatchet_ok=false
|
||||
for i in $(seq 1 60); do
|
||||
if curl -sf http://localhost:8888/api/live > /dev/null 2>&1; then
|
||||
if compose_cmd exec -T hatchet curl -sf http://localhost:8888/api/live > /dev/null 2>&1; then
|
||||
hatchet_ok=true
|
||||
break
|
||||
fi
|
||||
@@ -1515,7 +1799,7 @@ step_hatchet_token() {
|
||||
# Wait for hatchet to be healthy
|
||||
local hatchet_ok=false
|
||||
for i in $(seq 1 60); do
|
||||
if curl -sf http://localhost:8888/api/live > /dev/null 2>&1; then
|
||||
if compose_cmd exec -T hatchet curl -sf http://localhost:8888/api/live > /dev/null 2>&1; then
|
||||
hatchet_ok=true
|
||||
break
|
||||
fi
|
||||
@@ -1586,12 +1870,19 @@ main() {
|
||||
[[ "$BUILD_IMAGES" == "true" ]] && echo " Build: from source"
|
||||
echo ""
|
||||
|
||||
# Detect primary IP
|
||||
PRIMARY_IP=""
|
||||
if [[ "$OS" == "Linux" ]]; then
|
||||
PRIMARY_IP=$(hostname -I 2>/dev/null | awk '{print $1}' || true)
|
||||
if [[ "$PRIMARY_IP" == "127."* ]] || [[ -z "$PRIMARY_IP" ]]; then
|
||||
PRIMARY_IP=$(ip -4 route get 1 2>/dev/null | sed -n 's/.*src \([0-9.]*\).*/\1/p' || true)
|
||||
# Detect primary IP (--ip overrides auto-detection)
|
||||
if [[ -n "$CUSTOM_IP" ]]; then
|
||||
PRIMARY_IP="$CUSTOM_IP"
|
||||
ok "Using provided IP: $PRIMARY_IP"
|
||||
else
|
||||
PRIMARY_IP=""
|
||||
if [[ "$OS" == "Linux" ]]; then
|
||||
PRIMARY_IP=$(hostname -I 2>/dev/null | awk '{print $1}' || true)
|
||||
if [[ "$PRIMARY_IP" == "127."* ]] || [[ -z "$PRIMARY_IP" ]]; then
|
||||
PRIMARY_IP=$(ip -4 route get 1 2>/dev/null | sed -n 's/.*src \([0-9.]*\).*/\1/p' || true)
|
||||
fi
|
||||
elif [[ "$OS" == "Darwin" ]]; then
|
||||
PRIMARY_IP=$(detect_lan_ip)
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -1621,14 +1912,21 @@ main() {
|
||||
# Auto-detect video platforms from server/.env (after step_server_env so file exists)
|
||||
DAILY_DETECTED=false
|
||||
WHEREBY_DETECTED=false
|
||||
LIVEKIT_DETECTED=false
|
||||
if env_has_key "$SERVER_ENV" "DAILY_API_KEY" && [[ -n "$(env_get "$SERVER_ENV" "DAILY_API_KEY")" ]]; then
|
||||
DAILY_DETECTED=true
|
||||
fi
|
||||
if env_has_key "$SERVER_ENV" "WHEREBY_API_KEY" && [[ -n "$(env_get "$SERVER_ENV" "WHEREBY_API_KEY")" ]]; then
|
||||
WHEREBY_DETECTED=true
|
||||
fi
|
||||
# LiveKit: enabled via --livekit flag OR pre-existing LIVEKIT_API_KEY in env
|
||||
if [[ "$USE_LIVEKIT" == "true" ]]; then
|
||||
LIVEKIT_DETECTED=true
|
||||
elif env_has_key "$SERVER_ENV" "LIVEKIT_API_KEY" && [[ -n "$(env_get "$SERVER_ENV" "LIVEKIT_API_KEY")" ]]; then
|
||||
LIVEKIT_DETECTED=true
|
||||
fi
|
||||
ANY_PLATFORM_DETECTED=false
|
||||
[[ "$DAILY_DETECTED" == "true" || "$WHEREBY_DETECTED" == "true" ]] && ANY_PLATFORM_DETECTED=true
|
||||
[[ "$DAILY_DETECTED" == "true" || "$WHEREBY_DETECTED" == "true" || "$LIVEKIT_DETECTED" == "true" ]] && ANY_PLATFORM_DETECTED=true
|
||||
|
||||
# Conditional profile activation for Daily.co
|
||||
if [[ "$DAILY_DETECTED" == "true" ]]; then
|
||||
@@ -1636,6 +1934,13 @@ main() {
|
||||
ok "Daily.co detected — enabling Hatchet workflow services"
|
||||
fi
|
||||
|
||||
# Conditional profile activation for LiveKit
|
||||
if [[ "$LIVEKIT_DETECTED" == "true" ]]; then
|
||||
COMPOSE_PROFILES+=("livekit")
|
||||
_generate_livekit_config
|
||||
ok "LiveKit enabled — livekit-server + livekit-egress"
|
||||
fi
|
||||
|
||||
# Generate .env.hatchet for hatchet dashboard config (always needed)
|
||||
local hatchet_server_url hatchet_cookie_domain
|
||||
if [[ -n "$CUSTOM_DOMAIN" ]]; then
|
||||
@@ -1683,10 +1988,12 @@ EOF
|
||||
echo " App: https://localhost (accept self-signed cert in browser)"
|
||||
echo " API: https://localhost/v1/"
|
||||
fi
|
||||
elif [[ -n "$PRIMARY_IP" ]]; then
|
||||
echo " App: http://$PRIMARY_IP:3000"
|
||||
echo " API: http://$PRIMARY_IP:1250"
|
||||
else
|
||||
echo " No Caddy — point your reverse proxy at:"
|
||||
echo " Frontend: web:3000 (or localhost:3000 from host)"
|
||||
echo " API: server:1250 (or localhost:1250 from host)"
|
||||
echo " App: http://localhost:3000"
|
||||
echo " API: http://localhost:1250"
|
||||
fi
|
||||
echo ""
|
||||
if [[ "$HAS_OVERRIDES" == "true" ]]; then
|
||||
@@ -1702,6 +2009,11 @@ EOF
|
||||
[[ "$USES_OLLAMA" != "true" ]] && echo " LLM: External (configure in server/.env)"
|
||||
[[ "$DAILY_DETECTED" == "true" ]] && echo " Video: Daily.co (live rooms + multitrack processing via Hatchet)"
|
||||
[[ "$WHEREBY_DETECTED" == "true" ]] && echo " Video: Whereby (live rooms)"
|
||||
if [[ "$LIVEKIT_DETECTED" == "true" ]]; then
|
||||
echo " Video: LiveKit (self-hosted, live rooms + track egress)"
|
||||
[[ -n "$TUNNEL_TCP" ]] && echo " Tunnel: TCP=$TUNNEL_TCP"
|
||||
[[ -n "$TUNNEL_UDP" ]] && echo " UDP=$TUNNEL_UDP"
|
||||
fi
|
||||
[[ "$ANY_PLATFORM_DETECTED" != "true" ]] && echo " Video: None (rooms disabled)"
|
||||
if [[ "$USE_CUSTOM_CA" == "true" ]]; then
|
||||
echo " CA: Custom (certs/ca.crt)"
|
||||
|
||||
@@ -42,6 +42,7 @@ dependencies = [
|
||||
"pydantic>=2.12.5",
|
||||
"aiosmtplib>=3.0.0",
|
||||
"email-validator>=2.0.0",
|
||||
"livekit-api>=1.1.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
||||
@@ -15,6 +15,7 @@ from reflector.metrics import metrics_init
|
||||
from reflector.settings import settings
|
||||
from reflector.views.config import router as config_router
|
||||
from reflector.views.daily import router as daily_router
|
||||
from reflector.views.livekit import router as livekit_router
|
||||
from reflector.views.meetings import router as meetings_router
|
||||
from reflector.views.rooms import router as rooms_router
|
||||
from reflector.views.rtc_offer import router as rtc_offer_router
|
||||
@@ -112,6 +113,7 @@ app.include_router(config_router, prefix="/v1")
|
||||
app.include_router(zulip_router, prefix="/v1")
|
||||
app.include_router(whereby_router, prefix="/v1")
|
||||
app.include_router(daily_router, prefix="/v1/daily")
|
||||
app.include_router(livekit_router, prefix="/v1/livekit")
|
||||
if auth_router:
|
||||
app.include_router(auth_router, prefix="/v1")
|
||||
add_pagination(app)
|
||||
|
||||
@@ -120,7 +120,8 @@ class Meeting(BaseModel):
|
||||
daily_composed_video_s3_key: str | None = None
|
||||
daily_composed_video_duration: int | None = None
|
||||
# Email recipients for transcript notification
|
||||
email_recipients: list[str] | None = None
|
||||
# Each entry is {"email": str, "include_link": bool} or a legacy plain str
|
||||
email_recipients: list[dict | str] | None = None
|
||||
|
||||
|
||||
class MeetingController:
|
||||
@@ -164,6 +165,17 @@ class MeetingController:
|
||||
results = await get_database().fetch_all(query)
|
||||
return [Meeting(**result) for result in results]
|
||||
|
||||
async def get_all_inactive_livekit(self) -> list[Meeting]:
|
||||
"""Get inactive LiveKit meetings (for multitrack processing discovery)."""
|
||||
query = meetings.select().where(
|
||||
sa.and_(
|
||||
meetings.c.is_active == sa.false(),
|
||||
meetings.c.platform == "livekit",
|
||||
)
|
||||
)
|
||||
results = await get_database().fetch_all(query)
|
||||
return [Meeting(**result) for result in results]
|
||||
|
||||
async def get_by_room_name(
|
||||
self,
|
||||
room_name: str,
|
||||
@@ -399,15 +411,27 @@ class MeetingController:
|
||||
async with get_database().transaction(isolation="serializable"):
|
||||
yield
|
||||
|
||||
async def add_email_recipient(self, meeting_id: str, email: str) -> list[str]:
|
||||
"""Add an email to the meeting's email_recipients list (no duplicates)."""
|
||||
async def add_email_recipient(
|
||||
self, meeting_id: str, email: str, *, include_link: bool = True
|
||||
) -> list[dict]:
|
||||
"""Add an email to the meeting's email_recipients list (no duplicates).
|
||||
|
||||
Each entry is stored as {"email": str, "include_link": bool}.
|
||||
Legacy plain-string entries are normalised on read.
|
||||
"""
|
||||
async with self.transaction():
|
||||
meeting = await self.get_by_id(meeting_id)
|
||||
if not meeting:
|
||||
raise ValueError(f"Meeting {meeting_id} not found")
|
||||
current = meeting.email_recipients or []
|
||||
if email not in current:
|
||||
current.append(email)
|
||||
# Normalise legacy string entries
|
||||
current: list[dict] = [
|
||||
entry
|
||||
if isinstance(entry, dict)
|
||||
else {"email": entry, "include_link": True}
|
||||
for entry in (meeting.email_recipients or [])
|
||||
]
|
||||
if not any(r["email"] == email for r in current):
|
||||
current.append({"email": email, "include_link": include_link})
|
||||
await self.update_meeting(meeting_id, email_recipients=current)
|
||||
return current
|
||||
|
||||
|
||||
@@ -78,6 +78,14 @@ class RecordingController:
|
||||
)
|
||||
await get_database().execute(query)
|
||||
|
||||
async def restore_by_id(self, id: str) -> None:
|
||||
query = recordings.update().where(recordings.c.id == id).values(deleted_at=None)
|
||||
await get_database().execute(query)
|
||||
|
||||
async def hard_delete_by_id(self, id: str) -> None:
|
||||
query = recordings.delete().where(recordings.c.id == id)
|
||||
await get_database().execute(query)
|
||||
|
||||
async def set_meeting_id(
|
||||
self,
|
||||
recording_id: NonEmptyString,
|
||||
|
||||
@@ -138,6 +138,7 @@ class SearchParameters(BaseModel):
|
||||
source_kind: SourceKind | None = None
|
||||
from_datetime: datetime | None = None
|
||||
to_datetime: datetime | None = None
|
||||
include_deleted: bool = False
|
||||
|
||||
|
||||
class SearchResultDB(BaseModel):
|
||||
@@ -387,7 +388,10 @@ class SearchController:
|
||||
transcripts.join(rooms, transcripts.c.room_id == rooms.c.id, isouter=True)
|
||||
)
|
||||
|
||||
base_query = base_query.where(transcripts.c.deleted_at.is_(None))
|
||||
if params.include_deleted:
|
||||
base_query = base_query.where(transcripts.c.deleted_at.isnot(None))
|
||||
else:
|
||||
base_query = base_query.where(transcripts.c.deleted_at.is_(None))
|
||||
|
||||
if params.query_text is not None:
|
||||
# because already initialized based on params.query_text presence above
|
||||
@@ -396,7 +400,13 @@ class SearchController:
|
||||
transcripts.c.search_vector_en.op("@@")(search_query)
|
||||
)
|
||||
|
||||
if params.user_id:
|
||||
if params.include_deleted:
|
||||
# Trash view: only show user's own deleted transcripts.
|
||||
# Defense-in-depth: require user_id to prevent leaking all users' trash.
|
||||
if not params.user_id:
|
||||
return [], 0
|
||||
base_query = base_query.where(transcripts.c.user_id == params.user_id)
|
||||
elif params.user_id:
|
||||
base_query = base_query.where(
|
||||
sqlalchemy.or_(
|
||||
transcripts.c.user_id == params.user_id, rooms.c.is_shared
|
||||
@@ -421,6 +431,8 @@ class SearchController:
|
||||
|
||||
if params.query_text is not None:
|
||||
order_by = sqlalchemy.desc(sqlalchemy.text("rank"))
|
||||
elif params.include_deleted:
|
||||
order_by = sqlalchemy.desc(transcripts.c.deleted_at)
|
||||
else:
|
||||
order_by = sqlalchemy.desc(transcripts.c.created_at)
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ from reflector.db.utils import is_postgresql
|
||||
from reflector.logger import logger
|
||||
from reflector.processors.types import Word as ProcessorWord
|
||||
from reflector.settings import settings
|
||||
from reflector.storage import get_transcripts_storage
|
||||
from reflector.storage import get_source_storage, get_transcripts_storage
|
||||
from reflector.utils import generate_uuid4
|
||||
from reflector.utils.webvtt import topics_to_webvtt
|
||||
|
||||
@@ -486,6 +486,14 @@ class TranscriptController:
|
||||
return None
|
||||
return Transcript(**result)
|
||||
|
||||
async def get_by_meeting_id(self, meeting_id: str) -> Transcript | None:
|
||||
"""Get a transcript by meeting_id (first match)."""
|
||||
query = transcripts.select().where(transcripts.c.meeting_id == meeting_id)
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
return None
|
||||
return Transcript(**result)
|
||||
|
||||
async def get_by_recording_id(
|
||||
self, recording_id: str, **kwargs
|
||||
) -> Transcript | None:
|
||||
@@ -676,6 +684,126 @@ class TranscriptController:
|
||||
)
|
||||
await get_database().execute(query)
|
||||
|
||||
async def restore_by_id(
|
||||
self,
|
||||
transcript_id: str,
|
||||
user_id: str | None = None,
|
||||
) -> bool:
|
||||
"""
|
||||
Restore a soft-deleted transcript by clearing deleted_at.
|
||||
|
||||
Also restores the associated recording if present.
|
||||
Returns True if the transcript was restored, False otherwise.
|
||||
"""
|
||||
transcript = await self.get_by_id(transcript_id)
|
||||
if not transcript:
|
||||
return False
|
||||
if transcript.deleted_at is None:
|
||||
return False
|
||||
if user_id is not None and transcript.user_id != user_id:
|
||||
return False
|
||||
|
||||
query = (
|
||||
transcripts.update()
|
||||
.where(transcripts.c.id == transcript_id)
|
||||
.values(deleted_at=None)
|
||||
)
|
||||
await get_database().execute(query)
|
||||
|
||||
if transcript.recording_id:
|
||||
try:
|
||||
await recordings_controller.restore_by_id(transcript.recording_id)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to restore recording",
|
||||
exc_info=e,
|
||||
recording_id=transcript.recording_id,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
async def hard_delete(self, transcript_id: str) -> None:
|
||||
"""
|
||||
Permanently delete a transcript, its recording, and all associated files.
|
||||
|
||||
Only deletes transcript-owned resources:
|
||||
- Transcript row and recording row from DB (first, to make data inaccessible)
|
||||
- Transcript audio in S3 storage
|
||||
- Recording files in S3 (both object_key and track_keys, since a recording can have both)
|
||||
- Local files (data_path directory)
|
||||
|
||||
Does NOT delete: meetings, consent records, rooms, or any shared entity.
|
||||
Requires the transcript to be soft-deleted first (deleted_at must be set).
|
||||
"""
|
||||
transcript = await self.get_by_id(transcript_id)
|
||||
if not transcript:
|
||||
return
|
||||
if transcript.deleted_at is None:
|
||||
return
|
||||
|
||||
# Collect file references before deleting DB rows
|
||||
recording = None
|
||||
recording_storage = None
|
||||
if transcript.recording_id:
|
||||
recording = await recordings_controller.get_by_id(transcript.recording_id)
|
||||
# Determine the correct storage backend for recording files.
|
||||
# Recordings from different platforms (daily, whereby) live in
|
||||
# platform-specific buckets with separate credentials.
|
||||
if recording and recording.meeting_id:
|
||||
from reflector.db.meetings import meetings_controller # noqa: PLC0415
|
||||
|
||||
meeting = await meetings_controller.get_by_id(recording.meeting_id)
|
||||
if meeting:
|
||||
recording_storage = get_source_storage(meeting.platform)
|
||||
if recording_storage is None:
|
||||
recording_storage = get_transcripts_storage()
|
||||
|
||||
# 1. Hard-delete DB rows first (makes data inaccessible immediately)
|
||||
if recording:
|
||||
await recordings_controller.hard_delete_by_id(recording.id)
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == transcript_id)
|
||||
)
|
||||
|
||||
# 2. Delete transcript audio from S3 (always uses transcript storage)
|
||||
transcript_storage = get_transcripts_storage()
|
||||
if transcript.audio_location == "storage" and not transcript.audio_deleted:
|
||||
try:
|
||||
await transcript_storage.delete_file(transcript.storage_audio_path)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to delete transcript audio from storage",
|
||||
exc_info=e,
|
||||
transcript_id=transcript_id,
|
||||
path=transcript.storage_audio_path,
|
||||
)
|
||||
|
||||
# 3. Delete recording files from S3 (both object_key and track_keys —
|
||||
# a recording can have both, unlike consent cleanup which uses elif).
|
||||
# Uses platform-specific storage resolved above.
|
||||
if recording and recording.bucket_name and recording_storage:
|
||||
keys_to_delete = []
|
||||
if recording.track_keys:
|
||||
keys_to_delete = recording.track_keys
|
||||
if recording.object_key:
|
||||
keys_to_delete.append(recording.object_key)
|
||||
|
||||
for key in keys_to_delete:
|
||||
try:
|
||||
await recording_storage.delete_file(
|
||||
key, bucket=recording.bucket_name
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to delete recording file",
|
||||
exc_info=e,
|
||||
key=key,
|
||||
bucket=recording.bucket_name,
|
||||
)
|
||||
|
||||
# 4. Delete local files
|
||||
transcript.unlink()
|
||||
|
||||
async def remove_by_recording_id(self, recording_id: str):
|
||||
"""
|
||||
Soft-delete a transcript by recording_id
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from html import escape
|
||||
|
||||
import aiosmtplib
|
||||
import structlog
|
||||
|
||||
from reflector.db.transcripts import Transcript
|
||||
from reflector.db.transcripts import SourceKind, Transcript
|
||||
from reflector.settings import settings
|
||||
from reflector.utils.transcript_formats import transcript_to_text_timestamped
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
@@ -18,35 +20,111 @@ def get_transcript_url(transcript: Transcript) -> str:
|
||||
return f"{settings.UI_BASE_URL}/transcripts/{transcript.id}"
|
||||
|
||||
|
||||
def _build_plain_text(transcript: Transcript, url: str) -> str:
|
||||
def _get_timestamped_text(transcript: Transcript) -> str:
|
||||
"""Build the full timestamped transcript text using existing utility."""
|
||||
if not transcript.topics:
|
||||
return ""
|
||||
is_multitrack = transcript.source_kind == SourceKind.ROOM
|
||||
return transcript_to_text_timestamped(
|
||||
transcript.topics, transcript.participants, is_multitrack=is_multitrack
|
||||
)
|
||||
|
||||
|
||||
def _build_plain_text(transcript: Transcript, url: str, include_link: bool) -> str:
|
||||
title = transcript.title or "Unnamed recording"
|
||||
lines = [
|
||||
f"Your transcript is ready: {title}",
|
||||
"",
|
||||
f"View it here: {url}",
|
||||
]
|
||||
lines = [f"Reflector: {title}", ""]
|
||||
|
||||
if transcript.short_summary:
|
||||
lines.extend(["", "Summary:", transcript.short_summary])
|
||||
lines.extend(["Summary:", transcript.short_summary, ""])
|
||||
|
||||
timestamped = _get_timestamped_text(transcript)
|
||||
if timestamped:
|
||||
lines.extend(["Transcript:", timestamped, ""])
|
||||
|
||||
if include_link:
|
||||
lines.append(f"View transcript: {url}")
|
||||
lines.append("")
|
||||
|
||||
lines.append(
|
||||
"This email was sent because you requested to receive "
|
||||
"the transcript from a meeting."
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _build_html(transcript: Transcript, url: str) -> str:
|
||||
title = transcript.title or "Unnamed recording"
|
||||
def _build_html(transcript: Transcript, url: str, include_link: bool) -> str:
|
||||
title = escape(transcript.title or "Unnamed recording")
|
||||
|
||||
summary_html = ""
|
||||
if transcript.short_summary:
|
||||
summary_html = f"<p style='color:#555;'>{transcript.short_summary}</p>"
|
||||
summary_html = (
|
||||
f'<p style="color:#555;margin-bottom:16px;">'
|
||||
f"{escape(transcript.short_summary)}</p>"
|
||||
)
|
||||
|
||||
transcript_html = ""
|
||||
timestamped = _get_timestamped_text(transcript)
|
||||
if timestamped:
|
||||
# Build styled transcript lines
|
||||
styled_lines = []
|
||||
for line in timestamped.split("\n"):
|
||||
if not line.strip():
|
||||
continue
|
||||
# Lines are formatted as "[MM:SS] Speaker: text"
|
||||
if line.startswith("[") and "] " in line:
|
||||
bracket_end = line.index("] ")
|
||||
timestamp = escape(line[: bracket_end + 1])
|
||||
rest = line[bracket_end + 2 :]
|
||||
if ": " in rest:
|
||||
colon_pos = rest.index(": ")
|
||||
speaker = escape(rest[:colon_pos])
|
||||
text = escape(rest[colon_pos + 2 :])
|
||||
styled_lines.append(
|
||||
f'<div style="margin-bottom:4px;">'
|
||||
f'<span style="color:#888;font-size:12px;">{timestamp}</span> '
|
||||
f"<strong>{speaker}:</strong> {text}</div>"
|
||||
)
|
||||
else:
|
||||
styled_lines.append(
|
||||
f'<div style="margin-bottom:4px;">{escape(line)}</div>'
|
||||
)
|
||||
else:
|
||||
styled_lines.append(
|
||||
f'<div style="margin-bottom:4px;">{escape(line)}</div>'
|
||||
)
|
||||
|
||||
transcript_html = (
|
||||
'<h3 style="margin-top:20px;margin-bottom:8px;">Transcript</h3>'
|
||||
'<div style="background:#f7f7f7;padding:16px;border-radius:6px;'
|
||||
'font-size:13px;line-height:1.6;max-height:600px;overflow-y:auto;">'
|
||||
f"{''.join(styled_lines)}</div>"
|
||||
)
|
||||
|
||||
link_html = ""
|
||||
if include_link:
|
||||
link_html = (
|
||||
'<p style="margin-top:20px;">'
|
||||
f'<a href="{url}" style="display:inline-block;padding:10px 20px;'
|
||||
"background:#4A90D9;color:#fff;text-decoration:none;"
|
||||
'border-radius:4px;">View Transcript</a></p>'
|
||||
)
|
||||
|
||||
return f"""\
|
||||
<div style="font-family:sans-serif;max-width:600px;margin:0 auto;">
|
||||
<h2>Your transcript is ready</h2>
|
||||
<p><strong>{title}</strong></p>
|
||||
<h2 style="margin-bottom:4px;">{title}</h2>
|
||||
{summary_html}
|
||||
<p><a href="{url}" style="display:inline-block;padding:10px 20px;background:#4A90D9;color:#fff;text-decoration:none;border-radius:4px;">View Transcript</a></p>
|
||||
<p style="color:#999;font-size:12px;">This email was sent because you requested to receive the transcript from a meeting.</p>
|
||||
{transcript_html}
|
||||
{link_html}
|
||||
<p style="color:#999;font-size:12px;margin-top:20px;">This email was sent because you requested to receive the transcript from a meeting.</p>
|
||||
</div>"""
|
||||
|
||||
|
||||
async def send_transcript_email(to_emails: list[str], transcript: Transcript) -> int:
|
||||
async def send_transcript_email(
|
||||
to_emails: list[str],
|
||||
transcript: Transcript,
|
||||
*,
|
||||
include_link: bool = True,
|
||||
) -> int:
|
||||
"""Send transcript notification to all emails. Returns count sent."""
|
||||
if not is_email_configured() or not to_emails:
|
||||
return 0
|
||||
@@ -57,12 +135,12 @@ async def send_transcript_email(to_emails: list[str], transcript: Transcript) ->
|
||||
|
||||
for email_addr in to_emails:
|
||||
msg = MIMEMultipart("alternative")
|
||||
msg["Subject"] = f"Transcript Ready: {title}"
|
||||
msg["Subject"] = f"Reflector: {title}"
|
||||
msg["From"] = settings.SMTP_FROM_EMAIL
|
||||
msg["To"] = email_addr
|
||||
|
||||
msg.attach(MIMEText(_build_plain_text(transcript, url), "plain"))
|
||||
msg.attach(MIMEText(_build_html(transcript, url), "html"))
|
||||
msg.attach(MIMEText(_build_plain_text(transcript, url, include_link), "plain"))
|
||||
msg.attach(MIMEText(_build_html(transcript, url, include_link), "html"))
|
||||
|
||||
try:
|
||||
await aiosmtplib.send(
|
||||
|
||||
@@ -64,3 +64,9 @@ TIMEOUT_HEAVY = 1200 # Transcription, fan-out LLM tasks (Hatchet execution_time
|
||||
TIMEOUT_HEAVY_HTTP = (
|
||||
1150 # httpx timeout for transcribe_track — below 1200 so Hatchet doesn't race
|
||||
)
|
||||
TIMEOUT_EXTRA_HEAVY = (
|
||||
3600 # Detect Topics, fan-out LLM tasks (Hatchet execution_timeout)
|
||||
)
|
||||
TIMEOUT_EXTRA_HEAVY_HTTP = (
|
||||
3400 # httpx timeout for detect_topics — below 3600 so Hatchet doesn't race
|
||||
)
|
||||
|
||||
@@ -41,6 +41,7 @@ from reflector.hatchet.broadcast import (
|
||||
from reflector.hatchet.client import HatchetClientManager
|
||||
from reflector.hatchet.constants import (
|
||||
TIMEOUT_AUDIO,
|
||||
TIMEOUT_EXTRA_HEAVY,
|
||||
TIMEOUT_HEAVY,
|
||||
TIMEOUT_LONG,
|
||||
TIMEOUT_MEDIUM,
|
||||
@@ -272,8 +273,10 @@ def with_error_handling(
|
||||
)
|
||||
@with_error_handling(TaskName.GET_RECORDING)
|
||||
async def get_recording(input: PipelineInput, ctx: Context) -> RecordingResult:
|
||||
"""Fetch recording metadata from Daily.co API."""
|
||||
ctx.log(f"get_recording: starting for recording_id={input.recording_id}")
|
||||
"""Fetch recording metadata. Platform-aware: Daily calls API, LiveKit skips."""
|
||||
ctx.log(
|
||||
f"get_recording: starting for recording_id={input.recording_id}, platform={input.source_platform}"
|
||||
)
|
||||
ctx.log(
|
||||
f"get_recording: transcript_id={input.transcript_id}, room_id={input.room_id}"
|
||||
)
|
||||
@@ -298,6 +301,18 @@ async def get_recording(input: PipelineInput, ctx: Context) -> RecordingResult:
|
||||
)
|
||||
ctx.log(f"get_recording: status set to 'processing' and broadcasted")
|
||||
|
||||
# LiveKit: no external API call needed — metadata comes from S3 track listing
|
||||
if input.source_platform == "livekit":
|
||||
ctx.log(
|
||||
"get_recording: LiveKit platform — skipping API call (metadata from S3)"
|
||||
)
|
||||
return RecordingResult(
|
||||
id=input.recording_id,
|
||||
mtg_session_id=None,
|
||||
duration=0, # Duration calculated from tracks later
|
||||
)
|
||||
|
||||
# Daily.co: fetch recording metadata from API
|
||||
if not settings.DAILY_API_KEY:
|
||||
ctx.log("get_recording: ERROR - DAILY_API_KEY not configured")
|
||||
raise ValueError("DAILY_API_KEY not configured")
|
||||
@@ -331,11 +346,12 @@ async def get_recording(input: PipelineInput, ctx: Context) -> RecordingResult:
|
||||
)
|
||||
@with_error_handling(TaskName.GET_PARTICIPANTS)
|
||||
async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsResult:
|
||||
"""Fetch participant list from Daily.co API and update transcript in database."""
|
||||
ctx.log(f"get_participants: transcript_id={input.transcript_id}")
|
||||
"""Fetch participant list and update transcript. Platform-aware."""
|
||||
ctx.log(
|
||||
f"get_participants: transcript_id={input.transcript_id}, platform={input.source_platform}"
|
||||
)
|
||||
|
||||
recording = ctx.task_output(get_recording)
|
||||
mtg_session_id = recording.mtg_session_id
|
||||
async with fresh_db_connection():
|
||||
from reflector.db.transcripts import ( # noqa: PLC0415
|
||||
TranscriptDuration,
|
||||
@@ -346,8 +362,8 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe
|
||||
transcript = await transcripts_controller.get_by_id(input.transcript_id)
|
||||
if not transcript:
|
||||
raise ValueError(f"Transcript {input.transcript_id} not found")
|
||||
# Note: title NOT cleared - preserves existing titles
|
||||
# Duration from Daily API (seconds -> milliseconds) - master source
|
||||
|
||||
# Duration from recording metadata (seconds -> milliseconds)
|
||||
duration_ms = recording.duration * 1000 if recording.duration else 0
|
||||
await transcripts_controller.update(
|
||||
transcript,
|
||||
@@ -359,65 +375,141 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe
|
||||
},
|
||||
)
|
||||
|
||||
await append_event_and_broadcast(
|
||||
input.transcript_id,
|
||||
transcript,
|
||||
"DURATION",
|
||||
TranscriptDuration(duration=duration_ms),
|
||||
logger=logger,
|
||||
)
|
||||
|
||||
mtg_session_id = assert_non_none_and_non_empty(
|
||||
mtg_session_id, "mtg_session_id is required"
|
||||
)
|
||||
daily_api_key = assert_non_none_and_non_empty(
|
||||
settings.DAILY_API_KEY, "DAILY_API_KEY is required"
|
||||
)
|
||||
|
||||
async with DailyApiClient(
|
||||
api_key=daily_api_key, base_url=settings.DAILY_API_URL
|
||||
) as client:
|
||||
participants = await client.get_meeting_participants(mtg_session_id)
|
||||
|
||||
id_to_name = {}
|
||||
id_to_user_id = {}
|
||||
for p in participants.data:
|
||||
if p.user_name:
|
||||
id_to_name[p.participant_id] = p.user_name
|
||||
if p.user_id:
|
||||
id_to_user_id[p.participant_id] = p.user_id
|
||||
|
||||
track_keys = [t["s3_key"] for t in input.tracks]
|
||||
cam_audio_keys = filter_cam_audio_tracks(track_keys)
|
||||
if duration_ms:
|
||||
await append_event_and_broadcast(
|
||||
input.transcript_id,
|
||||
transcript,
|
||||
"DURATION",
|
||||
TranscriptDuration(duration=duration_ms),
|
||||
logger=logger,
|
||||
)
|
||||
|
||||
participants_list: list[ParticipantInfo] = []
|
||||
for idx, key in enumerate(cam_audio_keys):
|
||||
|
||||
if input.source_platform == "livekit":
|
||||
# LiveKit: participant identity is in the track dict or can be parsed from filepath
|
||||
from reflector.utils.livekit import (
|
||||
parse_livekit_track_filepath, # noqa: PLC0415
|
||||
)
|
||||
|
||||
# Look up identity → Reflector user_id mapping from Redis
|
||||
# (stored at join time in rooms.py)
|
||||
identity_to_user_id: dict[str, str] = {}
|
||||
try:
|
||||
parsed = parse_daily_recording_filename(key)
|
||||
participant_id = parsed.participant_id
|
||||
except ValueError as e:
|
||||
logger.error(
|
||||
"Failed to parse Daily recording filename",
|
||||
error=str(e),
|
||||
key=key,
|
||||
from reflector.db.meetings import (
|
||||
meetings_controller as mc, # noqa: PLC0415
|
||||
)
|
||||
from reflector.redis_cache import (
|
||||
get_async_redis_client, # noqa: PLC0415
|
||||
)
|
||||
continue
|
||||
|
||||
default_name = f"Speaker {idx}"
|
||||
name = id_to_name.get(participant_id, default_name)
|
||||
user_id = id_to_user_id.get(participant_id)
|
||||
meeting = (
|
||||
await mc.get_by_id(transcript.meeting_id)
|
||||
if transcript.meeting_id
|
||||
else None
|
||||
)
|
||||
if meeting:
|
||||
redis_client = await get_async_redis_client()
|
||||
mapping_key = f"livekit:participant_map:{meeting.room_name}"
|
||||
raw_map = await redis_client.hgetall(mapping_key)
|
||||
identity_to_user_id = {
|
||||
k.decode() if isinstance(k, bytes) else k: v.decode()
|
||||
if isinstance(v, bytes)
|
||||
else v
|
||||
for k, v in raw_map.items()
|
||||
}
|
||||
ctx.log(
|
||||
f"get_participants: loaded {len(identity_to_user_id)} identity→user_id mappings from Redis"
|
||||
)
|
||||
except Exception as e:
|
||||
ctx.log(
|
||||
f"get_participants: could not load identity map from Redis: {e}"
|
||||
)
|
||||
|
||||
participant = TranscriptParticipant(
|
||||
id=participant_id, speaker=idx, name=name, user_id=user_id
|
||||
)
|
||||
await transcripts_controller.upsert_participant(transcript, participant)
|
||||
participants_list.append(
|
||||
ParticipantInfo(
|
||||
participant_id=participant_id,
|
||||
user_name=name,
|
||||
for idx, track in enumerate(input.tracks):
|
||||
identity = track.get("participant_identity")
|
||||
if not identity:
|
||||
# Reprocess path: parse from S3 key
|
||||
try:
|
||||
parsed = parse_livekit_track_filepath(track["s3_key"])
|
||||
identity = parsed.participant_identity
|
||||
except (ValueError, KeyError):
|
||||
identity = f"speaker-{idx}"
|
||||
|
||||
# Strip the uuid suffix from identity for display name
|
||||
# e.g., "Juan-2bcea0" → "Juan"
|
||||
display_name = (
|
||||
identity.rsplit("-", 1)[0] if "-" in identity else identity
|
||||
)
|
||||
reflector_user_id = identity_to_user_id.get(identity)
|
||||
|
||||
participant = TranscriptParticipant(
|
||||
id=identity,
|
||||
speaker=idx,
|
||||
name=display_name,
|
||||
user_id=reflector_user_id,
|
||||
)
|
||||
await transcripts_controller.upsert_participant(transcript, participant)
|
||||
participants_list.append(
|
||||
ParticipantInfo(
|
||||
participant_id=identity,
|
||||
user_name=display_name,
|
||||
speaker=idx,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Daily.co: fetch participant names from API
|
||||
mtg_session_id = recording.mtg_session_id
|
||||
mtg_session_id = assert_non_none_and_non_empty(
|
||||
mtg_session_id, "mtg_session_id is required"
|
||||
)
|
||||
daily_api_key = assert_non_none_and_non_empty(
|
||||
settings.DAILY_API_KEY, "DAILY_API_KEY is required"
|
||||
)
|
||||
|
||||
async with DailyApiClient(
|
||||
api_key=daily_api_key, base_url=settings.DAILY_API_URL
|
||||
) as client:
|
||||
participants = await client.get_meeting_participants(mtg_session_id)
|
||||
|
||||
id_to_name = {}
|
||||
id_to_user_id = {}
|
||||
for p in participants.data:
|
||||
if p.user_name:
|
||||
id_to_name[p.participant_id] = p.user_name
|
||||
if p.user_id:
|
||||
id_to_user_id[p.participant_id] = p.user_id
|
||||
|
||||
track_keys = [t["s3_key"] for t in input.tracks]
|
||||
cam_audio_keys = filter_cam_audio_tracks(track_keys)
|
||||
|
||||
for idx, key in enumerate(cam_audio_keys):
|
||||
try:
|
||||
parsed = parse_daily_recording_filename(key)
|
||||
participant_id = parsed.participant_id
|
||||
except ValueError as e:
|
||||
logger.error(
|
||||
"Failed to parse Daily recording filename",
|
||||
error=str(e),
|
||||
key=key,
|
||||
)
|
||||
continue
|
||||
|
||||
default_name = f"Speaker {idx}"
|
||||
name = id_to_name.get(participant_id, default_name)
|
||||
user_id = id_to_user_id.get(participant_id)
|
||||
|
||||
participant = TranscriptParticipant(
|
||||
id=participant_id, speaker=idx, name=name, user_id=user_id
|
||||
)
|
||||
await transcripts_controller.upsert_participant(transcript, participant)
|
||||
participants_list.append(
|
||||
ParticipantInfo(
|
||||
participant_id=participant_id,
|
||||
user_name=name,
|
||||
speaker=idx,
|
||||
)
|
||||
)
|
||||
|
||||
ctx.log(f"get_participants complete: {len(participants_list)} participants")
|
||||
|
||||
@@ -432,6 +524,7 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe
|
||||
@daily_multitrack_pipeline.task(
|
||||
parents=[get_participants],
|
||||
execution_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
schedule_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
retries=3,
|
||||
backoff_factor=2.0,
|
||||
backoff_max_seconds=30,
|
||||
@@ -439,11 +532,66 @@ async def get_participants(input: PipelineInput, ctx: Context) -> ParticipantsRe
|
||||
@with_error_handling(TaskName.PROCESS_TRACKS)
|
||||
async def process_tracks(input: PipelineInput, ctx: Context) -> ProcessTracksResult:
|
||||
"""Spawn child workflows for each track (dynamic fan-out)."""
|
||||
ctx.log(f"process_tracks: spawning {len(input.tracks)} track workflows")
|
||||
ctx.log(
|
||||
f"process_tracks: spawning {len(input.tracks)} track workflows, platform={input.source_platform}"
|
||||
)
|
||||
|
||||
participants_result = ctx.task_output(get_participants)
|
||||
source_language = participants_result.source_language
|
||||
|
||||
# For LiveKit: calculate padding offsets from filename timestamps.
|
||||
# OGG files don't have embedded start_time metadata, so we pre-calculate.
|
||||
track_padding: dict[int, float] = {}
|
||||
if input.source_platform == "livekit":
|
||||
from datetime import datetime # noqa: PLC0415
|
||||
|
||||
from reflector.utils.livekit import (
|
||||
parse_livekit_track_filepath, # noqa: PLC0415
|
||||
)
|
||||
|
||||
timestamps = []
|
||||
for i, track in enumerate(input.tracks):
|
||||
ts_str = track.get("timestamp")
|
||||
if ts_str:
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_str)
|
||||
timestamps.append((i, ts))
|
||||
except (ValueError, TypeError):
|
||||
ctx.log(
|
||||
f"process_tracks: could not parse timestamp for track {i}: {ts_str}"
|
||||
)
|
||||
timestamps.append((i, None))
|
||||
else:
|
||||
# Reprocess path: parse timestamp from S3 key
|
||||
try:
|
||||
parsed = parse_livekit_track_filepath(track["s3_key"])
|
||||
timestamps.append((i, parsed.timestamp))
|
||||
ctx.log(
|
||||
f"process_tracks: parsed timestamp from S3 key for track {i}: {parsed.timestamp}"
|
||||
)
|
||||
except (ValueError, KeyError):
|
||||
timestamps.append((i, None))
|
||||
|
||||
valid_timestamps = [(i, ts) for i, ts in timestamps if ts is not None]
|
||||
if valid_timestamps:
|
||||
earliest = min(ts for _, ts in valid_timestamps)
|
||||
# LiveKit Track Egress outputs OGG/Opus files, but the transcription
|
||||
# service only accepts WebM. The padding step converts OGG→WebM as a
|
||||
# side effect of applying the adelay filter. For the earliest track
|
||||
# (offset=0), we use a minimal padding to force this conversion.
|
||||
LIVEKIT_MIN_PADDING_SECONDS = (
|
||||
0.001 # 1ms — inaudible, forces OGG→WebM conversion
|
||||
)
|
||||
|
||||
for i, ts in valid_timestamps:
|
||||
offset = (ts - earliest).total_seconds()
|
||||
if offset == 0.0:
|
||||
offset = LIVEKIT_MIN_PADDING_SECONDS
|
||||
track_padding[i] = offset
|
||||
ctx.log(
|
||||
f"process_tracks: track {i} padding={offset}s (from filename timestamp)"
|
||||
)
|
||||
|
||||
bulk_runs = [
|
||||
track_workflow.create_bulk_run_item(
|
||||
input=TrackInput(
|
||||
@@ -453,6 +601,7 @@ async def process_tracks(input: PipelineInput, ctx: Context) -> ProcessTracksRes
|
||||
transcript_id=input.transcript_id,
|
||||
language=source_language,
|
||||
source_platform=input.source_platform,
|
||||
padding_seconds=track_padding.get(i),
|
||||
)
|
||||
)
|
||||
for i, track in enumerate(input.tracks)
|
||||
@@ -515,6 +664,7 @@ async def process_tracks(input: PipelineInput, ctx: Context) -> ProcessTracksRes
|
||||
@daily_multitrack_pipeline.task(
|
||||
parents=[process_tracks],
|
||||
execution_timeout=timedelta(seconds=TIMEOUT_AUDIO),
|
||||
schedule_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
retries=2,
|
||||
backoff_factor=2.0,
|
||||
backoff_max_seconds=15,
|
||||
@@ -604,13 +754,31 @@ async def mixdown_tracks(input: PipelineInput, ctx: Context) -> MixdownResult:
|
||||
# else: modal backend already uploaded to output_url
|
||||
|
||||
async with fresh_db_connection():
|
||||
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
|
||||
from reflector.db.transcripts import ( # noqa: PLC0415
|
||||
TranscriptDuration,
|
||||
transcripts_controller,
|
||||
)
|
||||
|
||||
transcript = await transcripts_controller.get_by_id(input.transcript_id)
|
||||
if transcript:
|
||||
await transcripts_controller.update(
|
||||
transcript, {"audio_location": "storage"}
|
||||
)
|
||||
update_data = {"audio_location": "storage"}
|
||||
# Set duration from mixdown if not already set (LiveKit: duration starts at 0)
|
||||
if not transcript.duration or transcript.duration == 0:
|
||||
update_data["duration"] = result.duration_ms
|
||||
await transcripts_controller.update(transcript, update_data)
|
||||
|
||||
# Broadcast duration update if it was missing
|
||||
if not transcript.duration or transcript.duration == 0:
|
||||
await append_event_and_broadcast(
|
||||
input.transcript_id,
|
||||
transcript,
|
||||
"DURATION",
|
||||
TranscriptDuration(duration=result.duration_ms),
|
||||
logger=logger,
|
||||
)
|
||||
ctx.log(
|
||||
f"mixdown_tracks: set duration={result.duration_ms}ms from mixdown"
|
||||
)
|
||||
|
||||
ctx.log(f"mixdown_tracks complete: {result.size} bytes to {storage_path}")
|
||||
|
||||
@@ -693,7 +861,8 @@ async def generate_waveform(input: PipelineInput, ctx: Context) -> WaveformResul
|
||||
|
||||
@daily_multitrack_pipeline.task(
|
||||
parents=[process_tracks],
|
||||
execution_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
execution_timeout=timedelta(seconds=TIMEOUT_EXTRA_HEAVY),
|
||||
schedule_timeout=timedelta(seconds=TIMEOUT_HEAVY),
|
||||
retries=3,
|
||||
backoff_factor=2.0,
|
||||
backoff_max_seconds=30,
|
||||
@@ -1277,6 +1446,7 @@ async def cleanup_consent(input: PipelineInput, ctx: Context) -> ConsentResult:
|
||||
return ConsentResult()
|
||||
|
||||
consent_denied = False
|
||||
meeting = None
|
||||
if transcript.meeting_id:
|
||||
meeting = await meetings_controller.get_by_id(transcript.meeting_id)
|
||||
if meeting:
|
||||
@@ -1339,6 +1509,22 @@ async def cleanup_consent(input: PipelineInput, ctx: Context) -> ConsentResult:
|
||||
logger.error(error_msg, exc_info=True)
|
||||
deletion_errors.append(error_msg)
|
||||
|
||||
# Delete cloud video if present
|
||||
if meeting and meeting.daily_composed_video_s3_key:
|
||||
try:
|
||||
source_storage = get_source_storage("daily")
|
||||
await source_storage.delete_file(meeting.daily_composed_video_s3_key)
|
||||
await meetings_controller.update_meeting(
|
||||
meeting.id,
|
||||
daily_composed_video_s3_key=None,
|
||||
daily_composed_video_duration=None,
|
||||
)
|
||||
ctx.log(f"Deleted cloud video: {meeting.daily_composed_video_s3_key}")
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to delete cloud video: {e}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
deletion_errors.append(error_msg)
|
||||
|
||||
if deletion_errors:
|
||||
logger.warning(
|
||||
"[Hatchet] cleanup_consent completed with errors",
|
||||
@@ -1349,7 +1535,7 @@ async def cleanup_consent(input: PipelineInput, ctx: Context) -> ConsentResult:
|
||||
ctx.log(f"cleanup_consent completed with {len(deletion_errors)} errors")
|
||||
else:
|
||||
await transcripts_controller.update(transcript, {"audio_deleted": True})
|
||||
ctx.log("cleanup_consent: all audio deleted successfully")
|
||||
ctx.log("cleanup_consent: all audio and video deleted successfully")
|
||||
|
||||
return ConsentResult()
|
||||
|
||||
@@ -1493,22 +1679,41 @@ async def send_email(input: PipelineInput, ctx: Context) -> EmailResult:
|
||||
if recording and recording.meeting_id:
|
||||
meeting = await meetings_controller.get_by_id(recording.meeting_id)
|
||||
|
||||
recipients = (
|
||||
list(meeting.email_recipients)
|
||||
# Normalise meeting recipients (legacy strings → dicts)
|
||||
meeting_recipients: list[dict] = (
|
||||
[
|
||||
entry
|
||||
if isinstance(entry, dict)
|
||||
else {"email": entry, "include_link": True}
|
||||
for entry in (meeting.email_recipients or [])
|
||||
]
|
||||
if meeting and meeting.email_recipients
|
||||
else []
|
||||
)
|
||||
|
||||
# Also check room-level email
|
||||
# Room-level email always gets a link (room owner)
|
||||
from reflector.db.rooms import rooms_controller # noqa: PLC0415
|
||||
|
||||
room_email = None
|
||||
if transcript.room_id:
|
||||
room = await rooms_controller.get_by_id(transcript.room_id)
|
||||
if room and room.email_transcript_to:
|
||||
if room.email_transcript_to not in recipients:
|
||||
recipients.append(room.email_transcript_to)
|
||||
room_email = room.email_transcript_to
|
||||
|
||||
if not recipients:
|
||||
# Build two groups: with link and without link
|
||||
with_link = [
|
||||
r["email"] for r in meeting_recipients if r.get("include_link", True)
|
||||
]
|
||||
without_link = [
|
||||
r["email"] for r in meeting_recipients if not r.get("include_link", True)
|
||||
]
|
||||
|
||||
if room_email:
|
||||
if room_email not in with_link:
|
||||
with_link.append(room_email)
|
||||
without_link = [e for e in without_link if e != room_email]
|
||||
|
||||
if not with_link and not without_link:
|
||||
ctx.log("send_email skipped (no email recipients)")
|
||||
return EmailResult(skipped=True)
|
||||
|
||||
@@ -1516,7 +1721,15 @@ async def send_email(input: PipelineInput, ctx: Context) -> EmailResult:
|
||||
if meeting and meeting.email_recipients:
|
||||
await transcripts_controller.update(transcript, {"share_mode": "public"})
|
||||
|
||||
count = await send_transcript_email(recipients, transcript)
|
||||
count = 0
|
||||
if with_link:
|
||||
count += await send_transcript_email(
|
||||
with_link, transcript, include_link=True
|
||||
)
|
||||
if without_link:
|
||||
count += await send_transcript_email(
|
||||
without_link, transcript, include_link=False
|
||||
)
|
||||
ctx.log(f"send_email complete: sent {count} emails")
|
||||
|
||||
return EmailResult(emails_sent=count)
|
||||
|
||||
@@ -688,7 +688,10 @@ async def cleanup_consent(input: FilePipelineInput, ctx: Context) -> ConsentResu
|
||||
)
|
||||
from reflector.db.recordings import recordings_controller # noqa: PLC0415
|
||||
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
|
||||
from reflector.storage import get_transcripts_storage # noqa: PLC0415
|
||||
from reflector.storage import ( # noqa: PLC0415
|
||||
get_source_storage,
|
||||
get_transcripts_storage,
|
||||
)
|
||||
|
||||
transcript = await transcripts_controller.get_by_id(input.transcript_id)
|
||||
if not transcript:
|
||||
@@ -697,6 +700,7 @@ async def cleanup_consent(input: FilePipelineInput, ctx: Context) -> ConsentResu
|
||||
|
||||
consent_denied = False
|
||||
recording = None
|
||||
meeting = None
|
||||
if transcript.recording_id:
|
||||
recording = await recordings_controller.get_by_id(transcript.recording_id)
|
||||
if recording and recording.meeting_id:
|
||||
@@ -756,6 +760,22 @@ async def cleanup_consent(input: FilePipelineInput, ctx: Context) -> ConsentResu
|
||||
logger.error(error_msg, exc_info=True)
|
||||
deletion_errors.append(error_msg)
|
||||
|
||||
# Delete cloud video if present
|
||||
if meeting and meeting.daily_composed_video_s3_key:
|
||||
try:
|
||||
source_storage = get_source_storage("daily")
|
||||
await source_storage.delete_file(meeting.daily_composed_video_s3_key)
|
||||
await meetings_controller.update_meeting(
|
||||
meeting.id,
|
||||
daily_composed_video_s3_key=None,
|
||||
daily_composed_video_duration=None,
|
||||
)
|
||||
ctx.log(f"Deleted cloud video: {meeting.daily_composed_video_s3_key}")
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to delete cloud video: {e}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
deletion_errors.append(error_msg)
|
||||
|
||||
if deletion_errors:
|
||||
logger.warning(
|
||||
"[Hatchet] cleanup_consent completed with errors",
|
||||
@@ -764,7 +784,7 @@ async def cleanup_consent(input: FilePipelineInput, ctx: Context) -> ConsentResu
|
||||
)
|
||||
else:
|
||||
await transcripts_controller.update(transcript, {"audio_deleted": True})
|
||||
ctx.log("cleanup_consent: all audio deleted successfully")
|
||||
ctx.log("cleanup_consent: all audio and video deleted successfully")
|
||||
|
||||
return ConsentResult()
|
||||
|
||||
@@ -896,22 +916,41 @@ async def send_email(input: FilePipelineInput, ctx: Context) -> EmailResult:
|
||||
if recording and recording.meeting_id:
|
||||
meeting = await meetings_controller.get_by_id(recording.meeting_id)
|
||||
|
||||
recipients = (
|
||||
list(meeting.email_recipients)
|
||||
# Normalise meeting recipients (legacy strings → dicts)
|
||||
meeting_recipients: list[dict] = (
|
||||
[
|
||||
entry
|
||||
if isinstance(entry, dict)
|
||||
else {"email": entry, "include_link": True}
|
||||
for entry in (meeting.email_recipients or [])
|
||||
]
|
||||
if meeting and meeting.email_recipients
|
||||
else []
|
||||
)
|
||||
|
||||
# Also check room-level email
|
||||
# Room-level email always gets a link (room owner)
|
||||
from reflector.db.rooms import rooms_controller # noqa: PLC0415
|
||||
|
||||
room_email = None
|
||||
if transcript.room_id:
|
||||
room = await rooms_controller.get_by_id(transcript.room_id)
|
||||
if room and room.email_transcript_to:
|
||||
if room.email_transcript_to not in recipients:
|
||||
recipients.append(room.email_transcript_to)
|
||||
room_email = room.email_transcript_to
|
||||
|
||||
if not recipients:
|
||||
# Build two groups: with link and without link
|
||||
with_link = [
|
||||
r["email"] for r in meeting_recipients if r.get("include_link", True)
|
||||
]
|
||||
without_link = [
|
||||
r["email"] for r in meeting_recipients if not r.get("include_link", True)
|
||||
]
|
||||
|
||||
if room_email:
|
||||
if room_email not in with_link:
|
||||
with_link.append(room_email)
|
||||
without_link = [e for e in without_link if e != room_email]
|
||||
|
||||
if not with_link and not without_link:
|
||||
ctx.log("send_email skipped (no email recipients)")
|
||||
return EmailResult(skipped=True)
|
||||
|
||||
@@ -919,7 +958,15 @@ async def send_email(input: FilePipelineInput, ctx: Context) -> EmailResult:
|
||||
if meeting and meeting.email_recipients:
|
||||
await transcripts_controller.update(transcript, {"share_mode": "public"})
|
||||
|
||||
count = await send_transcript_email(recipients, transcript)
|
||||
count = 0
|
||||
if with_link:
|
||||
count += await send_transcript_email(
|
||||
with_link, transcript, include_link=True
|
||||
)
|
||||
if without_link:
|
||||
count += await send_transcript_email(
|
||||
without_link, transcript, include_link=False
|
||||
)
|
||||
ctx.log(f"send_email complete: sent {count} emails")
|
||||
|
||||
return EmailResult(emails_sent=count)
|
||||
|
||||
@@ -397,22 +397,41 @@ async def send_email(input: LivePostPipelineInput, ctx: Context) -> EmailResult:
|
||||
if recording and recording.meeting_id:
|
||||
meeting = await meetings_controller.get_by_id(recording.meeting_id)
|
||||
|
||||
recipients = (
|
||||
list(meeting.email_recipients)
|
||||
# Normalise meeting recipients (legacy strings → dicts)
|
||||
meeting_recipients: list[dict] = (
|
||||
[
|
||||
entry
|
||||
if isinstance(entry, dict)
|
||||
else {"email": entry, "include_link": True}
|
||||
for entry in (meeting.email_recipients or [])
|
||||
]
|
||||
if meeting and meeting.email_recipients
|
||||
else []
|
||||
)
|
||||
|
||||
# Also check room-level email
|
||||
# Room-level email always gets a link (room owner)
|
||||
from reflector.db.rooms import rooms_controller # noqa: PLC0415
|
||||
|
||||
room_email = None
|
||||
if transcript.room_id:
|
||||
room = await rooms_controller.get_by_id(transcript.room_id)
|
||||
if room and room.email_transcript_to:
|
||||
if room.email_transcript_to not in recipients:
|
||||
recipients.append(room.email_transcript_to)
|
||||
room_email = room.email_transcript_to
|
||||
|
||||
if not recipients:
|
||||
# Build two groups: with link and without link
|
||||
with_link = [
|
||||
r["email"] for r in meeting_recipients if r.get("include_link", True)
|
||||
]
|
||||
without_link = [
|
||||
r["email"] for r in meeting_recipients if not r.get("include_link", True)
|
||||
]
|
||||
|
||||
if room_email:
|
||||
if room_email not in with_link:
|
||||
with_link.append(room_email)
|
||||
without_link = [e for e in without_link if e != room_email]
|
||||
|
||||
if not with_link and not without_link:
|
||||
ctx.log("send_email skipped (no email recipients)")
|
||||
return EmailResult(skipped=True)
|
||||
|
||||
@@ -420,7 +439,15 @@ async def send_email(input: LivePostPipelineInput, ctx: Context) -> EmailResult:
|
||||
if meeting and meeting.email_recipients:
|
||||
await transcripts_controller.update(transcript, {"share_mode": "public"})
|
||||
|
||||
count = await send_transcript_email(recipients, transcript)
|
||||
count = 0
|
||||
if with_link:
|
||||
count += await send_transcript_email(
|
||||
with_link, transcript, include_link=True
|
||||
)
|
||||
if without_link:
|
||||
count += await send_transcript_email(
|
||||
without_link, transcript, include_link=False
|
||||
)
|
||||
ctx.log(f"send_email complete: sent {count} emails")
|
||||
|
||||
return EmailResult(emails_sent=count)
|
||||
|
||||
@@ -37,6 +37,9 @@ class TrackInput(BaseModel):
|
||||
transcript_id: str
|
||||
language: str = "en"
|
||||
source_platform: str = "daily"
|
||||
# Pre-calculated padding in seconds (from filename timestamps for LiveKit).
|
||||
# When set, overrides container metadata extraction for start_time.
|
||||
padding_seconds: float | None = None
|
||||
|
||||
|
||||
hatchet = HatchetClientManager.get_client()
|
||||
@@ -53,15 +56,19 @@ track_workflow = hatchet.workflow(name="TrackProcessing", input_validator=TrackI
|
||||
async def pad_track(input: TrackInput, ctx: Context) -> PadTrackResult:
|
||||
"""Pad single audio track with silence for alignment.
|
||||
|
||||
Extracts stream.start_time from WebM container metadata and applies
|
||||
silence padding using PyAV filter graph (adelay).
|
||||
For Daily: extracts stream.start_time from WebM container metadata.
|
||||
For LiveKit: uses pre-calculated padding_seconds from filename timestamps
|
||||
(OGG files don't have embedded start_time metadata).
|
||||
"""
|
||||
ctx.log(f"pad_track: track {input.track_index}, s3_key={input.s3_key}")
|
||||
ctx.log(
|
||||
f"pad_track: track {input.track_index}, s3_key={input.s3_key}, padding_seconds={input.padding_seconds}"
|
||||
)
|
||||
logger.info(
|
||||
"[Hatchet] pad_track",
|
||||
track_index=input.track_index,
|
||||
s3_key=input.s3_key,
|
||||
transcript_id=input.transcript_id,
|
||||
padding_seconds=input.padding_seconds,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -79,10 +86,16 @@ async def pad_track(input: TrackInput, ctx: Context) -> PadTrackResult:
|
||||
bucket=input.bucket_name,
|
||||
)
|
||||
|
||||
with av.open(source_url) as in_container:
|
||||
start_time_seconds = extract_stream_start_time_from_container(
|
||||
in_container, input.track_index, logger=logger
|
||||
)
|
||||
if input.padding_seconds is not None:
|
||||
# Pre-calculated offset (LiveKit: from filename timestamps)
|
||||
start_time_seconds = input.padding_seconds
|
||||
ctx.log(f"pad_track: using pre-calculated padding={start_time_seconds}s")
|
||||
else:
|
||||
# Extract from container metadata (Daily: WebM start_time)
|
||||
with av.open(source_url) as in_container:
|
||||
start_time_seconds = extract_stream_start_time_from_container(
|
||||
in_container, input.track_index, logger=logger
|
||||
)
|
||||
|
||||
# If no padding needed, return original S3 key
|
||||
if start_time_seconds <= 0:
|
||||
|
||||
12
server/reflector/livekit_api/__init__.py
Normal file
12
server/reflector/livekit_api/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""
|
||||
LiveKit API Module — thin wrapper around the livekit-api SDK.
|
||||
"""
|
||||
|
||||
from .client import LiveKitApiClient
|
||||
from .webhooks import create_webhook_receiver, verify_webhook
|
||||
|
||||
__all__ = [
|
||||
"LiveKitApiClient",
|
||||
"create_webhook_receiver",
|
||||
"verify_webhook",
|
||||
]
|
||||
195
server/reflector/livekit_api/client.py
Normal file
195
server/reflector/livekit_api/client.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""
|
||||
LiveKit API client wrapping the official livekit-api Python SDK.
|
||||
|
||||
Handles room management, access tokens, and Track Egress for
|
||||
per-participant audio recording to S3-compatible storage.
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from livekit.api import (
|
||||
AccessToken,
|
||||
AutoTrackEgress,
|
||||
CreateRoomRequest,
|
||||
DeleteRoomRequest,
|
||||
DirectFileOutput,
|
||||
EgressInfo,
|
||||
ListEgressRequest,
|
||||
ListParticipantsRequest,
|
||||
LiveKitAPI,
|
||||
Room,
|
||||
RoomEgress,
|
||||
S3Upload,
|
||||
StopEgressRequest,
|
||||
TrackEgressRequest,
|
||||
VideoGrants,
|
||||
)
|
||||
|
||||
|
||||
class LiveKitApiClient:
|
||||
"""Thin wrapper around LiveKitAPI for Reflector's needs."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
api_key: str,
|
||||
api_secret: str,
|
||||
s3_bucket: str | None = None,
|
||||
s3_region: str | None = None,
|
||||
s3_access_key: str | None = None,
|
||||
s3_secret_key: str | None = None,
|
||||
s3_endpoint: str | None = None,
|
||||
):
|
||||
self._url = url
|
||||
self._api_key = api_key
|
||||
self._api_secret = api_secret
|
||||
self._s3_bucket = s3_bucket
|
||||
self._s3_region = s3_region or "us-east-1"
|
||||
self._s3_access_key = s3_access_key
|
||||
self._s3_secret_key = s3_secret_key
|
||||
self._s3_endpoint = s3_endpoint
|
||||
self._api = LiveKitAPI(url=url, api_key=api_key, api_secret=api_secret)
|
||||
|
||||
# ── Room management ──────────────────────────────────────────
|
||||
|
||||
async def create_room(
|
||||
self,
|
||||
name: str,
|
||||
empty_timeout: int = 300,
|
||||
max_participants: int = 0,
|
||||
enable_auto_track_egress: bool = False,
|
||||
track_egress_filepath: str = "livekit/{room_name}/{publisher_identity}-{time}",
|
||||
) -> Room:
|
||||
"""Create a LiveKit room.
|
||||
|
||||
Args:
|
||||
name: Room name (unique identifier).
|
||||
empty_timeout: Seconds to keep room alive after last participant leaves.
|
||||
max_participants: 0 = unlimited.
|
||||
enable_auto_track_egress: If True, automatically record each participant's
|
||||
audio track to S3 as a separate file (OGG/Opus).
|
||||
track_egress_filepath: S3 filepath template for auto track egress.
|
||||
Supports {room_name}, {publisher_identity}, {time}.
|
||||
"""
|
||||
egress = None
|
||||
if enable_auto_track_egress:
|
||||
egress = RoomEgress(
|
||||
tracks=AutoTrackEgress(
|
||||
filepath=track_egress_filepath,
|
||||
s3=self._build_s3_upload(),
|
||||
),
|
||||
)
|
||||
|
||||
req = CreateRoomRequest(
|
||||
name=name,
|
||||
empty_timeout=empty_timeout,
|
||||
max_participants=max_participants,
|
||||
egress=egress,
|
||||
)
|
||||
return await self._api.room.create_room(req)
|
||||
|
||||
async def delete_room(self, room_name: str) -> None:
|
||||
await self._api.room.delete_room(DeleteRoomRequest(room=room_name))
|
||||
|
||||
async def list_participants(self, room_name: str):
|
||||
resp = await self._api.room.list_participants(
|
||||
ListParticipantsRequest(room=room_name)
|
||||
)
|
||||
return resp.participants
|
||||
|
||||
# ── Access tokens ────────────────────────────────────────────
|
||||
|
||||
def create_access_token(
|
||||
self,
|
||||
room_name: str,
|
||||
participant_identity: str,
|
||||
participant_name: str | None = None,
|
||||
can_publish: bool = True,
|
||||
can_subscribe: bool = True,
|
||||
room_admin: bool = False,
|
||||
ttl_seconds: int = 86400,
|
||||
) -> str:
|
||||
"""Generate a JWT access token for a participant."""
|
||||
token = AccessToken(
|
||||
api_key=self._api_key,
|
||||
api_secret=self._api_secret,
|
||||
)
|
||||
token.identity = participant_identity
|
||||
token.name = participant_name or participant_identity
|
||||
token.ttl = timedelta(seconds=ttl_seconds)
|
||||
token.with_grants(
|
||||
VideoGrants(
|
||||
room_join=True,
|
||||
room=room_name,
|
||||
can_publish=can_publish,
|
||||
can_subscribe=can_subscribe,
|
||||
room_admin=room_admin,
|
||||
)
|
||||
)
|
||||
return token.to_jwt()
|
||||
|
||||
# ── Track Egress (per-participant audio recording) ───────────
|
||||
|
||||
def _build_s3_upload(self) -> S3Upload:
|
||||
"""Build S3Upload config for egress output."""
|
||||
if not all([self._s3_bucket, self._s3_access_key, self._s3_secret_key]):
|
||||
raise ValueError(
|
||||
"S3 storage not configured for LiveKit egress. "
|
||||
"Set LIVEKIT_STORAGE_AWS_* environment variables."
|
||||
)
|
||||
kwargs = {
|
||||
"access_key": self._s3_access_key,
|
||||
"secret": self._s3_secret_key,
|
||||
"bucket": self._s3_bucket,
|
||||
"region": self._s3_region,
|
||||
"force_path_style": True, # Required for Garage/MinIO
|
||||
}
|
||||
if self._s3_endpoint:
|
||||
kwargs["endpoint"] = self._s3_endpoint
|
||||
return S3Upload(**kwargs)
|
||||
|
||||
async def start_track_egress(
|
||||
self,
|
||||
room_name: str,
|
||||
track_sid: str,
|
||||
s3_filepath: str,
|
||||
) -> EgressInfo:
|
||||
"""Start Track Egress for a single audio track (writes OGG/Opus to S3).
|
||||
|
||||
Args:
|
||||
room_name: LiveKit room name.
|
||||
track_sid: Track SID to record.
|
||||
s3_filepath: S3 key path for the output file.
|
||||
"""
|
||||
req = TrackEgressRequest(
|
||||
room_name=room_name,
|
||||
track_id=track_sid,
|
||||
file=DirectFileOutput(
|
||||
filepath=s3_filepath,
|
||||
s3=self._build_s3_upload(),
|
||||
),
|
||||
)
|
||||
return await self._api.egress.start_track_egress(req)
|
||||
|
||||
async def list_egress(self, room_name: str | None = None) -> list[EgressInfo]:
|
||||
req = ListEgressRequest()
|
||||
if room_name:
|
||||
req.room_name = room_name
|
||||
resp = await self._api.egress.list_egress(req)
|
||||
return list(resp.items)
|
||||
|
||||
async def stop_egress(self, egress_id: str) -> EgressInfo:
|
||||
return await self._api.egress.stop_egress(
|
||||
StopEgressRequest(egress_id=egress_id)
|
||||
)
|
||||
|
||||
# ── Cleanup ──────────────────────────────────────────────────
|
||||
|
||||
async def close(self):
|
||||
await self._api.aclose()
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
await self.close()
|
||||
52
server/reflector/livekit_api/webhooks.py
Normal file
52
server/reflector/livekit_api/webhooks.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
LiveKit webhook verification and event parsing.
|
||||
|
||||
LiveKit signs webhooks using the API secret as a JWT.
|
||||
The WebhookReceiver from the SDK handles verification.
|
||||
"""
|
||||
|
||||
from livekit.api import TokenVerifier, WebhookEvent, WebhookReceiver
|
||||
|
||||
from reflector.logger import logger
|
||||
|
||||
|
||||
def create_webhook_receiver(api_key: str, api_secret: str) -> WebhookReceiver:
|
||||
"""Create a WebhookReceiver for verifying LiveKit webhook signatures."""
|
||||
return WebhookReceiver(
|
||||
token_verifier=TokenVerifier(api_key=api_key, api_secret=api_secret)
|
||||
)
|
||||
|
||||
|
||||
def verify_webhook(
|
||||
receiver: WebhookReceiver,
|
||||
body: str | bytes,
|
||||
auth_header: str,
|
||||
) -> WebhookEvent | None:
|
||||
"""Verify and parse a LiveKit webhook event.
|
||||
|
||||
Returns the parsed WebhookEvent if valid, None if verification fails.
|
||||
Logs at different levels depending on failure type:
|
||||
- WARNING: invalid signature, expired token, malformed JWT (expected rejections)
|
||||
- ERROR: unexpected exceptions (potential bugs or attacks)
|
||||
"""
|
||||
if isinstance(body, bytes):
|
||||
body = body.decode("utf-8")
|
||||
try:
|
||||
return receiver.receive(body, auth_header)
|
||||
except (ValueError, KeyError) as e:
|
||||
# Expected verification failures (bad JWT, wrong key, expired, malformed)
|
||||
logger.warning(
|
||||
"LiveKit webhook verification failed",
|
||||
error=str(e),
|
||||
error_type=type(e).__name__,
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
# Unexpected errors — log at ERROR for visibility (potential attack or SDK bug)
|
||||
logger.error(
|
||||
"Unexpected error during LiveKit webhook verification",
|
||||
error=str(e),
|
||||
error_type=type(e).__name__,
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
@@ -61,7 +61,7 @@ from reflector.processors.types import (
|
||||
)
|
||||
from reflector.processors.types import Transcript as TranscriptProcessorType
|
||||
from reflector.settings import settings
|
||||
from reflector.storage import get_transcripts_storage
|
||||
from reflector.storage import get_source_storage, get_transcripts_storage
|
||||
from reflector.views.transcripts import GetTranscriptTopic
|
||||
from reflector.ws_events import TranscriptEventName
|
||||
from reflector.ws_manager import WebsocketManager, get_ws_manager
|
||||
@@ -671,6 +671,22 @@ async def cleanup_consent(transcript: Transcript, logger: Logger):
|
||||
logger.error(error_msg, exc_info=e)
|
||||
deletion_errors.append(error_msg)
|
||||
|
||||
# Delete cloud video if present
|
||||
if meeting and meeting.daily_composed_video_s3_key:
|
||||
try:
|
||||
source_storage = get_source_storage("daily")
|
||||
await source_storage.delete_file(meeting.daily_composed_video_s3_key)
|
||||
await meetings_controller.update_meeting(
|
||||
meeting.id,
|
||||
daily_composed_video_s3_key=None,
|
||||
daily_composed_video_duration=None,
|
||||
)
|
||||
logger.info(f"Deleted cloud video: {meeting.daily_composed_video_s3_key}")
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to delete cloud video: {e}"
|
||||
logger.error(error_msg, exc_info=e)
|
||||
deletion_errors.append(error_msg)
|
||||
|
||||
if deletion_errors:
|
||||
logger.warning(
|
||||
f"Consent cleanup completed with {len(deletion_errors)} errors",
|
||||
@@ -678,7 +694,7 @@ async def cleanup_consent(transcript: Transcript, logger: Logger):
|
||||
)
|
||||
else:
|
||||
await transcripts_controller.update(transcript, {"audio_deleted": True})
|
||||
logger.info("Consent cleanup done - all audio deleted")
|
||||
logger.info("Consent cleanup done - all audio and video deleted")
|
||||
|
||||
|
||||
@get_transcript
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import Literal
|
||||
|
||||
Platform = Literal["whereby", "daily"]
|
||||
Platform = Literal["whereby", "daily", "livekit"]
|
||||
WHEREBY_PLATFORM: Platform = "whereby"
|
||||
DAILY_PLATFORM: Platform = "daily"
|
||||
LIVEKIT_PLATFORM: Platform = "livekit"
|
||||
|
||||
@@ -155,12 +155,17 @@ async def prepare_transcript_processing(validation: ValidationOk) -> PrepareResu
|
||||
)
|
||||
|
||||
if track_keys:
|
||||
# Detect platform from recording ID prefix
|
||||
source_platform = (
|
||||
"livekit" if recording_id and recording_id.startswith("lk-") else "daily"
|
||||
)
|
||||
return MultitrackProcessingConfig(
|
||||
bucket_name=bucket_name, # type: ignore (validated above)
|
||||
track_keys=track_keys,
|
||||
transcript_id=validation.transcript_id,
|
||||
recording_id=recording_id,
|
||||
room_id=validation.room_id,
|
||||
source_platform=source_platform,
|
||||
)
|
||||
|
||||
return FileProcessingConfig(
|
||||
|
||||
@@ -195,6 +195,23 @@ class Settings(BaseSettings):
|
||||
DAILY_WEBHOOK_UUID: str | None = (
|
||||
None # Webhook UUID for this environment. Not used by production code
|
||||
)
|
||||
|
||||
# LiveKit integration (self-hosted open-source video platform)
|
||||
LIVEKIT_URL: str | None = (
|
||||
None # e.g. ws://livekit:7880 (internal) or wss://livekit.example.com
|
||||
)
|
||||
LIVEKIT_API_KEY: str | None = None
|
||||
LIVEKIT_API_SECRET: str | None = None
|
||||
LIVEKIT_WEBHOOK_SECRET: str | None = None # Defaults to API_SECRET if not set
|
||||
# LiveKit egress S3 storage (Track Egress writes per-participant audio here)
|
||||
LIVEKIT_STORAGE_AWS_BUCKET_NAME: str | None = None
|
||||
LIVEKIT_STORAGE_AWS_REGION: str | None = None
|
||||
LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID: str | None = None
|
||||
LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY: str | None = None
|
||||
LIVEKIT_STORAGE_AWS_ENDPOINT_URL: str | None = None # For Garage/MinIO
|
||||
# Public URL for LiveKit (used in frontend room_url, e.g. wss://livekit.example.com)
|
||||
LIVEKIT_PUBLIC_URL: str | None = None
|
||||
|
||||
# Platform Configuration
|
||||
DEFAULT_VIDEO_PLATFORM: Platform = DAILY_PLATFORM
|
||||
|
||||
|
||||
@@ -57,6 +57,22 @@ def get_source_storage(platform: str) -> Storage:
|
||||
aws_secret_access_key=settings.WHEREBY_STORAGE_AWS_SECRET_ACCESS_KEY,
|
||||
)
|
||||
|
||||
elif platform == "livekit":
|
||||
if (
|
||||
settings.LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID
|
||||
and settings.LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY
|
||||
and settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME
|
||||
):
|
||||
from reflector.storage.storage_aws import AwsStorage
|
||||
|
||||
return AwsStorage(
|
||||
aws_bucket_name=settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME,
|
||||
aws_region=settings.LIVEKIT_STORAGE_AWS_REGION or "us-east-1",
|
||||
aws_access_key_id=settings.LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=settings.LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY,
|
||||
aws_endpoint_url=settings.LIVEKIT_STORAGE_AWS_ENDPOINT_URL,
|
||||
)
|
||||
|
||||
return get_transcripts_storage()
|
||||
|
||||
|
||||
|
||||
112
server/reflector/utils/livekit.py
Normal file
112
server/reflector/utils/livekit.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""
|
||||
LiveKit track file utilities.
|
||||
|
||||
Parse participant identity and timing from Auto Track Egress S3 filepaths.
|
||||
|
||||
Actual filepath format from LiveKit Auto Track Egress:
|
||||
livekit/{room_name}/{publisher_identity}-{ISO_timestamp}-{track_id}.{ext}
|
||||
|
||||
Examples:
|
||||
livekit/myroom-20260401172036/juan-4b82ed-2026-04-01T195758-TR_AMR3SWs74Divho.ogg
|
||||
livekit/myroom-20260401172036/juan2-63abcf-2026-04-01T195847-TR_AMyoSbM7tAQbYj.ogg
|
||||
livekit/myroom-20260401172036/EG_K5sipvfB5fTM.json (manifest, skip)
|
||||
livekit/myroom-20260401172036/juan-4b82ed-2026-04-01T195727-TR_VC679dgMQBdfhT.webm (video, skip)
|
||||
"""
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
|
||||
@dataclass
|
||||
class LiveKitTrackFile:
|
||||
"""Parsed info from a LiveKit track egress filepath."""
|
||||
|
||||
s3_key: str
|
||||
room_name: str
|
||||
participant_identity: str
|
||||
timestamp: datetime # Parsed from ISO timestamp in filename
|
||||
track_id: str # LiveKit track ID (e.g., TR_AMR3SWs74Divho)
|
||||
|
||||
|
||||
# Pattern: livekit/{room_name}/{identity}-{ISO_date}T{time}-{track_id}.{ext}
|
||||
# The identity can contain alphanumeric, hyphens, underscores
|
||||
# ISO timestamp is like 2026-04-01T195758
|
||||
# Track ID starts with TR_
|
||||
_TRACK_FILENAME_PATTERN = re.compile(
|
||||
r"^livekit/(?P<room_name>[^/]+)/(?P<identity>.+?)-(?P<timestamp>\d{4}-\d{2}-\d{2}T\d{6})-(?P<track_id>TR_\w+)\.(?P<ext>\w+)$"
|
||||
)
|
||||
|
||||
|
||||
def parse_livekit_track_filepath(s3_key: str) -> LiveKitTrackFile:
|
||||
"""Parse a LiveKit track egress filepath into components.
|
||||
|
||||
Args:
|
||||
s3_key: S3 key like 'livekit/myroom-20260401/juan-4b82ed-2026-04-01T195758-TR_AMR3SWs74Divho.ogg'
|
||||
|
||||
Returns:
|
||||
LiveKitTrackFile with parsed components.
|
||||
|
||||
Raises:
|
||||
ValueError: If the filepath doesn't match the expected format.
|
||||
"""
|
||||
match = _TRACK_FILENAME_PATTERN.match(s3_key)
|
||||
if not match:
|
||||
raise ValueError(
|
||||
f"LiveKit track filepath doesn't match expected format: {s3_key}"
|
||||
)
|
||||
|
||||
# Parse ISO-ish timestamp (e.g., 2026-04-01T195758 → datetime)
|
||||
ts_str = match.group("timestamp")
|
||||
try:
|
||||
ts = datetime.strptime(ts_str, "%Y-%m-%dT%H%M%S").replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
raise ValueError(f"Cannot parse timestamp '{ts_str}' from: {s3_key}")
|
||||
|
||||
return LiveKitTrackFile(
|
||||
s3_key=s3_key,
|
||||
room_name=match.group("room_name"),
|
||||
participant_identity=match.group("identity"),
|
||||
timestamp=ts,
|
||||
track_id=match.group("track_id"),
|
||||
)
|
||||
|
||||
|
||||
def filter_audio_tracks(s3_keys: list[str]) -> list[str]:
|
||||
"""Filter S3 keys to only audio tracks (.ogg), excluding manifests and video."""
|
||||
return [k for k in s3_keys if k.endswith(".ogg")]
|
||||
|
||||
|
||||
def calculate_track_offsets(
|
||||
tracks: list[LiveKitTrackFile],
|
||||
) -> list[tuple[LiveKitTrackFile, float]]:
|
||||
"""Calculate silence padding offset for each track.
|
||||
|
||||
The earliest track starts at time zero. Each subsequent track
|
||||
gets (track_timestamp - earliest_timestamp) seconds of silence prepended.
|
||||
|
||||
Returns:
|
||||
List of (track, offset_seconds) tuples.
|
||||
"""
|
||||
if not tracks:
|
||||
return []
|
||||
|
||||
earliest = min(t.timestamp for t in tracks)
|
||||
return [(t, (t.timestamp - earliest).total_seconds()) for t in tracks]
|
||||
|
||||
|
||||
def extract_livekit_base_room_name(livekit_room_name: str) -> NonEmptyString:
|
||||
"""Extract base room name from LiveKit timestamped room name.
|
||||
|
||||
LiveKit rooms use the same naming as Daily: {base_name}-YYYYMMDDHHMMSS
|
||||
"""
|
||||
base_name = livekit_room_name.rsplit("-", 1)[0]
|
||||
assert base_name, f"Extracted base name is empty from: {livekit_room_name}"
|
||||
return NonEmptyString(base_name)
|
||||
|
||||
|
||||
def recording_lock_key(room_name: str) -> str:
|
||||
"""Redis lock key for preventing duplicate processing."""
|
||||
return f"livekit:processing:{room_name}"
|
||||
@@ -1,7 +1,7 @@
|
||||
from reflector.settings import settings
|
||||
from reflector.storage import get_dailyco_storage, get_whereby_storage
|
||||
|
||||
from ..schemas.platform import WHEREBY_PLATFORM, Platform
|
||||
from ..schemas.platform import LIVEKIT_PLATFORM, WHEREBY_PLATFORM, Platform
|
||||
from .base import VideoPlatformClient, VideoPlatformConfig
|
||||
from .registry import get_platform_client
|
||||
|
||||
@@ -44,6 +44,27 @@ def get_platform_config(platform: Platform) -> VideoPlatformConfig:
|
||||
s3_region=daily_storage.region,
|
||||
aws_role_arn=daily_storage.role_credential,
|
||||
)
|
||||
elif platform == LIVEKIT_PLATFORM:
|
||||
if not settings.LIVEKIT_URL:
|
||||
raise ValueError(
|
||||
"LIVEKIT_URL is required when platform='livekit'. "
|
||||
"Set LIVEKIT_URL environment variable."
|
||||
)
|
||||
if not settings.LIVEKIT_API_KEY or not settings.LIVEKIT_API_SECRET:
|
||||
raise ValueError(
|
||||
"LIVEKIT_API_KEY and LIVEKIT_API_SECRET are required when platform='livekit'. "
|
||||
"Set LIVEKIT_API_KEY and LIVEKIT_API_SECRET environment variables."
|
||||
)
|
||||
return VideoPlatformConfig(
|
||||
api_key=settings.LIVEKIT_API_KEY,
|
||||
webhook_secret=settings.LIVEKIT_WEBHOOK_SECRET
|
||||
or settings.LIVEKIT_API_SECRET,
|
||||
api_url=settings.LIVEKIT_URL,
|
||||
s3_bucket=settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME,
|
||||
s3_region=settings.LIVEKIT_STORAGE_AWS_REGION,
|
||||
aws_access_key_id=settings.LIVEKIT_STORAGE_AWS_ACCESS_KEY_ID,
|
||||
aws_access_key_secret=settings.LIVEKIT_STORAGE_AWS_SECRET_ACCESS_KEY,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown platform: {platform}")
|
||||
|
||||
|
||||
192
server/reflector/video_platforms/livekit.py
Normal file
192
server/reflector/video_platforms/livekit.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""
|
||||
LiveKit video platform client for Reflector.
|
||||
|
||||
Self-hosted, open-source alternative to Daily.co.
|
||||
Uses Track Egress for per-participant audio recording (no composite video).
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urlencode
|
||||
from uuid import uuid4
|
||||
|
||||
from reflector.db.rooms import Room
|
||||
from reflector.livekit_api.client import LiveKitApiClient
|
||||
from reflector.livekit_api.webhooks import create_webhook_receiver, verify_webhook
|
||||
from reflector.logger import logger
|
||||
from reflector.settings import settings
|
||||
|
||||
from ..schemas.platform import Platform
|
||||
from ..utils.string import NonEmptyString
|
||||
from .base import ROOM_PREFIX_SEPARATOR, VideoPlatformClient
|
||||
from .models import MeetingData, SessionData, VideoPlatformConfig
|
||||
|
||||
|
||||
class LiveKitClient(VideoPlatformClient):
|
||||
PLATFORM_NAME: Platform = "livekit"
|
||||
TIMESTAMP_FORMAT = "%Y%m%d%H%M%S"
|
||||
|
||||
def __init__(self, config: VideoPlatformConfig):
|
||||
super().__init__(config)
|
||||
self._api_client = LiveKitApiClient(
|
||||
url=config.api_url or "",
|
||||
api_key=config.api_key,
|
||||
api_secret=config.webhook_secret, # LiveKit uses API secret for both auth and webhooks
|
||||
s3_bucket=config.s3_bucket,
|
||||
s3_region=config.s3_region,
|
||||
s3_access_key=config.aws_access_key_id,
|
||||
s3_secret_key=config.aws_access_key_secret,
|
||||
s3_endpoint=settings.LIVEKIT_STORAGE_AWS_ENDPOINT_URL,
|
||||
)
|
||||
self._webhook_receiver = create_webhook_receiver(
|
||||
api_key=config.api_key,
|
||||
api_secret=config.webhook_secret,
|
||||
)
|
||||
|
||||
async def create_meeting(
|
||||
self, room_name_prefix: NonEmptyString, end_date: datetime, room: Room
|
||||
) -> MeetingData:
|
||||
"""Create a LiveKit room for this meeting.
|
||||
|
||||
LiveKit rooms are created explicitly via API. A new room is created
|
||||
for each Reflector meeting (same pattern as Daily.co).
|
||||
"""
|
||||
now = datetime.now(timezone.utc)
|
||||
timestamp = now.strftime(self.TIMESTAMP_FORMAT)
|
||||
room_name = f"{room_name_prefix}{ROOM_PREFIX_SEPARATOR}{timestamp}"
|
||||
|
||||
# Calculate empty_timeout from end_date (seconds until expiry)
|
||||
# Ensure end_date is timezone-aware for subtraction
|
||||
end_date_aware = (
|
||||
end_date if end_date.tzinfo else end_date.replace(tzinfo=timezone.utc)
|
||||
)
|
||||
remaining = int((end_date_aware - now).total_seconds())
|
||||
empty_timeout = max(300, min(remaining, 86400)) # 5 min to 24 hours
|
||||
|
||||
# Enable auto track egress for cloud recording (per-participant audio to S3).
|
||||
# Gracefully degrade if S3 credentials are missing — room still works, just no recording.
|
||||
enable_recording = room.recording_type == "cloud"
|
||||
egress_enabled = False
|
||||
if enable_recording:
|
||||
try:
|
||||
self._api_client._build_s3_upload() # Validate credentials exist
|
||||
egress_enabled = True
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"S3 credentials not configured — room created without auto track egress. "
|
||||
"Set LIVEKIT_STORAGE_AWS_* to enable recording.",
|
||||
room_name=room_name,
|
||||
)
|
||||
|
||||
lk_room = await self._api_client.create_room(
|
||||
name=room_name,
|
||||
empty_timeout=empty_timeout,
|
||||
enable_auto_track_egress=egress_enabled,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"LiveKit room created",
|
||||
room_name=lk_room.name,
|
||||
room_sid=lk_room.sid,
|
||||
empty_timeout=empty_timeout,
|
||||
auto_track_egress=egress_enabled,
|
||||
)
|
||||
|
||||
# room_url includes the server URL + room name as query param.
|
||||
# The join endpoint in rooms.py appends the token as another query param.
|
||||
# Frontend parses: ws://host:7880?room=<name>&token=<jwt>
|
||||
public_url = settings.LIVEKIT_PUBLIC_URL or settings.LIVEKIT_URL or ""
|
||||
room_url = f"{public_url}?{urlencode({'room': lk_room.name})}"
|
||||
|
||||
return MeetingData(
|
||||
meeting_id=lk_room.sid or str(uuid4()),
|
||||
room_name=lk_room.name,
|
||||
room_url=room_url,
|
||||
host_room_url=room_url,
|
||||
platform=self.PLATFORM_NAME,
|
||||
extra_data={"livekit_room_sid": lk_room.sid},
|
||||
)
|
||||
|
||||
async def get_room_sessions(self, room_name: str) -> list[SessionData]:
|
||||
"""Get current participants in a LiveKit room.
|
||||
|
||||
For historical sessions, we rely on webhook-stored data (same as Daily).
|
||||
This returns currently-connected participants.
|
||||
"""
|
||||
try:
|
||||
participants = await self._api_client.list_participants(room_name)
|
||||
return [
|
||||
SessionData(
|
||||
session_id=p.sid,
|
||||
started_at=datetime.fromtimestamp(
|
||||
p.joined_at if p.joined_at else 0, tz=timezone.utc
|
||||
),
|
||||
ended_at=None, # Still active
|
||||
)
|
||||
for p in participants
|
||||
if p.sid # Skip empty entries
|
||||
]
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
"Could not list LiveKit participants (room may not exist)",
|
||||
room_name=room_name,
|
||||
error=str(e),
|
||||
)
|
||||
return []
|
||||
|
||||
async def upload_logo(self, room_name: str, logo_path: str) -> bool:
|
||||
# LiveKit doesn't have a logo upload concept; handled in frontend theming
|
||||
return True
|
||||
|
||||
def verify_webhook_signature(
|
||||
self, body: bytes, signature: str, timestamp: str | None = None
|
||||
) -> bool:
|
||||
"""Verify LiveKit webhook signature.
|
||||
|
||||
LiveKit sends the JWT in the Authorization header. The `signature`
|
||||
param here receives the Authorization header value.
|
||||
"""
|
||||
event = verify_webhook(self._webhook_receiver, body, signature)
|
||||
return event is not None
|
||||
|
||||
def create_access_token(
|
||||
self,
|
||||
room_name: str,
|
||||
participant_identity: str,
|
||||
participant_name: str | None = None,
|
||||
is_admin: bool = False,
|
||||
) -> str:
|
||||
"""Generate a LiveKit access token for a participant."""
|
||||
return self._api_client.create_access_token(
|
||||
room_name=room_name,
|
||||
participant_identity=participant_identity,
|
||||
participant_name=participant_name,
|
||||
room_admin=is_admin,
|
||||
)
|
||||
|
||||
async def start_track_egress(
|
||||
self,
|
||||
room_name: str,
|
||||
track_sid: str,
|
||||
s3_filepath: str,
|
||||
):
|
||||
"""Start Track Egress for a single audio track."""
|
||||
return await self._api_client.start_track_egress(
|
||||
room_name=room_name,
|
||||
track_sid=track_sid,
|
||||
s3_filepath=s3_filepath,
|
||||
)
|
||||
|
||||
async def list_egress(self, room_name: str | None = None):
|
||||
return await self._api_client.list_egress(room_name=room_name)
|
||||
|
||||
async def stop_egress(self, egress_id: str):
|
||||
return await self._api_client.stop_egress(egress_id=egress_id)
|
||||
|
||||
async def close(self):
|
||||
await self._api_client.close()
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
await self.close()
|
||||
@@ -1,6 +1,11 @@
|
||||
from typing import Dict, Type
|
||||
|
||||
from ..schemas.platform import DAILY_PLATFORM, WHEREBY_PLATFORM, Platform
|
||||
from ..schemas.platform import (
|
||||
DAILY_PLATFORM,
|
||||
LIVEKIT_PLATFORM,
|
||||
WHEREBY_PLATFORM,
|
||||
Platform,
|
||||
)
|
||||
from .base import VideoPlatformClient, VideoPlatformConfig
|
||||
|
||||
_PLATFORMS: Dict[Platform, Type[VideoPlatformClient]] = {}
|
||||
@@ -26,10 +31,12 @@ def get_available_platforms() -> list[Platform]:
|
||||
|
||||
def _register_builtin_platforms():
|
||||
from .daily import DailyClient # noqa: PLC0415
|
||||
from .livekit import LiveKitClient # noqa: PLC0415
|
||||
from .whereby import WherebyClient # noqa: PLC0415
|
||||
|
||||
register_platform(WHEREBY_PLATFORM, WherebyClient)
|
||||
register_platform(DAILY_PLATFORM, DailyClient)
|
||||
register_platform(LIVEKIT_PLATFORM, LiveKitClient)
|
||||
|
||||
|
||||
_register_builtin_platforms()
|
||||
|
||||
246
server/reflector/views/livekit.py
Normal file
246
server/reflector/views/livekit.py
Normal file
@@ -0,0 +1,246 @@
|
||||
"""LiveKit webhook handler.
|
||||
|
||||
Processes LiveKit webhook events for participant tracking and
|
||||
Track Egress recording completion.
|
||||
|
||||
LiveKit sends webhooks as POST requests with JWT authentication
|
||||
in the Authorization header.
|
||||
|
||||
Webhooks are used as fast-path triggers and logging. Track discovery
|
||||
for the multitrack pipeline uses S3 listing (source of truth), not
|
||||
webhook data.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.livekit_api.webhooks import create_webhook_receiver, verify_webhook
|
||||
from reflector.logger import logger as _logger
|
||||
from reflector.settings import settings
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
logger = _logger.bind(platform="livekit")
|
||||
|
||||
# Module-level receiver, lazily initialized on first webhook
|
||||
_webhook_receiver = None
|
||||
|
||||
|
||||
def _get_webhook_receiver():
|
||||
global _webhook_receiver
|
||||
if _webhook_receiver is None:
|
||||
if not settings.LIVEKIT_API_KEY or not settings.LIVEKIT_API_SECRET:
|
||||
raise ValueError("LiveKit not configured")
|
||||
_webhook_receiver = create_webhook_receiver(
|
||||
api_key=settings.LIVEKIT_API_KEY,
|
||||
api_secret=settings.LIVEKIT_WEBHOOK_SECRET or settings.LIVEKIT_API_SECRET,
|
||||
)
|
||||
return _webhook_receiver
|
||||
|
||||
|
||||
@router.post("/webhook")
|
||||
async def livekit_webhook(request: Request):
|
||||
"""Handle LiveKit webhook events.
|
||||
|
||||
LiveKit webhook events include:
|
||||
- participant_joined / participant_left
|
||||
- egress_started / egress_updated / egress_ended
|
||||
- room_started / room_finished
|
||||
- track_published / track_unpublished
|
||||
"""
|
||||
if not settings.LIVEKIT_API_KEY or not settings.LIVEKIT_API_SECRET:
|
||||
raise HTTPException(status_code=500, detail="LiveKit not configured")
|
||||
|
||||
body = await request.body()
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
|
||||
receiver = _get_webhook_receiver()
|
||||
event = verify_webhook(receiver, body, auth_header)
|
||||
if event is None:
|
||||
logger.warning(
|
||||
"Invalid LiveKit webhook signature",
|
||||
has_auth=bool(auth_header),
|
||||
has_body=bool(body),
|
||||
)
|
||||
raise HTTPException(status_code=401, detail="Invalid webhook signature")
|
||||
|
||||
event_type = event.event
|
||||
|
||||
match event_type:
|
||||
case "participant_joined":
|
||||
await _handle_participant_joined(event)
|
||||
case "participant_left":
|
||||
await _handle_participant_left(event)
|
||||
case "egress_started":
|
||||
await _handle_egress_started(event)
|
||||
case "egress_ended":
|
||||
await _handle_egress_ended(event)
|
||||
case "room_started":
|
||||
logger.info(
|
||||
"Room started",
|
||||
room_name=event.room.name if event.room else None,
|
||||
)
|
||||
case "room_finished":
|
||||
await _handle_room_finished(event)
|
||||
case "track_published" | "track_unpublished":
|
||||
logger.debug(
|
||||
f"Track event: {event_type}",
|
||||
room_name=event.room.name if event.room else None,
|
||||
participant=event.participant.identity if event.participant else None,
|
||||
)
|
||||
case _:
|
||||
logger.debug(
|
||||
"Unhandled LiveKit webhook event",
|
||||
event_type=event_type,
|
||||
)
|
||||
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
async def _handle_participant_joined(event):
|
||||
room_name = event.room.name if event.room else None
|
||||
participant = event.participant
|
||||
|
||||
if not room_name or not participant:
|
||||
logger.warning("participant_joined: missing room or participant data")
|
||||
return
|
||||
|
||||
meeting = await meetings_controller.get_by_room_name(room_name)
|
||||
if not meeting:
|
||||
logger.warning("participant_joined: meeting not found", room_name=room_name)
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Participant joined",
|
||||
meeting_id=meeting.id,
|
||||
room_name=room_name,
|
||||
participant_identity=participant.identity,
|
||||
participant_sid=participant.sid,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_participant_left(event):
|
||||
room_name = event.room.name if event.room else None
|
||||
participant = event.participant
|
||||
|
||||
if not room_name or not participant:
|
||||
logger.warning("participant_left: missing room or participant data")
|
||||
return
|
||||
|
||||
meeting = await meetings_controller.get_by_room_name(room_name)
|
||||
if not meeting:
|
||||
logger.warning("participant_left: meeting not found", room_name=room_name)
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Participant left",
|
||||
meeting_id=meeting.id,
|
||||
room_name=room_name,
|
||||
participant_identity=participant.identity,
|
||||
participant_sid=participant.sid,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_egress_started(event):
|
||||
egress = event.egress_info
|
||||
logger.info(
|
||||
"Egress started",
|
||||
room_name=egress.room_name if egress else None,
|
||||
egress_id=egress.egress_id if egress else None,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_egress_ended(event):
|
||||
"""Handle Track Egress completion. Delete video files immediately to save storage.
|
||||
|
||||
AutoTrackEgress records ALL tracks (audio + video). Audio is kept for the
|
||||
transcription pipeline. Video files are unused and deleted on completion.
|
||||
This saves ~50x storage (video is 98% of egress output for HD cameras).
|
||||
"""
|
||||
egress = event.egress_info
|
||||
if not egress:
|
||||
logger.warning("egress_ended: no egress info in payload")
|
||||
return
|
||||
|
||||
# EGRESS_FAILED = 4
|
||||
if egress.status == 4:
|
||||
logger.error(
|
||||
"Egress failed",
|
||||
room_name=egress.room_name,
|
||||
egress_id=egress.egress_id,
|
||||
error=egress.error,
|
||||
)
|
||||
return
|
||||
|
||||
file_results = list(egress.file_results)
|
||||
logger.info(
|
||||
"Egress ended",
|
||||
room_name=egress.room_name,
|
||||
egress_id=egress.egress_id,
|
||||
status=egress.status,
|
||||
num_files=len(file_results),
|
||||
filenames=[f.filename for f in file_results] if file_results else [],
|
||||
)
|
||||
|
||||
# Delete video files (.webm) immediately — only audio (.ogg) is needed for transcription.
|
||||
# Video tracks are 50-90x larger than audio and unused by the pipeline.
|
||||
# JSON manifests are kept (lightweight metadata, ~430 bytes each).
|
||||
for file_result in file_results:
|
||||
filename = file_result.filename
|
||||
if filename and filename.endswith(".webm"):
|
||||
try:
|
||||
from reflector.storage import get_source_storage # noqa: PLC0415
|
||||
|
||||
storage = get_source_storage("livekit")
|
||||
await storage.delete_file(filename)
|
||||
logger.info(
|
||||
"Deleted video egress file",
|
||||
filename=filename,
|
||||
room_name=egress.room_name,
|
||||
)
|
||||
except Exception as e:
|
||||
# Non-critical — pipeline filters these out anyway
|
||||
logger.warning(
|
||||
"Failed to delete video egress file",
|
||||
filename=filename,
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
|
||||
async def _handle_room_finished(event):
|
||||
"""Fast-path: trigger multitrack processing when room closes.
|
||||
|
||||
This is an optimization — if missed, the process_livekit_ended_meetings
|
||||
beat task catches it within ~2 minutes.
|
||||
"""
|
||||
room_name = event.room.name if event.room else None
|
||||
if not room_name:
|
||||
logger.warning("room_finished: no room name in payload")
|
||||
return
|
||||
|
||||
logger.info("Room finished", room_name=room_name)
|
||||
|
||||
meeting = await meetings_controller.get_by_room_name(room_name)
|
||||
if not meeting:
|
||||
logger.warning("room_finished: meeting not found", room_name=room_name)
|
||||
return
|
||||
|
||||
# Deactivate the meeting — LiveKit room is destroyed, so process_meetings
|
||||
# can't detect this via API (list_participants returns empty for deleted rooms).
|
||||
if meeting.is_active:
|
||||
await meetings_controller.update_meeting(meeting.id, is_active=False)
|
||||
logger.info("room_finished: meeting deactivated", meeting_id=meeting.id)
|
||||
|
||||
# Import here to avoid circular imports (worker imports views)
|
||||
from reflector.worker.process import process_livekit_multitrack
|
||||
|
||||
process_livekit_multitrack.delay(
|
||||
room_name=room_name,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"room_finished: queued multitrack processing",
|
||||
meeting_id=meeting.id,
|
||||
room_name=room_name,
|
||||
)
|
||||
@@ -91,9 +91,7 @@ class StartRecordingRequest(BaseModel):
|
||||
async def start_recording(
|
||||
meeting_id: NonEmptyString,
|
||||
body: StartRecordingRequest,
|
||||
user: Annotated[
|
||||
Optional[auth.UserInfo], Depends(auth.current_user_optional_if_public_mode)
|
||||
],
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
) -> dict[str, Any]:
|
||||
"""Start cloud or raw-tracks recording via Daily.co REST API.
|
||||
|
||||
@@ -168,8 +166,9 @@ async def add_email_recipient(
|
||||
if not meeting:
|
||||
raise HTTPException(status_code=404, detail="Meeting not found")
|
||||
|
||||
include_link = user is not None
|
||||
recipients = await meetings_controller.add_email_recipient(
|
||||
meeting_id, request.email
|
||||
meeting_id, request.email, include_link=include_link
|
||||
)
|
||||
|
||||
return {"status": "success", "email_recipients": recipients}
|
||||
|
||||
@@ -554,6 +554,7 @@ async def rooms_join_meeting(
|
||||
room_name: str,
|
||||
meeting_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
display_name: str | None = None,
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(room_name)
|
||||
@@ -598,4 +599,51 @@ async def rooms_join_meeting(
|
||||
meeting = meeting.model_copy()
|
||||
meeting.room_url = add_query_param(meeting.room_url, "t", token)
|
||||
|
||||
elif meeting.platform == "livekit":
|
||||
import re
|
||||
import uuid
|
||||
|
||||
client = create_platform_client(meeting.platform)
|
||||
# Identity must be unique per participant to avoid S3 key collisions.
|
||||
# Format: {readable_name}-{short_uuid} ensures uniqueness even for same names.
|
||||
uid_suffix = uuid.uuid4().hex[:6]
|
||||
if display_name:
|
||||
safe_name = re.sub(r"[^a-zA-Z0-9_-]", "_", display_name.strip())[:40]
|
||||
participant_identity = (
|
||||
f"{safe_name}-{uid_suffix}" if safe_name else f"anon-{uid_suffix}"
|
||||
)
|
||||
elif user_id:
|
||||
email = getattr(user, "email", None)
|
||||
if email and "@" in email:
|
||||
participant_identity = f"{email.split('@')[0]}-{uid_suffix}"
|
||||
else:
|
||||
participant_identity = f"{user_id[:12]}-{uid_suffix}"
|
||||
else:
|
||||
participant_identity = f"anon-{uid_suffix}"
|
||||
participant_name = display_name or participant_identity
|
||||
|
||||
# Store identity → Reflector user_id mapping for the pipeline
|
||||
# (so TranscriptParticipant.user_id can be set correctly)
|
||||
if user_id:
|
||||
from reflector.redis_cache import get_async_redis_client # noqa: PLC0415
|
||||
|
||||
redis_client = await get_async_redis_client()
|
||||
mapping_key = f"livekit:participant_map:{meeting.room_name}"
|
||||
await redis_client.hset(mapping_key, participant_identity, user_id)
|
||||
await redis_client.expire(mapping_key, 7 * 86400) # 7 day TTL
|
||||
|
||||
token = client.create_access_token(
|
||||
room_name=meeting.room_name,
|
||||
participant_identity=participant_identity,
|
||||
participant_name=participant_name,
|
||||
is_admin=user_id == room.user_id if user_id else False,
|
||||
)
|
||||
# Close the platform client to release aiohttp session
|
||||
if hasattr(client, "close"):
|
||||
await client.close()
|
||||
|
||||
meeting = meeting.model_copy()
|
||||
# For LiveKit, room_url is the WS URL; token goes as a query param
|
||||
meeting.room_url = add_query_param(meeting.room_url, "token", token)
|
||||
|
||||
return meeting
|
||||
|
||||
@@ -309,6 +309,7 @@ async def transcripts_search(
|
||||
source_kind: Optional[SourceKind] = None,
|
||||
from_datetime: SearchFromDatetimeParam = None,
|
||||
to_datetime: SearchToDatetimeParam = None,
|
||||
include_deleted: bool = False,
|
||||
user: Annotated[
|
||||
Optional[auth.UserInfo], Depends(auth.current_user_optional_if_public_mode)
|
||||
] = None,
|
||||
@@ -316,6 +317,12 @@ async def transcripts_search(
|
||||
"""Full-text search across transcript titles and content."""
|
||||
user_id = user["sub"] if user else None
|
||||
|
||||
if include_deleted and not user_id:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Authentication required to view deleted transcripts",
|
||||
)
|
||||
|
||||
if from_datetime and to_datetime and from_datetime > to_datetime:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="'from' must be less than or equal to 'to'"
|
||||
@@ -330,6 +337,7 @@ async def transcripts_search(
|
||||
source_kind=source_kind,
|
||||
from_datetime=from_datetime,
|
||||
to_datetime=to_datetime,
|
||||
include_deleted=include_deleted,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(search_params)
|
||||
@@ -615,6 +623,54 @@ async def transcript_delete(
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
|
||||
@router.post("/transcripts/{transcript_id}/restore", response_model=DeletionStatus)
|
||||
async def transcript_restore(
|
||||
transcript_id: str,
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
"""Restore a soft-deleted transcript."""
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
if transcript.deleted_at is None:
|
||||
raise HTTPException(status_code=400, detail="Transcript is not deleted")
|
||||
if not transcripts_controller.user_can_mutate(transcript, user_id):
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
await transcripts_controller.restore_by_id(transcript.id, user_id=user_id)
|
||||
await get_ws_manager().send_json(
|
||||
room_id=f"user:{user_id}",
|
||||
message={"event": "TRANSCRIPT_RESTORED", "data": {"id": transcript.id}},
|
||||
)
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
|
||||
@router.delete("/transcripts/{transcript_id}/destroy", response_model=DeletionStatus)
|
||||
async def transcript_destroy(
|
||||
transcript_id: str,
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
"""Permanently delete a transcript and all associated files."""
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
if transcript.deleted_at is None:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Transcript must be soft-deleted first"
|
||||
)
|
||||
if not transcripts_controller.user_can_mutate(transcript, user_id):
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
await transcripts_controller.hard_delete(transcript.id)
|
||||
await get_ws_manager().send_json(
|
||||
room_id=f"user:{user_id}",
|
||||
message={"event": "TRANSCRIPT_DELETED", "data": {"id": transcript.id}},
|
||||
)
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/transcripts/{transcript_id}/topics",
|
||||
response_model=list[GetTranscriptTopic],
|
||||
@@ -741,5 +797,7 @@ async def transcript_send_email(
|
||||
)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
sent = await send_transcript_email([request.email], transcript)
|
||||
sent = await send_transcript_email(
|
||||
[request.email], transcript, include_link=(transcript.share_mode == "public")
|
||||
)
|
||||
return SendEmailResponse(sent=sent)
|
||||
|
||||
@@ -83,7 +83,25 @@ def build_beat_schedule(
|
||||
else:
|
||||
logger.info("Daily.co beat tasks disabled (no DAILY_API_KEY)")
|
||||
|
||||
_any_platform = _whereby_enabled or _daily_enabled
|
||||
_livekit_enabled = bool(settings.LIVEKIT_API_KEY and settings.LIVEKIT_URL)
|
||||
if _livekit_enabled:
|
||||
beat_schedule["process_livekit_ended_meetings"] = {
|
||||
"task": "reflector.worker.process.process_livekit_ended_meetings",
|
||||
"schedule": 120, # Every 2 minutes
|
||||
}
|
||||
beat_schedule["reprocess_failed_livekit_recordings"] = {
|
||||
"task": "reflector.worker.process.reprocess_failed_livekit_recordings",
|
||||
"schedule": crontab(hour=5, minute=0),
|
||||
}
|
||||
logger.info(
|
||||
"LiveKit beat tasks enabled",
|
||||
tasks=[
|
||||
"process_livekit_ended_meetings",
|
||||
"reprocess_failed_livekit_recordings",
|
||||
],
|
||||
)
|
||||
|
||||
_any_platform = _whereby_enabled or _daily_enabled or _livekit_enabled
|
||||
if _any_platform:
|
||||
beat_schedule["process_meetings"] = {
|
||||
"task": "reflector.worker.process.process_meetings",
|
||||
@@ -146,7 +164,6 @@ else:
|
||||
app.conf.broker_connection_retry_on_startup = True
|
||||
app.autodiscover_tasks(
|
||||
[
|
||||
"reflector.pipelines.main_live_pipeline",
|
||||
"reflector.worker.healthcheck",
|
||||
"reflector.worker.process",
|
||||
"reflector.worker.cleanup",
|
||||
|
||||
@@ -12,6 +12,7 @@ from celery import shared_task
|
||||
from celery.utils.log import get_task_logger
|
||||
from pydantic import ValidationError
|
||||
|
||||
from reflector.asynctask import asynctask
|
||||
from reflector.dailyco_api import FinishedRecordingResponse, RecordingResponse
|
||||
from reflector.db.daily_participant_sessions import (
|
||||
DailyParticipantSession,
|
||||
@@ -25,9 +26,6 @@ from reflector.db.transcripts import (
|
||||
transcripts_controller,
|
||||
)
|
||||
from reflector.hatchet.client import HatchetClientManager
|
||||
from reflector.pipelines.main_live_pipeline import asynctask
|
||||
from reflector.pipelines.topic_processing import EmptyPipeline
|
||||
from reflector.processors import AudioFileWriterProcessor
|
||||
from reflector.processors.audio_waveform_processor import AudioWaveformProcessor
|
||||
from reflector.redis_cache import RedisAsyncLock
|
||||
from reflector.settings import settings
|
||||
@@ -871,13 +869,30 @@ async def process_meetings():
|
||||
elif has_had_sessions:
|
||||
should_deactivate = True
|
||||
logger_.info("Meeting ended - all participants left")
|
||||
elif current_time > end_date:
|
||||
should_deactivate = True
|
||||
logger_.info(
|
||||
"Meeting deactivated - scheduled time ended with no participants",
|
||||
)
|
||||
else:
|
||||
logger_.debug("Meeting not yet started, keep it")
|
||||
elif not has_had_sessions:
|
||||
# No sessions recorded — either no one joined, or webhooks
|
||||
# didn't arrive (e.g. local dev without tunnel).
|
||||
meeting_start = meeting.start_date
|
||||
if meeting_start.tzinfo is None:
|
||||
meeting_start = meeting_start.replace(tzinfo=timezone.utc)
|
||||
age_minutes = (current_time - meeting_start).total_seconds() / 60
|
||||
is_scheduled = bool(meeting.calendar_event_id)
|
||||
|
||||
if is_scheduled and current_time > end_date:
|
||||
# Scheduled meeting past its end time with no participants
|
||||
should_deactivate = True
|
||||
logger_.info(
|
||||
"Meeting deactivated - scheduled time ended with no participants",
|
||||
)
|
||||
elif not is_scheduled and age_minutes > 30:
|
||||
# On-the-fly meeting with no sessions after 30 min
|
||||
should_deactivate = True
|
||||
logger_.info(
|
||||
"Meeting deactivated - no sessions after 30 min",
|
||||
age_minutes=round(age_minutes, 1),
|
||||
)
|
||||
else:
|
||||
logger_.debug("Meeting not yet started, keep it")
|
||||
|
||||
if should_deactivate:
|
||||
await meetings_controller.update_meeting(
|
||||
@@ -908,6 +923,11 @@ async def convert_audio_and_waveform(transcript) -> None:
|
||||
transcript_id=transcript.id,
|
||||
)
|
||||
|
||||
from reflector.pipelines.topic_processing import EmptyPipeline # noqa: PLC0415
|
||||
from reflector.processors.audio_file_writer import (
|
||||
AudioFileWriterProcessor, # noqa: PLC0415
|
||||
)
|
||||
|
||||
upload_path = transcript.data_path / "upload.webm"
|
||||
mp3_path = transcript.audio_mp3_filename
|
||||
|
||||
@@ -1167,3 +1187,311 @@ async def trigger_daily_reconciliation() -> None:
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Reconciliation trigger failed", error=str(e), exc_info=True)
|
||||
|
||||
|
||||
# ============================================================
|
||||
# LiveKit multitrack recording tasks
|
||||
# ============================================================
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def process_livekit_multitrack(
|
||||
room_name: str,
|
||||
meeting_id: str,
|
||||
):
|
||||
"""
|
||||
Process LiveKit multitrack recording by discovering tracks on S3.
|
||||
|
||||
Tracks are discovered via S3 listing (source of truth), not webhooks.
|
||||
Called from room_finished webhook (fast-path) or beat task (fallback).
|
||||
"""
|
||||
from reflector.utils.livekit import ( # noqa: PLC0415
|
||||
recording_lock_key,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Processing LiveKit multitrack recording",
|
||||
room_name=room_name,
|
||||
meeting_id=meeting_id,
|
||||
)
|
||||
|
||||
lock_key = recording_lock_key(room_name)
|
||||
async with RedisAsyncLock(
|
||||
key=lock_key,
|
||||
timeout=600,
|
||||
extend_interval=60,
|
||||
skip_if_locked=True,
|
||||
blocking=False,
|
||||
) as lock:
|
||||
if not lock.acquired:
|
||||
logger.warning(
|
||||
"LiveKit processing skipped - lock already held",
|
||||
room_name=room_name,
|
||||
lock_key=lock_key,
|
||||
)
|
||||
return
|
||||
|
||||
await _process_livekit_multitrack_inner(room_name, meeting_id)
|
||||
|
||||
|
||||
async def _process_livekit_multitrack_inner(
|
||||
room_name: str,
|
||||
meeting_id: str,
|
||||
):
|
||||
"""Inner processing logic for LiveKit multitrack recording."""
|
||||
# 1. Discover tracks by listing S3 prefix.
|
||||
# Wait briefly for egress files to finish flushing to S3 — the room_finished
|
||||
# webhook fires after empty_timeout, but egress finalization may still be in progress.
|
||||
import asyncio as _asyncio # noqa: PLC0415
|
||||
|
||||
from reflector.storage import get_source_storage # noqa: PLC0415
|
||||
from reflector.utils.livekit import ( # noqa: PLC0415
|
||||
extract_livekit_base_room_name,
|
||||
filter_audio_tracks,
|
||||
parse_livekit_track_filepath,
|
||||
)
|
||||
|
||||
EGRESS_FLUSH_DELAY = 10 # seconds — egress typically flushes within a few seconds
|
||||
EGRESS_RETRY_DELAY = 30 # seconds — retry if first listing finds nothing
|
||||
|
||||
await _asyncio.sleep(EGRESS_FLUSH_DELAY)
|
||||
|
||||
storage = get_source_storage("livekit")
|
||||
s3_prefix = f"livekit/{room_name}/"
|
||||
all_keys = await storage.list_objects(prefix=s3_prefix)
|
||||
|
||||
# Filter to audio tracks only (.ogg) — skip .json manifests and .webm video
|
||||
audio_keys = filter_audio_tracks(all_keys) if all_keys else []
|
||||
|
||||
if not audio_keys:
|
||||
# Retry once after a longer delay — egress may still be flushing
|
||||
logger.info(
|
||||
"No audio tracks found yet, retrying after delay",
|
||||
room_name=room_name,
|
||||
retry_delay=EGRESS_RETRY_DELAY,
|
||||
)
|
||||
await _asyncio.sleep(EGRESS_RETRY_DELAY)
|
||||
all_keys = await storage.list_objects(prefix=s3_prefix)
|
||||
audio_keys = filter_audio_tracks(all_keys) if all_keys else []
|
||||
|
||||
# Sanity check: compare audio tracks against egress manifests.
|
||||
# Each Track Egress (audio or video) produces a .json manifest.
|
||||
# Video tracks produce .webm files. So expected audio count ≈ manifests - video files.
|
||||
if all_keys:
|
||||
manifest_count = sum(1 for k in all_keys if k.endswith(".json"))
|
||||
video_count = sum(1 for k in all_keys if k.endswith(".webm"))
|
||||
expected_audio = manifest_count - video_count
|
||||
if expected_audio > len(audio_keys) and expected_audio > 0:
|
||||
# Some audio tracks may still be flushing — wait and retry
|
||||
logger.info(
|
||||
"Expected more audio tracks based on manifests, waiting for late flushes",
|
||||
room_name=room_name,
|
||||
expected=expected_audio,
|
||||
found=len(audio_keys),
|
||||
)
|
||||
await _asyncio.sleep(EGRESS_RETRY_DELAY)
|
||||
all_keys = await storage.list_objects(prefix=s3_prefix)
|
||||
audio_keys = filter_audio_tracks(all_keys) if all_keys else []
|
||||
|
||||
logger.info(
|
||||
"S3 track discovery complete",
|
||||
room_name=room_name,
|
||||
total_files=len(all_keys) if all_keys else 0,
|
||||
audio_files=len(audio_keys),
|
||||
)
|
||||
|
||||
if not audio_keys:
|
||||
logger.warning(
|
||||
"No audio track files found on S3 after retries",
|
||||
room_name=room_name,
|
||||
s3_prefix=s3_prefix,
|
||||
)
|
||||
return
|
||||
|
||||
# 2. Parse track info from filenames
|
||||
parsed_tracks = []
|
||||
for key in audio_keys:
|
||||
try:
|
||||
parsed = parse_livekit_track_filepath(key)
|
||||
parsed_tracks.append(parsed)
|
||||
except ValueError as e:
|
||||
logger.warning("Skipping unparseable track file", s3_key=key, error=str(e))
|
||||
|
||||
if not parsed_tracks:
|
||||
logger.warning(
|
||||
"No valid track files found after parsing",
|
||||
room_name=room_name,
|
||||
raw_keys=all_keys,
|
||||
)
|
||||
return
|
||||
|
||||
track_keys = [t.s3_key for t in parsed_tracks]
|
||||
|
||||
# 3. Find meeting and room
|
||||
meeting = await meetings_controller.get_by_id(meeting_id)
|
||||
if not meeting:
|
||||
logger.error(
|
||||
"Meeting not found for LiveKit recording",
|
||||
meeting_id=meeting_id,
|
||||
room_name=room_name,
|
||||
)
|
||||
return
|
||||
|
||||
base_room_name = extract_livekit_base_room_name(room_name)
|
||||
room = await rooms_controller.get_by_name(base_room_name)
|
||||
if not room:
|
||||
logger.error("Room not found", room_name=base_room_name)
|
||||
return
|
||||
|
||||
# 4. Create recording
|
||||
recording_id = f"lk-{room_name}"
|
||||
bucket_name = settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME or ""
|
||||
|
||||
existing_recording = await recordings_controller.get_by_id(recording_id)
|
||||
if existing_recording and existing_recording.deleted_at is not None:
|
||||
logger.info("Skipping soft-deleted recording", recording_id=recording_id)
|
||||
return
|
||||
|
||||
if not existing_recording:
|
||||
recording = await recordings_controller.create(
|
||||
Recording(
|
||||
id=recording_id,
|
||||
bucket_name=bucket_name,
|
||||
object_key=s3_prefix,
|
||||
recorded_at=datetime.now(timezone.utc),
|
||||
meeting_id=meeting.id,
|
||||
track_keys=track_keys,
|
||||
)
|
||||
)
|
||||
else:
|
||||
recording = existing_recording
|
||||
|
||||
# 5. Create or get transcript
|
||||
transcript = await transcripts_controller.get_by_recording_id(recording.id)
|
||||
if transcript and transcript.deleted_at is not None:
|
||||
logger.info("Skipping soft-deleted transcript", recording_id=recording.id)
|
||||
return
|
||||
if not transcript:
|
||||
transcript = await transcripts_controller.add(
|
||||
"",
|
||||
source_kind=SourceKind.ROOM,
|
||||
source_language="en",
|
||||
target_language="en",
|
||||
user_id=room.user_id,
|
||||
recording_id=recording.id,
|
||||
share_mode="semi-private",
|
||||
meeting_id=meeting.id,
|
||||
room_id=room.id,
|
||||
)
|
||||
|
||||
# 6. Start Hatchet pipeline (reuses DiarizationPipeline with source_platform="livekit")
|
||||
workflow_id = await HatchetClientManager.start_workflow(
|
||||
workflow_name="DiarizationPipeline",
|
||||
input_data={
|
||||
"recording_id": recording_id,
|
||||
"tracks": [
|
||||
{
|
||||
"s3_key": t.s3_key,
|
||||
"participant_identity": t.participant_identity,
|
||||
"timestamp": t.timestamp.isoformat(),
|
||||
}
|
||||
for t in parsed_tracks
|
||||
],
|
||||
"bucket_name": bucket_name,
|
||||
"transcript_id": transcript.id,
|
||||
"room_id": room.id,
|
||||
"source_platform": "livekit",
|
||||
},
|
||||
additional_metadata={
|
||||
"transcript_id": transcript.id,
|
||||
"recording_id": recording_id,
|
||||
},
|
||||
)
|
||||
logger.info(
|
||||
"Started LiveKit Hatchet workflow",
|
||||
workflow_id=workflow_id,
|
||||
transcript_id=transcript.id,
|
||||
room_name=room_name,
|
||||
num_tracks=len(parsed_tracks),
|
||||
)
|
||||
|
||||
await transcripts_controller.update(transcript, {"workflow_run_id": workflow_id})
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def process_livekit_ended_meetings():
|
||||
"""Check for inactive LiveKit meetings that need multitrack processing.
|
||||
|
||||
Runs on a beat schedule. Catches cases where room_finished webhook was missed.
|
||||
Only processes meetings that:
|
||||
- Platform is "livekit"
|
||||
- is_active=False (already deactivated by process_meetings)
|
||||
- No associated transcript yet
|
||||
"""
|
||||
from reflector.db.transcripts import transcripts_controller as tc # noqa: PLC0415
|
||||
|
||||
all_livekit = await meetings_controller.get_all_inactive_livekit()
|
||||
|
||||
queued = 0
|
||||
for meeting in all_livekit:
|
||||
# Skip if already has a transcript
|
||||
existing = await tc.get_by_meeting_id(meeting.id)
|
||||
if existing:
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Found unprocessed inactive LiveKit meeting",
|
||||
meeting_id=meeting.id,
|
||||
room_name=meeting.room_name,
|
||||
)
|
||||
|
||||
process_livekit_multitrack.delay(
|
||||
room_name=meeting.room_name,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
queued += 1
|
||||
|
||||
if queued > 0:
|
||||
logger.info("Queued LiveKit multitrack processing", count=queued)
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def reprocess_failed_livekit_recordings():
|
||||
"""Reprocess LiveKit recordings that failed.
|
||||
|
||||
Runs daily at 5 AM. Finds recordings with livekit prefix and error status.
|
||||
"""
|
||||
bucket_name = settings.LIVEKIT_STORAGE_AWS_BUCKET_NAME
|
||||
if not bucket_name:
|
||||
return
|
||||
|
||||
failed = await recordings_controller.get_multitrack_needing_reprocessing(
|
||||
bucket_name
|
||||
)
|
||||
livekit_failed = [r for r in failed if r.id.startswith("lk-")]
|
||||
|
||||
for recording in livekit_failed:
|
||||
if not recording.meeting_id:
|
||||
logger.warning(
|
||||
"Skipping reprocess — no meeting_id",
|
||||
recording_id=recording.id,
|
||||
)
|
||||
continue
|
||||
|
||||
meeting = await meetings_controller.get_by_id(recording.meeting_id)
|
||||
if not meeting:
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Reprocessing failed LiveKit recording",
|
||||
recording_id=recording.id,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
|
||||
process_livekit_multitrack.delay(
|
||||
room_name=meeting.room_name,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
|
||||
@@ -8,8 +8,8 @@ import structlog
|
||||
from celery import shared_task
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
from reflector.asynctask import asynctask
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.pipelines.main_live_pipeline import asynctask
|
||||
from reflector.utils.webhook import (
|
||||
WebhookRoomPayload,
|
||||
WebhookTestPayload,
|
||||
|
||||
@@ -113,6 +113,7 @@ TranscriptWsEvent = Annotated[
|
||||
UserEventName = Literal[
|
||||
"TRANSCRIPT_CREATED",
|
||||
"TRANSCRIPT_DELETED",
|
||||
"TRANSCRIPT_RESTORED",
|
||||
"TRANSCRIPT_STATUS",
|
||||
"TRANSCRIPT_FINAL_TITLE",
|
||||
"TRANSCRIPT_DURATION",
|
||||
@@ -161,6 +162,15 @@ class UserWsTranscriptDeleted(BaseModel):
|
||||
data: UserTranscriptDeletedData
|
||||
|
||||
|
||||
class UserTranscriptRestoredData(BaseModel):
|
||||
id: NonEmptyString
|
||||
|
||||
|
||||
class UserWsTranscriptRestored(BaseModel):
|
||||
event: Literal["TRANSCRIPT_RESTORED"] = "TRANSCRIPT_RESTORED"
|
||||
data: UserTranscriptRestoredData
|
||||
|
||||
|
||||
class UserWsTranscriptStatus(BaseModel):
|
||||
event: Literal["TRANSCRIPT_STATUS"] = "TRANSCRIPT_STATUS"
|
||||
data: UserTranscriptStatusData
|
||||
@@ -180,6 +190,7 @@ UserWsEvent = Annotated[
|
||||
Union[
|
||||
UserWsTranscriptCreated,
|
||||
UserWsTranscriptDeleted,
|
||||
UserWsTranscriptRestored,
|
||||
UserWsTranscriptStatus,
|
||||
UserWsTranscriptFinalTitle,
|
||||
UserWsTranscriptDuration,
|
||||
|
||||
@@ -107,7 +107,8 @@ class WebsocketManager:
|
||||
while True:
|
||||
# timeout=1.0 prevents tight CPU loop when no messages available
|
||||
message = await pubsub_subscriber.get_message(
|
||||
ignore_subscribe_messages=True
|
||||
ignore_subscribe_messages=True,
|
||||
timeout=1.0,
|
||||
)
|
||||
if message is not None:
|
||||
room_id = message["channel"].decode("utf-8")
|
||||
|
||||
@@ -171,5 +171,5 @@ async def test_multitrack_pipeline_end_to_end(
|
||||
assert len(messages) >= 1, "Should have received at least 1 email"
|
||||
email_msg = messages[0]
|
||||
assert (
|
||||
"Transcript Ready" in email_msg.get("Subject", "")
|
||||
"Reflector:" in email_msg.get("Subject", "")
|
||||
), f"Email subject should contain 'Transcript Ready', got: {email_msg.get('Subject')}"
|
||||
|
||||
206
server/tests/test_email.py
Normal file
206
server/tests/test_email.py
Normal file
@@ -0,0 +1,206 @@
|
||||
"""Tests for reflector.email — transcript email composition and sending."""
|
||||
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.db.transcripts import (
|
||||
SourceKind,
|
||||
Transcript,
|
||||
TranscriptParticipant,
|
||||
TranscriptTopic,
|
||||
)
|
||||
from reflector.email import (
|
||||
_build_html,
|
||||
_build_plain_text,
|
||||
get_transcript_url,
|
||||
send_transcript_email,
|
||||
)
|
||||
from reflector.processors.types import Word
|
||||
|
||||
|
||||
def _make_transcript(
|
||||
*,
|
||||
title: str | None = "Weekly Standup",
|
||||
short_summary: str | None = "Team discussed sprint progress.",
|
||||
with_topics: bool = True,
|
||||
share_mode: str = "private",
|
||||
source_kind: SourceKind = SourceKind.FILE,
|
||||
) -> Transcript:
|
||||
topics = []
|
||||
participants = []
|
||||
if with_topics:
|
||||
participants = [
|
||||
TranscriptParticipant(id="p1", speaker=0, name="Alice"),
|
||||
TranscriptParticipant(id="p2", speaker=1, name="Bob"),
|
||||
]
|
||||
topics = [
|
||||
TranscriptTopic(
|
||||
title="Intro",
|
||||
summary="Greetings",
|
||||
timestamp=0.0,
|
||||
duration=10.0,
|
||||
words=[
|
||||
Word(text="Hello", start=0.0, end=0.5, speaker=0),
|
||||
Word(text="everyone", start=0.5, end=1.0, speaker=0),
|
||||
Word(text="Thanks", start=5.0, end=5.5, speaker=1),
|
||||
Word(text="for", start=5.5, end=5.8, speaker=1),
|
||||
Word(text="joining", start=5.8, end=6.2, speaker=1),
|
||||
],
|
||||
),
|
||||
]
|
||||
return Transcript(
|
||||
id="tx-123",
|
||||
title=title,
|
||||
short_summary=short_summary,
|
||||
topics=topics,
|
||||
participants=participants,
|
||||
share_mode=share_mode,
|
||||
source_kind=source_kind,
|
||||
)
|
||||
|
||||
|
||||
URL = "http://localhost:3000/transcripts/tx-123"
|
||||
|
||||
|
||||
class TestBuildPlainText:
|
||||
def test_full_content_with_link(self):
|
||||
t = _make_transcript()
|
||||
text = _build_plain_text(t, URL, include_link=True)
|
||||
|
||||
assert text.startswith("Reflector: Weekly Standup")
|
||||
assert "Team discussed sprint progress." in text
|
||||
assert "[00:00] Alice:" in text
|
||||
assert "[00:05] Bob:" in text
|
||||
assert URL in text
|
||||
|
||||
def test_full_content_without_link(self):
|
||||
t = _make_transcript()
|
||||
text = _build_plain_text(t, URL, include_link=False)
|
||||
|
||||
assert "Reflector: Weekly Standup" in text
|
||||
assert "Team discussed sprint progress." in text
|
||||
assert "[00:00] Alice:" in text
|
||||
assert URL not in text
|
||||
|
||||
def test_no_summary(self):
|
||||
t = _make_transcript(short_summary=None)
|
||||
text = _build_plain_text(t, URL, include_link=True)
|
||||
|
||||
assert "Summary:" not in text
|
||||
assert "[00:00] Alice:" in text
|
||||
|
||||
def test_no_topics(self):
|
||||
t = _make_transcript(with_topics=False)
|
||||
text = _build_plain_text(t, URL, include_link=True)
|
||||
|
||||
assert "Transcript:" not in text
|
||||
assert "Reflector: Weekly Standup" in text
|
||||
|
||||
def test_unnamed_recording(self):
|
||||
t = _make_transcript(title=None)
|
||||
text = _build_plain_text(t, URL, include_link=True)
|
||||
|
||||
assert "Reflector: Unnamed recording" in text
|
||||
|
||||
|
||||
class TestBuildHtml:
|
||||
def test_full_content_with_link(self):
|
||||
t = _make_transcript()
|
||||
html = _build_html(t, URL, include_link=True)
|
||||
|
||||
assert "Weekly Standup" in html
|
||||
assert "Team discussed sprint progress." in html
|
||||
assert "Alice" in html
|
||||
assert "Bob" in html
|
||||
assert URL in html
|
||||
assert "View Transcript" in html
|
||||
|
||||
def test_full_content_without_link(self):
|
||||
t = _make_transcript()
|
||||
html = _build_html(t, URL, include_link=False)
|
||||
|
||||
assert "Weekly Standup" in html
|
||||
assert "Alice" in html
|
||||
assert URL not in html
|
||||
assert "View Transcript" not in html
|
||||
|
||||
def test_no_summary(self):
|
||||
t = _make_transcript(short_summary=None)
|
||||
html = _build_html(t, URL, include_link=True)
|
||||
|
||||
assert "sprint progress" not in html
|
||||
assert "Alice" in html
|
||||
|
||||
def test_no_topics(self):
|
||||
t = _make_transcript(with_topics=False)
|
||||
html = _build_html(t, URL, include_link=True)
|
||||
|
||||
assert "Transcript" not in html or "View Transcript" in html
|
||||
|
||||
def test_html_escapes_title(self):
|
||||
t = _make_transcript(title='<script>alert("xss")</script>')
|
||||
html = _build_html(t, URL, include_link=True)
|
||||
|
||||
assert "<script>" not in html
|
||||
assert "<script>" in html
|
||||
|
||||
|
||||
class TestGetTranscriptUrl:
|
||||
def test_url_format(self):
|
||||
t = _make_transcript()
|
||||
url = get_transcript_url(t)
|
||||
assert url.endswith("/transcripts/tx-123")
|
||||
|
||||
|
||||
class TestSendTranscriptEmail:
|
||||
@pytest.mark.asyncio
|
||||
async def test_include_link_default_true(self):
|
||||
t = _make_transcript()
|
||||
with (
|
||||
patch("reflector.email.is_email_configured", return_value=True),
|
||||
patch(
|
||||
"reflector.email.aiosmtplib.send", new_callable=AsyncMock
|
||||
) as mock_send,
|
||||
):
|
||||
count = await send_transcript_email(["a@test.com"], t)
|
||||
|
||||
assert count == 1
|
||||
call_args = mock_send.call_args
|
||||
msg = call_args[0][0]
|
||||
assert msg["Subject"] == "Reflector: Weekly Standup"
|
||||
# Default include_link=True, so HTML part should contain the URL
|
||||
html_part = msg.get_payload()[1].get_payload()
|
||||
assert "/transcripts/tx-123" in html_part
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_include_link_false(self):
|
||||
t = _make_transcript()
|
||||
with (
|
||||
patch("reflector.email.is_email_configured", return_value=True),
|
||||
patch(
|
||||
"reflector.email.aiosmtplib.send", new_callable=AsyncMock
|
||||
) as mock_send,
|
||||
):
|
||||
count = await send_transcript_email(["a@test.com"], t, include_link=False)
|
||||
|
||||
assert count == 1
|
||||
msg = mock_send.call_args[0][0]
|
||||
html_part = msg.get_payload()[1].get_payload()
|
||||
assert "/transcripts/tx-123" not in html_part
|
||||
plain_part = msg.get_payload()[0].get_payload()
|
||||
assert "/transcripts/tx-123" not in plain_part
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_skips_when_not_configured(self):
|
||||
t = _make_transcript()
|
||||
with patch("reflector.email.is_email_configured", return_value=False):
|
||||
count = await send_transcript_email(["a@test.com"], t)
|
||||
assert count == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_skips_empty_recipients(self):
|
||||
t = _make_transcript()
|
||||
with patch("reflector.email.is_email_configured", return_value=True):
|
||||
count = await send_transcript_email([], t)
|
||||
assert count == 0
|
||||
408
server/tests/test_livekit_backend.py
Normal file
408
server/tests/test_livekit_backend.py
Normal file
@@ -0,0 +1,408 @@
|
||||
"""
|
||||
Tests for LiveKit backend: webhook verification, token generation,
|
||||
display_name sanitization, and platform client behavior.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.livekit_api.webhooks import create_webhook_receiver, verify_webhook
|
||||
|
||||
# ── Webhook verification ──────────────────────────────────────
|
||||
|
||||
|
||||
class TestWebhookVerification:
|
||||
def _make_receiver(self):
|
||||
"""Create a receiver with test credentials."""
|
||||
return create_webhook_receiver(
|
||||
api_key="test_key",
|
||||
api_secret="test_secret_that_is_long_enough_for_hmac",
|
||||
)
|
||||
|
||||
def test_rejects_empty_auth_header(self):
|
||||
receiver = self._make_receiver()
|
||||
result = verify_webhook(receiver, b'{"event":"test"}', "")
|
||||
assert result is None
|
||||
|
||||
def test_rejects_garbage_auth_header(self):
|
||||
receiver = self._make_receiver()
|
||||
result = verify_webhook(receiver, b'{"event":"test"}', "not-a-jwt")
|
||||
assert result is None
|
||||
|
||||
def test_rejects_empty_body(self):
|
||||
receiver = self._make_receiver()
|
||||
result = verify_webhook(receiver, b"", "Bearer some.jwt.token")
|
||||
assert result is None
|
||||
|
||||
def test_handles_bytes_body(self):
|
||||
receiver = self._make_receiver()
|
||||
# Should not crash on bytes input
|
||||
result = verify_webhook(receiver, b'{"event":"test"}', "invalid")
|
||||
assert result is None
|
||||
|
||||
def test_handles_string_body(self):
|
||||
receiver = self._make_receiver()
|
||||
result = verify_webhook(receiver, '{"event":"test"}', "invalid")
|
||||
assert result is None
|
||||
|
||||
def test_rejects_wrong_secret(self):
|
||||
"""Webhook signed with different secret should be rejected."""
|
||||
receiver = self._make_receiver()
|
||||
# A JWT signed with a different secret
|
||||
fake_jwt = "eyJhbGciOiJIUzI1NiJ9.eyJ0ZXN0IjoxfQ.wrong_signature"
|
||||
result = verify_webhook(receiver, b"{}", fake_jwt)
|
||||
assert result is None
|
||||
|
||||
|
||||
# ── Token generation ──────────────────────────────────────────
|
||||
|
||||
|
||||
class TestTokenGeneration:
|
||||
"""Test token generation using the LiveKit SDK directly (no client instantiation)."""
|
||||
|
||||
def _generate_token(
|
||||
self, room_name="room", identity="user", name=None, admin=False, ttl=86400
|
||||
):
|
||||
"""Generate a token using the SDK directly, avoiding LiveKitAPI client session."""
|
||||
from datetime import timedelta
|
||||
|
||||
from livekit.api import AccessToken, VideoGrants
|
||||
|
||||
token = AccessToken(
|
||||
api_key="test_key", api_secret="test_secret_that_is_long_enough_for_hmac"
|
||||
)
|
||||
token.identity = identity
|
||||
token.name = name or identity
|
||||
token.ttl = timedelta(seconds=ttl)
|
||||
token.with_grants(
|
||||
VideoGrants(
|
||||
room_join=True,
|
||||
room=room_name,
|
||||
can_publish=True,
|
||||
can_subscribe=True,
|
||||
room_admin=admin,
|
||||
)
|
||||
)
|
||||
return token.to_jwt()
|
||||
|
||||
def _decode_claims(self, token):
|
||||
import base64
|
||||
import json
|
||||
|
||||
payload = token.split(".")[1]
|
||||
payload += "=" * (4 - len(payload) % 4)
|
||||
return json.loads(base64.b64decode(payload))
|
||||
|
||||
def test_creates_valid_jwt(self):
|
||||
token = self._generate_token(
|
||||
room_name="test-room", identity="user123", name="Test User"
|
||||
)
|
||||
assert isinstance(token, str)
|
||||
assert len(token.split(".")) == 3
|
||||
|
||||
def test_token_includes_room_name(self):
|
||||
token = self._generate_token(room_name="my-room-20260401", identity="alice")
|
||||
claims = self._decode_claims(token)
|
||||
assert claims.get("video", {}).get("room") == "my-room-20260401"
|
||||
assert claims.get("sub") == "alice"
|
||||
|
||||
def test_token_respects_admin_flag(self):
|
||||
token = self._generate_token(identity="admin", admin=True)
|
||||
claims = self._decode_claims(token)
|
||||
assert claims["video"]["roomAdmin"] is True
|
||||
|
||||
def test_token_non_admin_by_default(self):
|
||||
token = self._generate_token(identity="user")
|
||||
claims = self._decode_claims(token)
|
||||
assert claims.get("video", {}).get("roomAdmin") in (None, False)
|
||||
|
||||
def test_ttl_is_timedelta(self):
|
||||
"""Verify ttl as timedelta works (previous bug: int caused TypeError)."""
|
||||
token = self._generate_token(ttl=3600)
|
||||
assert isinstance(token, str)
|
||||
|
||||
|
||||
# ── Display name sanitization ─────────────────────────────────
|
||||
|
||||
|
||||
class TestDisplayNameSanitization:
|
||||
"""Test the sanitization logic from rooms.py join endpoint."""
|
||||
|
||||
def _sanitize(self, display_name: str) -> str:
|
||||
"""Replicate the sanitization from rooms_join_meeting."""
|
||||
safe_name = re.sub(r"[^a-zA-Z0-9_-]", "_", display_name.strip())[:40]
|
||||
return safe_name
|
||||
|
||||
def test_normal_name(self):
|
||||
assert self._sanitize("Alice") == "Alice"
|
||||
|
||||
def test_name_with_spaces(self):
|
||||
assert self._sanitize("John Doe") == "John_Doe"
|
||||
|
||||
def test_name_with_special_chars(self):
|
||||
assert self._sanitize("user@email.com") == "user_email_com"
|
||||
|
||||
def test_name_with_unicode(self):
|
||||
result = self._sanitize("José García")
|
||||
assert result == "Jos__Garc_a"
|
||||
assert all(
|
||||
c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-"
|
||||
for c in result
|
||||
)
|
||||
|
||||
def test_name_with_emoji(self):
|
||||
result = self._sanitize("👋 Hello")
|
||||
assert "_" in result # Emoji replaced with underscore
|
||||
assert "Hello" in result
|
||||
|
||||
def test_very_long_name(self):
|
||||
long_name = "A" * 100
|
||||
result = self._sanitize(long_name)
|
||||
assert len(result) == 40
|
||||
|
||||
def test_empty_name(self):
|
||||
result = self._sanitize("")
|
||||
assert result == ""
|
||||
|
||||
def test_only_special_chars(self):
|
||||
result = self._sanitize("!!!")
|
||||
assert result == "___"
|
||||
|
||||
def test_whitespace_stripped(self):
|
||||
result = self._sanitize(" Alice ")
|
||||
assert result == "Alice"
|
||||
|
||||
def test_hyphens_preserved(self):
|
||||
assert self._sanitize("first-last") == "first-last"
|
||||
|
||||
def test_underscores_preserved(self):
|
||||
assert self._sanitize("first_last") == "first_last"
|
||||
|
||||
def test_html_injection(self):
|
||||
result = self._sanitize("<script>alert('xss')</script>")
|
||||
assert "<" not in result
|
||||
assert ">" not in result
|
||||
assert "'" not in result
|
||||
|
||||
|
||||
# ── S3 egress configuration ───────────────────────────────────
|
||||
|
||||
|
||||
class TestS3EgressConfig:
|
||||
"""Test S3Upload construction using the SDK directly."""
|
||||
|
||||
def test_build_s3_upload_requires_all_fields(self):
|
||||
# Missing fields should raise or produce invalid config
|
||||
# The validation happens in our client wrapper, not the SDK
|
||||
# Test the validation logic directly
|
||||
s3_bucket = None
|
||||
s3_access_key = "AKID"
|
||||
s3_secret_key = "secret"
|
||||
assert not all([s3_bucket, s3_access_key, s3_secret_key])
|
||||
|
||||
def test_s3_upload_with_credentials(self):
|
||||
from livekit.api import S3Upload
|
||||
|
||||
upload = S3Upload(
|
||||
access_key="AKID",
|
||||
secret="secret123",
|
||||
bucket="test-bucket",
|
||||
region="us-east-1",
|
||||
force_path_style=True,
|
||||
)
|
||||
assert upload.bucket == "test-bucket"
|
||||
assert upload.force_path_style is True
|
||||
|
||||
def test_s3_upload_with_endpoint(self):
|
||||
from livekit.api import S3Upload
|
||||
|
||||
upload = S3Upload(
|
||||
access_key="AKID",
|
||||
secret="secret",
|
||||
bucket="bucket",
|
||||
region="us-east-1",
|
||||
force_path_style=True,
|
||||
endpoint="http://garage:3900",
|
||||
)
|
||||
assert upload.endpoint == "http://garage:3900"
|
||||
|
||||
|
||||
# ── Platform detection ────────────────────────────────────────
|
||||
|
||||
|
||||
# ── Redis participant mapping ──────────────────────────────
|
||||
|
||||
|
||||
class TestParticipantIdentityMapping:
|
||||
"""Test the identity → user_id Redis mapping pattern."""
|
||||
|
||||
def test_mapping_key_format(self):
|
||||
room_name = "myroom-20260401172036"
|
||||
mapping_key = f"livekit:participant_map:{room_name}"
|
||||
assert mapping_key == "livekit:participant_map:myroom-20260401172036"
|
||||
|
||||
def test_identity_with_uuid_suffix_is_unique(self):
|
||||
import uuid
|
||||
|
||||
name = "Juan"
|
||||
id1 = f"{name}-{uuid.uuid4().hex[:6]}"
|
||||
id2 = f"{name}-{uuid.uuid4().hex[:6]}"
|
||||
assert id1 != id2
|
||||
assert id1.startswith("Juan-")
|
||||
assert id2.startswith("Juan-")
|
||||
|
||||
def test_strip_uuid_suffix_for_display(self):
|
||||
"""Pipeline strips UUID suffix for display name."""
|
||||
identity = "Juan-2bcea0"
|
||||
display_name = identity.rsplit("-", 1)[0] if "-" in identity else identity
|
||||
assert display_name == "Juan"
|
||||
|
||||
def test_strip_uuid_preserves_hyphenated_names(self):
|
||||
identity = "Mary-Jane-abc123"
|
||||
display_name = identity.rsplit("-", 1)[0] if "-" in identity else identity
|
||||
assert display_name == "Mary-Jane"
|
||||
|
||||
def test_anon_identity_no_user_id(self):
|
||||
"""Anonymous participants should not have a user_id mapping."""
|
||||
identity = "anon-abc123"
|
||||
# In the pipeline, anon identities don't get looked up
|
||||
assert identity.startswith("anon-")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_redis_hset_hgetall_roundtrip(self):
|
||||
"""Test the actual Redis operations used for participant mapping."""
|
||||
try:
|
||||
from reflector.redis_cache import get_async_redis_client
|
||||
|
||||
redis_client = await get_async_redis_client()
|
||||
test_key = "livekit:participant_map:__test_room__"
|
||||
|
||||
# Write
|
||||
await redis_client.hset(test_key, "Juan-abc123", "user-id-1")
|
||||
await redis_client.hset(test_key, "Alice-def456", "user-id-2")
|
||||
|
||||
# Read
|
||||
raw_map = await redis_client.hgetall(test_key)
|
||||
decoded = {
|
||||
k.decode() if isinstance(k, bytes) else k: v.decode()
|
||||
if isinstance(v, bytes)
|
||||
else v
|
||||
for k, v in raw_map.items()
|
||||
}
|
||||
|
||||
assert decoded["Juan-abc123"] == "user-id-1"
|
||||
assert decoded["Alice-def456"] == "user-id-2"
|
||||
|
||||
# Cleanup
|
||||
await redis_client.delete(test_key)
|
||||
except Exception:
|
||||
pytest.skip("Redis not available")
|
||||
|
||||
|
||||
# ── Egress video cleanup safety ────────────────────────────────
|
||||
|
||||
|
||||
class TestEgressVideoCleanup:
|
||||
"""Ensure video cleanup logic NEVER deletes audio files."""
|
||||
|
||||
AUDIO_FILES = [
|
||||
"livekit/room-20260401/juan-abc123-2026-04-01T100000-TR_AMR3SWs74Divho.ogg",
|
||||
"livekit/room-20260401/alice-def456-2026-04-01T100030-TR_AMirKjdAvLteAZ.ogg",
|
||||
"livekit/room-20260401/bob-789abc-2026-04-01T100100-TR_AMyoSbM7tAQbYj.ogg",
|
||||
]
|
||||
|
||||
VIDEO_FILES = [
|
||||
"livekit/room-20260401/juan-abc123-2026-04-01T100000-TR_VC679dgMQBdfhT.webm",
|
||||
"livekit/room-20260401/alice-def456-2026-04-01T100030-TR_VCLsuRuxLp4eik.webm",
|
||||
]
|
||||
|
||||
MANIFEST_FILES = [
|
||||
"livekit/room-20260401/EG_K5sipvfB5fTM.json",
|
||||
"livekit/room-20260401/EG_nzwBsH9xzgoj.json",
|
||||
]
|
||||
|
||||
def _should_delete(self, filename: str) -> bool:
|
||||
"""Replicate the deletion logic from _handle_egress_ended."""
|
||||
return filename.endswith(".webm")
|
||||
|
||||
def test_audio_files_never_deleted(self):
|
||||
"""CRITICAL: Audio files must NEVER be marked for deletion."""
|
||||
for f in self.AUDIO_FILES:
|
||||
assert not self._should_delete(f), f"Audio file would be deleted: {f}"
|
||||
|
||||
def test_video_files_are_deleted(self):
|
||||
for f in self.VIDEO_FILES:
|
||||
assert self._should_delete(f), f"Video file NOT marked for deletion: {f}"
|
||||
|
||||
def test_manifests_are_kept(self):
|
||||
for f in self.MANIFEST_FILES:
|
||||
assert not self._should_delete(f), f"Manifest would be deleted: {f}"
|
||||
|
||||
def test_ogg_extension_never_matches_delete(self):
|
||||
"""Double-check: no .ogg file ever matches the deletion condition."""
|
||||
test_names = [
|
||||
"anything.ogg",
|
||||
"livekit/room/track.ogg",
|
||||
"video.ogg", # Even if someone names it "video.ogg"
|
||||
".ogg",
|
||||
"TR_VC_fake_video.ogg", # Video-like track ID but .ogg extension
|
||||
]
|
||||
for f in test_names:
|
||||
assert not self._should_delete(f), f".ogg file would be deleted: {f}"
|
||||
|
||||
def test_webm_always_matches_delete(self):
|
||||
test_names = [
|
||||
"anything.webm",
|
||||
"livekit/room/track.webm",
|
||||
"audio.webm", # Even if someone names it "audio.webm"
|
||||
".webm",
|
||||
]
|
||||
for f in test_names:
|
||||
assert self._should_delete(f), f".webm file NOT marked for deletion: {f}"
|
||||
|
||||
def test_unknown_extensions_are_kept(self):
|
||||
"""Unknown file types should NOT be deleted (safe by default)."""
|
||||
test_names = [
|
||||
"file.mp4",
|
||||
"file.wav",
|
||||
"file.mp3",
|
||||
"file.txt",
|
||||
"file",
|
||||
"",
|
||||
]
|
||||
for f in test_names:
|
||||
assert not self._should_delete(
|
||||
f
|
||||
), f"Unknown file type would be deleted: {f}"
|
||||
|
||||
|
||||
# ── Platform detection ────────────────────────────────────────
|
||||
|
||||
|
||||
class TestSourcePlatformDetection:
|
||||
"""Test the recording ID prefix-based platform detection from transcript_process.py."""
|
||||
|
||||
def test_livekit_prefix(self):
|
||||
recording_id = "lk-livekit-20260401234423"
|
||||
platform = "livekit" if recording_id.startswith("lk-") else "daily"
|
||||
assert platform == "livekit"
|
||||
|
||||
def test_daily_no_prefix(self):
|
||||
recording_id = "08fa0b24-9220-44c5-846c-3f116cf8e738"
|
||||
platform = "livekit" if recording_id.startswith("lk-") else "daily"
|
||||
assert platform == "daily"
|
||||
|
||||
def test_none_recording_id(self):
|
||||
recording_id = None
|
||||
platform = (
|
||||
"livekit" if recording_id and recording_id.startswith("lk-") else "daily"
|
||||
)
|
||||
assert platform == "daily"
|
||||
|
||||
def test_empty_recording_id(self):
|
||||
recording_id = ""
|
||||
platform = (
|
||||
"livekit" if recording_id and recording_id.startswith("lk-") else "daily"
|
||||
)
|
||||
assert platform == "daily"
|
||||
393
server/tests/test_livekit_track_processing.py
Normal file
393
server/tests/test_livekit_track_processing.py
Normal file
@@ -0,0 +1,393 @@
|
||||
"""
|
||||
Tests for LiveKit track processing: filepath parsing, offset calculation,
|
||||
and pad_track padding_seconds behavior.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from fractions import Fraction
|
||||
|
||||
import av
|
||||
import pytest
|
||||
|
||||
from reflector.utils.livekit import (
|
||||
LiveKitTrackFile,
|
||||
calculate_track_offsets,
|
||||
extract_livekit_base_room_name,
|
||||
filter_audio_tracks,
|
||||
parse_livekit_track_filepath,
|
||||
)
|
||||
|
||||
# ── Filepath parsing ──────────────────────────────────────────
|
||||
|
||||
|
||||
class TestParseLiveKitTrackFilepath:
|
||||
def test_parses_ogg_audio_track(self):
|
||||
result = parse_livekit_track_filepath(
|
||||
"livekit/myroom-20260401172036/juan-4b82ed-2026-04-01T195758-TR_AMR3SWs74Divho.ogg"
|
||||
)
|
||||
assert result.room_name == "myroom-20260401172036"
|
||||
assert result.participant_identity == "juan-4b82ed"
|
||||
assert result.track_id == "TR_AMR3SWs74Divho"
|
||||
assert result.timestamp == datetime(2026, 4, 1, 19, 57, 58, tzinfo=timezone.utc)
|
||||
|
||||
def test_parses_different_identities(self):
|
||||
r1 = parse_livekit_track_filepath(
|
||||
"livekit/room-20260401/alice-a1b2c3-2026-04-01T100000-TR_abc123.ogg"
|
||||
)
|
||||
r2 = parse_livekit_track_filepath(
|
||||
"livekit/room-20260401/bob_smith-d4e5f6-2026-04-01T100030-TR_def456.ogg"
|
||||
)
|
||||
assert r1.participant_identity == "alice-a1b2c3"
|
||||
assert r2.participant_identity == "bob_smith-d4e5f6"
|
||||
|
||||
def test_rejects_json_manifest(self):
|
||||
with pytest.raises(ValueError, match="doesn't match expected format"):
|
||||
parse_livekit_track_filepath("livekit/myroom-20260401/EG_K5sipvfB5fTM.json")
|
||||
|
||||
def test_rejects_webm_video(self):
|
||||
# webm files match the pattern but are filtered by filter_audio_tracks
|
||||
result = parse_livekit_track_filepath(
|
||||
"livekit/myroom-20260401/juan-4b82ed-2026-04-01T195727-TR_VC679dgMQBdfhT.webm"
|
||||
)
|
||||
# webm parses successfully (TR_ prefix matches video tracks too)
|
||||
assert result.track_id == "TR_VC679dgMQBdfhT"
|
||||
|
||||
def test_rejects_invalid_path(self):
|
||||
with pytest.raises(ValueError):
|
||||
parse_livekit_track_filepath("not/a/valid/path.ogg")
|
||||
|
||||
def test_rejects_missing_track_id(self):
|
||||
with pytest.raises(ValueError):
|
||||
parse_livekit_track_filepath("livekit/room/user-2026-04-01T100000.ogg")
|
||||
|
||||
def test_parses_timestamp_correctly(self):
|
||||
result = parse_livekit_track_filepath(
|
||||
"livekit/room-20260401/user-abc123-2026-12-25T235959-TR_test.ogg"
|
||||
)
|
||||
assert result.timestamp == datetime(
|
||||
2026, 12, 25, 23, 59, 59, tzinfo=timezone.utc
|
||||
)
|
||||
|
||||
|
||||
# ── Audio track filtering ─────────────────────────────────────
|
||||
|
||||
|
||||
class TestFilterAudioTracks:
|
||||
def test_filters_to_ogg_only(self):
|
||||
keys = [
|
||||
"livekit/room/EG_abc.json",
|
||||
"livekit/room/user-abc-2026-04-01T100000-TR_audio.ogg",
|
||||
"livekit/room/user-abc-2026-04-01T100000-TR_video.webm",
|
||||
"livekit/room/EG_def.json",
|
||||
"livekit/room/user2-def-2026-04-01T100030-TR_audio2.ogg",
|
||||
]
|
||||
result = filter_audio_tracks(keys)
|
||||
assert len(result) == 2
|
||||
assert all(k.endswith(".ogg") for k in result)
|
||||
|
||||
def test_empty_input(self):
|
||||
assert filter_audio_tracks([]) == []
|
||||
|
||||
def test_no_audio_tracks(self):
|
||||
keys = ["livekit/room/EG_abc.json", "livekit/room/user-TR_v.webm"]
|
||||
assert filter_audio_tracks(keys) == []
|
||||
|
||||
|
||||
# ── Offset calculation ─────────────────────────────────────────
|
||||
|
||||
|
||||
class TestCalculateTrackOffsets:
|
||||
def test_single_track_zero_offset(self):
|
||||
tracks = [
|
||||
LiveKitTrackFile(
|
||||
s3_key="k1",
|
||||
room_name="r",
|
||||
participant_identity="alice",
|
||||
timestamp=datetime(2026, 4, 1, 10, 0, 0, tzinfo=timezone.utc),
|
||||
track_id="TR_1",
|
||||
)
|
||||
]
|
||||
offsets = calculate_track_offsets(tracks)
|
||||
assert len(offsets) == 1
|
||||
assert offsets[0][1] == 0.0
|
||||
|
||||
def test_two_tracks_correct_offset(self):
|
||||
tracks = [
|
||||
LiveKitTrackFile(
|
||||
s3_key="k1",
|
||||
room_name="r",
|
||||
participant_identity="alice",
|
||||
timestamp=datetime(2026, 4, 1, 10, 0, 0, tzinfo=timezone.utc),
|
||||
track_id="TR_1",
|
||||
),
|
||||
LiveKitTrackFile(
|
||||
s3_key="k2",
|
||||
room_name="r",
|
||||
participant_identity="bob",
|
||||
timestamp=datetime(2026, 4, 1, 10, 1, 10, tzinfo=timezone.utc),
|
||||
track_id="TR_2",
|
||||
),
|
||||
]
|
||||
offsets = calculate_track_offsets(tracks)
|
||||
assert offsets[0][1] == 0.0 # alice (earliest)
|
||||
assert offsets[1][1] == 70.0 # bob (70 seconds later)
|
||||
|
||||
def test_three_tracks_earliest_is_zero(self):
|
||||
tracks = [
|
||||
LiveKitTrackFile(
|
||||
s3_key="k2",
|
||||
room_name="r",
|
||||
participant_identity="bob",
|
||||
timestamp=datetime(2026, 4, 1, 10, 0, 30, tzinfo=timezone.utc),
|
||||
track_id="TR_2",
|
||||
),
|
||||
LiveKitTrackFile(
|
||||
s3_key="k1",
|
||||
room_name="r",
|
||||
participant_identity="alice",
|
||||
timestamp=datetime(2026, 4, 1, 10, 0, 0, tzinfo=timezone.utc),
|
||||
track_id="TR_1",
|
||||
),
|
||||
LiveKitTrackFile(
|
||||
s3_key="k3",
|
||||
room_name="r",
|
||||
participant_identity="charlie",
|
||||
timestamp=datetime(2026, 4, 1, 10, 1, 0, tzinfo=timezone.utc),
|
||||
track_id="TR_3",
|
||||
),
|
||||
]
|
||||
offsets = calculate_track_offsets(tracks)
|
||||
offset_map = {t.participant_identity: o for t, o in offsets}
|
||||
assert offset_map["alice"] == 0.0
|
||||
assert offset_map["bob"] == 30.0
|
||||
assert offset_map["charlie"] == 60.0
|
||||
|
||||
def test_empty_tracks(self):
|
||||
assert calculate_track_offsets([]) == []
|
||||
|
||||
def test_simultaneous_tracks_zero_offsets(self):
|
||||
ts = datetime(2026, 4, 1, 10, 0, 0, tzinfo=timezone.utc)
|
||||
tracks = [
|
||||
LiveKitTrackFile(
|
||||
s3_key="k1",
|
||||
room_name="r",
|
||||
participant_identity="a",
|
||||
timestamp=ts,
|
||||
track_id="TR_1",
|
||||
),
|
||||
LiveKitTrackFile(
|
||||
s3_key="k2",
|
||||
room_name="r",
|
||||
participant_identity="b",
|
||||
timestamp=ts,
|
||||
track_id="TR_2",
|
||||
),
|
||||
]
|
||||
offsets = calculate_track_offsets(tracks)
|
||||
assert all(o == 0.0 for _, o in offsets)
|
||||
|
||||
|
||||
# ── Room name extraction ───────────────────────────────────────
|
||||
|
||||
|
||||
class TestExtractLiveKitBaseRoomName:
|
||||
def test_strips_timestamp_suffix(self):
|
||||
assert extract_livekit_base_room_name("myroom-20260401172036") == "myroom"
|
||||
|
||||
def test_preserves_hyphenated_name(self):
|
||||
assert (
|
||||
extract_livekit_base_room_name("my-room-name-20260401172036")
|
||||
== "my-room-name"
|
||||
)
|
||||
|
||||
def test_single_segment(self):
|
||||
assert extract_livekit_base_room_name("room-20260401") == "room"
|
||||
|
||||
|
||||
# ── pad_track padding_seconds behavior ─────────────────────────
|
||||
|
||||
|
||||
class TestPadTrackPaddingSeconds:
|
||||
"""Test that pad_track correctly uses pre-calculated padding_seconds
|
||||
for LiveKit (skipping container metadata) vs extracting from container
|
||||
for Daily (when padding_seconds is None).
|
||||
"""
|
||||
|
||||
def _make_test_ogg(self, path: str, duration_seconds: float = 5.0):
|
||||
"""Create a minimal OGG/Opus file for testing."""
|
||||
with av.open(path, "w", format="ogg") as out:
|
||||
stream = out.add_stream("libopus", rate=48000)
|
||||
stream.bit_rate = 64000
|
||||
samples_per_frame = 960 # Opus standard
|
||||
total_samples = int(duration_seconds * 48000)
|
||||
pts = 0
|
||||
while pts < total_samples:
|
||||
frame = av.AudioFrame(
|
||||
format="s16", layout="stereo", samples=samples_per_frame
|
||||
)
|
||||
# Fill with silence (zeros)
|
||||
frame.planes[0].update(bytes(samples_per_frame * 2 * 2)) # s16 * stereo
|
||||
frame.sample_rate = 48000
|
||||
frame.pts = pts
|
||||
frame.time_base = Fraction(1, 48000)
|
||||
for packet in stream.encode(frame):
|
||||
out.mux(packet)
|
||||
pts += samples_per_frame
|
||||
for packet in stream.encode(None):
|
||||
out.mux(packet)
|
||||
|
||||
def test_ogg_has_zero_start_time(self, tmp_path):
|
||||
"""Verify that OGG files (like LiveKit produces) have start_time=0,
|
||||
confirming why pre-calculated padding is needed."""
|
||||
ogg_path = str(tmp_path / "test.ogg")
|
||||
self._make_test_ogg(ogg_path)
|
||||
|
||||
with av.open(ogg_path) as container:
|
||||
from reflector.utils.audio_padding import (
|
||||
extract_stream_start_time_from_container,
|
||||
)
|
||||
|
||||
start_time = extract_stream_start_time_from_container(container, 0)
|
||||
|
||||
assert start_time <= 0.0, (
|
||||
"OGG files should have start_time<=0 (no usable offset), confirming "
|
||||
f"LiveKit tracks need pre-calculated padding_seconds. Got: {start_time}"
|
||||
)
|
||||
|
||||
def test_precalculated_padding_skips_metadata_extraction(self, tmp_path):
|
||||
"""When padding_seconds is set, pad_track should use it directly
|
||||
and NOT call extract_stream_start_time_from_container."""
|
||||
from reflector.hatchet.workflows.track_processing import TrackInput
|
||||
|
||||
input_data = TrackInput(
|
||||
track_index=0,
|
||||
s3_key="livekit/room/user-abc-2026-04-01T100000-TR_audio.ogg",
|
||||
bucket_name="test-bucket",
|
||||
transcript_id="test-transcript",
|
||||
source_platform="livekit",
|
||||
padding_seconds=70.0,
|
||||
)
|
||||
|
||||
assert input_data.padding_seconds == 70.0
|
||||
# The pad_track function checks: if input.padding_seconds is not None → use it
|
||||
# This means extract_stream_start_time_from_container is never called for LiveKit
|
||||
|
||||
def test_none_padding_falls_back_to_metadata(self, tmp_path):
|
||||
"""When padding_seconds is None (Daily), pad_track should extract
|
||||
start_time from container metadata."""
|
||||
from reflector.hatchet.workflows.track_processing import TrackInput
|
||||
|
||||
input_data = TrackInput(
|
||||
track_index=0,
|
||||
s3_key="daily/room/track.webm",
|
||||
bucket_name="test-bucket",
|
||||
transcript_id="test-transcript",
|
||||
source_platform="daily",
|
||||
padding_seconds=None,
|
||||
)
|
||||
|
||||
assert input_data.padding_seconds is None
|
||||
# pad_track will call extract_stream_start_time_from_container for this case
|
||||
|
||||
def test_zero_padding_returns_original_key(self):
|
||||
"""When padding_seconds=0.0, pad_track should return the original S3 key
|
||||
without applying any padding (same as start_time=0 from metadata)."""
|
||||
from reflector.hatchet.workflows.track_processing import TrackInput
|
||||
|
||||
input_data = TrackInput(
|
||||
track_index=0,
|
||||
s3_key="livekit/room/earliest-track.ogg",
|
||||
bucket_name="test-bucket",
|
||||
transcript_id="test-transcript",
|
||||
source_platform="livekit",
|
||||
padding_seconds=0.0,
|
||||
)
|
||||
|
||||
# padding_seconds=0.0 → start_time_seconds=0.0 → "no padding needed" branch
|
||||
assert input_data.padding_seconds == 0.0
|
||||
|
||||
|
||||
# ── Pipeline offset calculation (process_tracks logic) ─────────
|
||||
|
||||
|
||||
class TestProcessTracksOffsetCalculation:
|
||||
"""Test the offset calculation logic used in process_tracks
|
||||
for LiveKit source_platform."""
|
||||
|
||||
def test_livekit_offsets_from_timestamps(self):
|
||||
"""Simulate the offset calculation done in process_tracks."""
|
||||
tracks = [
|
||||
{
|
||||
"s3_key": "track1.ogg",
|
||||
"participant_identity": "admin-0129c3",
|
||||
"timestamp": "2026-04-01T23:44:50+00:00",
|
||||
},
|
||||
{
|
||||
"s3_key": "track2.ogg",
|
||||
"participant_identity": "juan-5a5b41",
|
||||
"timestamp": "2026-04-01T23:46:00+00:00",
|
||||
},
|
||||
]
|
||||
|
||||
# Replicate the logic from process_tracks
|
||||
timestamps = []
|
||||
for i, track in enumerate(tracks):
|
||||
ts_str = track.get("timestamp")
|
||||
if ts_str:
|
||||
ts = datetime.fromisoformat(ts_str)
|
||||
timestamps.append((i, ts))
|
||||
|
||||
earliest = min(ts for _, ts in timestamps)
|
||||
track_padding = {}
|
||||
for i, ts in timestamps:
|
||||
track_padding[i] = (ts - earliest).total_seconds()
|
||||
|
||||
assert track_padding[0] == 0.0 # admin (earliest)
|
||||
assert track_padding[1] == 70.0 # juan (70s later)
|
||||
|
||||
def test_daily_tracks_get_no_precalculated_padding(self):
|
||||
"""Daily tracks should NOT get padding_seconds (use container metadata)."""
|
||||
tracks = [
|
||||
{"s3_key": "daily-track1.webm"},
|
||||
{"s3_key": "daily-track2.webm"},
|
||||
]
|
||||
|
||||
# Daily tracks don't have "timestamp" field
|
||||
track_padding = {}
|
||||
source_platform = "daily"
|
||||
|
||||
if source_platform == "livekit":
|
||||
# This block should NOT execute for daily
|
||||
pass
|
||||
|
||||
# Daily tracks get no pre-calculated padding
|
||||
assert track_padding == {}
|
||||
for i, _ in enumerate(tracks):
|
||||
assert track_padding.get(i) is None
|
||||
|
||||
def test_livekit_missing_timestamp_graceful(self):
|
||||
"""If a LiveKit track is missing timestamp, it should be skipped."""
|
||||
tracks = [
|
||||
{
|
||||
"s3_key": "track1.ogg",
|
||||
"participant_identity": "alice",
|
||||
"timestamp": "2026-04-01T10:00:00+00:00",
|
||||
},
|
||||
{"s3_key": "track2.ogg", "participant_identity": "bob"}, # no timestamp
|
||||
]
|
||||
|
||||
timestamps = []
|
||||
for i, track in enumerate(tracks):
|
||||
ts_str = track.get("timestamp")
|
||||
if ts_str:
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_str)
|
||||
timestamps.append((i, ts))
|
||||
except (ValueError, TypeError):
|
||||
timestamps.append((i, None))
|
||||
else:
|
||||
timestamps.append((i, None))
|
||||
|
||||
valid = [(i, ts) for i, ts in timestamps if ts is not None]
|
||||
assert len(valid) == 1 # only alice has a timestamp
|
||||
assert valid[0][0] == 0 # track index 0
|
||||
@@ -452,9 +452,11 @@ async def test_anonymous_cannot_webrtc_record_when_not_public(client, monkeypatc
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_start_meeting_recording_when_not_public(
|
||||
async def test_anonymous_can_start_meeting_recording_when_not_public(
|
||||
client, monkeypatch
|
||||
):
|
||||
"""Anonymous users can start recording since it's triggered from the frontend
|
||||
and recording is at room level via Daily REST API."""
|
||||
monkeypatch.setattr(settings, "PUBLIC_MODE", False)
|
||||
|
||||
room = await rooms_controller.add(
|
||||
@@ -486,7 +488,8 @@ async def test_anonymous_cannot_start_meeting_recording_when_not_public(
|
||||
f"/meetings/{meeting.id}/recordings/start",
|
||||
json={"type": "cloud", "instanceId": "00000000-0000-0000-0000-000000000001"},
|
||||
)
|
||||
assert resp.status_code == 401, resp.text
|
||||
# Should not be 401 (may fail for other reasons like no Daily API, but auth passes)
|
||||
assert resp.status_code != 401, f"Should not get 401: {resp.text}"
|
||||
|
||||
|
||||
# ======================================================================
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.recordings import Recording, recordings_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.db.transcripts import SourceKind, transcripts_controller
|
||||
@@ -390,3 +394,463 @@ async def test_transcripts_list_filtered_by_room_id(authenticated_client, client
|
||||
ids = [t["id"] for t in items]
|
||||
assert in_room.id in ids
|
||||
assert other.id not in ids
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Restore tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_restore(authenticated_client, client):
|
||||
"""Soft-delete then restore, verify accessible again."""
|
||||
response = await client.post("/transcripts", json={"name": "restore-me"})
|
||||
assert response.status_code == 200
|
||||
tid = response.json()["id"]
|
||||
|
||||
# Soft-delete
|
||||
response = await client.delete(f"/transcripts/{tid}")
|
||||
assert response.status_code == 200
|
||||
|
||||
# 404 while deleted
|
||||
response = await client.get(f"/transcripts/{tid}")
|
||||
assert response.status_code == 404
|
||||
|
||||
# Restore
|
||||
response = await client.post(f"/transcripts/{tid}/restore")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == "ok"
|
||||
|
||||
# Accessible again
|
||||
response = await client.get(f"/transcripts/{tid}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["name"] == "restore-me"
|
||||
|
||||
# deleted_at is cleared
|
||||
transcript = await transcripts_controller.get_by_id(tid)
|
||||
assert transcript.deleted_at is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_restore_recording_also_restored(authenticated_client, client):
|
||||
"""Restoring a transcript also restores its recording."""
|
||||
recording = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="restore-test.mp4",
|
||||
recorded_at=datetime.now(timezone.utc),
|
||||
)
|
||||
)
|
||||
transcript = await transcripts_controller.add(
|
||||
name="restore-with-recording",
|
||||
source_kind=SourceKind.ROOM,
|
||||
recording_id=recording.id,
|
||||
user_id="randomuserid",
|
||||
)
|
||||
|
||||
# Soft-delete
|
||||
response = await client.delete(f"/transcripts/{transcript.id}")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Both should be soft-deleted
|
||||
rec = await recordings_controller.get_by_id(recording.id)
|
||||
assert rec.deleted_at is not None
|
||||
|
||||
# Restore
|
||||
response = await client.post(f"/transcripts/{transcript.id}/restore")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Recording also restored
|
||||
rec = await recordings_controller.get_by_id(recording.id)
|
||||
assert rec.deleted_at is None
|
||||
|
||||
tr = await transcripts_controller.get_by_id(transcript.id)
|
||||
assert tr.deleted_at is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_restore_not_deleted(authenticated_client, client):
|
||||
"""Restoring a non-deleted transcript returns 400."""
|
||||
response = await client.post("/transcripts", json={"name": "not-deleted"})
|
||||
assert response.status_code == 200
|
||||
tid = response.json()["id"]
|
||||
|
||||
response = await client.post(f"/transcripts/{tid}/restore")
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_restore_not_found(authenticated_client, client):
|
||||
"""Restoring a nonexistent transcript returns 404."""
|
||||
response = await client.post("/transcripts/nonexistent-id/restore")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_restore_forbidden(authenticated_client, client):
|
||||
"""Cannot restore another user's deleted transcript."""
|
||||
# Create transcript owned by a different user
|
||||
transcript = await transcripts_controller.add(
|
||||
name="other-user-restore",
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id="some-other-user",
|
||||
)
|
||||
# Soft-delete directly in DB
|
||||
await transcripts_controller.remove_by_id(transcript.id, user_id="some-other-user")
|
||||
|
||||
# Try to restore as randomuserid (authenticated_client)
|
||||
response = await client.post(f"/transcripts/{transcript.id}/restore")
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Destroy tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_destroy_storage():
|
||||
"""Mock storage backends so hard_delete doesn't require S3 credentials."""
|
||||
with (
|
||||
patch(
|
||||
"reflector.db.transcripts.get_transcripts_storage",
|
||||
return_value=AsyncMock(delete_file=AsyncMock()),
|
||||
),
|
||||
patch(
|
||||
"reflector.db.transcripts.get_source_storage",
|
||||
return_value=AsyncMock(delete_file=AsyncMock()),
|
||||
),
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_destroy(authenticated_client, client, mock_destroy_storage):
|
||||
"""Soft-delete then destroy, verify transcript gone from DB."""
|
||||
response = await client.post("/transcripts", json={"name": "destroy-me"})
|
||||
assert response.status_code == 200
|
||||
tid = response.json()["id"]
|
||||
|
||||
# Soft-delete first
|
||||
response = await client.delete(f"/transcripts/{tid}")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Destroy
|
||||
response = await client.delete(f"/transcripts/{tid}/destroy")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == "ok"
|
||||
|
||||
# Gone from DB entirely
|
||||
transcript = await transcripts_controller.get_by_id(tid)
|
||||
assert transcript is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_destroy_not_soft_deleted(authenticated_client, client):
|
||||
"""Cannot destroy a transcript that hasn't been soft-deleted."""
|
||||
response = await client.post("/transcripts", json={"name": "not-soft-deleted"})
|
||||
assert response.status_code == 200
|
||||
tid = response.json()["id"]
|
||||
|
||||
response = await client.delete(f"/transcripts/{tid}/destroy")
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_destroy_with_recording(
|
||||
authenticated_client, client, mock_destroy_storage
|
||||
):
|
||||
"""Destroying a transcript also hard-deletes its recording from DB."""
|
||||
recording = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="destroy-test.mp4",
|
||||
recorded_at=datetime.now(timezone.utc),
|
||||
)
|
||||
)
|
||||
transcript = await transcripts_controller.add(
|
||||
name="destroy-with-recording",
|
||||
source_kind=SourceKind.ROOM,
|
||||
recording_id=recording.id,
|
||||
user_id="randomuserid",
|
||||
)
|
||||
|
||||
# Soft-delete
|
||||
response = await client.delete(f"/transcripts/{transcript.id}")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Destroy
|
||||
response = await client.delete(f"/transcripts/{transcript.id}/destroy")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Both gone from DB
|
||||
assert await transcripts_controller.get_by_id(transcript.id) is None
|
||||
assert await recordings_controller.get_by_id(recording.id) is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_destroy_forbidden(authenticated_client, client):
|
||||
"""Cannot destroy another user's deleted transcript."""
|
||||
transcript = await transcripts_controller.add(
|
||||
name="other-user-destroy",
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id="some-other-user",
|
||||
)
|
||||
await transcripts_controller.remove_by_id(transcript.id, user_id="some-other-user")
|
||||
|
||||
# Try to destroy as randomuserid (authenticated_client)
|
||||
response = await client.delete(f"/transcripts/{transcript.id}/destroy")
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Isolation tests — verify unrelated data is NOT deleted
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_destroy_does_not_delete_meeting(
|
||||
authenticated_client, client, mock_destroy_storage
|
||||
):
|
||||
"""Destroying a transcript must NOT delete its associated meeting."""
|
||||
room = await rooms_controller.add(
|
||||
name="room-for-meeting-isolation",
|
||||
user_id="randomuserid",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
webhook_url="",
|
||||
webhook_secret="",
|
||||
)
|
||||
now = datetime.now(timezone.utc)
|
||||
meeting = await meetings_controller.create(
|
||||
id="meeting-isolation-test",
|
||||
room_name=room.name,
|
||||
room_url="https://example.com/room",
|
||||
host_room_url="https://example.com/room-host",
|
||||
start_date=now,
|
||||
end_date=now + timedelta(hours=1),
|
||||
room=room,
|
||||
)
|
||||
recording = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="meeting-iso.mp4",
|
||||
recorded_at=now,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
)
|
||||
transcript = await transcripts_controller.add(
|
||||
name="transcript-with-meeting",
|
||||
source_kind=SourceKind.ROOM,
|
||||
recording_id=recording.id,
|
||||
meeting_id=meeting.id,
|
||||
room_id=room.id,
|
||||
user_id="randomuserid",
|
||||
)
|
||||
|
||||
# Soft-delete then destroy
|
||||
await transcripts_controller.remove_by_id(transcript.id, user_id="randomuserid")
|
||||
response = await client.delete(f"/transcripts/{transcript.id}/destroy")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Transcript and recording are gone
|
||||
assert await transcripts_controller.get_by_id(transcript.id) is None
|
||||
assert await recordings_controller.get_by_id(recording.id) is None
|
||||
|
||||
# Meeting still exists
|
||||
m = await meetings_controller.get_by_id(meeting.id)
|
||||
assert m is not None
|
||||
assert m.id == meeting.id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_destroy_does_not_affect_other_transcripts(
|
||||
authenticated_client, client, mock_destroy_storage
|
||||
):
|
||||
"""Destroying one transcript must not affect another transcript or its recording."""
|
||||
user_id = "randomuserid"
|
||||
rec1 = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="sibling1.mp4",
|
||||
recorded_at=datetime.now(timezone.utc),
|
||||
)
|
||||
)
|
||||
rec2 = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="sibling2.mp4",
|
||||
recorded_at=datetime.now(timezone.utc),
|
||||
)
|
||||
)
|
||||
t1 = await transcripts_controller.add(
|
||||
name="sibling-1",
|
||||
source_kind=SourceKind.FILE,
|
||||
recording_id=rec1.id,
|
||||
user_id=user_id,
|
||||
)
|
||||
t2 = await transcripts_controller.add(
|
||||
name="sibling-2",
|
||||
source_kind=SourceKind.FILE,
|
||||
recording_id=rec2.id,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
# Soft-delete and destroy t1
|
||||
await transcripts_controller.remove_by_id(t1.id, user_id=user_id)
|
||||
response = await client.delete(f"/transcripts/{t1.id}/destroy")
|
||||
assert response.status_code == 200
|
||||
|
||||
# t1 and rec1 gone
|
||||
assert await transcripts_controller.get_by_id(t1.id) is None
|
||||
assert await recordings_controller.get_by_id(rec1.id) is None
|
||||
|
||||
# t2 and rec2 untouched
|
||||
t2_after = await transcripts_controller.get_by_id(t2.id)
|
||||
assert t2_after is not None
|
||||
assert t2_after.deleted_at is None
|
||||
rec2_after = await recordings_controller.get_by_id(rec2.id)
|
||||
assert rec2_after is not None
|
||||
assert rec2_after.deleted_at is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_destroy_meeting_with_multiple_transcripts(
|
||||
authenticated_client, client, mock_destroy_storage
|
||||
):
|
||||
"""Destroying one transcript from a meeting must not affect the other
|
||||
transcript, its recording, or the shared meeting."""
|
||||
user_id = "randomuserid"
|
||||
room = await rooms_controller.add(
|
||||
name="room-multi-transcript",
|
||||
user_id=user_id,
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
webhook_url="",
|
||||
webhook_secret="",
|
||||
)
|
||||
now = datetime.now(timezone.utc)
|
||||
meeting = await meetings_controller.create(
|
||||
id="meeting-multi-transcript-test",
|
||||
room_name=room.name,
|
||||
room_url="https://example.com/room",
|
||||
host_room_url="https://example.com/room-host",
|
||||
start_date=now,
|
||||
end_date=now + timedelta(hours=1),
|
||||
room=room,
|
||||
)
|
||||
rec1 = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="multi1.mp4",
|
||||
recorded_at=now,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
)
|
||||
rec2 = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="multi2.mp4",
|
||||
recorded_at=now,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
)
|
||||
t1 = await transcripts_controller.add(
|
||||
name="multi-t1",
|
||||
source_kind=SourceKind.ROOM,
|
||||
recording_id=rec1.id,
|
||||
meeting_id=meeting.id,
|
||||
room_id=room.id,
|
||||
user_id=user_id,
|
||||
)
|
||||
t2 = await transcripts_controller.add(
|
||||
name="multi-t2",
|
||||
source_kind=SourceKind.ROOM,
|
||||
recording_id=rec2.id,
|
||||
meeting_id=meeting.id,
|
||||
room_id=room.id,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
# Soft-delete and destroy t1
|
||||
await transcripts_controller.remove_by_id(t1.id, user_id=user_id)
|
||||
response = await client.delete(f"/transcripts/{t1.id}/destroy")
|
||||
assert response.status_code == 200
|
||||
|
||||
# t1 + rec1 gone
|
||||
assert await transcripts_controller.get_by_id(t1.id) is None
|
||||
assert await recordings_controller.get_by_id(rec1.id) is None
|
||||
|
||||
# t2 + rec2 + meeting all still exist
|
||||
assert (await transcripts_controller.get_by_id(t2.id)) is not None
|
||||
assert (await recordings_controller.get_by_id(rec2.id)) is not None
|
||||
assert (await meetings_controller.get_by_id(meeting.id)) is not None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Search tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_include_deleted(authenticated_client, client):
|
||||
"""Search with include_deleted=true returns only deleted transcripts."""
|
||||
response = await client.post("/transcripts", json={"name": "search-deleted"})
|
||||
assert response.status_code == 200
|
||||
tid = response.json()["id"]
|
||||
|
||||
# Soft-delete
|
||||
response = await client.delete(f"/transcripts/{tid}")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Normal search should not include it
|
||||
response = await client.get("/transcripts/search", params={"q": ""})
|
||||
assert response.status_code == 200
|
||||
ids = [r["id"] for r in response.json()["results"]]
|
||||
assert tid not in ids
|
||||
|
||||
# Search with include_deleted should include it
|
||||
response = await client.get(
|
||||
"/transcripts/search", params={"q": "", "include_deleted": True}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
ids = [r["id"] for r in response.json()["results"]]
|
||||
assert tid in ids
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_exclude_deleted_by_default(authenticated_client, client):
|
||||
"""Normal search excludes deleted transcripts by default."""
|
||||
response = await client.post(
|
||||
"/transcripts", json={"name": "search-exclude-deleted"}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
tid = response.json()["id"]
|
||||
|
||||
# Verify it appears in search
|
||||
response = await client.get("/transcripts/search", params={"q": ""})
|
||||
assert response.status_code == 200
|
||||
ids = [r["id"] for r in response.json()["results"]]
|
||||
assert tid in ids
|
||||
|
||||
# Soft-delete
|
||||
response = await client.delete(f"/transcripts/{tid}")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify it no longer appears in default search
|
||||
response = await client.get("/transcripts/search", params={"q": ""})
|
||||
assert response.status_code == 200
|
||||
ids = [r["id"] for r in response.json()["results"]]
|
||||
assert tid not in ids
|
||||
|
||||
420
server/uv.lock
generated
420
server/uv.lock
generated
@@ -74,7 +74,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.13.3"
|
||||
version = "3.13.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
@@ -85,42 +85,42 @@ dependencies = [
|
||||
{ name = "propcache" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/4c/a164164834f03924d9a29dc3acd9e7ee58f95857e0b467f6d04298594ebb/aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b", size = 746051, upload-time = "2026-01-03T17:29:43.287Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/71/d5c31390d18d4f58115037c432b7e0348c60f6f53b727cad33172144a112/aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64", size = 499234, upload-time = "2026-01-03T17:29:44.822Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/c9/741f8ac91e14b1d2e7100690425a5b2b919a87a5075406582991fb7de920/aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea", size = 494979, upload-time = "2026-01-03T17:29:46.405Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/b5/31d4d2e802dfd59f74ed47eba48869c1c21552c586d5e81a9d0d5c2ad640/aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a", size = 1748297, upload-time = "2026-01-03T17:29:48.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/3e/eefad0ad42959f226bb79664826883f2687d602a9ae2941a18e0484a74d3/aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540", size = 1707172, upload-time = "2026-01-03T17:29:49.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/3a/54a64299fac2891c346cdcf2aa6803f994a2e4beeaf2e5a09dcc54acc842/aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b", size = 1805405, upload-time = "2026-01-03T17:29:51.244Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/70/ddc1b7169cf64075e864f64595a14b147a895a868394a48f6a8031979038/aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3", size = 1899449, upload-time = "2026-01-03T17:29:53.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/7e/6815aab7d3a56610891c76ef79095677b8b5be6646aaf00f69b221765021/aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1", size = 1748444, upload-time = "2026-01-03T17:29:55.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/f2/073b145c4100da5511f457dc0f7558e99b2987cf72600d42b559db856fbc/aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3", size = 1606038, upload-time = "2026-01-03T17:29:57.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/c1/778d011920cae03ae01424ec202c513dc69243cf2db303965615b81deeea/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440", size = 1724156, upload-time = "2026-01-03T17:29:58.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/cb/3419eabf4ec1e9ec6f242c32b689248365a1cf621891f6f0386632525494/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7", size = 1722340, upload-time = "2026-01-03T17:30:01.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/e5/76cf77bdbc435bf233c1f114edad39ed4177ccbfab7c329482b179cff4f4/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c", size = 1783041, upload-time = "2026-01-03T17:30:03.609Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/d4/dd1ca234c794fd29c057ce8c0566b8ef7fd6a51069de5f06fa84b9a1971c/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51", size = 1596024, upload-time = "2026-01-03T17:30:05.132Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/58/4345b5f26661a6180afa686c473620c30a66afdf120ed3dd545bbc809e85/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4", size = 1804590, upload-time = "2026-01-03T17:30:07.135Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/06/05950619af6c2df7e0a431d889ba2813c9f0129cec76f663e547a5ad56f2/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29", size = 1740355, upload-time = "2026-01-03T17:30:09.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/80/958f16de79ba0422d7c1e284b2abd0c84bc03394fbe631d0a39ffa10e1eb/aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239", size = 433701, upload-time = "2026-01-03T17:30:10.869Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/f2/27cdf04c9851712d6c1b99df6821a6623c3c9e55956d4b1e318c337b5a48/aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f", size = 457678, upload-time = "2026-01-03T17:30:12.719Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/7e/cb94129302d78c46662b47f9897d642fd0b33bdfef4b73b20c6ced35aa4c/aiohttp-3.13.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8ea0c64d1bcbf201b285c2246c51a0c035ba3bbd306640007bc5844a3b4658c1", size = 760027, upload-time = "2026-03-28T17:15:33.022Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/cd/2db3c9397c3bd24216b203dd739945b04f8b87bb036c640da7ddb63c75ef/aiohttp-3.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6f742e1fa45c0ed522b00ede565e18f97e4cf8d1883a712ac42d0339dfb0cce7", size = 508325, upload-time = "2026-03-28T17:15:34.714Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/a3/d28b2722ec13107f2e37a86b8a169897308bab6a3b9e071ecead9d67bd9b/aiohttp-3.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dcfb50ee25b3b7a1222a9123be1f9f89e56e67636b561441f0b304e25aaef8f", size = 502402, upload-time = "2026-03-28T17:15:36.409Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/d6/acd47b5f17c4430e555590990a4746efbcb2079909bb865516892bf85f37/aiohttp-3.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3262386c4ff370849863ea93b9ea60fd59c6cf56bf8f93beac625cf4d677c04d", size = 1771224, upload-time = "2026-03-28T17:15:38.223Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/af/af6e20113ba6a48fd1cd9e5832c4851e7613ef50c7619acdaee6ec5f1aff/aiohttp-3.13.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:473bb5aa4218dd254e9ae4834f20e31f5a0083064ac0136a01a62ddbae2eaa42", size = 1731530, upload-time = "2026-03-28T17:15:39.988Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/16/78a2f5d9c124ad05d5ce59a9af94214b6466c3491a25fb70760e98e9f762/aiohttp-3.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e56423766399b4c77b965f6aaab6c9546617b8994a956821cc507d00b91d978c", size = 1827925, upload-time = "2026-03-28T17:15:41.944Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/1f/79acf0974ced805e0e70027389fccbb7d728e6f30fcac725fb1071e63075/aiohttp-3.13.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8af249343fafd5ad90366a16d230fc265cf1149f26075dc9fe93cfd7c7173942", size = 1923579, upload-time = "2026-03-28T17:15:44.071Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/53/29f9e2054ea6900413f3b4c3eb9d8331f60678ec855f13ba8714c47fd48d/aiohttp-3.13.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bc0a5cf4f10ef5a2c94fdde488734b582a3a7a000b131263e27c9295bd682d9", size = 1767655, upload-time = "2026-03-28T17:15:45.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/57/462fe1d3da08109ba4aa8590e7aed57c059af2a7e80ec21f4bac5cfe1094/aiohttp-3.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5c7ff1028e3c9fc5123a865ce17df1cb6424d180c503b8517afbe89aa566e6be", size = 1630439, upload-time = "2026-03-28T17:15:48.11Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/4b/4813344aacdb8127263e3eec343d24e973421143826364fa9fc847f6283f/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ba5cf98b5dcb9bddd857da6713a503fa6d341043258ca823f0f5ab7ab4a94ee8", size = 1745557, upload-time = "2026-03-28T17:15:50.13Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/01/1ef1adae1454341ec50a789f03cfafe4c4ac9c003f6a64515ecd32fe4210/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d85965d3ba21ee4999e83e992fecb86c4614d6920e40705501c0a1f80a583c12", size = 1741796, upload-time = "2026-03-28T17:15:52.351Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/04/8cdd99af988d2aa6922714d957d21383c559835cbd43fbf5a47ddf2e0f05/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:49f0b18a9b05d79f6f37ddd567695943fcefb834ef480f17a4211987302b2dc7", size = 1805312, upload-time = "2026-03-28T17:15:54.407Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/7f/b48d5577338d4b25bbdbae35c75dbfd0493cb8886dc586fbfb2e90862239/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7f78cb080c86fbf765920e5f1ef35af3f24ec4314d6675d0a21eaf41f6f2679c", size = 1621751, upload-time = "2026-03-28T17:15:56.564Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/89/4eecad8c1858e6d0893c05929e22343e0ebe3aec29a8a399c65c3cc38311/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:67a3ec705534a614b68bbf1c70efa777a21c3da3895d1c44510a41f5a7ae0453", size = 1826073, upload-time = "2026-03-28T17:15:58.489Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/5c/9dc8293ed31b46c39c9c513ac7ca152b3c3d38e0ea111a530ad12001b827/aiohttp-3.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6630ec917e85c5356b2295744c8a97d40f007f96a1c76bf1928dc2e27465393", size = 1760083, upload-time = "2026-03-28T17:16:00.677Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/19/8bbf6a4994205d96831f97b7d21a0feed120136e6267b5b22d229c6dc4dc/aiohttp-3.13.4-cp311-cp311-win32.whl", hash = "sha256:54049021bc626f53a5394c29e8c444f726ee5a14b6e89e0ad118315b1f90f5e3", size = 439690, upload-time = "2026-03-28T17:16:02.902Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/f5/ac409ecd1007528d15c3e8c3a57d34f334c70d76cfb7128a28cffdebd4c1/aiohttp-3.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:c033f2bc964156030772d31cbf7e5defea181238ce1f87b9455b786de7d30145", size = 463824, upload-time = "2026-03-28T17:16:05.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/bd/ede278648914cabbabfdf95e436679b5d4156e417896a9b9f4587169e376/aiohttp-3.13.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ee62d4471ce86b108b19c3364db4b91180d13fe3510144872d6bad5401957360", size = 752158, upload-time = "2026-03-28T17:16:06.901Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/de/581c053253c07b480b03785196ca5335e3c606a37dc73e95f6527f1591fe/aiohttp-3.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c0fd8f41b54b58636402eb493afd512c23580456f022c1ba2db0f810c959ed0d", size = 501037, upload-time = "2026-03-28T17:16:08.82Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f9/a5ede193c08f13cc42c0a5b50d1e246ecee9115e4cf6e900d8dbd8fd6acb/aiohttp-3.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4baa48ce49efd82d6b1a0be12d6a36b35e5594d1dd42f8bfba96ea9f8678b88c", size = 501556, upload-time = "2026-03-28T17:16:10.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/10/88ff67cd48a6ec36335b63a640abe86135791544863e0cfe1f065d6cef7a/aiohttp-3.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d738ebab9f71ee652d9dbd0211057690022201b11197f9a7324fd4dba128aa97", size = 1757314, upload-time = "2026-03-28T17:16:12.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/15/fdb90a5cf5a1f52845c276e76298c75fbbcc0ac2b4a86551906d54529965/aiohttp-3.13.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0ce692c3468fa831af7dceed52edf51ac348cebfc8d3feb935927b63bd3e8576", size = 1731819, upload-time = "2026-03-28T17:16:14.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/df/28146785a007f7820416be05d4f28cc207493efd1e8c6c1068e9bdc29198/aiohttp-3.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8e08abcfe752a454d2cb89ff0c08f2d1ecd057ae3e8cc6d84638de853530ebab", size = 1793279, upload-time = "2026-03-28T17:16:16.594Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/47/689c743abf62ea7a77774d5722f220e2c912a77d65d368b884d9779ef41b/aiohttp-3.13.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5977f701b3fff36367a11087f30ea73c212e686d41cd363c50c022d48b011d8d", size = 1891082, upload-time = "2026-03-28T17:16:18.71Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b6/f7f4f318c7e58c23b761c9b13b9a3c9b394e0f9d5d76fbc6622fa98509f6/aiohttp-3.13.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:54203e10405c06f8b6020bd1e076ae0fe6c194adcee12a5a78af3ffa3c57025e", size = 1773938, upload-time = "2026-03-28T17:16:21.125Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/06/f207cb3121852c989586a6fc16ff854c4fcc8651b86c5d3bd1fc83057650/aiohttp-3.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:358a6af0145bc4dda037f13167bef3cce54b132087acc4c295c739d05d16b1c3", size = 1579548, upload-time = "2026-03-28T17:16:23.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/58/e1289661a32161e24c1fe479711d783067210d266842523752869cc1d9c2/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:898ea1850656d7d61832ef06aa9846ab3ddb1621b74f46de78fbc5e1a586ba83", size = 1714669, upload-time = "2026-03-28T17:16:25.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/0a/3e86d039438a74a86e6a948a9119b22540bae037d6ba317a042ae3c22711/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7bc30cceb710cf6a44e9617e43eebb6e3e43ad855a34da7b4b6a73537d8a6763", size = 1754175, upload-time = "2026-03-28T17:16:28.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/30/e717fc5df83133ba467a560b6d8ef20197037b4bb5d7075b90037de1018e/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4a31c0c587a8a038f19a4c7e60654a6c899c9de9174593a13e7cc6e15ff271f9", size = 1762049, upload-time = "2026-03-28T17:16:30.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/28/8f7a2d4492e336e40005151bdd94baf344880a4707573378579f833a64c1/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2062f675f3fe6e06d6113eb74a157fb9df58953ffed0cdb4182554b116545758", size = 1570861, upload-time = "2026-03-28T17:16:32.953Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/45/12e1a3d0645968b1c38de4b23fdf270b8637735ea057d4f84482ff918ad9/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d1ba8afb847ff80626d5e408c1fdc99f942acc877d0702fe137015903a220a9", size = 1790003, upload-time = "2026-03-28T17:16:35.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/0f/60374e18d590de16dcb39d6ff62f39c096c1b958e6f37727b5870026ea30/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b08149419994cdd4d5eecf7fd4bc5986b5a9380285bcd01ab4c0d6bfca47b79d", size = 1737289, upload-time = "2026-03-28T17:16:38.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/bf/535e58d886cfbc40a8b0013c974afad24ef7632d645bca0b678b70033a60/aiohttp-3.13.4-cp312-cp312-win32.whl", hash = "sha256:fc432f6a2c4f720180959bc19aa37259651c1a4ed8af8afc84dd41c60f15f791", size = 434185, upload-time = "2026-03-28T17:16:40.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/1a/d92e3325134ebfff6f4069f270d3aac770d63320bd1fcd0eca023e74d9a8/aiohttp-3.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:6148c9ae97a3e8bff9a1fc9c757fa164116f86c100468339730e717590a3fb77", size = 461285, upload-time = "2026-03-28T17:16:42.713Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -375,31 +375,19 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "banks"
|
||||
version = "2.2.0"
|
||||
version = "2.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "deprecated" },
|
||||
{ name = "filetype" },
|
||||
{ name = "griffe" },
|
||||
{ name = "jinja2" },
|
||||
{ name = "platformdirs" },
|
||||
{ name = "pydantic" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7d/f8/25ef24814f77f3fd7f0fd3bd1ef3749e38a9dbd23502fbb53034de49900c/banks-2.2.0.tar.gz", hash = "sha256:d1446280ce6e00301e3e952dd754fd8cee23ff277d29ed160994a84d0d7ffe62", size = 179052, upload-time = "2025-07-18T16:28:26.892Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/47/5d/54c79aaaa9aa1278af24cae98d81d6ef635ad840f046bc2ccb5041ddeb1b/banks-2.4.1.tar.gz", hash = "sha256:8cbf1553f14c44d4f7e9c2064ad9212ce53ee4da000b2f8308d548b60db56655", size = 188033, upload-time = "2026-02-17T11:21:14.855Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/d6/f9168956276934162ec8d48232f9920f2985ee45aa7602e3c6b4bc203613/banks-2.2.0-py3-none-any.whl", hash = "sha256:963cd5c85a587b122abde4f4064078def35c50c688c1b9d36f43c92503854e7d", size = 29244, upload-time = "2025-07-18T16:28:27.835Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "beautifulsoup4"
|
||||
version = "4.13.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "soupsieve" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/5a/f38b49e8b225b0c774e97c9495e52ab9ccdf6d82bde68c513bd736820eb2/banks-2.4.1-py3-none-any.whl", hash = "sha256:40e6d9b6e9b69fb403fa31f2853b3297e4919c1b6f2179b2119d2d4473c6ed13", size = 35032, upload-time = "2026-02-17T11:21:13.236Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -909,15 +897,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "defusedxml"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deprecated"
|
||||
version = "1.2.18"
|
||||
@@ -1359,17 +1338,18 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "hf-xet"
|
||||
version = "1.1.5"
|
||||
version = "1.4.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ed/d4/7685999e85945ed0d7f0762b686ae7015035390de1161dcea9d5276c134c/hf_xet-1.1.5.tar.gz", hash = "sha256:69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694", size = 495969, upload-time = "2025-06-20T21:48:38.007Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/53/92/ec9ad04d0b5728dca387a45af7bc98fbb0d73b2118759f5f6038b61a57e8/hf_xet-1.4.3.tar.gz", hash = "sha256:8ddedb73c8c08928c793df2f3401ec26f95be7f7e516a7bee2fbb546f6676113", size = 670477, upload-time = "2026-03-31T22:40:07.874Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/00/89/a1119eebe2836cb25758e7661d6410d3eae982e2b5e974bcc4d250be9012/hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23", size = 2687929, upload-time = "2025-06-20T21:48:32.284Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/5f/2c78e28f309396e71ec8e4e9304a6483dcbc36172b5cea8f291994163425/hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8", size = 2556338, upload-time = "2025-06-20T21:48:30.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/2f/6cad7b5fe86b7652579346cb7f85156c11761df26435651cbba89376cd2c/hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1", size = 3102894, upload-time = "2025-06-20T21:48:28.114Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/54/0fcf2b619720a26fbb6cc941e89f2472a522cd963a776c089b189559447f/hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18", size = 3002134, upload-time = "2025-06-20T21:48:25.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/92/1d351ac6cef7c4ba8c85744d37ffbfac2d53d0a6c04d2cabeba614640a78/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14", size = 3171009, upload-time = "2025-06-20T21:48:33.987Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/65/4b2ddb0e3e983f2508528eb4501288ae2f84963586fbdfae596836d5e57a/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a", size = 3279245, upload-time = "2025-06-20T21:48:36.051Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/55/ef77a85ee443ae05a9e9cba1c9f0dd9241eb42da2aeba1dc50f51154c81a/hf_xet-1.1.5-cp37-abi3-win_amd64.whl", hash = "sha256:73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245", size = 2738931, upload-time = "2025-06-20T21:48:39.482Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/9f/9c23e4a447b8f83120798f9279d0297a4d1360bdbf59ef49ebec78fe2545/hf_xet-1.4.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d0da85329eaf196e03e90b84c2d0aca53bd4573d097a75f99609e80775f98025", size = 3805048, upload-time = "2026-03-31T22:39:53.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/f8/7aacb8e5f4a7899d39c787b5984e912e6c18b11be136ef13947d7a66d265/hf_xet-1.4.3-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:e23717ce4186b265f69afa66e6f0069fe7efbf331546f5c313d00e123dc84583", size = 3562178, upload-time = "2026-03-31T22:39:51.295Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/9a/a24b26dc8a65f0ecc0fe5be981a19e61e7ca963b85e062c083f3a9100529/hf_xet-1.4.3-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc360b70c815bf340ed56c7b8c63aacf11762a4b099b2fe2c9bd6d6068668c08", size = 4212320, upload-time = "2026-03-31T22:39:42.922Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/60/46d493db155d2ee2801b71fb1b0fd67696359047fdd8caee2c914cc50c79/hf_xet-1.4.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39f2d2e9654cd9b4319885733993807aab6de9dfbd34c42f0b78338d6617421f", size = 3991546, upload-time = "2026-03-31T22:39:41.335Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/f5/067363e1c96c6b17256910830d1b54099d06287e10f4ec6ec4e7e08371fc/hf_xet-1.4.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:49ad8a8cead2b56051aa84d7fce3e1335efe68df3cf6c058f22a65513885baac", size = 4193200, upload-time = "2026-03-31T22:40:01.936Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/4b/53951592882d9c23080c7644542fda34a3813104e9e11fa1a7d82d419cb8/hf_xet-1.4.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7716d62015477a70ea272d2d68cd7cad140f61c52ee452e133e139abfe2c17ba", size = 4429392, upload-time = "2026-03-31T22:40:03.492Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/21/75a6c175b4e79662ad8e62f46a40ce341d8d6b206b06b4320d07d55b188c/hf_xet-1.4.3-cp37-abi3-win_amd64.whl", hash = "sha256:6b591fcad34e272a5b02607485e4f2a1334aebf1bc6d16ce8eb1eb8978ac2021", size = 3677359, upload-time = "2026-03-31T22:40:13.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/7c/44314ecd0e89f8b2b51c9d9e5e7a60a9c1c82024ac471d415860557d3cd8/hf_xet-1.4.3-cp37-abi3-win_arm64.whl", hash = "sha256:7c2c7e20bcfcc946dc67187c203463f5e932e395845d098cc2a93f5b67ca0b47", size = 3533664, upload-time = "2026-03-31T22:40:12.152Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1440,21 +1420,22 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.33.4"
|
||||
version = "1.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "filelock" },
|
||||
{ name = "fsspec" },
|
||||
{ name = "hf-xet", marker = "platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" },
|
||||
{ name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" },
|
||||
{ name = "httpx" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "requests" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "typer" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4b/9e/9366b7349fc125dd68b9d384a0fea84d67b7497753fe92c71b67e13f47c4/huggingface_hub-0.33.4.tar.gz", hash = "sha256:6af13478deae120e765bfd92adad0ae1aec1ad8c439b46f23058ad5956cbca0a", size = 426674, upload-time = "2025-07-11T12:32:48.694Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/44/40/68d9b286b125d9318ae95c8f8b206e8672e7244b0eea61ebb4a88037638c/huggingface_hub-1.9.1.tar.gz", hash = "sha256:442af372207cc24dcb089caf507fcd7dbc1217c11d6059a06f6b90afe64e8bd2", size = 750355, upload-time = "2026-04-07T13:47:59.167Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/46/7b/98daa50a2db034cab6cd23a3de04fa2358cb691593d28e9130203eb7a805/huggingface_hub-0.33.4-py3-none-any.whl", hash = "sha256:09f9f4e7ca62547c70f8b82767eefadd2667f4e116acba2e3e62a5a81815a7bb", size = 515339, upload-time = "2025-07-11T12:32:46.346Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/af/10a89c54937dccf6c10792770f362d96dd67aedfde108e6e1fd7a0836789/huggingface_hub-1.9.1-py3-none-any.whl", hash = "sha256:8dae771b969b318203727a6c6c5209d25e661f6f0dd010fc09cc4a12cf81c657", size = 637356, upload-time = "2026-04-07T13:47:57.239Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1806,73 +1787,52 @@ wheels = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-cloud"
|
||||
version = "0.1.35"
|
||||
name = "livekit-api"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "httpx" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "aiohttp" },
|
||||
{ name = "livekit-protocol" },
|
||||
{ name = "protobuf" },
|
||||
{ name = "pyjwt" },
|
||||
{ name = "types-protobuf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9b/72/816e6e900448e1b4a8137d90e65876b296c5264a23db6ae888bd3e6660ba/llama_cloud-0.1.35.tar.gz", hash = "sha256:200349d5d57424d7461f304cdb1355a58eea3e6ca1e6b0d75c66b2e937216983", size = 106403, upload-time = "2025-07-28T17:22:06.41Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b5/0a/ad3cce124e608c056d6390244ec4dd18c8a4b5f055693a95831da2119af7/livekit_api-1.1.0.tar.gz", hash = "sha256:f94c000534d3a9b506e6aed2f35eb88db1b23bdea33bb322f0144c4e9f73934e", size = 16649, upload-time = "2025-12-02T19:37:11.452Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/d2/8d18a021ab757cea231428404f21fe3186bf1ebaac3f57a73c379483fd3f/llama_cloud-0.1.35-py3-none-any.whl", hash = "sha256:b7abab4423118e6f638d2f326749e7a07c6426543bea6da99b623c715b22af71", size = 303280, upload-time = "2025-07-28T17:22:04.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/b9/8d8515e3e0e629ab07d399cf858b8fc7e0a02bbf6384a6592b285264b4b9/livekit_api-1.1.0-py3-none-any.whl", hash = "sha256:bfc1c2c65392eb3f580a2c28108269f0e79873f053578a677eee7bb1de8aa8fb", size = 19620, upload-time = "2025-12-02T19:37:10.075Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-cloud-services"
|
||||
version = "0.6.54"
|
||||
name = "livekit-protocol"
|
||||
version = "1.1.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "llama-cloud" },
|
||||
{ name = "llama-index-core" },
|
||||
{ name = "platformdirs" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "tenacity" },
|
||||
{ name = "protobuf" },
|
||||
{ name = "types-protobuf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8a/0c/8ca87d33bea0340a8ed791f36390112aeb29fd3eebfd64b6aef6204a03f0/llama_cloud_services-0.6.54.tar.gz", hash = "sha256:baf65d9bffb68f9dca98ac6e22908b6675b2038b021e657ead1ffc0e43cbd45d", size = 53468, upload-time = "2025-08-01T20:09:20.988Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/ca/d15e2a2cc8c8aa4ba621fe5f9ffd1806d88ac91c7b8fa4c09a3c0304dd92/livekit_protocol-1.1.3.tar.gz", hash = "sha256:cb4948d2513e81d91583f4a795bf80faa9026cedda509c5714999c7e33564287", size = 88746, upload-time = "2026-03-18T05:25:43.562Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/48/4e295e3f791b279885a2e584f71e75cbe4ac84e93bba3c36e2668f60a8ac/llama_cloud_services-0.6.54-py3-none-any.whl", hash = "sha256:07f595f7a0ba40c6a1a20543d63024ca7600fe65c4811d1951039977908997be", size = 63874, upload-time = "2025-08-01T20:09:20.076Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/0e/f3d3e48628294df4559cffd0f8e1adf030127029e5a8da9beff9979090a0/livekit_protocol-1.1.3-py3-none-any.whl", hash = "sha256:fdae5640e064ab6549ec3d62d8bac75a3ef44d7ea73716069b419cbe8b360a5c", size = 107498, upload-time = "2026-03-18T05:25:42.077Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index"
|
||||
version = "0.13.0"
|
||||
version = "0.14.20"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-cli" },
|
||||
{ name = "llama-index-core" },
|
||||
{ name = "llama-index-embeddings-openai" },
|
||||
{ name = "llama-index-indices-managed-llama-cloud" },
|
||||
{ name = "llama-index-llms-openai" },
|
||||
{ name = "llama-index-readers-file" },
|
||||
{ name = "llama-index-readers-llama-parse" },
|
||||
{ name = "nltk" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/90/43/d4a19822e828f02d45d20a73c3d6e2e3dbec8faa0c107d8f851e5fccb192/llama_index-0.13.0.tar.gz", hash = "sha256:00f4c61d96a83af5d770a992006f0039eb671c2a64eaab9da3660bee921177f2", size = 8000, upload-time = "2025-07-31T16:07:44.173Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/24/1b/7b360f7395485c77a81e514ba86fac577a28c799a5737925dd221adc5b9a/llama_index-0.14.20.tar.gz", hash = "sha256:aa6895cee1366a1ab256715fb2f526d57fe346708c76e77d6f319380de70223b", size = 8566, upload-time = "2026-04-03T19:55:46.792Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/df/3da1e5fcee560d78106357b6d91794b68637b866fc152d46ee3331ffed9b/llama_index-0.13.0-py3-none-any.whl", hash = "sha256:028986e73d948b8119dbf2ed6aa2719ece34b4e2d66dd91ae3473de672fc1361", size = 7027, upload-time = "2025-07-31T16:07:42.736Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-cli"
|
||||
version = "0.5.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-core" },
|
||||
{ name = "llama-index-embeddings-openai" },
|
||||
{ name = "llama-index-llms-openai" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d2/e3/ac6928586e20cfd327a2a38a00781cbc8fae923edcd0316c23e38aae1537/llama_index_cli-0.5.1.tar.gz", hash = "sha256:0446159d85c56c29022c1c830c9886f670d5f59d69343c3c029a3b20eda1a9d8", size = 24821, upload-time = "2025-09-12T15:22:44.064Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/16/b53af5b23921d1e18f57b7a79d557b34554df295c63f5c59d5bee1f5fb47/llama_index_cli-0.5.1-py3-none-any.whl", hash = "sha256:5429b2fd7960df7724c2955b6e6901f6fa910b7b5ecef411c979a8b545a6b7e2", size = 28179, upload-time = "2025-09-12T15:22:43.169Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/37/bc6d45dd6207b82220da7c977aff9238c7b3f55b26d63dc2dfefaf3c394f/llama_index-0.14.20-py3-none-any.whl", hash = "sha256:bf43c6d785ced39a5e12605425bffcc9f79fc1bfe9ff831ea8babec6c1a2adef", size = 7114, upload-time = "2026-04-03T19:55:48.599Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-core"
|
||||
version = "0.13.6"
|
||||
version = "0.14.20"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
@@ -1898,136 +1858,81 @@ dependencies = [
|
||||
{ name = "sqlalchemy", extra = ["asyncio"] },
|
||||
{ name = "tenacity" },
|
||||
{ name = "tiktoken" },
|
||||
{ name = "tinytag" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspect" },
|
||||
{ name = "wrapt" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2d/f8/4f6e2bbc34ec6586456727a644960a1ff2d9db60b92071e213ad9d160456/llama_index_core-0.13.6.tar.gz", hash = "sha256:80315a6bd1f9804f48c1870eff1a0315bf9fe5a413747d53eb88a8ebb2602b97", size = 7232179, upload-time = "2025-09-07T03:27:26.544Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/38/2c/9a1f613fcd59c583c1b4d529948785fd153f97b076e7b0f170d86106357d/llama_index_core-0.14.20.tar.gz", hash = "sha256:5ddb7ecba2131ecd0a452cd730c5361a407d3ffcdcfb1a319525ed8c9a7c423b", size = 11599236, upload-time = "2026-04-03T19:54:52.108Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/35/23/7e497216ece6e041c6a271f2b7952e5609729da0dcdf09dd3f25a4efc1b9/llama_index_core-0.13.6-py3-none-any.whl", hash = "sha256:67bec3c06a8105cd82d83db0f8c3122f4e4d8a4b9c7a2768cced6a2686ddb331", size = 7575324, upload-time = "2025-09-07T03:27:19.243Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/27/0f0e01c239efddc178713379341aabee7a54ffa8e0a4162ff05a0ab950e0/llama_index_core-0.14.20-py3-none-any.whl", hash = "sha256:c666e395879e73a0aa6c751e5f4c8a8e8637df50f6e66ab9ae6e5d932c816126", size = 11945381, upload-time = "2026-04-03T19:54:55.711Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-embeddings-openai"
|
||||
version = "0.5.1"
|
||||
version = "0.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-core" },
|
||||
{ name = "openai" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/10/36/90336d054a5061a3f5bc17ac2c18ef63d9d84c55c14d557de484e811ea4d/llama_index_embeddings_openai-0.5.1.tar.gz", hash = "sha256:1c89867a48b0d0daa3d2d44f5e76b394b2b2ef9935932daf921b9e77939ccda8", size = 7020, upload-time = "2025-09-08T20:17:44.681Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/06/52/eb56a4887501651fb17400f7f571c1878109ff698efbe0bbac9165a5603d/llama_index_embeddings_openai-0.6.0.tar.gz", hash = "sha256:eb3e6606be81cb89125073e23c97c0a6119dabb4827adbd14697c2029ad73f29", size = 7629, upload-time = "2026-03-12T20:21:27.234Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/23/4a/8ab11026cf8deff8f555aa73919be0bac48332683111e5fc4290f352dc50/llama_index_embeddings_openai-0.5.1-py3-none-any.whl", hash = "sha256:a2fcda3398bbd987b5ce3f02367caee8e84a56b930fdf43cc1d059aa9fd20ca5", size = 7011, upload-time = "2025-09-08T20:17:44.015Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-indices-managed-llama-cloud"
|
||||
version = "0.9.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "deprecated" },
|
||||
{ name = "llama-cloud" },
|
||||
{ name = "llama-index-core" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/4a/79044fcb3209583d1ffe0c2a7c19dddfb657a03faeb9fe0cf5a74027e646/llama_index_indices_managed_llama_cloud-0.9.4.tar.gz", hash = "sha256:b5e00752ab30564abf19c57595a2107f5697c3b03b085817b4fca84a38ebbd59", size = 15146, upload-time = "2025-09-08T20:29:58.673Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/6a/0e33245df06afc9766c46a1fe92687be8a09da5d0d0128bc08d84a9f5efa/llama_index_indices_managed_llama_cloud-0.9.4-py3-none-any.whl", hash = "sha256:535a08811046803ca6ab7f8e9d510e926aa5306608b02201ad3d9d21701383bc", size = 17005, upload-time = "2025-09-08T20:29:57.876Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/d1/4bb0b80f4057903110060f617ef519197194b3ff5dd6153d850c8f5676fa/llama_index_embeddings_openai-0.6.0-py3-none-any.whl", hash = "sha256:039bb1007ad4267e25ddb89a206dfdab862bfb87d58da4271a3919e4f9df4d61", size = 7666, upload-time = "2026-03-12T20:21:28.079Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-instrumentation"
|
||||
version = "0.3.0"
|
||||
version = "0.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "deprecated" },
|
||||
{ name = "pydantic" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/57/76123657bf6f175382ceddee9af66507c37d603475cbf0968df8dfea9de2/llama_index_instrumentation-0.3.0.tar.gz", hash = "sha256:77741c1d9861ead080e6f98350625971488d1e046bede91cec9e0ce2f63ea34a", size = 42651, upload-time = "2025-07-17T17:41:20.468Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4e/d0/671b23ccff255c9bce132a84ffd5a6f4541ceefdeab9c1786b08c9722f2e/llama_index_instrumentation-0.5.0.tar.gz", hash = "sha256:eeb724648b25d149de882a5ac9e21c5acb1ce780da214bda2b075341af29ad8e", size = 43831, upload-time = "2026-03-12T20:17:06.742Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/d4/9377a53ea2f9bdd33f5ccff78ac863705657f422bb686cad4896b058ce46/llama_index_instrumentation-0.3.0-py3-none-any.whl", hash = "sha256:edfcd71aedc453dbdb4a7073a1e39ddef6ae2c13601a4cba6f2dfea38f48eeff", size = 15011, upload-time = "2025-07-17T17:41:19.723Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/45/6dcaccef44e541ffa138e4b45e33e0d40ab2a7d845338483954fcf77bc75/llama_index_instrumentation-0.5.0-py3-none-any.whl", hash = "sha256:aaab83cddd9dd434278891012d8995f47a3bc7ed1736a371db90965348c56a21", size = 16444, upload-time = "2026-03-12T20:17:05.957Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-llms-openai"
|
||||
version = "0.5.6"
|
||||
version = "0.7.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-core" },
|
||||
{ name = "openai" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d7/fe/ac57ecb9b5ea4243e097fbc3f5de22f6bd1a787b72a7c80542af80afbf4d/llama_index_llms_openai-0.5.6.tar.gz", hash = "sha256:92533e83be2eb321d84a01a84fb2bf4506bf684c410cd94ccb29ae6c949a27d4", size = 24239, upload-time = "2025-09-08T20:46:25.018Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/65/27/18a7fd0873023aed145332dab5a09b95b298e4fff1c21685eaf22b629d87/llama_index_llms_openai-0.7.5.tar.gz", hash = "sha256:54123e679a7cddc1f2e969f278a4654050730daf84691731a0c53ae14feac3c7", size = 27423, upload-time = "2026-03-30T16:30:33.973Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/e2/d78be8cbc645668eba088223d63114a076758626fe12e3b4ec9efa2ba342/llama_index_llms_openai-0.5.6-py3-none-any.whl", hash = "sha256:a93a897fe733a6d7b668cbc6cca546e644054ddf5497821141b2d4b5ffb6ea80", size = 25368, upload-time = "2025-09-08T20:46:23.79Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/62/a847e9a94c2f92926c30188259f9f86e019dcc45122bbb222dea35a74c02/llama_index_llms_openai-0.7.5-py3-none-any.whl", hash = "sha256:c302c6386873420df3714c3d538f45379b6de27ab6a531f30c67419b39a538f5", size = 28492, upload-time = "2026-03-30T16:30:32.979Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-llms-openai-like"
|
||||
version = "0.5.1"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-core" },
|
||||
{ name = "llama-index-llms-openai" },
|
||||
{ name = "transformers" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8f/81/41b328a13262c287a1e56bb93eff1564164db53ad5773961c378e23dba36/llama_index_llms_openai_like-0.5.1.tar.gz", hash = "sha256:77044a5c2d1e4743435751dd9d39a2281bc9de969f9b90196fe4e2b9f773a352", size = 4899, upload-time = "2025-09-08T20:29:47.603Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9d/9f/0d98d022a08f43d4374998072636ec50a7cb50009bbb9a2761f5b26a78cc/llama_index_llms_openai_like-0.7.1.tar.gz", hash = "sha256:ce7cef3686b1e62d7c08134b4d8ca56706cca816e4c4098eaede33002829a6f9", size = 5177, upload-time = "2026-03-13T16:15:58.156Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/63/57/ab83d7e098a88dc101d56c22584f279dd632785af3bc1e9b84b9b598264d/llama_index_llms_openai_like-0.5.1-py3-none-any.whl", hash = "sha256:0d196d9cd71f7a695a647767c3b09b8e532031f15a86a8d8519645bf77ac3b75", size = 4594, upload-time = "2025-09-08T20:29:46.883Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-readers-file"
|
||||
version = "0.5.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "defusedxml" },
|
||||
{ name = "llama-index-core" },
|
||||
{ name = "pandas" },
|
||||
{ name = "pypdf" },
|
||||
{ name = "striprtf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a0/e5/dccfb495dbc40f50fcfb799db2287ac5dca4a16a3b09bae61a4ccb1788d3/llama_index_readers_file-0.5.6.tar.gz", hash = "sha256:1c08b14facc2dfe933622aaa26dc7d2a7a6023c42d3db896a2c948789edaf1ea", size = 32535, upload-time = "2025-12-24T16:04:16.421Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/c3/8d28eaa962e073e6735d80847dda9fd3525cb9ff5974ae82dd20621a5a02/llama_index_readers_file-0.5.6-py3-none-any.whl", hash = "sha256:32e83f9adb4e4803e6c7cef746c44fa0949013b1cb76f06f422e9491d198dbda", size = 51832, upload-time = "2025-12-24T16:04:17.307Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-readers-llama-parse"
|
||||
version = "0.5.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-core" },
|
||||
{ name = "llama-parse" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/77/5bfaab20e6ec8428dbf2352e18be550c957602723d69383908176b5686cd/llama_index_readers_llama_parse-0.5.1.tar.gz", hash = "sha256:2b78b73faa933e30e6c69df351e4e9f36dfe2ae142e2ab3969ddd2ac48930e37", size = 3858, upload-time = "2025-09-08T20:41:29.201Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/68/81/52410c7245dcbf1a54756a9ce3892cdd167ec0b884d696de1304ca3f452e/llama_index_readers_llama_parse-0.5.1-py3-none-any.whl", hash = "sha256:0d41450ed29b0c49c024e206ef6c8e662b1854e77a1c5faefed3b958be54f880", size = 3203, upload-time = "2025-09-08T20:41:28.438Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/22/a1e1ec1112c69ca0a379cd72691c36cdbcba78362622ce9a27e5a97965cc/llama_index_llms_openai_like-0.7.1-py3-none-any.whl", hash = "sha256:831f1144077c6f9ea7a62e987b7f2af00310dded3056edca2cb509f70a3e650a", size = 4860, upload-time = "2026-03-13T16:15:59.113Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-workflows"
|
||||
version = "1.2.0"
|
||||
version = "2.17.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-instrumentation" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/26/9d/9dc7adc10d9976582bf50b074883986cb36b46f2fe45cf60550767300a29/llama_index_workflows-1.2.0.tar.gz", hash = "sha256:f6b19f01a340a1afb1d2fd2285c9dce346e304a3aae519e6103059f5afb2609f", size = 1019113, upload-time = "2025-07-23T18:32:47.86Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f7/36/07f0a6c7173e33f46d6e1754b73c0b40ba5368bf623fb2727d5889cc2f93/llama_index_workflows-2.17.3.tar.gz", hash = "sha256:85f6dcdbf214700ab0741dc3225ad4eaaf1c90fd9f0e082588aa70c4735b26c3", size = 87703, upload-time = "2026-04-07T21:59:10.662Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/36/c1/5190f102a042d36a6a495de27510c2d6e3aca98f892895bfacdcf9109c1d/llama_index_workflows-1.2.0-py3-none-any.whl", hash = "sha256:5722a7ce137e00361025768789e7e77720cd66f855791050183a3c540b6e5b8c", size = 37463, upload-time = "2025-07-23T18:32:46.294Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-parse"
|
||||
version = "0.6.43"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-cloud-services" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/79/62/22e3f73a2b33b9db1523573611281010c8258bf1d17408913e8e46bdfe58/llama_parse-0.6.43.tar.gz", hash = "sha256:d88e91c97e37f77b2619111ef43c02b7da61125f821cf77f918996eb48200d78", size = 3536, upload-time = "2025-07-08T18:20:58.786Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/24/8497595be04a8a0209536e9ce70d4132f8f8e001986f4c700414b3777758/llama_parse-0.6.43-py3-none-any.whl", hash = "sha256:fe435309638c4fdec4fec31f97c5031b743c92268962d03b99bd76704f566c32", size = 4944, upload-time = "2025-07-08T18:20:57.089Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/d9/b83117e1482cbfcbffca565a070e2e1c228f840f1139dc83dd21bf1f5212/llama_index_workflows-2.17.3-py3-none-any.whl", hash = "sha256:5299775835b521a7ecca0099ad7a9b14e1ce26eb83277fbcc14071dfac54a404", size = 111543, upload-time = "2026-04-07T21:59:09.406Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2371,7 +2276,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.97.0"
|
||||
version = "2.30.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
@@ -2383,9 +2288,9 @@ dependencies = [
|
||||
{ name = "tqdm" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e0/c6/b8d66e4f3b95493a8957065b24533333c927dc23817abe397f13fe589c6e/openai-1.97.0.tar.gz", hash = "sha256:0be349569ccaa4fb54f97bb808423fd29ccaeb1246ee1be762e0c81a47bae0aa", size = 493850, upload-time = "2025-07-16T16:37:35.196Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/88/15/52580c8fbc16d0675d516e8749806eda679b16de1e4434ea06fb6feaa610/openai-2.30.0.tar.gz", hash = "sha256:92f7661c990bda4b22a941806c83eabe4896c3094465030dd882a71abe80c885", size = 676084, upload-time = "2026-03-25T22:08:59.96Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/91/1f1cf577f745e956b276a8b1d3d76fa7a6ee0c2b05db3b001b900f2c71db/openai-1.97.0-py3-none-any.whl", hash = "sha256:a1c24d96f4609f3f7f51c9e1c2606d97cc6e334833438659cfd687e9c972c610", size = 764953, upload-time = "2025-07-16T16:37:33.135Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/9e/5bfa2270f902d5b92ab7d41ce0475b8630572e71e349b2a4996d14bdda93/openai-2.30.0-py3-none-any.whl", hash = "sha256:9a5ae616888eb2748ec5e0c5b955a51592e0b201a11f4262db920f2a78c5231d", size = 1146656, upload-time = "2026-03-25T22:08:58.2Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2983,15 +2888,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pypdf"
|
||||
version = "6.9.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/31/83/691bdb309306232362503083cb15777491045dd54f45393a317dc7d8082f/pypdf-6.9.2.tar.gz", hash = "sha256:7f850faf2b0d4ab936582c05da32c52214c2b089d61a316627b5bfb5b0dab46c", size = 5311837, upload-time = "2026-03-23T14:53:27.983Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/7e/c85f41243086a8fe5d1baeba527cb26a1918158a565932b41e0f7c0b32e9/pypdf-6.9.2-py3-none-any.whl", hash = "sha256:662cf29bcb419a36a1365232449624ab40b7c2d0cfc28e54f42eeecd1fd7e844", size = 333744, upload-time = "2026-03-23T14:53:26.573Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyreadline3"
|
||||
version = "3.5.4"
|
||||
@@ -3364,6 +3260,7 @@ dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "icalendar" },
|
||||
{ name = "jsonschema" },
|
||||
{ name = "livekit-api" },
|
||||
{ name = "llama-index" },
|
||||
{ name = "llama-index-llms-openai-like" },
|
||||
{ name = "openai" },
|
||||
@@ -3445,6 +3342,7 @@ requires-dist = [
|
||||
{ name = "httpx", specifier = ">=0.24.1" },
|
||||
{ name = "icalendar", specifier = ">=6.0.0" },
|
||||
{ name = "jsonschema", specifier = ">=4.23.0" },
|
||||
{ name = "livekit-api", specifier = ">=1.1.0" },
|
||||
{ name = "llama-index", specifier = ">=0.12.52" },
|
||||
{ name = "llama-index-llms-openai-like", specifier = ">=0.4.0" },
|
||||
{ name = "openai", specifier = ">=1.59.7" },
|
||||
@@ -3939,15 +3837,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/e9/6b761de83277f2f02ded7e7ea6f07828ec78e4b229b80e4ca55dd205b9dc/soundfile-0.13.1-py2.py3-none-win_amd64.whl", hash = "sha256:1e70a05a0626524a69e9f0f4dd2ec174b4e9567f4d8b6c11d38b5c289be36ee9", size = 1019162, upload-time = "2025-01-25T09:16:59.573Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "soupsieve"
|
||||
version = "2.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "soxr"
|
||||
version = "1.0.0"
|
||||
@@ -4043,15 +3932,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/51/da/545b75d420bb23b5d494b0517757b351963e974e79933f01e05c929f20a6/starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875", size = 74175, upload-time = "2025-10-28T17:34:09.13Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "striprtf"
|
||||
version = "0.0.26"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/25/20/3d419008265346452d09e5dadfd5d045b64b40d8fc31af40588e6c76997a/striprtf-0.0.26.tar.gz", hash = "sha256:fdb2bba7ac440072d1c41eab50d8d74ae88f60a8b6575c6e2c7805dc462093aa", size = 6258, upload-time = "2023-07-20T14:30:36.29Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/cf/0fea4f4ba3fc2772ac2419278aa9f6964124d4302117d61bc055758e000c/striprtf-0.0.26-py3-none-any.whl", hash = "sha256:8c8f9d32083cdc2e8bfb149455aa1cc5a4e0a035893bedc75db8b73becb3a1bb", size = 6914, upload-time = "2023-07-20T14:30:35.338Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "structlog"
|
||||
version = "25.4.0"
|
||||
@@ -4138,29 +4018,39 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897, upload-time = "2025-02-14T06:02:36.265Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinytag"
|
||||
version = "2.2.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/96/59/8a8cb2331e2602b53e4dc06960f57d1387a2b18e7efd24e5f9cb60ea4925/tinytag-2.2.1.tar.gz", hash = "sha256:e6d06610ebe7cd66fd07be2d3b9495914ab32654a5e47657bb8cd44c2484523c", size = 38214, upload-time = "2026-03-15T18:48:01.11Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/34/d50e338631baaf65ec5396e70085e5de0b52b24b28db1ffbc1c6e82190dc/tinytag-2.2.1-py3-none-any.whl", hash = "sha256:ed8b1e6d25367937e3321e054f4974f9abfde1a3e0a538824c87da377130c2b6", size = 32927, upload-time = "2026-03-15T18:47:59.613Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokenizers"
|
||||
version = "0.21.2"
|
||||
version = "0.22.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "huggingface-hub" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ab/2d/b0fce2b8201635f60e8c95990080f58461cc9ca3d5026de2e900f38a7f21/tokenizers-0.21.2.tar.gz", hash = "sha256:fdc7cffde3e2113ba0e6cc7318c40e3438a4d74bbc62bf04bcc63bdfb082ac77", size = 351545, upload-time = "2025-06-24T10:24:52.449Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/73/6f/f80cfef4a312e1fb34baf7d85c72d4411afde10978d4657f8cdd811d3ccc/tokenizers-0.22.2.tar.gz", hash = "sha256:473b83b915e547aa366d1eee11806deaf419e17be16310ac0a14077f1e28f917", size = 372115, upload-time = "2026-01-05T10:45:15.988Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/cc/2936e2d45ceb130a21d929743f1e9897514691bec123203e10837972296f/tokenizers-0.21.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:342b5dfb75009f2255ab8dec0041287260fed5ce00c323eb6bab639066fef8ec", size = 2875206, upload-time = "2025-06-24T10:24:42.755Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/e6/33f41f2cc7861faeba8988e7a77601407bf1d9d28fc79c5903f8f77df587/tokenizers-0.21.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:126df3205d6f3a93fea80c7a8a266a78c1bd8dd2fe043386bafdd7736a23e45f", size = 2732655, upload-time = "2025-06-24T10:24:41.56Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/2b/1791eb329c07122a75b01035b1a3aa22ad139f3ce0ece1b059b506d9d9de/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a32cd81be21168bd0d6a0f0962d60177c447a1aa1b1e48fa6ec9fc728ee0b12", size = 3019202, upload-time = "2025-06-24T10:24:31.791Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/15/fd2d8104faa9f86ac68748e6f7ece0b5eb7983c7efc3a2c197cb98c99030/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8bd8999538c405133c2ab999b83b17c08b7fc1b48c1ada2469964605a709ef91", size = 2934539, upload-time = "2025-06-24T10:24:34.567Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/2e/53e8fd053e1f3ffbe579ca5f9546f35ac67cf0039ed357ad7ec57f5f5af0/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e9944e61239b083a41cf8fc42802f855e1dca0f499196df37a8ce219abac6eb", size = 3248665, upload-time = "2025-06-24T10:24:39.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/15/79713359f4037aa8f4d1f06ffca35312ac83629da062670e8830917e2153/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:514cd43045c5d546f01142ff9c79a96ea69e4b5cda09e3027708cb2e6d5762ab", size = 3451305, upload-time = "2025-06-24T10:24:36.133Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/5f/959f3a8756fc9396aeb704292777b84f02a5c6f25c3fc3ba7530db5feb2c/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1b9405822527ec1e0f7d8d2fdb287a5730c3a6518189c968254a8441b21faae", size = 3214757, upload-time = "2025-06-24T10:24:37.784Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/74/f41a432a0733f61f3d21b288de6dfa78f7acff309c6f0f323b2833e9189f/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed9a4d51c395103ad24f8e7eb976811c57fbec2af9f133df471afcd922e5020", size = 3121887, upload-time = "2025-06-24T10:24:40.293Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/6a/bc220a11a17e5d07b0dfb3b5c628621d4dcc084bccd27cfaead659963016/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2c41862df3d873665ec78b6be36fcc30a26e3d4902e9dd8608ed61d49a48bc19", size = 9091965, upload-time = "2025-06-24T10:24:44.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/bd/ac386d79c4ef20dc6f39c4706640c24823dca7ebb6f703bfe6b5f0292d88/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed21dc7e624e4220e21758b2e62893be7101453525e3d23264081c9ef9a6d00d", size = 9053372, upload-time = "2025-06-24T10:24:46.455Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/7b/5440bf203b2a5358f074408f7f9c42884849cd9972879e10ee6b7a8c3b3d/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:0e73770507e65a0e0e2a1affd6b03c36e3bc4377bd10c9ccf51a82c77c0fe365", size = 9298632, upload-time = "2025-06-24T10:24:48.446Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/d2/faa1acac3f96a7427866e94ed4289949b2524f0c1878512516567d80563c/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:106746e8aa9014a12109e58d540ad5465b4c183768ea96c03cbc24c44d329958", size = 9470074, upload-time = "2025-06-24T10:24:50.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/a5/896e1ef0707212745ae9f37e84c7d50269411aef2e9ccd0de63623feecdf/tokenizers-0.21.2-cp39-abi3-win32.whl", hash = "sha256:cabda5a6d15d620b6dfe711e1af52205266d05b379ea85a8a301b3593c60e962", size = 2330115, upload-time = "2025-06-24T10:24:55.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/c3/cc2755ee10be859c4338c962a35b9a663788c0c0b50c0bdd8078fb6870cf/tokenizers-0.21.2-cp39-abi3-win_amd64.whl", hash = "sha256:58747bb898acdb1007f37a7bbe614346e98dc28708ffb66a3fd50ce169ac6c98", size = 2509918, upload-time = "2025-06-24T10:24:53.71Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/97/5dbfabf04c7e348e655e907ed27913e03db0923abb5dfdd120d7b25630e1/tokenizers-0.22.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c", size = 3100275, upload-time = "2026-01-05T10:41:02.158Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/47/174dca0502ef88b28f1c9e06b73ce33500eedfac7a7692108aec220464e7/tokenizers-0.22.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001", size = 2981472, upload-time = "2026-01-05T10:41:00.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/84/7990e799f1309a8b87af6b948f31edaa12a3ed22d11b352eaf4f4b2e5753/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249487018adec45d6e3554c71d46eb39fa8ea67156c640f7513eb26f318cec7", size = 3290736, upload-time = "2026-01-05T10:40:32.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/59/09d0d9ba94dcd5f4f1368d4858d24546b4bdc0231c2354aa31d6199f0399/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b85325d0815e86e0bac263506dd114578953b7b53d7de09a6485e4a160a7dd", size = 3168835, upload-time = "2026-01-05T10:40:38.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/50/b3ebb4243e7160bda8d34b731e54dd8ab8b133e50775872e7a434e524c28/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfb88f22a209ff7b40a576d5324bf8286b519d7358663db21d6246fb17eea2d5", size = 3521673, upload-time = "2026-01-05T10:40:56.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/fa/89f4cb9e08df770b57adb96f8cbb7e22695a4cb6c2bd5f0c4f0ebcf33b66/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c774b1276f71e1ef716e5486f21e76333464f47bece56bbd554485982a9e03e", size = 3724818, upload-time = "2026-01-05T10:40:44.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/04/ca2363f0bfbe3b3d36e95bf67e56a4c88c8e3362b658e616d1ac185d47f2/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df6c4265b289083bf710dff49bc51ef252f9d5be33a45ee2bed151114a56207b", size = 3379195, upload-time = "2026-01-05T10:40:51.139Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/76/932be4b50ef6ccedf9d3c6639b056a967a86258c6d9200643f01269211ca/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369cc9fc8cc10cb24143873a0d95438bb8ee257bb80c71989e3ee290e8d72c67", size = 3274982, upload-time = "2026-01-05T10:40:58.331Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/28/5f9f5a4cc211b69e89420980e483831bcc29dade307955cc9dc858a40f01/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:29c30b83d8dcd061078b05ae0cb94d3c710555fbb44861139f9f83dcca3dc3e4", size = 9478245, upload-time = "2026-01-05T10:41:04.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/fb/66e2da4704d6aadebf8cb39f1d6d1957df667ab24cff2326b77cda0dcb85/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:37ae80a28c1d3265bb1f22464c856bd23c02a05bb211e56d0c5301a435be6c1a", size = 9560069, upload-time = "2026-01-05T10:45:10.673Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/04/fed398b05caa87ce9b1a1bb5166645e38196081b225059a6edaff6440fac/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:791135ee325f2336f498590eb2f11dc5c295232f288e75c99a36c5dbce63088a", size = 9899263, upload-time = "2026-01-05T10:45:12.559Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/a1/d62dfe7376beaaf1394917e0f8e93ee5f67fea8fcf4107501db35996586b/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38337540fbbddff8e999d59970f3c6f35a82de10053206a7562f1ea02d046fa5", size = 10033429, upload-time = "2026-01-05T10:45:14.333Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/18/a545c4ea42af3df6effd7d13d250ba77a0a86fb20393143bbb9a92e434d4/tokenizers-0.22.2-cp39-abi3-win32.whl", hash = "sha256:a6bf3f88c554a2b653af81f3204491c818ae2ac6fbc09e76ef4773351292bc92", size = 2502363, upload-time = "2026-01-05T10:45:20.593Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/71/0670843133a43d43070abeb1949abfdef12a86d490bea9cd9e18e37c5ff7/tokenizers-0.22.2-cp39-abi3-win_amd64.whl", hash = "sha256:c9ea31edff2968b44a88f97d784c2f16dc0729b8b143ed004699ebca91f05c48", size = 2747786, upload-time = "2026-01-05T10:45:18.411Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/f4/0de46cfa12cdcbcd464cc59fde36912af405696f687e53a091fb432f694c/tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc", size = 2612133, upload-time = "2026-01-05T10:45:17.232Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4210,8 +4100,8 @@ dependencies = [
|
||||
{ name = "typing-extensions", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:3d05017d19bc99741288e458888283a44b0ee881d53f05f72f8b1cfea8998122" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:a47b7986bee3f61ad217d8a8ce24605809ab425baf349f97de758815edd2ef54" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:3d05017d19bc99741288e458888283a44b0ee881d53f05f72f8b1cfea8998122" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:a47b7986bee3f61ad217d8a8ce24605809ab425baf349f97de758815edd2ef54" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4242,16 +4132,16 @@ dependencies = [
|
||||
{ name = "typing-extensions", marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-linux_s390x.whl", hash = "sha256:2bfc013dd6efdc8f8223a0241d3529af9f315dffefb53ffa3bf14d3f10127da6" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:680129efdeeec3db5da3f88ee5d28c1b1e103b774aef40f9d638e2cce8f8d8d8" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cb06175284673a581dd91fb1965662ae4ecaba6e5c357aa0ea7bb8b84b6b7eeb" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-win_amd64.whl", hash = "sha256:7631ef49fbd38d382909525b83696dc12a55d68492ade4ace3883c62b9fc140f" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-win_arm64.whl", hash = "sha256:41e6fc5ec0914fcdce44ccf338b1d19a441b55cafdd741fd0bf1af3f9e4cfd14" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-linux_s390x.whl", hash = "sha256:0e34e276722ab7dd0dffa9e12fe2135a9b34a0e300c456ed7ad6430229404eb5" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:610f600c102386e581327d5efc18c0d6edecb9820b4140d26163354a99cd800d" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:cb9a8ba8137ab24e36bf1742cb79a1294bd374db570f09fc15a5e1318160db4e" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:2be20b2c05a0cce10430cc25f32b689259640d273232b2de357c35729132256d" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_arm64.whl", hash = "sha256:99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-linux_s390x.whl", hash = "sha256:2bfc013dd6efdc8f8223a0241d3529af9f315dffefb53ffa3bf14d3f10127da6" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:680129efdeeec3db5da3f88ee5d28c1b1e103b774aef40f9d638e2cce8f8d8d8" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cb06175284673a581dd91fb1965662ae4ecaba6e5c357aa0ea7bb8b84b6b7eeb" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-win_amd64.whl", hash = "sha256:7631ef49fbd38d382909525b83696dc12a55d68492ade4ace3883c62b9fc140f" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp311-cp311-win_arm64.whl", hash = "sha256:41e6fc5ec0914fcdce44ccf338b1d19a441b55cafdd741fd0bf1af3f9e4cfd14" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-linux_s390x.whl", hash = "sha256:0e34e276722ab7dd0dffa9e12fe2135a9b34a0e300c456ed7ad6430229404eb5" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:610f600c102386e581327d5efc18c0d6edecb9820b4140d26163354a99cd800d" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:cb9a8ba8137ab24e36bf1742cb79a1294bd374db570f09fc15a5e1318160db4e" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:2be20b2c05a0cce10430cc25f32b689259640d273232b2de357c35729132256d" },
|
||||
{ url = "https://download-r2.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_arm64.whl", hash = "sha256:99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4365,7 +4255,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "transformers"
|
||||
version = "4.53.2"
|
||||
version = "5.0.0rc3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "filelock" },
|
||||
@@ -4378,25 +4268,47 @@ dependencies = [
|
||||
{ name = "safetensors" },
|
||||
{ name = "tokenizers" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "typer-slim" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4c/67/80f51466ec447028fd84469b208eb742533ce06cc8fad2e3181380199e5c/transformers-4.53.2.tar.gz", hash = "sha256:6c3ed95edfb1cba71c4245758f1b4878c93bf8cde77d076307dacb2cbbd72be2", size = 9201233, upload-time = "2025-07-11T12:39:08.742Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3f/a3/7c116a8d85f69ea7749cf4c2df79e64c35d028e5fc7ea0168f299d03b8c7/transformers-5.0.0rc3.tar.gz", hash = "sha256:a0315b92b7e087617ade42ec9e6e92ee7620541cc5d6a3331886c52cbe306f5c", size = 8388520, upload-time = "2026-01-14T16:49:02.952Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/96/88/beb33a79a382fcd2aed0be5222bdc47f41e4bfe7aaa90ae1374f1d8ea2af/transformers-4.53.2-py3-none-any.whl", hash = "sha256:db8f4819bb34f000029c73c3c557e7d06fc1b8e612ec142eecdae3947a9c78bf", size = 10826609, upload-time = "2025-07-11T12:39:05.461Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/f2/ae2b8968764253bdf38a48dee3c299b8d0bedf7c8ffbe3449fca9bd95338/transformers-5.0.0rc3-py3-none-any.whl", hash = "sha256:383fad27f4f73092d330e45fae384681e5c8521e1dc1cf6cb1a297780e68bf2d", size = 10107087, upload-time = "2026-01-14T16:48:59.393Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typer"
|
||||
version = "0.16.0"
|
||||
version = "0.24.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
{ name = "click" },
|
||||
{ name = "rich" },
|
||||
{ name = "shellingham" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625, upload-time = "2025-05-26T14:30:31.824Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317, upload-time = "2025-05-26T14:30:30.523Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typer-slim"
|
||||
version = "0.24.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typer" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a7/a7/e6aecc4b4eb59598829a3b5076a93aff291b4fdaa2ded25efc4e1f4d219c/typer_slim-0.24.0.tar.gz", hash = "sha256:f0ed36127183f52ae6ced2ecb2521789995992c521a46083bfcdbb652d22ad34", size = 4776, upload-time = "2026-02-16T22:08:51.2Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/24/5480c20380dfd18cf33d14784096dca45a24eae6102e91d49a718d3b6855/typer_slim-0.24.0-py3-none-any.whl", hash = "sha256:d5d7ee1ee2834d5020c7c616ed5e0d0f29b9a4b1dd283bdebae198ec09778d0e", size = 3394, upload-time = "2026-02-16T22:08:49.92Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-protobuf"
|
||||
version = "6.32.1.20260221"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5f/e2/9aa4a3b2469508bd7b4e2ae11cbedaf419222a09a1b94daffcd5efca4023/types_protobuf-6.32.1.20260221.tar.gz", hash = "sha256:6d5fb060a616bfb076cbb61b4b3c3969f5fc8bec5810f9a2f7e648ee5cbcbf6e", size = 64408, upload-time = "2026-02-21T03:55:13.916Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/e8/1fd38926f9cf031188fbc5a96694203ea6f24b0e34bd64a225ec6f6291ba/types_protobuf-6.32.1.20260221-py3-none-any.whl", hash = "sha256:da7cdd947975964a93c30bfbcc2c6841ee646b318d3816b033adc2c4eb6448e4", size = 77956, upload-time = "2026-02-21T03:55:12.894Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -34,11 +34,11 @@ export default function DeleteTranscriptDialog({
|
||||
<Dialog.Positioner>
|
||||
<Dialog.Content>
|
||||
<Dialog.Header fontSize="lg" fontWeight="bold">
|
||||
Delete transcript
|
||||
Move to Trash
|
||||
</Dialog.Header>
|
||||
<Dialog.Body>
|
||||
Are you sure you want to delete this transcript? This action cannot
|
||||
be undone.
|
||||
This transcript will be moved to the trash. You can restore it later
|
||||
from the Trash view.
|
||||
{title && (
|
||||
<Text mt={3} fontWeight="600">
|
||||
{title}
|
||||
@@ -71,7 +71,7 @@ export default function DeleteTranscriptDialog({
|
||||
ml={3}
|
||||
disabled={!!isLoading}
|
||||
>
|
||||
Delete
|
||||
Move to Trash
|
||||
</Button>
|
||||
</Dialog.Footer>
|
||||
</Dialog.Content>
|
||||
|
||||
83
www/app/(app)/browse/_components/DestroyTranscriptDialog.tsx
Normal file
83
www/app/(app)/browse/_components/DestroyTranscriptDialog.tsx
Normal file
@@ -0,0 +1,83 @@
|
||||
import React from "react";
|
||||
import { Button, Dialog, Text } from "@chakra-ui/react";
|
||||
|
||||
interface DestroyTranscriptDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onConfirm: () => void;
|
||||
cancelRef: React.RefObject<any>;
|
||||
isLoading?: boolean;
|
||||
title?: string;
|
||||
date?: string;
|
||||
source?: string;
|
||||
}
|
||||
|
||||
export default function DestroyTranscriptDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
onConfirm,
|
||||
cancelRef,
|
||||
isLoading,
|
||||
title,
|
||||
date,
|
||||
source,
|
||||
}: DestroyTranscriptDialogProps) {
|
||||
return (
|
||||
<Dialog.Root
|
||||
open={isOpen}
|
||||
onOpenChange={(e) => {
|
||||
if (!e.open) onClose();
|
||||
}}
|
||||
initialFocusEl={() => cancelRef.current}
|
||||
>
|
||||
<Dialog.Backdrop />
|
||||
<Dialog.Positioner>
|
||||
<Dialog.Content>
|
||||
<Dialog.Header fontSize="lg" fontWeight="bold">
|
||||
Permanently Destroy Transcript
|
||||
</Dialog.Header>
|
||||
<Dialog.Body>
|
||||
<Text color="red.600" fontWeight="medium">
|
||||
This will permanently delete this transcript and all its
|
||||
associated audio files. This action cannot be undone.
|
||||
</Text>
|
||||
{title && (
|
||||
<Text mt={3} fontWeight="600">
|
||||
{title}
|
||||
</Text>
|
||||
)}
|
||||
{date && (
|
||||
<Text color="gray.600" fontSize="sm">
|
||||
Date: {date}
|
||||
</Text>
|
||||
)}
|
||||
{source && (
|
||||
<Text color="gray.600" fontSize="sm">
|
||||
Source: {source}
|
||||
</Text>
|
||||
)}
|
||||
</Dialog.Body>
|
||||
<Dialog.Footer>
|
||||
<Button
|
||||
ref={cancelRef as any}
|
||||
onClick={onClose}
|
||||
disabled={!!isLoading}
|
||||
variant="outline"
|
||||
colorPalette="gray"
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
colorPalette="red"
|
||||
onClick={onConfirm}
|
||||
ml={3}
|
||||
disabled={!!isLoading}
|
||||
>
|
||||
Destroy
|
||||
</Button>
|
||||
</Dialog.Footer>
|
||||
</Dialog.Content>
|
||||
</Dialog.Positioner>
|
||||
</Dialog.Root>
|
||||
);
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import { Box, Stack, Link, Heading } from "@chakra-ui/react";
|
||||
import { Box, Stack, Link, Heading, Flex } from "@chakra-ui/react";
|
||||
import NextLink from "next/link";
|
||||
import { LuTrash2 } from "react-icons/lu";
|
||||
import type { components } from "../../../reflector-api";
|
||||
|
||||
type Room = components["schemas"]["Room"];
|
||||
@@ -13,6 +14,9 @@ interface FilterSidebarProps {
|
||||
selectedSourceKind: SourceKind | null;
|
||||
selectedRoomId: string;
|
||||
onFilterChange: (sourceKind: SourceKind | null, roomId: string) => void;
|
||||
isTrashView: boolean;
|
||||
onTrashClick: () => void;
|
||||
isAuthenticated: boolean;
|
||||
}
|
||||
|
||||
export default function FilterSidebar({
|
||||
@@ -20,6 +24,9 @@ export default function FilterSidebar({
|
||||
selectedSourceKind,
|
||||
selectedRoomId,
|
||||
onFilterChange,
|
||||
isTrashView,
|
||||
onTrashClick,
|
||||
isAuthenticated,
|
||||
}: FilterSidebarProps) {
|
||||
const myRooms = rooms.filter((room) => !room.is_shared);
|
||||
const sharedRooms = rooms.filter((room) => room.is_shared);
|
||||
@@ -32,8 +39,14 @@ export default function FilterSidebar({
|
||||
fontSize="sm"
|
||||
href="#"
|
||||
onClick={() => onFilterChange(null, "")}
|
||||
color={selectedSourceKind === null ? "blue.500" : "gray.600"}
|
||||
fontWeight={selectedSourceKind === null ? "bold" : "normal"}
|
||||
color={
|
||||
!isTrashView && selectedSourceKind === null
|
||||
? "blue.500"
|
||||
: "gray.600"
|
||||
}
|
||||
fontWeight={
|
||||
!isTrashView && selectedSourceKind === null ? "bold" : "normal"
|
||||
}
|
||||
>
|
||||
All Transcripts
|
||||
</Link>
|
||||
@@ -51,12 +64,16 @@ export default function FilterSidebar({
|
||||
href="#"
|
||||
onClick={() => onFilterChange("room", room.id)}
|
||||
color={
|
||||
selectedSourceKind === "room" && selectedRoomId === room.id
|
||||
!isTrashView &&
|
||||
selectedSourceKind === "room" &&
|
||||
selectedRoomId === room.id
|
||||
? "blue.500"
|
||||
: "gray.600"
|
||||
}
|
||||
fontWeight={
|
||||
selectedSourceKind === "room" && selectedRoomId === room.id
|
||||
!isTrashView &&
|
||||
selectedSourceKind === "room" &&
|
||||
selectedRoomId === room.id
|
||||
? "bold"
|
||||
: "normal"
|
||||
}
|
||||
@@ -79,12 +96,16 @@ export default function FilterSidebar({
|
||||
href="#"
|
||||
onClick={() => onFilterChange("room" as SourceKind, room.id)}
|
||||
color={
|
||||
selectedSourceKind === "room" && selectedRoomId === room.id
|
||||
!isTrashView &&
|
||||
selectedSourceKind === "room" &&
|
||||
selectedRoomId === room.id
|
||||
? "blue.500"
|
||||
: "gray.600"
|
||||
}
|
||||
fontWeight={
|
||||
selectedSourceKind === "room" && selectedRoomId === room.id
|
||||
!isTrashView &&
|
||||
selectedSourceKind === "room" &&
|
||||
selectedRoomId === room.id
|
||||
? "bold"
|
||||
: "normal"
|
||||
}
|
||||
@@ -101,9 +122,15 @@ export default function FilterSidebar({
|
||||
as={NextLink}
|
||||
href="#"
|
||||
onClick={() => onFilterChange("live", "")}
|
||||
color={selectedSourceKind === "live" ? "blue.500" : "gray.600"}
|
||||
color={
|
||||
!isTrashView && selectedSourceKind === "live"
|
||||
? "blue.500"
|
||||
: "gray.600"
|
||||
}
|
||||
_hover={{ color: "blue.300" }}
|
||||
fontWeight={selectedSourceKind === "live" ? "bold" : "normal"}
|
||||
fontWeight={
|
||||
!isTrashView && selectedSourceKind === "live" ? "bold" : "normal"
|
||||
}
|
||||
fontSize="sm"
|
||||
>
|
||||
Live Transcripts
|
||||
@@ -112,13 +139,39 @@ export default function FilterSidebar({
|
||||
as={NextLink}
|
||||
href="#"
|
||||
onClick={() => onFilterChange("file", "")}
|
||||
color={selectedSourceKind === "file" ? "blue.500" : "gray.600"}
|
||||
color={
|
||||
!isTrashView && selectedSourceKind === "file"
|
||||
? "blue.500"
|
||||
: "gray.600"
|
||||
}
|
||||
_hover={{ color: "blue.300" }}
|
||||
fontWeight={selectedSourceKind === "file" ? "bold" : "normal"}
|
||||
fontWeight={
|
||||
!isTrashView && selectedSourceKind === "file" ? "bold" : "normal"
|
||||
}
|
||||
fontSize="sm"
|
||||
>
|
||||
Uploaded Files
|
||||
</Link>
|
||||
|
||||
{isAuthenticated && (
|
||||
<>
|
||||
<Box borderBottomWidth="1px" my={2} />
|
||||
<Link
|
||||
as={NextLink}
|
||||
href="#"
|
||||
onClick={onTrashClick}
|
||||
color={isTrashView ? "red.600" : "red.500"}
|
||||
_hover={{ color: "red.400" }}
|
||||
fontWeight={isTrashView ? "bold" : "normal"}
|
||||
fontSize="sm"
|
||||
>
|
||||
<Flex align="center" gap={1}>
|
||||
<LuTrash2 />
|
||||
Trash
|
||||
</Flex>
|
||||
</Link>
|
||||
</>
|
||||
)}
|
||||
</Stack>
|
||||
</Box>
|
||||
);
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
import React from "react";
|
||||
import { IconButton, Icon, Menu } from "@chakra-ui/react";
|
||||
import { LuMenu, LuTrash, LuRotateCw } from "react-icons/lu";
|
||||
import { IconButton, Menu } from "@chakra-ui/react";
|
||||
import { LuMenu, LuTrash, LuRotateCw, LuUndo2 } from "react-icons/lu";
|
||||
|
||||
interface TranscriptActionsMenuProps {
|
||||
transcriptId: string;
|
||||
onDelete: (transcriptId: string) => void;
|
||||
onReprocess: (transcriptId: string) => void;
|
||||
onDelete?: (transcriptId: string) => void;
|
||||
onReprocess?: (transcriptId: string) => void;
|
||||
onRestore?: (transcriptId: string) => void;
|
||||
onDestroy?: (transcriptId: string) => void;
|
||||
}
|
||||
|
||||
export default function TranscriptActionsMenu({
|
||||
transcriptId,
|
||||
onDelete,
|
||||
onReprocess,
|
||||
onRestore,
|
||||
onDestroy,
|
||||
}: TranscriptActionsMenuProps) {
|
||||
return (
|
||||
<Menu.Root closeOnSelect={true} lazyMount={true}>
|
||||
@@ -22,21 +26,42 @@ export default function TranscriptActionsMenu({
|
||||
</Menu.Trigger>
|
||||
<Menu.Positioner>
|
||||
<Menu.Content>
|
||||
<Menu.Item
|
||||
value="reprocess"
|
||||
onClick={() => onReprocess(transcriptId)}
|
||||
>
|
||||
<LuRotateCw /> Reprocess
|
||||
</Menu.Item>
|
||||
<Menu.Item
|
||||
value="delete"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onDelete(transcriptId);
|
||||
}}
|
||||
>
|
||||
<LuTrash /> Delete
|
||||
</Menu.Item>
|
||||
{onReprocess && (
|
||||
<Menu.Item
|
||||
value="reprocess"
|
||||
onClick={() => onReprocess(transcriptId)}
|
||||
>
|
||||
<LuRotateCw /> Reprocess
|
||||
</Menu.Item>
|
||||
)}
|
||||
{onDelete && (
|
||||
<Menu.Item
|
||||
value="delete"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onDelete(transcriptId);
|
||||
}}
|
||||
>
|
||||
<LuTrash /> Delete
|
||||
</Menu.Item>
|
||||
)}
|
||||
{onRestore && (
|
||||
<Menu.Item value="restore" onClick={() => onRestore(transcriptId)}>
|
||||
<LuUndo2 /> Restore
|
||||
</Menu.Item>
|
||||
)}
|
||||
{onDestroy && (
|
||||
<Menu.Item
|
||||
value="destroy"
|
||||
color="red.500"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onDestroy(transcriptId);
|
||||
}}
|
||||
>
|
||||
<LuTrash /> Destroy
|
||||
</Menu.Item>
|
||||
)}
|
||||
</Menu.Content>
|
||||
</Menu.Positioner>
|
||||
</Menu.Root>
|
||||
|
||||
@@ -29,8 +29,11 @@ interface TranscriptCardsProps {
|
||||
results: SearchResult[];
|
||||
query: string;
|
||||
isLoading?: boolean;
|
||||
onDelete: (transcriptId: string) => void;
|
||||
onReprocess: (transcriptId: string) => void;
|
||||
isTrash?: boolean;
|
||||
onDelete?: (transcriptId: string) => void;
|
||||
onReprocess?: (transcriptId: string) => void;
|
||||
onRestore?: (transcriptId: string) => void;
|
||||
onDestroy?: (transcriptId: string) => void;
|
||||
}
|
||||
|
||||
function highlightText(text: string, query: string): React.ReactNode {
|
||||
@@ -102,13 +105,19 @@ const transcriptHref = (
|
||||
function TranscriptCard({
|
||||
result,
|
||||
query,
|
||||
isTrash,
|
||||
onDelete,
|
||||
onReprocess,
|
||||
onRestore,
|
||||
onDestroy,
|
||||
}: {
|
||||
result: SearchResult;
|
||||
query: string;
|
||||
onDelete: (transcriptId: string) => void;
|
||||
onReprocess: (transcriptId: string) => void;
|
||||
isTrash?: boolean;
|
||||
onDelete?: (transcriptId: string) => void;
|
||||
onReprocess?: (transcriptId: string) => void;
|
||||
onRestore?: (transcriptId: string) => void;
|
||||
onDestroy?: (transcriptId: string) => void;
|
||||
}) {
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
|
||||
@@ -136,22 +145,36 @@ function TranscriptCard({
|
||||
};
|
||||
|
||||
return (
|
||||
<Box borderWidth={1} p={4} borderRadius="md" fontSize="sm">
|
||||
<Box
|
||||
borderWidth={1}
|
||||
p={4}
|
||||
borderRadius="md"
|
||||
fontSize="sm"
|
||||
borderLeftWidth={isTrash ? "3px" : 1}
|
||||
borderLeftColor={isTrash ? "red.400" : undefined}
|
||||
bg={isTrash ? "gray.50" : undefined}
|
||||
>
|
||||
<Flex justify="space-between" alignItems="flex-start" gap="2">
|
||||
<Box>
|
||||
<TranscriptStatusIcon status={result.status} />
|
||||
</Box>
|
||||
<Box flex="1">
|
||||
{/* Title with highlighting and text fragment for deep linking */}
|
||||
<Link
|
||||
as={NextLink}
|
||||
href={transcriptHref(result.id, mainSnippet, query)}
|
||||
fontWeight="600"
|
||||
display="block"
|
||||
mb={2}
|
||||
>
|
||||
{highlightText(resultTitle, query)}
|
||||
</Link>
|
||||
{/* Title — plain text in trash (deleted transcripts return 404) */}
|
||||
{isTrash ? (
|
||||
<Text fontWeight="600" mb={2} color="gray.600">
|
||||
{highlightText(resultTitle, query)}
|
||||
</Text>
|
||||
) : (
|
||||
<Link
|
||||
as={NextLink}
|
||||
href={transcriptHref(result.id, mainSnippet, query)}
|
||||
fontWeight="600"
|
||||
display="block"
|
||||
mb={2}
|
||||
>
|
||||
{highlightText(resultTitle, query)}
|
||||
</Link>
|
||||
)}
|
||||
|
||||
{/* Metadata - Horizontal on desktop, vertical on mobile */}
|
||||
<Flex
|
||||
@@ -272,8 +295,10 @@ function TranscriptCard({
|
||||
</Box>
|
||||
<TranscriptActionsMenu
|
||||
transcriptId={result.id}
|
||||
onDelete={onDelete}
|
||||
onReprocess={onReprocess}
|
||||
onDelete={isTrash ? undefined : onDelete}
|
||||
onReprocess={isTrash ? undefined : onReprocess}
|
||||
onRestore={isTrash ? onRestore : undefined}
|
||||
onDestroy={isTrash ? onDestroy : undefined}
|
||||
/>
|
||||
</Flex>
|
||||
</Box>
|
||||
@@ -284,8 +309,11 @@ export default function TranscriptCards({
|
||||
results,
|
||||
query,
|
||||
isLoading,
|
||||
isTrash,
|
||||
onDelete,
|
||||
onReprocess,
|
||||
onRestore,
|
||||
onDestroy,
|
||||
}: TranscriptCardsProps) {
|
||||
return (
|
||||
<Box position="relative">
|
||||
@@ -315,8 +343,11 @@ export default function TranscriptCards({
|
||||
key={result.id}
|
||||
result={result}
|
||||
query={query}
|
||||
isTrash={isTrash}
|
||||
onDelete={onDelete}
|
||||
onReprocess={onReprocess}
|
||||
onRestore={onRestore}
|
||||
onDestroy={onDestroy}
|
||||
/>
|
||||
))}
|
||||
</Stack>
|
||||
|
||||
@@ -19,6 +19,7 @@ import {
|
||||
parseAsStringLiteral,
|
||||
} from "nuqs";
|
||||
import { LuX } from "react-icons/lu";
|
||||
import { toaster } from "../../components/ui/toaster";
|
||||
import type { components } from "../../reflector-api";
|
||||
|
||||
type Room = components["schemas"]["Room"];
|
||||
@@ -29,6 +30,9 @@ import {
|
||||
useTranscriptsSearch,
|
||||
useTranscriptDelete,
|
||||
useTranscriptProcess,
|
||||
useTranscriptRestore,
|
||||
useTranscriptDestroy,
|
||||
useAuthReady,
|
||||
} from "../../lib/apiHooks";
|
||||
import FilterSidebar from "./_components/FilterSidebar";
|
||||
import Pagination, {
|
||||
@@ -40,6 +44,7 @@ import Pagination, {
|
||||
} from "./_components/Pagination";
|
||||
import TranscriptCards from "./_components/TranscriptCards";
|
||||
import DeleteTranscriptDialog from "./_components/DeleteTranscriptDialog";
|
||||
import DestroyTranscriptDialog from "./_components/DestroyTranscriptDialog";
|
||||
import { formatLocalDate } from "../../lib/time";
|
||||
import { RECORD_A_MEETING_URL } from "../../api/urls";
|
||||
import { useUserName } from "../../lib/useUserName";
|
||||
@@ -175,14 +180,17 @@ const UnderSearchFormFilterIndicators: React.FC<{
|
||||
|
||||
const EmptyResult: React.FC<{
|
||||
searchQuery: string;
|
||||
}> = ({ searchQuery }) => {
|
||||
isTrash?: boolean;
|
||||
}> = ({ searchQuery, isTrash }) => {
|
||||
return (
|
||||
<Flex flexDir="column" alignItems="center" justifyContent="center" py={8}>
|
||||
<Text textAlign="center">
|
||||
{searchQuery
|
||||
? `No results found for "${searchQuery}". Try adjusting your search terms.`
|
||||
: "No transcripts found, but you can "}
|
||||
{!searchQuery && (
|
||||
{isTrash
|
||||
? "Trash is empty."
|
||||
: searchQuery
|
||||
? `No results found for "${searchQuery}". Try adjusting your search terms.`
|
||||
: "No transcripts found, but you can "}
|
||||
{!isTrash && !searchQuery && (
|
||||
<>
|
||||
<Link href={RECORD_A_MEETING_URL} color="blue.500">
|
||||
record a meeting
|
||||
@@ -196,6 +204,8 @@ const EmptyResult: React.FC<{
|
||||
};
|
||||
|
||||
export default function TranscriptBrowser() {
|
||||
const { isAuthenticated } = useAuthReady();
|
||||
|
||||
const [urlSearchQuery, setUrlSearchQuery] = useQueryState(
|
||||
"q",
|
||||
parseAsString.withDefault("").withOptions({ shallow: false }),
|
||||
@@ -216,6 +226,12 @@ export default function TranscriptBrowser() {
|
||||
parseAsString.withDefault("").withOptions({ shallow: false }),
|
||||
);
|
||||
|
||||
const [urlTrash, setUrlTrash] = useQueryState(
|
||||
"trash",
|
||||
parseAsStringLiteral(["1"] as const).withOptions({ shallow: false }),
|
||||
);
|
||||
const isTrashView = urlTrash === "1";
|
||||
|
||||
const [urlPage, setPage] = useQueryState(
|
||||
"page",
|
||||
parseAsInteger.withDefault(1).withOptions({ shallow: false }),
|
||||
@@ -231,7 +247,7 @@ export default function TranscriptBrowser() {
|
||||
return;
|
||||
}
|
||||
_setSafePage(maybePage.value);
|
||||
}, [urlPage]);
|
||||
}, [urlPage, setPage]);
|
||||
|
||||
const pageSize = 20;
|
||||
|
||||
@@ -240,11 +256,12 @@ export default function TranscriptBrowser() {
|
||||
() => ({
|
||||
q: urlSearchQuery,
|
||||
extras: {
|
||||
room_id: urlRoomId || undefined,
|
||||
source_kind: urlSourceKind || undefined,
|
||||
room_id: isTrashView ? undefined : urlRoomId || undefined,
|
||||
source_kind: isTrashView ? undefined : urlSourceKind || undefined,
|
||||
include_deleted: isTrashView ? true : undefined,
|
||||
},
|
||||
}),
|
||||
[urlSearchQuery, urlRoomId, urlSourceKind],
|
||||
[urlSearchQuery, urlRoomId, urlSourceKind, isTrashView],
|
||||
);
|
||||
|
||||
const {
|
||||
@@ -266,35 +283,55 @@ export default function TranscriptBrowser() {
|
||||
|
||||
const totalPages = getTotalPages(totalResults, pageSize);
|
||||
|
||||
// reset pagination when search results change (detected by total change; good enough approximation)
|
||||
// reset pagination when search filters change
|
||||
useEffect(() => {
|
||||
// operation is idempotent
|
||||
setPage(FIRST_PAGE).then(() => {});
|
||||
}, [JSON.stringify(searchFilters)]);
|
||||
}, [searchFilters, setPage]);
|
||||
|
||||
const userName = useUserName();
|
||||
const [deletionLoading, setDeletionLoading] = useState(false);
|
||||
const [actionLoading, setActionLoading] = useState(false);
|
||||
const cancelRef = React.useRef(null);
|
||||
const destroyCancelRef = React.useRef(null);
|
||||
|
||||
// Delete (soft-delete / move to trash)
|
||||
const [transcriptToDeleteId, setTranscriptToDeleteId] =
|
||||
React.useState<string>();
|
||||
|
||||
// Destroy (hard-delete)
|
||||
const [transcriptToDestroyId, setTranscriptToDestroyId] =
|
||||
React.useState<string>();
|
||||
|
||||
const handleFilterTranscripts = (
|
||||
sourceKind: SourceKind | null,
|
||||
roomId: string,
|
||||
) => {
|
||||
if (isTrashView) {
|
||||
setUrlTrash(null);
|
||||
}
|
||||
setUrlSourceKind(sourceKind);
|
||||
setUrlRoomId(roomId);
|
||||
setPage(1);
|
||||
};
|
||||
|
||||
const handleTrashClick = () => {
|
||||
setUrlTrash(isTrashView ? null : "1");
|
||||
setUrlSourceKind(null);
|
||||
setUrlRoomId(null);
|
||||
setPage(1);
|
||||
};
|
||||
|
||||
const onCloseDeletion = () => setTranscriptToDeleteId(undefined);
|
||||
const onCloseDestroy = () => setTranscriptToDestroyId(undefined);
|
||||
|
||||
const deleteTranscript = useTranscriptDelete();
|
||||
const processTranscript = useTranscriptProcess();
|
||||
const restoreTranscript = useTranscriptRestore();
|
||||
const destroyTranscript = useTranscriptDestroy();
|
||||
|
||||
const confirmDeleteTranscript = (transcriptId: string) => {
|
||||
if (deletionLoading) return;
|
||||
setDeletionLoading(true);
|
||||
if (actionLoading) return;
|
||||
setActionLoading(true);
|
||||
deleteTranscript.mutate(
|
||||
{
|
||||
params: {
|
||||
@@ -303,12 +340,12 @@ export default function TranscriptBrowser() {
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
setDeletionLoading(false);
|
||||
setActionLoading(false);
|
||||
onCloseDeletion();
|
||||
reloadSearch();
|
||||
},
|
||||
onError: () => {
|
||||
setDeletionLoading(false);
|
||||
setActionLoading(false);
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -322,18 +359,83 @@ export default function TranscriptBrowser() {
|
||||
});
|
||||
};
|
||||
|
||||
const handleRestoreTranscript = (transcriptId: string) => {
|
||||
if (actionLoading) return;
|
||||
setActionLoading(true);
|
||||
restoreTranscript.mutate(
|
||||
{
|
||||
params: {
|
||||
path: { transcript_id: transcriptId },
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
setActionLoading(false);
|
||||
reloadSearch();
|
||||
toaster.create({
|
||||
duration: 3000,
|
||||
render: () => (
|
||||
<Box bg="green.500" color="white" px={4} py={3} borderRadius="md">
|
||||
<Text fontWeight="bold">Transcript restored</Text>
|
||||
</Box>
|
||||
),
|
||||
});
|
||||
},
|
||||
onError: () => {
|
||||
setActionLoading(false);
|
||||
},
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
const confirmDestroyTranscript = (transcriptId: string) => {
|
||||
if (actionLoading) return;
|
||||
setActionLoading(true);
|
||||
destroyTranscript.mutate(
|
||||
{
|
||||
params: {
|
||||
path: { transcript_id: transcriptId },
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
setActionLoading(false);
|
||||
onCloseDestroy();
|
||||
reloadSearch();
|
||||
},
|
||||
onError: () => {
|
||||
setActionLoading(false);
|
||||
},
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
// Dialog data for delete
|
||||
const transcriptToDelete = results?.find(
|
||||
(i) => i.id === transcriptToDeleteId,
|
||||
);
|
||||
const dialogTitle = transcriptToDelete?.title || "Unnamed Transcript";
|
||||
const dialogDate = transcriptToDelete?.created_at
|
||||
const deleteDialogTitle = transcriptToDelete?.title || "Unnamed Transcript";
|
||||
const deleteDialogDate = transcriptToDelete?.created_at
|
||||
? formatLocalDate(transcriptToDelete.created_at)
|
||||
: undefined;
|
||||
const dialogSource =
|
||||
const deleteDialogSource =
|
||||
transcriptToDelete?.source_kind === "room" && transcriptToDelete?.room_id
|
||||
? transcriptToDelete.room_name || transcriptToDelete.room_id
|
||||
: transcriptToDelete?.source_kind;
|
||||
|
||||
// Dialog data for destroy
|
||||
const transcriptToDestroy = results?.find(
|
||||
(i) => i.id === transcriptToDestroyId,
|
||||
);
|
||||
const destroyDialogTitle = transcriptToDestroy?.title || "Unnamed Transcript";
|
||||
const destroyDialogDate = transcriptToDestroy?.created_at
|
||||
? formatLocalDate(transcriptToDestroy.created_at)
|
||||
: undefined;
|
||||
const destroyDialogSource =
|
||||
transcriptToDestroy?.source_kind === "room" && transcriptToDestroy?.room_id
|
||||
? transcriptToDestroy.room_name || transcriptToDestroy.room_id
|
||||
: transcriptToDestroy?.source_kind;
|
||||
|
||||
if (searchLoading && results.length === 0) {
|
||||
return (
|
||||
<Flex
|
||||
@@ -361,17 +463,24 @@ export default function TranscriptBrowser() {
|
||||
mb={4}
|
||||
>
|
||||
<Heading size="lg">
|
||||
{userName ? `${userName}'s Transcriptions` : "Your Transcriptions"}{" "}
|
||||
{(searchLoading || deletionLoading) && <Spinner size="sm" />}
|
||||
{isTrashView
|
||||
? "Trash"
|
||||
: userName
|
||||
? `${userName}'s Transcriptions`
|
||||
: "Your Transcriptions"}{" "}
|
||||
{(searchLoading || actionLoading) && <Spinner size="sm" />}
|
||||
</Heading>
|
||||
</Flex>
|
||||
|
||||
<Flex flexDir={{ base: "column", md: "row" }}>
|
||||
<FilterSidebar
|
||||
rooms={rooms}
|
||||
selectedSourceKind={urlSourceKind}
|
||||
selectedRoomId={urlRoomId}
|
||||
selectedSourceKind={isTrashView ? null : urlSourceKind}
|
||||
selectedRoomId={isTrashView ? "" : urlRoomId}
|
||||
onFilterChange={handleFilterTranscripts}
|
||||
isTrashView={isTrashView}
|
||||
onTrashClick={handleTrashClick}
|
||||
isAuthenticated={isAuthenticated}
|
||||
/>
|
||||
|
||||
<Flex
|
||||
@@ -384,8 +493,8 @@ export default function TranscriptBrowser() {
|
||||
>
|
||||
<SearchForm
|
||||
setPage={setPage}
|
||||
sourceKind={urlSourceKind}
|
||||
roomId={urlRoomId}
|
||||
sourceKind={isTrashView ? null : urlSourceKind}
|
||||
roomId={isTrashView ? null : urlRoomId}
|
||||
searchQuery={urlSearchQuery}
|
||||
setSearchQuery={setUrlSearchQuery}
|
||||
setSourceKind={setUrlSourceKind}
|
||||
@@ -406,12 +515,15 @@ export default function TranscriptBrowser() {
|
||||
results={results}
|
||||
query={urlSearchQuery}
|
||||
isLoading={searchLoading}
|
||||
onDelete={setTranscriptToDeleteId}
|
||||
onReprocess={handleProcessTranscript}
|
||||
isTrash={isTrashView}
|
||||
onDelete={isTrashView ? undefined : setTranscriptToDeleteId}
|
||||
onReprocess={isTrashView ? undefined : handleProcessTranscript}
|
||||
onRestore={isTrashView ? handleRestoreTranscript : undefined}
|
||||
onDestroy={isTrashView ? setTranscriptToDestroyId : undefined}
|
||||
/>
|
||||
|
||||
{!searchLoading && results.length === 0 && (
|
||||
<EmptyResult searchQuery={urlSearchQuery} />
|
||||
<EmptyResult searchQuery={urlSearchQuery} isTrash={isTrashView} />
|
||||
)}
|
||||
</Flex>
|
||||
</Flex>
|
||||
@@ -423,10 +535,24 @@ export default function TranscriptBrowser() {
|
||||
transcriptToDeleteId && confirmDeleteTranscript(transcriptToDeleteId)
|
||||
}
|
||||
cancelRef={cancelRef}
|
||||
isLoading={deletionLoading}
|
||||
title={dialogTitle}
|
||||
date={dialogDate}
|
||||
source={dialogSource}
|
||||
isLoading={actionLoading}
|
||||
title={deleteDialogTitle}
|
||||
date={deleteDialogDate}
|
||||
source={deleteDialogSource}
|
||||
/>
|
||||
|
||||
<DestroyTranscriptDialog
|
||||
isOpen={!!transcriptToDestroyId}
|
||||
onClose={onCloseDestroy}
|
||||
onConfirm={() =>
|
||||
transcriptToDestroyId &&
|
||||
confirmDestroyTranscript(transcriptToDestroyId)
|
||||
}
|
||||
cancelRef={destroyCancelRef}
|
||||
isLoading={actionLoading}
|
||||
title={destroyDialogTitle}
|
||||
date={destroyDialogDate}
|
||||
source={destroyDialogSource}
|
||||
/>
|
||||
</Flex>
|
||||
);
|
||||
|
||||
@@ -10,14 +10,17 @@ import {
|
||||
Badge,
|
||||
VStack,
|
||||
Icon,
|
||||
Tooltip,
|
||||
} from "@chakra-ui/react";
|
||||
import { LuLink, LuRefreshCw } from "react-icons/lu";
|
||||
import { FaStop } from "react-icons/fa";
|
||||
import { FaCalendarAlt } from "react-icons/fa";
|
||||
import type { components } from "../../../reflector-api";
|
||||
import {
|
||||
useRoomActiveMeetings,
|
||||
useRoomUpcomingMeetings,
|
||||
useRoomIcsSync,
|
||||
useMeetingDeactivate,
|
||||
} from "../../../lib/apiHooks";
|
||||
|
||||
type Room = components["schemas"]["Room"];
|
||||
@@ -107,6 +110,7 @@ const getZulipDisplay = (
|
||||
function MeetingStatus({ roomName }: { roomName: string }) {
|
||||
const activeMeetingsQuery = useRoomActiveMeetings(roomName);
|
||||
const upcomingMeetingsQuery = useRoomUpcomingMeetings(roomName);
|
||||
const deactivateMutation = useMeetingDeactivate();
|
||||
|
||||
const activeMeetings = activeMeetingsQuery.data || [];
|
||||
const upcomingMeetings = upcomingMeetingsQuery.data || [];
|
||||
@@ -121,14 +125,46 @@ function MeetingStatus({ roomName }: { roomName: string }) {
|
||||
meeting.calendar_metadata?.["title"] || "Active Meeting",
|
||||
);
|
||||
return (
|
||||
<VStack gap={1} alignItems="start">
|
||||
<Text fontSize="xs" color="gray.600" lineHeight={1}>
|
||||
{title}
|
||||
</Text>
|
||||
<Text fontSize="xs" color="gray.500" lineHeight={1}>
|
||||
{meeting.num_clients} participants
|
||||
</Text>
|
||||
</VStack>
|
||||
<Flex alignItems="center" gap={2}>
|
||||
<VStack gap={1} alignItems="start">
|
||||
<Text fontSize="xs" color="gray.600" lineHeight={1}>
|
||||
{title}
|
||||
</Text>
|
||||
<Text fontSize="xs" color="gray.500" lineHeight={1}>
|
||||
{meeting.num_clients} participants
|
||||
</Text>
|
||||
</VStack>
|
||||
{activeMeetings.length === 1 && (meeting.num_clients ?? 0) < 2 && (
|
||||
<Tooltip.Root openDelay={100}>
|
||||
<Tooltip.Trigger asChild>
|
||||
<IconButton
|
||||
aria-label="End meeting"
|
||||
size="xs"
|
||||
variant="ghost"
|
||||
color="red.500"
|
||||
_hover={{ bg: "transparent", color: "red.600" }}
|
||||
onClick={() =>
|
||||
deactivateMutation.mutate({
|
||||
params: { path: { meeting_id: meeting.id } },
|
||||
})
|
||||
}
|
||||
disabled={deactivateMutation.isPending}
|
||||
>
|
||||
{deactivateMutation.isPending ? (
|
||||
<Spinner size="xs" />
|
||||
) : (
|
||||
<FaStop />
|
||||
)}
|
||||
</IconButton>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Positioner>
|
||||
<Tooltip.Content>
|
||||
End this meeting and stop any active recordings
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Positioner>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</Flex>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -74,6 +74,7 @@ const recordingTypeOptions: SelectOption[] = [
|
||||
const platformOptions: SelectOption[] = [
|
||||
{ label: "Whereby", value: "whereby" },
|
||||
{ label: "Daily", value: "daily" },
|
||||
{ label: "LiveKit", value: "livekit" },
|
||||
];
|
||||
|
||||
const roomInitialState = {
|
||||
@@ -309,10 +310,7 @@ export default function RoomsList() {
|
||||
return;
|
||||
}
|
||||
|
||||
const platform: "whereby" | "daily" =
|
||||
room.platform === "whereby" || room.platform === "daily"
|
||||
? room.platform
|
||||
: "daily";
|
||||
const platform = room.platform as "whereby" | "daily" | "livekit";
|
||||
|
||||
const roomData = {
|
||||
name: room.name,
|
||||
@@ -544,7 +542,10 @@ export default function RoomsList() {
|
||||
<Select.Root
|
||||
value={[room.platform]}
|
||||
onValueChange={(e) => {
|
||||
const newPlatform = e.value[0] as "whereby" | "daily";
|
||||
const newPlatform = e.value[0] as
|
||||
| "whereby"
|
||||
| "daily"
|
||||
| "livekit";
|
||||
const updates: Partial<typeof room> = {
|
||||
platform: newPlatform,
|
||||
};
|
||||
|
||||
@@ -212,8 +212,13 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
|
||||
const showConsentModalRef = useRef(showConsentModal);
|
||||
showConsentModalRef.current = showConsentModal;
|
||||
|
||||
const userEmail =
|
||||
auth.status === "authenticated" || auth.status === "refreshing"
|
||||
? auth.user.email
|
||||
: null;
|
||||
const { showEmailModal } = useEmailTranscriptDialog({
|
||||
meetingId: assertMeetingId(meeting.id),
|
||||
userEmail,
|
||||
});
|
||||
const showEmailModalRef = useRef(showEmailModal);
|
||||
showEmailModalRef.current = showEmailModal;
|
||||
@@ -332,6 +337,10 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
|
||||
[],
|
||||
);
|
||||
const emailIconUrl = useMemo(
|
||||
() => new URL("/email-icon-dark.svg", window.location.origin),
|
||||
[],
|
||||
);
|
||||
const emailIconDarkModeUrl = useMemo(
|
||||
() => new URL("/email-icon.svg", window.location.origin),
|
||||
[],
|
||||
);
|
||||
@@ -394,12 +403,13 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
|
||||
show
|
||||
? {
|
||||
iconPath: emailIconUrl.href,
|
||||
iconPathDarkMode: emailIconDarkModeUrl.href,
|
||||
label: "Email Transcript",
|
||||
tooltip: "Get transcript emailed to you",
|
||||
}
|
||||
: null,
|
||||
);
|
||||
}, [emailIconUrl, setCustomTrayButton]);
|
||||
}, [emailIconUrl, emailIconDarkModeUrl, setCustomTrayButton]);
|
||||
|
||||
if (authLastUserId === undefined) {
|
||||
return (
|
||||
|
||||
277
www/app/[roomName]/components/LiveKitRoom.tsx
Normal file
277
www/app/[roomName]/components/LiveKitRoom.tsx
Normal file
@@ -0,0 +1,277 @@
|
||||
"use client";
|
||||
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { Box, Spinner, Center, Text, IconButton } from "@chakra-ui/react";
|
||||
import { useRouter, useParams } from "next/navigation";
|
||||
import {
|
||||
LiveKitRoom as LKRoom,
|
||||
VideoConference,
|
||||
RoomAudioRenderer,
|
||||
PreJoin,
|
||||
type LocalUserChoices,
|
||||
} from "@livekit/components-react";
|
||||
import type { components } from "../../reflector-api";
|
||||
import { useAuth } from "../../lib/AuthProvider";
|
||||
import { useRoomJoinMeeting } from "../../lib/apiHooks";
|
||||
import { assertMeetingId } from "../../lib/types";
|
||||
import {
|
||||
ConsentDialogButton,
|
||||
RecordingIndicator,
|
||||
useConsentDialog,
|
||||
} from "../../lib/consent";
|
||||
import { useEmailTranscriptDialog } from "../../lib/emailTranscript";
|
||||
import { featureEnabled } from "../../lib/features";
|
||||
import { LuMail } from "react-icons/lu";
|
||||
|
||||
type Meeting = components["schemas"]["Meeting"];
|
||||
type Room = components["schemas"]["RoomDetails"];
|
||||
|
||||
interface LiveKitRoomProps {
|
||||
meeting: Meeting;
|
||||
room: Room;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract LiveKit WebSocket URL, room name, and token from the room_url.
|
||||
*
|
||||
* The backend returns room_url like: ws://host:7880?room=<name>&token=<jwt>
|
||||
* We split these for the LiveKit React SDK.
|
||||
*/
|
||||
function parseLiveKitUrl(roomUrl: string): {
|
||||
serverUrl: string;
|
||||
roomName: string | null;
|
||||
token: string | null;
|
||||
} {
|
||||
try {
|
||||
const url = new URL(roomUrl);
|
||||
const token = url.searchParams.get("token");
|
||||
const roomName = url.searchParams.get("room");
|
||||
url.searchParams.delete("token");
|
||||
url.searchParams.delete("room");
|
||||
// Strip trailing slash and leftover ? from URL API
|
||||
const serverUrl = url.toString().replace(/[?/]+$/, "");
|
||||
return { serverUrl, roomName, token };
|
||||
} catch {
|
||||
return { serverUrl: roomUrl, roomName: null, token: null };
|
||||
}
|
||||
}
|
||||
|
||||
export default function LiveKitRoom({ meeting, room }: LiveKitRoomProps) {
|
||||
const router = useRouter();
|
||||
const params = useParams();
|
||||
const auth = useAuth();
|
||||
const authLastUserId = auth.lastUserId;
|
||||
const roomName = params?.roomName as string;
|
||||
const meetingId = assertMeetingId(meeting.id);
|
||||
|
||||
const joinMutation = useRoomJoinMeeting();
|
||||
const [joinedMeeting, setJoinedMeeting] = useState<Meeting | null>(null);
|
||||
const [connectionError, setConnectionError] = useState(false);
|
||||
const [userChoices, setUserChoices] = useState<LocalUserChoices | null>(null);
|
||||
|
||||
// ── Consent dialog (same hooks as Daily/Whereby) ──────────
|
||||
const { showConsentButton, showRecordingIndicator } = useConsentDialog({
|
||||
meetingId,
|
||||
recordingType: meeting.recording_type,
|
||||
skipConsent: room.skip_consent,
|
||||
});
|
||||
|
||||
// ── Email transcript dialog ───────────────────────────────
|
||||
const userEmail =
|
||||
auth.status === "authenticated" || auth.status === "refreshing"
|
||||
? auth.user.email
|
||||
: null;
|
||||
const { showEmailModal } = useEmailTranscriptDialog({
|
||||
meetingId,
|
||||
userEmail,
|
||||
});
|
||||
const showEmailFeature = featureEnabled("emailTranscript");
|
||||
|
||||
// ── PreJoin defaults (persisted to localStorage for page refresh) ──
|
||||
const STORAGE_KEY = `livekit-username-${roomName}`;
|
||||
const defaultUsername = (() => {
|
||||
if (typeof window !== "undefined") {
|
||||
const saved = localStorage.getItem(STORAGE_KEY);
|
||||
if (saved) return saved;
|
||||
}
|
||||
if (auth.status === "authenticated" || auth.status === "refreshing") {
|
||||
return auth.user.email?.split("@")[0] || auth.user.id?.slice(0, 12) || "";
|
||||
}
|
||||
return "";
|
||||
})();
|
||||
const isJoining = !!userChoices && !joinedMeeting && !connectionError;
|
||||
|
||||
// ── Join meeting via backend API after PreJoin submit ─────
|
||||
useEffect(() => {
|
||||
if (
|
||||
authLastUserId === undefined ||
|
||||
!userChoices ||
|
||||
!meeting?.id ||
|
||||
!roomName
|
||||
)
|
||||
return;
|
||||
let cancelled = false;
|
||||
|
||||
async function join() {
|
||||
try {
|
||||
const result = await joinMutation.mutateAsync({
|
||||
params: {
|
||||
path: { room_name: roomName, meeting_id: meeting.id },
|
||||
query: { display_name: userChoices!.username || undefined },
|
||||
},
|
||||
});
|
||||
if (!cancelled) setJoinedMeeting(result);
|
||||
} catch (err) {
|
||||
console.error("Failed to join LiveKit meeting:", err);
|
||||
if (!cancelled) setConnectionError(true);
|
||||
}
|
||||
}
|
||||
|
||||
join();
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [meeting?.id, roomName, authLastUserId, userChoices]);
|
||||
|
||||
const handleDisconnected = useCallback(() => {
|
||||
router.push("/browse");
|
||||
}, [router]);
|
||||
|
||||
const handlePreJoinSubmit = useCallback(
|
||||
(choices: LocalUserChoices) => {
|
||||
// Persist username for page refresh
|
||||
if (choices.username) {
|
||||
localStorage.setItem(STORAGE_KEY, choices.username);
|
||||
}
|
||||
setUserChoices(choices);
|
||||
},
|
||||
[STORAGE_KEY],
|
||||
);
|
||||
|
||||
// ── PreJoin screen (name + device selection) ──────────────
|
||||
if (!userChoices) {
|
||||
return (
|
||||
<Box
|
||||
w="100vw"
|
||||
h="100vh"
|
||||
display="flex"
|
||||
alignItems="center"
|
||||
justifyContent="center"
|
||||
bg="gray.900"
|
||||
data-lk-theme="default"
|
||||
>
|
||||
<PreJoin
|
||||
defaults={{
|
||||
username: defaultUsername,
|
||||
audioEnabled: true,
|
||||
videoEnabled: true,
|
||||
}}
|
||||
onSubmit={handlePreJoinSubmit}
|
||||
userLabel="Display Name"
|
||||
/>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Loading / error states ────────────────────────────────
|
||||
if (isJoining) {
|
||||
return (
|
||||
<Center h="100vh" bg="gray.900">
|
||||
<Spinner color="blue.500" size="xl" />
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
if (connectionError) {
|
||||
return (
|
||||
<Center h="100vh" bg="gray.50">
|
||||
<Text fontSize="lg">Failed to connect to meeting</Text>
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
if (!joinedMeeting) {
|
||||
return (
|
||||
<Center h="100vh" bg="gray.50">
|
||||
<Spinner color="blue.500" size="xl" />
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
const {
|
||||
serverUrl,
|
||||
roomName: lkRoomName,
|
||||
token,
|
||||
} = parseLiveKitUrl(joinedMeeting.room_url);
|
||||
|
||||
if (
|
||||
serverUrl &&
|
||||
!serverUrl.startsWith("ws://") &&
|
||||
!serverUrl.startsWith("wss://")
|
||||
) {
|
||||
console.warn(
|
||||
`LiveKit serverUrl has unexpected scheme: ${serverUrl}. Expected ws:// or wss://`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!token || !lkRoomName) {
|
||||
return (
|
||||
<Center h="100vh" bg="gray.50">
|
||||
<Text fontSize="lg">
|
||||
{!token
|
||||
? "No access token received from server"
|
||||
: "No room name received from server"}
|
||||
</Text>
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Render ────────────────────────────────────────────────
|
||||
// The token already encodes the room name (in VideoGrants.room),
|
||||
// so LiveKit SDK joins the correct room from the token alone.
|
||||
return (
|
||||
<Box w="100vw" h="100vh" bg="black" position="relative">
|
||||
<LKRoom
|
||||
serverUrl={serverUrl}
|
||||
token={token}
|
||||
connect={true}
|
||||
audio={userChoices.audioEnabled}
|
||||
video={userChoices.videoEnabled}
|
||||
onDisconnected={handleDisconnected}
|
||||
data-lk-theme="default"
|
||||
style={{ height: "100%" }}
|
||||
>
|
||||
<VideoConference />
|
||||
<RoomAudioRenderer />
|
||||
</LKRoom>
|
||||
|
||||
{/* ── Floating overlay buttons (consent, email, extensible) ── */}
|
||||
{showConsentButton && (
|
||||
<ConsentDialogButton
|
||||
meetingId={meetingId}
|
||||
recordingType={meeting.recording_type}
|
||||
skipConsent={room.skip_consent}
|
||||
/>
|
||||
)}
|
||||
|
||||
{showRecordingIndicator && <RecordingIndicator />}
|
||||
|
||||
{showEmailFeature && (
|
||||
<IconButton
|
||||
aria-label="Email transcript"
|
||||
position="absolute"
|
||||
top="56px"
|
||||
right="8px"
|
||||
zIndex={1000}
|
||||
colorPalette="blue"
|
||||
size="sm"
|
||||
onClick={showEmailModal}
|
||||
variant="solid"
|
||||
borderRadius="full"
|
||||
>
|
||||
<LuMail />
|
||||
</IconButton>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -14,6 +14,7 @@ import MeetingSelection from "../MeetingSelection";
|
||||
import useRoomDefaultMeeting from "../useRoomDefaultMeeting";
|
||||
import WherebyRoom from "./WherebyRoom";
|
||||
import DailyRoom from "./DailyRoom";
|
||||
import LiveKitRoom from "./LiveKitRoom";
|
||||
import { useAuth } from "../../lib/AuthProvider";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import { parseNonEmptyString } from "../../lib/utils";
|
||||
@@ -199,8 +200,9 @@ export default function RoomContainer(details: RoomDetails) {
|
||||
return <DailyRoom meeting={meeting} room={room} />;
|
||||
case "whereby":
|
||||
return <WherebyRoom meeting={meeting} room={room} />;
|
||||
default: {
|
||||
const _exhaustive: never = platform;
|
||||
case "livekit":
|
||||
return <LiveKitRoom meeting={meeting} room={room} />;
|
||||
default:
|
||||
return (
|
||||
<Box
|
||||
display="flex"
|
||||
@@ -213,6 +215,5 @@ export default function RoomContainer(details: RoomDetails) {
|
||||
<Text fontSize="lg">Unknown platform: {platform}</Text>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import "./styles/globals.scss";
|
||||
import "@livekit/components-styles";
|
||||
import { Metadata, Viewport } from "next";
|
||||
import { Poppins } from "next/font/google";
|
||||
import { ErrorProvider } from "./(errors)/errorContext";
|
||||
|
||||
@@ -136,6 +136,7 @@ export function UserEventsProvider({
|
||||
switch (msg.event) {
|
||||
case "TRANSCRIPT_CREATED":
|
||||
case "TRANSCRIPT_DELETED":
|
||||
case "TRANSCRIPT_RESTORED":
|
||||
case "TRANSCRIPT_STATUS":
|
||||
case "TRANSCRIPT_FINAL_TITLE":
|
||||
case "TRANSCRIPT_DURATION":
|
||||
|
||||
@@ -57,6 +57,7 @@ export function useTranscriptsSearch(
|
||||
offset?: number;
|
||||
room_id?: string;
|
||||
source_kind?: SourceKind;
|
||||
include_deleted?: boolean;
|
||||
} = {},
|
||||
) {
|
||||
return $api.useQuery(
|
||||
@@ -70,6 +71,7 @@ export function useTranscriptsSearch(
|
||||
offset: options.offset,
|
||||
room_id: options.room_id,
|
||||
source_kind: options.source_kind,
|
||||
include_deleted: options.include_deleted,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -105,6 +107,38 @@ export function useTranscriptProcess() {
|
||||
});
|
||||
}
|
||||
|
||||
export function useTranscriptRestore() {
|
||||
const { setError } = useError();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return $api.useMutation("post", "/v1/transcripts/{transcript_id}/restore", {
|
||||
onSuccess: () => {
|
||||
return queryClient.invalidateQueries({
|
||||
queryKey: ["get", TRANSCRIPT_SEARCH_URL],
|
||||
});
|
||||
},
|
||||
onError: (error) => {
|
||||
setError(error as Error, "There was an error restoring the transcript");
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function useTranscriptDestroy() {
|
||||
const { setError } = useError();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return $api.useMutation("delete", "/v1/transcripts/{transcript_id}/destroy", {
|
||||
onSuccess: () => {
|
||||
return queryClient.invalidateQueries({
|
||||
queryKey: ["get", TRANSCRIPT_SEARCH_URL],
|
||||
});
|
||||
},
|
||||
onError: (error) => {
|
||||
setError(error as Error, "There was an error destroying the transcript");
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const ACTIVE_TRANSCRIPT_STATUSES = new Set<TranscriptStatus>([
|
||||
"processing",
|
||||
"uploaded",
|
||||
|
||||
@@ -6,13 +6,15 @@ import { Box, Button, Input, Text, VStack, HStack } from "@chakra-ui/react";
|
||||
interface EmailTranscriptDialogProps {
|
||||
onSubmit: (email: string) => void;
|
||||
onDismiss: () => void;
|
||||
initialEmail?: string;
|
||||
}
|
||||
|
||||
export function EmailTranscriptDialog({
|
||||
onSubmit,
|
||||
onDismiss,
|
||||
initialEmail,
|
||||
}: EmailTranscriptDialogProps) {
|
||||
const [email, setEmail] = useState("");
|
||||
const [email, setEmail] = useState(initialEmail ?? "");
|
||||
const [inputEl, setInputEl] = useState<HTMLInputElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -11,10 +11,12 @@ const TOAST_CHECK_INTERVAL_MS = 100;
|
||||
|
||||
type UseEmailTranscriptDialogParams = {
|
||||
meetingId: MeetingId;
|
||||
userEmail?: string | null;
|
||||
};
|
||||
|
||||
export function useEmailTranscriptDialog({
|
||||
meetingId,
|
||||
userEmail,
|
||||
}: UseEmailTranscriptDialogParams) {
|
||||
const [modalOpen, setModalOpen] = useState(false);
|
||||
const addEmailMutation = useMeetingAddEmailRecipient();
|
||||
@@ -83,6 +85,7 @@ export function useEmailTranscriptDialog({
|
||||
duration: null,
|
||||
render: ({ dismiss }) => (
|
||||
<EmailTranscriptDialog
|
||||
initialEmail={userEmail ?? undefined}
|
||||
onSubmit={(email) => {
|
||||
handleSubmitEmail(email);
|
||||
dismiss();
|
||||
@@ -120,7 +123,7 @@ export function useEmailTranscriptDialog({
|
||||
}
|
||||
}, TOAST_CHECK_INTERVAL_MS);
|
||||
});
|
||||
}, [handleSubmitEmail, modalOpen]);
|
||||
}, [handleSubmitEmail, modalOpen, userEmail]);
|
||||
|
||||
return {
|
||||
showEmailModal,
|
||||
|
||||
185
www/app/reflector-api.d.ts
vendored
185
www/app/reflector-api.d.ts
vendored
@@ -388,6 +388,46 @@ export interface paths {
|
||||
patch: operations["v1_transcript_update"];
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/transcripts/{transcript_id}/restore": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
get?: never;
|
||||
put?: never;
|
||||
/**
|
||||
* Transcript Restore
|
||||
* @description Restore a soft-deleted transcript.
|
||||
*/
|
||||
post: operations["v1_transcript_restore"];
|
||||
delete?: never;
|
||||
options?: never;
|
||||
head?: never;
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/transcripts/{transcript_id}/destroy": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
get?: never;
|
||||
put?: never;
|
||||
post?: never;
|
||||
/**
|
||||
* Transcript Destroy
|
||||
* @description Permanently delete a transcript and all associated files.
|
||||
*/
|
||||
delete: operations["v1_transcript_destroy"];
|
||||
options?: never;
|
||||
head?: never;
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/transcripts/{transcript_id}/topics": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
@@ -871,6 +911,32 @@ export interface paths {
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/livekit/webhook": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
get?: never;
|
||||
put?: never;
|
||||
/**
|
||||
* Livekit Webhook
|
||||
* @description Handle LiveKit webhook events.
|
||||
*
|
||||
* LiveKit webhook events include:
|
||||
* - participant_joined / participant_left
|
||||
* - egress_started / egress_updated / egress_ended
|
||||
* - room_started / room_finished
|
||||
* - track_published / track_unpublished
|
||||
*/
|
||||
post: operations["v1_livekit_webhook"];
|
||||
delete?: never;
|
||||
options?: never;
|
||||
head?: never;
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/auth/login": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
@@ -1060,7 +1126,7 @@ export interface components {
|
||||
* Platform
|
||||
* @enum {string}
|
||||
*/
|
||||
platform: "whereby" | "daily";
|
||||
platform: "whereby" | "daily" | "livekit";
|
||||
/**
|
||||
* Skip Consent
|
||||
* @default false
|
||||
@@ -1781,7 +1847,7 @@ export interface components {
|
||||
* Platform
|
||||
* @enum {string}
|
||||
*/
|
||||
platform: "whereby" | "daily";
|
||||
platform: "whereby" | "daily" | "livekit";
|
||||
/** Daily Composed Video S3 Key */
|
||||
daily_composed_video_s3_key?: string | null;
|
||||
/** Daily Composed Video Duration */
|
||||
@@ -1881,7 +1947,7 @@ export interface components {
|
||||
* Platform
|
||||
* @enum {string}
|
||||
*/
|
||||
platform: "whereby" | "daily";
|
||||
platform: "whereby" | "daily" | "livekit";
|
||||
/**
|
||||
* Skip Consent
|
||||
* @default false
|
||||
@@ -1939,7 +2005,7 @@ export interface components {
|
||||
* Platform
|
||||
* @enum {string}
|
||||
*/
|
||||
platform: "whereby" | "daily";
|
||||
platform: "whereby" | "daily" | "livekit";
|
||||
/**
|
||||
* Skip Consent
|
||||
* @default false
|
||||
@@ -2318,7 +2384,7 @@ export interface components {
|
||||
/** Ics Enabled */
|
||||
ics_enabled?: boolean | null;
|
||||
/** Platform */
|
||||
platform?: ("whereby" | "daily") | null;
|
||||
platform?: ("whereby" | "daily" | "livekit") | null;
|
||||
/** Skip Consent */
|
||||
skip_consent?: boolean | null;
|
||||
/** Email Transcript To */
|
||||
@@ -2391,6 +2457,14 @@ export interface components {
|
||||
*/
|
||||
title: string;
|
||||
};
|
||||
/** UserTranscriptRestoredData */
|
||||
UserTranscriptRestoredData: {
|
||||
/**
|
||||
* Id
|
||||
* @description A non-empty string
|
||||
*/
|
||||
id: string;
|
||||
};
|
||||
/** UserTranscriptStatusData */
|
||||
UserTranscriptStatusData: {
|
||||
/**
|
||||
@@ -2446,6 +2520,15 @@ export interface components {
|
||||
event: "TRANSCRIPT_FINAL_TITLE";
|
||||
data: components["schemas"]["UserTranscriptFinalTitleData"];
|
||||
};
|
||||
/** UserWsTranscriptRestored */
|
||||
UserWsTranscriptRestored: {
|
||||
/**
|
||||
* @description discriminator enum property added by openapi-typescript
|
||||
* @enum {string}
|
||||
*/
|
||||
event: "TRANSCRIPT_RESTORED";
|
||||
data: components["schemas"]["UserTranscriptRestoredData"];
|
||||
};
|
||||
/** UserWsTranscriptStatus */
|
||||
UserWsTranscriptStatus: {
|
||||
/**
|
||||
@@ -3176,7 +3259,9 @@ export interface operations {
|
||||
};
|
||||
v1_rooms_join_meeting: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
query?: {
|
||||
display_name?: string | null;
|
||||
};
|
||||
header?: never;
|
||||
path: {
|
||||
room_name: string;
|
||||
@@ -3293,6 +3378,7 @@ export interface operations {
|
||||
from?: string | null;
|
||||
/** @description Filter transcripts created on or before this datetime (ISO 8601 with timezone) */
|
||||
to?: string | null;
|
||||
include_deleted?: boolean;
|
||||
};
|
||||
header?: never;
|
||||
path?: never;
|
||||
@@ -3427,6 +3513,68 @@ export interface operations {
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_transcript_restore: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path: {
|
||||
transcript_id: string;
|
||||
};
|
||||
cookie?: never;
|
||||
};
|
||||
requestBody?: never;
|
||||
responses: {
|
||||
/** @description Successful Response */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": components["schemas"]["DeletionStatus"];
|
||||
};
|
||||
};
|
||||
/** @description Validation Error */
|
||||
422: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": components["schemas"]["HTTPValidationError"];
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_transcript_destroy: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path: {
|
||||
transcript_id: string;
|
||||
};
|
||||
cookie?: never;
|
||||
};
|
||||
requestBody?: never;
|
||||
responses: {
|
||||
/** @description Successful Response */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": components["schemas"]["DeletionStatus"];
|
||||
};
|
||||
};
|
||||
/** @description Validation Error */
|
||||
422: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": components["schemas"]["HTTPValidationError"];
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_transcript_get_topics: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
@@ -3995,9 +4143,7 @@ export interface operations {
|
||||
};
|
||||
v1_transcript_get_video_url: {
|
||||
parameters: {
|
||||
query?: {
|
||||
token?: string | null;
|
||||
};
|
||||
query?: never;
|
||||
header?: never;
|
||||
path: {
|
||||
transcript_id: string;
|
||||
@@ -4254,6 +4400,7 @@ export interface operations {
|
||||
"application/json":
|
||||
| components["schemas"]["UserWsTranscriptCreated"]
|
||||
| components["schemas"]["UserWsTranscriptDeleted"]
|
||||
| components["schemas"]["UserWsTranscriptRestored"]
|
||||
| components["schemas"]["UserWsTranscriptStatus"]
|
||||
| components["schemas"]["UserWsTranscriptFinalTitle"]
|
||||
| components["schemas"]["UserWsTranscriptDuration"];
|
||||
@@ -4385,6 +4532,26 @@ export interface operations {
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_livekit_webhook: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
requestBody?: never;
|
||||
responses: {
|
||||
/** @description Successful Response */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": unknown;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_login: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
|
||||
@@ -20,6 +20,8 @@
|
||||
"@fortawesome/fontawesome-svg-core": "^7.2.0",
|
||||
"@fortawesome/free-solid-svg-icons": "^7.2.0",
|
||||
"@fortawesome/react-fontawesome": "^3.2.0",
|
||||
"@livekit/components-react": "2.9.20",
|
||||
"@livekit/components-styles": "1.2.0",
|
||||
"@sentry/nextjs": "^10.40.0",
|
||||
"@tanstack/react-query": "^5.90.21",
|
||||
"@whereby.com/browser-sdk": "^3.18.21",
|
||||
@@ -30,6 +32,7 @@
|
||||
"fontawesome": "^5.6.3",
|
||||
"ioredis": "^5.10.0",
|
||||
"jest-worker": "^30.2.0",
|
||||
"livekit-client": "2.18.0",
|
||||
"lucide-react": "^0.575.0",
|
||||
"next": "16.1.7",
|
||||
"next-auth": "^4.24.13",
|
||||
|
||||
150
www/pnpm-lock.yaml
generated
150
www/pnpm-lock.yaml
generated
@@ -34,6 +34,12 @@ importers:
|
||||
'@fortawesome/react-fontawesome':
|
||||
specifier: ^3.2.0
|
||||
version: 3.2.0(@fortawesome/fontawesome-svg-core@7.2.0)(react@19.2.4)
|
||||
'@livekit/components-react':
|
||||
specifier: 2.9.20
|
||||
version: 2.9.20(livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tslib@2.8.1)
|
||||
'@livekit/components-styles':
|
||||
specifier: 1.2.0
|
||||
version: 1.2.0
|
||||
'@sentry/nextjs':
|
||||
specifier: ^10.40.0
|
||||
version: 10.40.0(@opentelemetry/context-async-hooks@2.5.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.5.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.5.1(@opentelemetry/api@1.9.0))(next@16.1.7(@babel/core@7.29.0)(@opentelemetry/api@1.9.0)(babel-plugin-macros@3.1.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.97.3))(react@19.2.4)(webpack@5.105.3)
|
||||
@@ -64,6 +70,9 @@ importers:
|
||||
jest-worker:
|
||||
specifier: ^30.2.0
|
||||
version: 30.2.0
|
||||
livekit-client:
|
||||
specifier: 2.18.0
|
||||
version: 2.18.0(@types/dom-mediacapture-record@1.0.22)
|
||||
lucide-react:
|
||||
specifier: ^0.575.0
|
||||
version: 0.575.0(react@19.2.4)
|
||||
@@ -343,6 +352,9 @@ packages:
|
||||
'@bcoe/v8-coverage@0.2.3':
|
||||
resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==}
|
||||
|
||||
'@bufbuild/protobuf@1.10.1':
|
||||
resolution: {integrity: sha512-wJ8ReQbHxsAfXhrf9ixl0aYbZorRuOWpBNzm8pL8ftmSxQx/wnJD5Eg861NwJU/czy2VXFIebCeZnZrI9rktIQ==}
|
||||
|
||||
'@chakra-ui/react@3.33.0':
|
||||
resolution: {integrity: sha512-HNbUFsFABjVL5IHBxsqtuT+AH/vQT1+xsEWrxnG0GBM2VjlzlMqlqCxNiDyQOsjLZXQC1ciCMbzPNcSCc63Y9w==}
|
||||
peerDependencies:
|
||||
@@ -445,6 +457,9 @@ packages:
|
||||
'@floating-ui/core@1.7.4':
|
||||
resolution: {integrity: sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg==}
|
||||
|
||||
'@floating-ui/dom@1.7.4':
|
||||
resolution: {integrity: sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==}
|
||||
|
||||
'@floating-ui/dom@1.7.5':
|
||||
resolution: {integrity: sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg==}
|
||||
|
||||
@@ -767,6 +782,36 @@ packages:
|
||||
'@jridgewell/trace-mapping@0.3.31':
|
||||
resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==}
|
||||
|
||||
'@livekit/components-core@0.12.13':
|
||||
resolution: {integrity: sha512-DQmi84afHoHjZ62wm8y+XPNIDHTwFHAltjd3lmyXj8UZHOY7wcza4vFt1xnghJOD5wLRY58L1dkAgAw59MgWvw==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
livekit-client: ^2.17.2
|
||||
tslib: ^2.6.2
|
||||
|
||||
'@livekit/components-react@2.9.20':
|
||||
resolution: {integrity: sha512-hjkYOsJj9Jbghb7wM5cI8HoVisKeL6Zcy1VnRWTLm0sqVbto8GJp/17T4Udx85mCPY6Jgh8I1Cv0yVzgz7CQtg==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
'@livekit/krisp-noise-filter': ^0.2.12 || ^0.3.0
|
||||
livekit-client: ^2.17.2
|
||||
react: '>=18'
|
||||
react-dom: '>=18'
|
||||
tslib: ^2.6.2
|
||||
peerDependenciesMeta:
|
||||
'@livekit/krisp-noise-filter':
|
||||
optional: true
|
||||
|
||||
'@livekit/components-styles@1.2.0':
|
||||
resolution: {integrity: sha512-74/rt0lDh6aHmOPmWAeDE9C4OrNW9RIdmhX/YRbovQBVNGNVWojRjl3FgQZ5LPFXO6l1maKB4JhXcBFENVxVvw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@livekit/mutex@1.1.1':
|
||||
resolution: {integrity: sha512-EsshAucklmpuUAfkABPxJNhzj9v2sG7JuzFDL4ML1oJQSV14sqrpTYnsaOudMAw9yOaW53NU3QQTlUQoRs4czw==}
|
||||
|
||||
'@livekit/protocol@1.44.0':
|
||||
resolution: {integrity: sha512-/vfhDUGcUKO8Q43r6i+5FrDhl5oZjm/X3U4x2Iciqvgn5C8qbj+57YPcWSJ1kyIZm5Cm6AV2nAPjMm3ETD/iyg==}
|
||||
|
||||
'@lukeed/csprng@1.1.0':
|
||||
resolution: {integrity: sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -1886,6 +1931,9 @@ packages:
|
||||
'@types/debug@4.1.12':
|
||||
resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==}
|
||||
|
||||
'@types/dom-mediacapture-record@1.0.22':
|
||||
resolution: {integrity: sha512-mUMZLK3NvwRLcAAT9qmcK+9p7tpU2FHdDsntR3YI4+GY88XrgG4XiE7u1Q2LAN2/FZOz/tdMDC3GQCR4T8nFuw==}
|
||||
|
||||
'@types/eslint-scope@3.7.7':
|
||||
resolution: {integrity: sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==}
|
||||
|
||||
@@ -3839,6 +3887,9 @@ packages:
|
||||
jose@4.15.9:
|
||||
resolution: {integrity: sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA==}
|
||||
|
||||
jose@6.2.2:
|
||||
resolution: {integrity: sha512-d7kPDd34KO/YnzaDOlikGpOurfF0ByC2sEV4cANCtdqLlTfBlw2p14O/5d/zv40gJPbIQxfES3nSx1/oYNyuZQ==}
|
||||
|
||||
js-levenshtein@1.1.6:
|
||||
resolution: {integrity: sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
@@ -3984,6 +4035,11 @@ packages:
|
||||
lines-and-columns@1.2.4:
|
||||
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
||||
|
||||
livekit-client@2.18.0:
|
||||
resolution: {integrity: sha512-wjH4y0rw5fnkPmmaxutPhD4XcAq6goQszS8lw9PEpGXVwiRE6sI/ZH+mOT/s8AHJnEC3tjmfiMZ4MQt8BlaWew==}
|
||||
peerDependencies:
|
||||
'@types/dom-mediacapture-record': ^1
|
||||
|
||||
loader-runner@4.3.1:
|
||||
resolution: {integrity: sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q==}
|
||||
engines: {node: '>=6.11.5'}
|
||||
@@ -3996,6 +4052,9 @@ packages:
|
||||
resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
lodash.debounce@4.0.8:
|
||||
resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==}
|
||||
|
||||
lodash.defaults@4.2.0:
|
||||
resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==}
|
||||
|
||||
@@ -4005,6 +4064,14 @@ packages:
|
||||
lodash.memoize@4.1.2:
|
||||
resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==}
|
||||
|
||||
loglevel@1.9.1:
|
||||
resolution: {integrity: sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
|
||||
loglevel@1.9.2:
|
||||
resolution: {integrity: sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
|
||||
longest-streak@3.1.0:
|
||||
resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==}
|
||||
|
||||
@@ -4752,6 +4819,9 @@ packages:
|
||||
resolution: {integrity: sha512-K6p9y4ZyL9wPzA+PMDloNQPfoDGTiFYDvdlXznyGKgD10BJpcAosvATKrExRKOrNLgD8E7Um7WGW0lxsnOuNLg==}
|
||||
engines: {node: '>=4.0.0'}
|
||||
|
||||
rxjs@7.8.2:
|
||||
resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==}
|
||||
|
||||
safe-array-concat@1.1.3:
|
||||
resolution: {integrity: sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==}
|
||||
engines: {node: '>=0.4'}
|
||||
@@ -5133,6 +5203,9 @@ packages:
|
||||
resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
typed-emitter@2.1.0:
|
||||
resolution: {integrity: sha512-g/KzbYKbH5C2vPkaXGu8DJlHrGKHLsM25Zg9WuC9pMGfuvT+X25tZQWo5fK1BjBm8+UrVE9LDCvaY0CQk+fXDA==}
|
||||
|
||||
typescript-eslint@8.56.1:
|
||||
resolution: {integrity: sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
@@ -5228,6 +5301,12 @@ packages:
|
||||
'@types/react':
|
||||
optional: true
|
||||
|
||||
usehooks-ts@3.1.1:
|
||||
resolution: {integrity: sha512-I4diPp9Cq6ieSUH2wu+fDAVQO43xwtulo+fKEidHUwZPnYImbtkTjzIJYcDcJqxgmX31GVqNFURodvcgHcW0pA==}
|
||||
engines: {node: '>=16.15.0'}
|
||||
peerDependencies:
|
||||
react: ^16.8.0 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
||||
|
||||
util-deprecate@1.0.2:
|
||||
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
|
||||
|
||||
@@ -5662,6 +5741,8 @@ snapshots:
|
||||
|
||||
'@bcoe/v8-coverage@0.2.3': {}
|
||||
|
||||
'@bufbuild/protobuf@1.10.1': {}
|
||||
|
||||
'@chakra-ui/react@3.33.0(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)':
|
||||
dependencies:
|
||||
'@ark-ui/react': 5.34.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
@@ -5811,6 +5892,11 @@ snapshots:
|
||||
dependencies:
|
||||
'@floating-ui/utils': 0.2.10
|
||||
|
||||
'@floating-ui/dom@1.7.4':
|
||||
dependencies:
|
||||
'@floating-ui/core': 1.7.4
|
||||
'@floating-ui/utils': 0.2.10
|
||||
|
||||
'@floating-ui/dom@1.7.5':
|
||||
dependencies:
|
||||
'@floating-ui/core': 1.7.4
|
||||
@@ -6179,6 +6265,34 @@ snapshots:
|
||||
'@jridgewell/resolve-uri': 3.1.2
|
||||
'@jridgewell/sourcemap-codec': 1.5.5
|
||||
|
||||
'@livekit/components-core@0.12.13(livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22))(tslib@2.8.1)':
|
||||
dependencies:
|
||||
'@floating-ui/dom': 1.7.4
|
||||
livekit-client: 2.18.0(@types/dom-mediacapture-record@1.0.22)
|
||||
loglevel: 1.9.1
|
||||
rxjs: 7.8.2
|
||||
tslib: 2.8.1
|
||||
|
||||
'@livekit/components-react@2.9.20(livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tslib@2.8.1)':
|
||||
dependencies:
|
||||
'@livekit/components-core': 0.12.13(livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22))(tslib@2.8.1)
|
||||
clsx: 2.1.1
|
||||
events: 3.3.0
|
||||
jose: 6.2.2
|
||||
livekit-client: 2.18.0(@types/dom-mediacapture-record@1.0.22)
|
||||
react: 19.2.4
|
||||
react-dom: 19.2.4(react@19.2.4)
|
||||
tslib: 2.8.1
|
||||
usehooks-ts: 3.1.1(react@19.2.4)
|
||||
|
||||
'@livekit/components-styles@1.2.0': {}
|
||||
|
||||
'@livekit/mutex@1.1.1': {}
|
||||
|
||||
'@livekit/protocol@1.44.0':
|
||||
dependencies:
|
||||
'@bufbuild/protobuf': 1.10.1
|
||||
|
||||
'@lukeed/csprng@1.1.0': {}
|
||||
|
||||
'@lukeed/uuid@2.0.1':
|
||||
@@ -7259,6 +7373,8 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/ms': 2.1.0
|
||||
|
||||
'@types/dom-mediacapture-record@1.0.22': {}
|
||||
|
||||
'@types/eslint-scope@3.7.7':
|
||||
dependencies:
|
||||
'@types/eslint': 9.6.1
|
||||
@@ -9986,6 +10102,8 @@ snapshots:
|
||||
|
||||
jose@4.15.9: {}
|
||||
|
||||
jose@6.2.2: {}
|
||||
|
||||
js-levenshtein@1.1.6: {}
|
||||
|
||||
js-tokens@4.0.0: {}
|
||||
@@ -10101,6 +10219,19 @@ snapshots:
|
||||
|
||||
lines-and-columns@1.2.4: {}
|
||||
|
||||
livekit-client@2.18.0(@types/dom-mediacapture-record@1.0.22):
|
||||
dependencies:
|
||||
'@livekit/mutex': 1.1.1
|
||||
'@livekit/protocol': 1.44.0
|
||||
'@types/dom-mediacapture-record': 1.0.22
|
||||
events: 3.3.0
|
||||
jose: 6.2.2
|
||||
loglevel: 1.9.2
|
||||
sdp-transform: 2.15.0
|
||||
tslib: 2.8.1
|
||||
typed-emitter: 2.1.0
|
||||
webrtc-adapter: 9.0.4
|
||||
|
||||
loader-runner@4.3.1: {}
|
||||
|
||||
locate-path@5.0.0:
|
||||
@@ -10111,12 +10242,18 @@ snapshots:
|
||||
dependencies:
|
||||
p-locate: 5.0.0
|
||||
|
||||
lodash.debounce@4.0.8: {}
|
||||
|
||||
lodash.defaults@4.2.0: {}
|
||||
|
||||
lodash.isarguments@3.1.0: {}
|
||||
|
||||
lodash.memoize@4.1.2: {}
|
||||
|
||||
loglevel@1.9.1: {}
|
||||
|
||||
loglevel@1.9.2: {}
|
||||
|
||||
longest-streak@3.1.0: {}
|
||||
|
||||
loose-envify@1.4.0:
|
||||
@@ -11009,6 +11146,10 @@ snapshots:
|
||||
|
||||
runes@0.4.3: {}
|
||||
|
||||
rxjs@7.8.2:
|
||||
dependencies:
|
||||
tslib: 2.8.1
|
||||
|
||||
safe-array-concat@1.1.3:
|
||||
dependencies:
|
||||
call-bind: 1.0.8
|
||||
@@ -11462,6 +11603,10 @@ snapshots:
|
||||
possible-typed-array-names: 1.1.0
|
||||
reflect.getprototypeof: 1.0.10
|
||||
|
||||
typed-emitter@2.1.0:
|
||||
optionalDependencies:
|
||||
rxjs: 7.8.2
|
||||
|
||||
typescript-eslint@8.56.1(eslint@10.0.2(jiti@2.6.1))(typescript@5.9.3):
|
||||
dependencies:
|
||||
'@typescript-eslint/eslint-plugin': 8.56.1(@typescript-eslint/parser@8.56.1(eslint@10.0.2(jiti@2.6.1))(typescript@5.9.3))(eslint@10.0.2(jiti@2.6.1))(typescript@5.9.3)
|
||||
@@ -11585,6 +11730,11 @@ snapshots:
|
||||
optionalDependencies:
|
||||
'@types/react': 19.2.14
|
||||
|
||||
usehooks-ts@3.1.1(react@19.2.4):
|
||||
dependencies:
|
||||
lodash.debounce: 4.0.8
|
||||
react: 19.2.4
|
||||
|
||||
util-deprecate@1.0.2: {}
|
||||
|
||||
uuid-validate@0.0.3: {}
|
||||
|
||||
4
www/public/email-icon-dark.svg
Normal file
4
www/public/email-icon-dark.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<rect x="2" y="4" width="20" height="16" rx="2"/>
|
||||
<path d="m22 7-8.97 5.7a1.94 1.94 0 0 1-2.06 0L2 7"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 267 B |
@@ -1,4 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<rect x="2" y="4" width="20" height="16" rx="2"/>
|
||||
<path d="m22 7-8.97 5.7a1.94 1.94 0 0 1-2.06 0L2 7"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 274 B After Width: | Height: | Size: 267 B |
Reference in New Issue
Block a user