mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2026-02-04 09:56:47 +00:00
Compare commits
99 Commits
v0.15.0
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
afc65df749 | ||
| 7fde64e252 | |||
| 2ca624f052 | |||
| fc3ef6c893 | |||
| 5d26461477 | |||
| 6c175a11d8 | |||
| 6e786b7631 | |||
| 8fc8d8bf4a | |||
| c723752b7e | |||
| 4dc49e5b25 | |||
| 23d2bc283d | |||
| c8743fdf1c | |||
| 8a293882ad | |||
| d83c4a30b4 | |||
| 3b6540eae5 | |||
|
|
7ca9cad937 | ||
|
|
3be7fc0b9a | ||
|
|
1f2aeff8cc | ||
| 407c15299f | |||
| e644d6497b | |||
| 7f42ef6d17 | |||
| 5f7b1ff1a6 | |||
| 2d0df48767 | |||
| 5baa6dd92e | |||
| bab1e2d537 | |||
| e886153ae1 | |||
| 7b352f465e | |||
| 3cf9757ac2 | |||
| d9d3938192 | |||
| 8598707c1c | |||
| 594bcc09e0 | |||
| 7c2d0698ed | |||
| 1dac999b56 | |||
| f580b996ee | |||
| 225783496f | |||
| f0ee7b531a | |||
| 37a454f283 | |||
| 964cd78bb6 | |||
| 5f458aa4a7 | |||
| 5f7dfadabd | |||
| 0bc971ba96 | |||
| c62e3c0753 | |||
| 16284e1ac3 | |||
| 443982617d | |||
| 23023b3cdb | |||
| 90c3ecc9c3 | |||
| d7f140b7d1 | |||
| a47a5f5781 | |||
| 0eba147018 | |||
| 18a27f7b45 | |||
| 32a049c134 | |||
| 91650ec65f | |||
| 61f0e29d4c | |||
| ec17ed7b58 | |||
| 00549f153a | |||
| 3ad78be762 | |||
| d3a5cd12d2 | |||
| af921ce927 | |||
| bd5df1ce2e | |||
| c8024484b3 | |||
| 28f87c09dc | |||
| dabf7251db | |||
| b51b7aa917 | |||
| a8983b4e7e | |||
| fe47c46489 | |||
| a2bb6a27d6 | |||
| 7f0b728991 | |||
| 692895c859 | |||
| d63040e2fd | |||
| 8d696aa775 | |||
| f6ca07505f | |||
| 3aef926203 | |||
| 0b2c82227d | |||
| 689c8075cc | |||
| 201671368a | |||
| 86d5e26224 | |||
| 9bec39808f | |||
| 86ac23868b | |||
| c442a62787 | |||
| 8e438ca285 | |||
| 11731c9d38 | |||
| 4287f8b8ae | |||
| 3e47c2c057 | |||
| 616092a9bb | |||
| 18ed713369 | |||
| 2801ab3643 | |||
| b20cad76e6 | |||
| 28a7258e45 | |||
| a9a4f32324 | |||
| 857e035562 | |||
| 34a3f5618c | |||
| 1473fd82dc | |||
| 372202b0e1 | |||
| d20aac66c4 | |||
| dc4b737daa | |||
| 0baff7abf7 | |||
| 962c40e2b6 | |||
| 3c4b9f2103 | |||
| c6c035aacf |
90
.github/workflows/deploy.yml
vendored
90
.github/workflows/deploy.yml
vendored
@@ -1,90 +0,0 @@
|
||||
name: Deploy to Amazon ECS
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
env:
|
||||
# 950402358378.dkr.ecr.us-east-1.amazonaws.com/reflector
|
||||
AWS_REGION: us-east-1
|
||||
ECR_REPOSITORY: reflector
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: linux-amd64
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: linux-arm64
|
||||
arch: arm64
|
||||
|
||||
runs-on: ${{ matrix.runner }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
outputs:
|
||||
registry: ${{ steps.login-ecr.outputs.registry }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push ${{ matrix.arch }}
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: server
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: true
|
||||
tags: ${{ steps.login-ecr.outputs.registry }}/${{ env.ECR_REPOSITORY }}:latest-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
github-token: ${{ secrets.GHA_CACHE_TOKEN }}
|
||||
provenance: false
|
||||
|
||||
create-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build]
|
||||
|
||||
permissions:
|
||||
deployments: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Create and push multi-arch manifest
|
||||
run: |
|
||||
# Get the registry URL (since we can't easily access job outputs in matrix)
|
||||
ECR_REGISTRY=$(aws ecr describe-registry --query 'registryId' --output text).dkr.ecr.${{ env.AWS_REGION }}.amazonaws.com
|
||||
|
||||
docker manifest create \
|
||||
$ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest \
|
||||
$ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest-amd64 \
|
||||
$ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest-arm64
|
||||
|
||||
docker manifest push $ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest
|
||||
|
||||
echo "✅ Multi-arch manifest pushed: $ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest"
|
||||
@@ -1,35 +1,31 @@
|
||||
name: Build and Push Frontend Docker Image
|
||||
name: Build and Push Backend Docker Image (Docker Hub)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'www/**'
|
||||
- '.github/workflows/docker-frontend.yml'
|
||||
tags:
|
||||
- "v*"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}-frontend
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: monadicalsas/reflector-backend
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
username: monadicalsas
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
@@ -38,7 +34,7 @@ jobs:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=sha,prefix={{branch}}-
|
||||
type=ref,event=tag
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
@@ -47,11 +43,11 @@ jobs:
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./www
|
||||
file: ./www/Dockerfile
|
||||
context: ./server
|
||||
file: ./server/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64,linux/arm64
|
||||
70
.github/workflows/dockerhub-frontend.yml
vendored
Normal file
70
.github/workflows/dockerhub-frontend.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Build and Push Frontend Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: monadicalsas/reflector-frontend
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: monadicalsas
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./www
|
||||
file: ./www/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
deploy:
|
||||
needs: build-and-push
|
||||
runs-on: ubuntu-latest
|
||||
if: success()
|
||||
strategy:
|
||||
matrix:
|
||||
environment: [reflector-monadical, reflector-media]
|
||||
environment: ${{ matrix.environment }}
|
||||
steps:
|
||||
- name: Trigger Coolify deployment
|
||||
run: |
|
||||
curl -X POST "${{ secrets.COOLIFY_WEBHOOK_URL }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_WEBHOOK_TOKEN }}" \
|
||||
-f || (echo "Failed to trigger Coolify deployment for ${{ matrix.environment }}" && exit 1)
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,6 +1,8 @@
|
||||
.DS_Store
|
||||
server/.env
|
||||
server/.env.production
|
||||
.env
|
||||
Caddyfile
|
||||
server/exportdanswer
|
||||
.vercel
|
||||
.env*.local
|
||||
@@ -18,3 +20,6 @@ CLAUDE.local.md
|
||||
www/.env.development
|
||||
www/.env.production
|
||||
.playwright-mcp
|
||||
docs/pnpm-lock.yaml
|
||||
.secrets
|
||||
opencode.json
|
||||
|
||||
@@ -1 +1,6 @@
|
||||
b9d891d3424f371642cb032ecfd0e2564470a72c:server/tests/test_transcripts_recording_deletion.py:generic-api-key:15
|
||||
docs/docs/installation/auth-setup.md:curl-auth-header:250
|
||||
docs/docs/installation/daily-setup.md:curl-auth-header:277
|
||||
gpu/self_hosted/DEV_SETUP.md:curl-auth-header:74
|
||||
gpu/self_hosted/DEV_SETUP.md:curl-auth-header:83
|
||||
server/reflector/worker/process.py:generic-api-key:465
|
||||
|
||||
24
.secrets.example
Normal file
24
.secrets.example
Normal file
@@ -0,0 +1,24 @@
|
||||
# Example secrets file for GitHub Actions workflows
|
||||
# Copy this to .secrets and fill in your values
|
||||
# These secrets should be configured in GitHub repository settings:
|
||||
# Settings > Secrets and variables > Actions
|
||||
|
||||
# DockerHub Configuration (required for frontend and backend deployment)
|
||||
# Create a Docker Hub access token at https://hub.docker.com/settings/security
|
||||
# Username: monadicalsas
|
||||
DOCKERHUB_TOKEN=your-dockerhub-access-token
|
||||
|
||||
# GitHub Token (required for frontend and backend deployment)
|
||||
# Used by docker/metadata-action for extracting image metadata
|
||||
# Can use the default GITHUB_TOKEN or create a personal access token
|
||||
GITHUB_TOKEN=your-github-token-or-use-default-GITHUB_TOKEN
|
||||
|
||||
# Coolify Deployment Webhook (required for frontend deployment)
|
||||
# Used to trigger automatic deployment after image push
|
||||
# Configure these secrets in GitHub Environments:
|
||||
# Each environment should have:
|
||||
# - COOLIFY_WEBHOOK_URL: The webhook URL for that specific deployment
|
||||
# - COOLIFY_WEBHOOK_TOKEN: The webhook token (can be the same for both if using same token)
|
||||
|
||||
# Optional: GitHub Actions Cache Token (for local testing with act)
|
||||
GHA_CACHE_TOKEN=your-github-token-or-empty
|
||||
208
CHANGELOG.md
208
CHANGELOG.md
@@ -1,5 +1,213 @@
|
||||
# Changelog
|
||||
|
||||
## [0.31.0](https://github.com/Monadical-SAS/reflector/compare/v0.30.0...v0.31.0) (2026-01-23)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* mixdown optional ([#834](https://github.com/Monadical-SAS/reflector/issues/834)) ([fc3ef6c](https://github.com/Monadical-SAS/reflector/commit/fc3ef6c8933231c731fad84e7477a476a6220a5e))
|
||||
|
||||
## [0.30.0](https://github.com/Monadical-SAS/reflector/compare/v0.29.0...v0.30.0) (2026-01-23)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* brady bunch ([#816](https://github.com/Monadical-SAS/reflector/issues/816)) ([6c175a1](https://github.com/Monadical-SAS/reflector/commit/6c175a11d8a3745095bfad06a4ad3ccdfd278433))
|
||||
|
||||
## [0.29.0](https://github.com/Monadical-SAS/reflector/compare/v0.28.1...v0.29.0) (2026-01-21)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* set hatchet as default for multitracks ([#822](https://github.com/Monadical-SAS/reflector/issues/822)) ([c723752](https://github.com/Monadical-SAS/reflector/commit/c723752b7e15aa48a41ad22856f147a5517d3f46))
|
||||
|
||||
## [0.28.1](https://github.com/Monadical-SAS/reflector/compare/v0.28.0...v0.28.1) (2026-01-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* ics non-sync bugfix ([#823](https://github.com/Monadical-SAS/reflector/issues/823)) ([23d2bc2](https://github.com/Monadical-SAS/reflector/commit/23d2bc283d4d02187b250d2055103e0374ee93d6))
|
||||
|
||||
## [0.28.0](https://github.com/Monadical-SAS/reflector/compare/v0.27.0...v0.28.0) (2026-01-20)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* worker affinity ([#819](https://github.com/Monadical-SAS/reflector/issues/819)) ([3b6540e](https://github.com/Monadical-SAS/reflector/commit/3b6540eae5b597449f98661bdf15483b77be3268))
|
||||
|
||||
## [0.27.0](https://github.com/Monadical-SAS/reflector/compare/v0.26.0...v0.27.0) (2025-12-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* devex/hatchet log progress track ([#813](https://github.com/Monadical-SAS/reflector/issues/813)) ([2d0df48](https://github.com/Monadical-SAS/reflector/commit/2d0df487674e5486208cd599e3338ebff8b6e470))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* webhook parity, pipeline rename, waveform constant fix ([#806](https://github.com/Monadical-SAS/reflector/issues/806)) ([5f7b1ff](https://github.com/Monadical-SAS/reflector/commit/5f7b1ff1a68ebbb907684c7c5f55c1f82dac8550))
|
||||
|
||||
## [0.26.0](https://github.com/Monadical-SAS/reflector/compare/v0.25.0...v0.26.0) (2025-12-23)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* parallelize hatchet ([#804](https://github.com/Monadical-SAS/reflector/issues/804)) ([594bcc0](https://github.com/Monadical-SAS/reflector/commit/594bcc09e0ca744163de2f1525ebbf7c52a68448))
|
||||
|
||||
## [0.25.0](https://github.com/Monadical-SAS/reflector/compare/v0.24.0...v0.25.0) (2025-12-22)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* consent disable feature ([#799](https://github.com/Monadical-SAS/reflector/issues/799)) ([2257834](https://github.com/Monadical-SAS/reflector/commit/225783496f2e265d5cb58e3539a20bf6b55589b8))
|
||||
* durable ([#794](https://github.com/Monadical-SAS/reflector/issues/794)) ([1dac999](https://github.com/Monadical-SAS/reflector/commit/1dac999b56997582ce400e7d56e915adc1e4728d))
|
||||
* increase daily recording max duration ([#801](https://github.com/Monadical-SAS/reflector/issues/801)) ([f580b99](https://github.com/Monadical-SAS/reflector/commit/f580b996eef49cce16433c505abfc6454dd45de1))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* logout redirect ([#802](https://github.com/Monadical-SAS/reflector/issues/802)) ([f0ee7b5](https://github.com/Monadical-SAS/reflector/commit/f0ee7b531a0911f214ccbb84d399e9a6c9b700c0))
|
||||
|
||||
## [0.24.0](https://github.com/Monadical-SAS/reflector/compare/v0.23.2...v0.24.0) (2025-12-18)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* identify action items ([#790](https://github.com/Monadical-SAS/reflector/issues/790)) ([964cd78](https://github.com/Monadical-SAS/reflector/commit/964cd78bb699d83d012ae4b8c96565df25b90a5d))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* automatically reprocess daily recordings ([#797](https://github.com/Monadical-SAS/reflector/issues/797)) ([5f458aa](https://github.com/Monadical-SAS/reflector/commit/5f458aa4a7ec3d00ca5ec49d62fcc8ad232b138e))
|
||||
* daily video optimisation ([#789](https://github.com/Monadical-SAS/reflector/issues/789)) ([16284e1](https://github.com/Monadical-SAS/reflector/commit/16284e1ac3faede2b74f0d91b50c0b5612af2c35))
|
||||
* main menu login ([#800](https://github.com/Monadical-SAS/reflector/issues/800)) ([0bc971b](https://github.com/Monadical-SAS/reflector/commit/0bc971ba966a52d719c8c240b47dc7b3bdea4391))
|
||||
* retry on workflow timeout ([#798](https://github.com/Monadical-SAS/reflector/issues/798)) ([5f7dfad](https://github.com/Monadical-SAS/reflector/commit/5f7dfadabd3e8017406ad3720ba495a59963ee34))
|
||||
|
||||
## [0.23.2](https://github.com/Monadical-SAS/reflector/compare/v0.23.1...v0.23.2) (2025-12-11)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* build on push tags ([#785](https://github.com/Monadical-SAS/reflector/issues/785)) ([d7f140b](https://github.com/Monadical-SAS/reflector/commit/d7f140b7d1f4660d5da7a0da1357f68869e0b5cd))
|
||||
|
||||
## [0.23.1](https://github.com/Monadical-SAS/reflector/compare/v0.23.0...v0.23.1) (2025-12-11)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* populate room_name in transcript GET endpoint ([#783](https://github.com/Monadical-SAS/reflector/issues/783)) ([0eba147](https://github.com/Monadical-SAS/reflector/commit/0eba1470181c7b9e0a79964a1ef28c09bcbdd9d7))
|
||||
|
||||
## [0.23.0](https://github.com/Monadical-SAS/reflector/compare/v0.22.4...v0.23.0) (2025-12-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* dockerhub ci ([#772](https://github.com/Monadical-SAS/reflector/issues/772)) ([00549f1](https://github.com/Monadical-SAS/reflector/commit/00549f153ade922cf4cb6c5358a7d11a39c426d2))
|
||||
* llm retries ([#739](https://github.com/Monadical-SAS/reflector/issues/739)) ([61f0e29](https://github.com/Monadical-SAS/reflector/commit/61f0e29d4c51eab54ee67af92141fbb171e8ccaa))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* celery inspect bug sidestep in restart script ([#766](https://github.com/Monadical-SAS/reflector/issues/766)) ([ec17ed7](https://github.com/Monadical-SAS/reflector/commit/ec17ed7b587cf6ee143646baaee67a7c017044d4))
|
||||
* deploy frontend to coolify ([#779](https://github.com/Monadical-SAS/reflector/issues/779)) ([91650ec](https://github.com/Monadical-SAS/reflector/commit/91650ec65f65713faa7ee0dcfb75af427b7c4ba0))
|
||||
* hide rooms settings instead of disabling ([#763](https://github.com/Monadical-SAS/reflector/issues/763)) ([3ad78be](https://github.com/Monadical-SAS/reflector/commit/3ad78be7628c0d029296b301a0e87236c76b7598))
|
||||
* return participant emails from transcript endpoint ([#769](https://github.com/Monadical-SAS/reflector/issues/769)) ([d3a5cd1](https://github.com/Monadical-SAS/reflector/commit/d3a5cd12d2d0d9c32af2d5bd9322e030ef69b85d))
|
||||
|
||||
## [0.22.4](https://github.com/Monadical-SAS/reflector/compare/v0.22.3...v0.22.4) (2025-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Multitrack mixdown optimisation 2 ([#764](https://github.com/Monadical-SAS/reflector/issues/764)) ([bd5df1c](https://github.com/Monadical-SAS/reflector/commit/bd5df1ce2ebf35d7f3413b295e56937a9a28ef7b))
|
||||
|
||||
## [0.22.3](https://github.com/Monadical-SAS/reflector/compare/v0.22.2...v0.22.3) (2025-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* align daily room settings ([#759](https://github.com/Monadical-SAS/reflector/issues/759)) ([28f87c0](https://github.com/Monadical-SAS/reflector/commit/28f87c09dc459846873d0dde65b03e3d7b2b9399))
|
||||
|
||||
## [0.22.2](https://github.com/Monadical-SAS/reflector/compare/v0.22.1...v0.22.2) (2025-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* daily auto refresh fix ([#755](https://github.com/Monadical-SAS/reflector/issues/755)) ([fe47c46](https://github.com/Monadical-SAS/reflector/commit/fe47c46489c5aa0cc538109f7559cc9accb35c01))
|
||||
* Skip mixdown for multitrack ([#760](https://github.com/Monadical-SAS/reflector/issues/760)) ([b51b7aa](https://github.com/Monadical-SAS/reflector/commit/b51b7aa9176c1a53ba57ad99f5e976c804a1e80c))
|
||||
|
||||
## [0.22.1](https://github.com/Monadical-SAS/reflector/compare/v0.22.0...v0.22.1) (2025-11-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* participants update from daily ([#749](https://github.com/Monadical-SAS/reflector/issues/749)) ([7f0b728](https://github.com/Monadical-SAS/reflector/commit/7f0b728991c1b9f9aae702c96297eae63b561ef5))
|
||||
|
||||
## [0.22.0](https://github.com/Monadical-SAS/reflector/compare/v0.21.0...v0.22.0) (2025-11-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Multitrack segmentation ([#747](https://github.com/Monadical-SAS/reflector/issues/747)) ([d63040e](https://github.com/Monadical-SAS/reflector/commit/d63040e2fdc07e7b272e85a39eb2411cd6a14798))
|
||||
|
||||
## [0.21.0](https://github.com/Monadical-SAS/reflector/compare/v0.20.0...v0.21.0) (2025-11-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add transcript format parameter to GET endpoint ([#709](https://github.com/Monadical-SAS/reflector/issues/709)) ([f6ca075](https://github.com/Monadical-SAS/reflector/commit/f6ca07505f34483b02270a2ef3bd809e9d2e1045))
|
||||
|
||||
## [0.20.0](https://github.com/Monadical-SAS/reflector/compare/v0.19.0...v0.20.0) (2025-11-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* link transcript participants ([#737](https://github.com/Monadical-SAS/reflector/issues/737)) ([9bec398](https://github.com/Monadical-SAS/reflector/commit/9bec39808fc6322612d8b87e922a6f7901fc01c1))
|
||||
* transcript restart script ([#742](https://github.com/Monadical-SAS/reflector/issues/742)) ([86d5e26](https://github.com/Monadical-SAS/reflector/commit/86d5e26224bb55a0f1cc785aeda52065bb92ee6f))
|
||||
|
||||
## [0.19.0](https://github.com/Monadical-SAS/reflector/compare/v0.18.0...v0.19.0) (2025-11-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* dailyco api module ([#725](https://github.com/Monadical-SAS/reflector/issues/725)) ([4287f8b](https://github.com/Monadical-SAS/reflector/commit/4287f8b8aeee60e51db7539f4dcbda5f6e696bd8))
|
||||
* dailyco poll ([#730](https://github.com/Monadical-SAS/reflector/issues/730)) ([8e438ca](https://github.com/Monadical-SAS/reflector/commit/8e438ca285152bd48fdc42767e706fb448d3525c))
|
||||
* multitrack cli ([#735](https://github.com/Monadical-SAS/reflector/issues/735)) ([11731c9](https://github.com/Monadical-SAS/reflector/commit/11731c9d38439b04e93b1c3afbd7090bad11a11f))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* default platform fix ([#736](https://github.com/Monadical-SAS/reflector/issues/736)) ([c442a62](https://github.com/Monadical-SAS/reflector/commit/c442a627873ca667656eeaefb63e54ab10b8d19e))
|
||||
* parakeet vad not getting the end timestamp ([#728](https://github.com/Monadical-SAS/reflector/issues/728)) ([18ed713](https://github.com/Monadical-SAS/reflector/commit/18ed7133693653ef4ddac6c659a8c14b320d1657))
|
||||
* start raw tracks recording ([#729](https://github.com/Monadical-SAS/reflector/issues/729)) ([3e47c2c](https://github.com/Monadical-SAS/reflector/commit/3e47c2c0573504858e0d2e1798b6ed31f16b4a5d))
|
||||
|
||||
## [0.18.0](https://github.com/Monadical-SAS/reflector/compare/v0.17.0...v0.18.0) (2025-11-14)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* daily QOL: participants dictionary ([#721](https://github.com/Monadical-SAS/reflector/issues/721)) ([b20cad7](https://github.com/Monadical-SAS/reflector/commit/b20cad76e69fb6a76405af299a005f1ddcf60eae))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add proccessing page to file upload and reprocessing ([#650](https://github.com/Monadical-SAS/reflector/issues/650)) ([28a7258](https://github.com/Monadical-SAS/reflector/commit/28a7258e45317b78e60e6397be2bc503647eaace))
|
||||
* copy transcript ([#674](https://github.com/Monadical-SAS/reflector/issues/674)) ([a9a4f32](https://github.com/Monadical-SAS/reflector/commit/a9a4f32324f66c838e081eee42bb9502f38c1db1))
|
||||
|
||||
## [0.17.0](https://github.com/Monadical-SAS/reflector/compare/v0.16.0...v0.17.0) (2025-11-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add API key management UI ([#716](https://github.com/Monadical-SAS/reflector/issues/716)) ([372202b](https://github.com/Monadical-SAS/reflector/commit/372202b0e1a86823900b0aa77be1bfbc2893d8a1))
|
||||
* daily.co support as alternative to whereby ([#691](https://github.com/Monadical-SAS/reflector/issues/691)) ([1473fd8](https://github.com/Monadical-SAS/reflector/commit/1473fd82dc472c394cbaa2987212ad662a74bcac))
|
||||
|
||||
## [0.16.0](https://github.com/Monadical-SAS/reflector/compare/v0.15.0...v0.16.0) (2025-10-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* search date filter ([#710](https://github.com/Monadical-SAS/reflector/issues/710)) ([962c40e](https://github.com/Monadical-SAS/reflector/commit/962c40e2b6428ac42fd10aea926782d7a6f3f902))
|
||||
|
||||
## [0.15.0](https://github.com/Monadical-SAS/reflector/compare/v0.14.0...v0.15.0) (2025-10-20)
|
||||
|
||||
|
||||
|
||||
22
Caddyfile.example
Normal file
22
Caddyfile.example
Normal file
@@ -0,0 +1,22 @@
|
||||
# Reflector Caddyfile
|
||||
# Replace example.com with your actual domains
|
||||
# CORS is handled by the backend - Caddy just proxies
|
||||
#
|
||||
# For environment variable substitution, set:
|
||||
# FRONTEND_DOMAIN=app.example.com
|
||||
# API_DOMAIN=api.example.com
|
||||
# AUTHENTIK_DOMAIN=authentik.example.com (optional, for authentication)
|
||||
# Or edit this file directly with your domains.
|
||||
|
||||
{$FRONTEND_DOMAIN:app.example.com} {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
|
||||
{$API_DOMAIN:api.example.com} {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
|
||||
# Uncomment if using Authentik for authentication (see auth-setup.md)
|
||||
# {$AUTHENTIK_DOMAIN:authentik.example.com} {
|
||||
# reverse_proxy authentik-server-1:9000
|
||||
# }
|
||||
@@ -168,6 +168,12 @@ You can manually process an audio file by calling the process tool:
|
||||
uv run python -m reflector.tools.process path/to/audio.wav
|
||||
```
|
||||
|
||||
## Reprocessing any transcription
|
||||
|
||||
```bash
|
||||
uv run -m reflector.tools.process_transcript 81ec38d1-9dd7-43d2-b3f8-51f4d34a07cd --sync
|
||||
```
|
||||
|
||||
## Build-time env variables
|
||||
|
||||
Next.js projects are more used to NEXT_PUBLIC_ prefixed buildtime vars. We don't have those for the reason we need to serve a ccustomizable prebuild docker container.
|
||||
|
||||
@@ -1,28 +1,61 @@
|
||||
# Production Docker Compose configuration for Frontend
|
||||
# Production Docker Compose configuration
|
||||
# Usage: docker compose -f docker-compose.prod.yml up -d
|
||||
#
|
||||
# Prerequisites:
|
||||
# 1. Copy .env.example to .env and configure for both server/ and www/
|
||||
# 2. Copy Caddyfile.example to Caddyfile and edit with your domains
|
||||
# 3. Deploy Modal GPU functions (see gpu/modal_deployments/deploy-all.sh)
|
||||
|
||||
services:
|
||||
web:
|
||||
build:
|
||||
context: ./www
|
||||
dockerfile: Dockerfile
|
||||
image: reflector-frontend:latest
|
||||
image: monadicalsas/reflector-frontend:latest
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./www/.env
|
||||
pull_policy: always
|
||||
environment:
|
||||
- KV_URL=${KV_URL:-redis://redis:6379}
|
||||
- SITE_URL=${SITE_URL}
|
||||
- API_URL=${API_URL}
|
||||
- WEBSOCKET_URL=${WEBSOCKET_URL}
|
||||
- NEXTAUTH_URL=${NEXTAUTH_URL:-http://localhost:3000}
|
||||
- NEXTAUTH_SECRET=${NEXTAUTH_SECRET:-changeme-in-production}
|
||||
- AUTHENTIK_ISSUER=${AUTHENTIK_ISSUER}
|
||||
- AUTHENTIK_CLIENT_ID=${AUTHENTIK_CLIENT_ID}
|
||||
- AUTHENTIK_CLIENT_SECRET=${AUTHENTIK_CLIENT_SECRET}
|
||||
- AUTHENTIK_REFRESH_TOKEN_URL=${AUTHENTIK_REFRESH_TOKEN_URL}
|
||||
- SENTRY_DSN=${SENTRY_DSN}
|
||||
- SENTRY_IGNORE_API_RESOLUTION_ERROR=${SENTRY_IGNORE_API_RESOLUTION_ERROR:-1}
|
||||
- KV_URL=redis://redis:6379
|
||||
depends_on:
|
||||
- redis
|
||||
|
||||
server:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./server/.env
|
||||
environment:
|
||||
ENTRYPOINT: server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
- ./server/reflector/auth/jwt/keys:/app/reflector/auth/jwt/keys:ro
|
||||
|
||||
worker:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./server/.env
|
||||
environment:
|
||||
ENTRYPOINT: worker
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
- ./server/reflector/auth/jwt/keys:/app/reflector/auth/jwt/keys:ro
|
||||
|
||||
beat:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./server/.env
|
||||
environment:
|
||||
ENTRYPOINT: beat
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
|
||||
redis:
|
||||
image: redis:7.2-alpine
|
||||
@@ -35,5 +68,46 @@ services:
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: reflector
|
||||
POSTGRES_PASSWORD: reflector
|
||||
POSTGRES_DB: reflector
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U reflector"]
|
||||
interval: 30s
|
||||
timeout: 3s
|
||||
retries: 3
|
||||
|
||||
caddy:
|
||||
image: caddy:2-alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./Caddyfile:/etc/caddy/Caddyfile:ro
|
||||
- caddy_data:/data
|
||||
- caddy_config:/config
|
||||
depends_on:
|
||||
- web
|
||||
- server
|
||||
|
||||
docs:
|
||||
build: ./docs
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
redis_data:
|
||||
postgres_data:
|
||||
server_data:
|
||||
caddy_data:
|
||||
caddy_config:
|
||||
|
||||
networks:
|
||||
default:
|
||||
attachable: true
|
||||
|
||||
@@ -34,6 +34,33 @@ services:
|
||||
environment:
|
||||
ENTRYPOINT: beat
|
||||
|
||||
hatchet-worker-cpu:
|
||||
build:
|
||||
context: server
|
||||
volumes:
|
||||
- ./server/:/app/
|
||||
- /app/.venv
|
||||
env_file:
|
||||
- ./server/.env
|
||||
environment:
|
||||
ENTRYPOINT: hatchet-worker-cpu
|
||||
depends_on:
|
||||
hatchet:
|
||||
condition: service_healthy
|
||||
hatchet-worker-llm:
|
||||
build:
|
||||
context: server
|
||||
volumes:
|
||||
- ./server/:/app/
|
||||
- /app/.venv
|
||||
env_file:
|
||||
- ./server/.env
|
||||
environment:
|
||||
ENTRYPOINT: hatchet-worker-llm
|
||||
depends_on:
|
||||
hatchet:
|
||||
condition: service_healthy
|
||||
|
||||
redis:
|
||||
image: redis:7.2
|
||||
ports:
|
||||
@@ -55,6 +82,7 @@ services:
|
||||
|
||||
postgres:
|
||||
image: postgres:17
|
||||
command: postgres -c 'max_connections=200'
|
||||
ports:
|
||||
- 5432:5432
|
||||
environment:
|
||||
@@ -63,6 +91,42 @@ services:
|
||||
POSTGRES_DB: reflector
|
||||
volumes:
|
||||
- ./data/postgres:/var/lib/postgresql/data
|
||||
- ./server/docker/init-hatchet-db.sql:/docker-entrypoint-initdb.d/init-hatchet-db.sql:ro
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -d reflector -U reflector"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
|
||||
hatchet:
|
||||
image: ghcr.io/hatchet-dev/hatchet/hatchet-lite:latest
|
||||
ports:
|
||||
- "8889:8888"
|
||||
- "7078:7077"
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DATABASE_URL: "postgresql://reflector:reflector@postgres:5432/hatchet?sslmode=disable"
|
||||
SERVER_AUTH_COOKIE_DOMAIN: localhost
|
||||
SERVER_AUTH_COOKIE_INSECURE: "t"
|
||||
SERVER_GRPC_BIND_ADDRESS: "0.0.0.0"
|
||||
SERVER_GRPC_INSECURE: "t"
|
||||
SERVER_GRPC_BROADCAST_ADDRESS: hatchet:7077
|
||||
SERVER_GRPC_PORT: "7077"
|
||||
SERVER_URL: http://localhost:8889
|
||||
SERVER_AUTH_SET_EMAIL_VERIFIED: "t"
|
||||
# SERVER_DEFAULT_ENGINE_VERSION: "V1" # default
|
||||
SERVER_INTERNAL_CLIENT_INTERNAL_GRPC_BROADCAST_ADDRESS: hatchet:7077
|
||||
volumes:
|
||||
- ./data/hatchet-config:/config
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8888/api/live"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
|
||||
networks:
|
||||
default:
|
||||
|
||||
20
docs/.gitignore
vendored
Normal file
20
docs/.gitignore
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
# Dependencies
|
||||
/node_modules
|
||||
|
||||
# Production
|
||||
/build
|
||||
|
||||
# Generated files
|
||||
.docusaurus
|
||||
.cache-loader
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
39
docs/Dockerfile
Normal file
39
docs/Dockerfile
Normal file
@@ -0,0 +1,39 @@
|
||||
FROM node:18-alpine AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# Install curl for fetching OpenAPI spec
|
||||
RUN apk add --no-cache curl
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci
|
||||
|
||||
# Copy source
|
||||
COPY . .
|
||||
|
||||
# Fetch OpenAPI spec from production API
|
||||
ARG OPENAPI_URL=https://api-reflector.monadical.com/openapi.json
|
||||
RUN mkdir -p ./static && curl -sf "${OPENAPI_URL}" -o ./static/openapi.json || echo '{}' > ./static/openapi.json
|
||||
|
||||
# Fix docusaurus config: change onBrokenLinks to 'warn' for Docker build
|
||||
RUN sed -i "s/onBrokenLinks: 'throw'/onBrokenLinks: 'warn'/g" docusaurus.config.ts
|
||||
|
||||
# Build static site (skip prebuild hook by calling docusaurus directly)
|
||||
RUN npx docusaurus build
|
||||
|
||||
# Production image
|
||||
FROM nginx:alpine
|
||||
|
||||
# Copy built static files
|
||||
COPY --from=builder /app/build /usr/share/nginx/html
|
||||
|
||||
# Healthcheck for container orchestration
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost/ || exit 1
|
||||
|
||||
# Expose port
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
41
docs/README.md
Normal file
41
docs/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Website
|
||||
|
||||
This website is built using [Docusaurus](https://docusaurus.io/), a modern static website generator.
|
||||
|
||||
### Installation
|
||||
|
||||
```
|
||||
$ yarn
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
```
|
||||
$ yarn start
|
||||
```
|
||||
|
||||
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
|
||||
|
||||
### Build
|
||||
|
||||
```
|
||||
$ yarn build
|
||||
```
|
||||
|
||||
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
||||
|
||||
### Deployment
|
||||
|
||||
Using SSH:
|
||||
|
||||
```
|
||||
$ USE_SSH=true yarn deploy
|
||||
```
|
||||
|
||||
Not using SSH:
|
||||
|
||||
```
|
||||
$ GIT_USER=<Your GitHub username> yarn deploy
|
||||
```
|
||||
|
||||
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
|
||||
48
docs/TODO.md
Normal file
48
docs/TODO.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# Documentation TODO - PR #778 Review Comments
|
||||
|
||||
Remaining items from Tito's review. See CHANGES.md for completed items.
|
||||
|
||||
---
|
||||
|
||||
## Remaining Items
|
||||
|
||||
| File | Issue | Priority | Notes |
|
||||
|------|-------|----------|-------|
|
||||
| ~~`intro.md:10`~~ | ~~Add screenshots~~ | ~~Low~~ | ✅ **DONE** - Added transcript view screenshot |
|
||||
| `file-pipeline.md:47` | chunk_size example shows 30s | Low | Unclear what example config should show (~16s actual) |
|
||||
| ~~`self-hosted-gpu-setup.md:235`~~ | ~~systemd template in repo~~ | ~~Medium~~ | ✅ **REMOVED** - Systemd support removed entirely |
|
||||
| ~~`installation/overview.md:85`~~ | ~~uv tool install~~ | ~~Low~~ | ✅ **DONE** - Changed to `uv tool install modal` |
|
||||
| ~~`installation/overview.md:101`~~ | ~~"Why systemd?"~~ | ~~Low~~ | ✅ **REMOVED** - Systemd support removed entirely |
|
||||
| `installation/overview.md:271` | Caddyfile copy removal | Low | Keeping for clarity |
|
||||
|
||||
---
|
||||
|
||||
## Skipped (Decided Not To Fix)
|
||||
|
||||
| File | Issue | Reason |
|
||||
|------|-------|--------|
|
||||
| `installation/overview.md:40` | Model size requirements | Uncertain about exact requirements |
|
||||
| `installation/overview.md:136` | WebRTC ports | Handled by Daily/Whereby, not us |
|
||||
| `installation/overview.md:136` | Security section | Risk of incomplete/misleading docs |
|
||||
| `installation/overview.md:179` | AWS setup order | Low priority, works as-is |
|
||||
| `installation/overview.md:410` | Redundant next steps | Issue doesn't exist (file ends at 401) |
|
||||
|
||||
---
|
||||
|
||||
## Completed
|
||||
|
||||
See CHANGES.md for full list. Summary:
|
||||
|
||||
### Removals (9)
|
||||
- Encrypted data storage, session management, analytics claims
|
||||
- "coming soon" GPU, 30-second segments, CPU optimization
|
||||
- Encryption at rest, manual migrations, modprobe commands
|
||||
|
||||
### Fixes (9)
|
||||
- WebRTC + Daily/Whereby, 4 API endpoints, Docker docs link
|
||||
- NVIDIA steps merged, compose.yml referenced, cross-reference duplicate
|
||||
- tee→nano, MOV format, troubleshooting link
|
||||
|
||||
### Previously Fixed (7)
|
||||
- Blog removal, Daily.co added, rate limiting removed (x2)
|
||||
- PII claim removed, python→yaml, LUFS removed
|
||||
777
docs/create-docs.sh
Executable file
777
docs/create-docs.sh
Executable file
@@ -0,0 +1,777 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Create directory structure
|
||||
mkdir -p docs/concepts
|
||||
mkdir -p docs/installation
|
||||
mkdir -p docs/pipelines
|
||||
mkdir -p docs/reference/architecture
|
||||
mkdir -p docs/reference/processors
|
||||
mkdir -p docs/reference/api
|
||||
|
||||
# Create all documentation files with content
|
||||
echo "Creating documentation files..."
|
||||
|
||||
# Concepts - Modes
|
||||
cat > docs/concepts/modes.md << 'EOF'
|
||||
---
|
||||
sidebar_position: 2
|
||||
title: Operating Modes
|
||||
---
|
||||
|
||||
# Operating Modes
|
||||
|
||||
Reflector operates in two distinct modes to accommodate different use cases and security requirements.
|
||||
|
||||
## Public Mode
|
||||
|
||||
Public mode provides immediate access to core transcription features without requiring authentication.
|
||||
|
||||
### Features Available
|
||||
- **File Upload**: Process audio files up to 2GB
|
||||
- **Live Transcription**: Stream audio from microphone
|
||||
- **Basic Processing**: Transcription and diarization
|
||||
- **Temporary Storage**: Results available for 24 hours
|
||||
|
||||
### Limitations
|
||||
- No persistent storage
|
||||
- No meeting rooms
|
||||
- Limited to single-user sessions
|
||||
- No team collaboration features
|
||||
|
||||
### Use Cases
|
||||
- Quick transcription needs
|
||||
- Testing and evaluation
|
||||
- Individual users
|
||||
- Public demonstrations
|
||||
|
||||
## Private Mode
|
||||
|
||||
Private mode unlocks the full potential of Reflector with authentication and persistent storage.
|
||||
|
||||
### Additional Features
|
||||
- **Virtual Meeting Rooms**: Whereby integration
|
||||
- **Team Collaboration**: Share transcripts with team
|
||||
- **Persistent Storage**: Long-term transcript archive
|
||||
- **Advanced Analytics**: Meeting insights and trends
|
||||
- **Custom Integration**: Webhooks and API access
|
||||
- **User Management**: Role-based access control
|
||||
|
||||
### Authentication Options
|
||||
|
||||
#### Authentik Integration
|
||||
Enterprise-grade SSO with support for:
|
||||
- SAML 2.0
|
||||
- OAuth 2.0 / OIDC
|
||||
- LDAP / Active Directory
|
||||
- Multi-factor authentication
|
||||
|
||||
#### JWT Authentication
|
||||
Stateless token-based auth for:
|
||||
- API access
|
||||
- Service-to-service communication
|
||||
- Mobile applications
|
||||
|
||||
### Room Management
|
||||
|
||||
Virtual rooms provide dedicated spaces for meetings:
|
||||
- **Persistent URLs**: Same link for recurring meetings
|
||||
- **Access Control**: Invite-only or open rooms
|
||||
- **Recording Consent**: Automatic consent management
|
||||
- **Custom Settings**: Per-room configuration
|
||||
|
||||
## Mode Selection
|
||||
|
||||
The mode is determined by your deployment configuration:
|
||||
|
||||
```yaml
|
||||
# Public Mode (no authentication)
|
||||
REFLECTOR_AUTH_BACKEND=none
|
||||
|
||||
# Private Mode (with authentication)
|
||||
REFLECTOR_AUTH_BACKEND=jwt
|
||||
# or
|
||||
REFLECTOR_AUTH_BACKEND=authentik
|
||||
```
|
||||
|
||||
## Feature Comparison
|
||||
|
||||
| Feature | Public Mode | Private Mode |
|
||||
|---------|------------|--------------|
|
||||
| File Upload | ✅ | ✅ |
|
||||
| Live Transcription | ✅ | ✅ |
|
||||
| Speaker Diarization | ✅ | ✅ |
|
||||
| Translation | ✅ | ✅ |
|
||||
| Summarization | ✅ | ✅ |
|
||||
| Meeting Rooms | ❌ | ✅ |
|
||||
| Persistent Storage | ❌ | ✅ |
|
||||
| Team Collaboration | ❌ | ✅ |
|
||||
| API Access | Limited | Full |
|
||||
| User Management | ❌ | ✅ |
|
||||
| Custom Branding | ❌ | ✅ |
|
||||
| Analytics | ❌ | ✅ |
|
||||
| Webhooks | ❌ | ✅ |
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### Public Mode Security
|
||||
- Rate limiting to prevent abuse
|
||||
- File size restrictions
|
||||
- Automatic cleanup of old data
|
||||
- No PII storage
|
||||
|
||||
### Private Mode Security
|
||||
- Encrypted data storage
|
||||
- Audit logging
|
||||
- Session management
|
||||
- Access control lists
|
||||
- Data retention policies
|
||||
|
||||
## Choosing the Right Mode
|
||||
|
||||
### Choose Public Mode if:
|
||||
- You need quick, one-time transcriptions
|
||||
- You're evaluating Reflector
|
||||
- You don't need persistent storage
|
||||
- You're processing non-sensitive content
|
||||
|
||||
### Choose Private Mode if:
|
||||
- You need team collaboration
|
||||
- You require persistent storage
|
||||
- You're processing sensitive content
|
||||
- You need meeting room functionality
|
||||
- You want advanced analytics
|
||||
EOF
|
||||
|
||||
# Concepts - Independence
|
||||
cat > docs/concepts/independence.md << 'EOF'
|
||||
---
|
||||
sidebar_position: 3
|
||||
title: Data Independence
|
||||
---
|
||||
|
||||
# Data Independence & Privacy
|
||||
|
||||
Reflector is designed with privacy and data independence as core principles, giving you complete control over your data and processing.
|
||||
|
||||
## Privacy by Design
|
||||
|
||||
### No Third-Party Data Sharing
|
||||
|
||||
Your audio and transcripts are never shared with third parties:
|
||||
- **Local Processing**: All ML models can run on your infrastructure
|
||||
- **No Training on User Data**: Your content is never used to improve models
|
||||
- **Isolated Processing**: Each transcript is processed in isolation
|
||||
- **No Analytics Tracking**: No usage analytics sent to external services
|
||||
|
||||
### Data Ownership
|
||||
|
||||
You maintain complete ownership of all data:
|
||||
- **Export Anytime**: Download all your transcripts and audio
|
||||
- **Delete on Demand**: Permanent deletion with no recovery
|
||||
- **API Access**: Full programmatic access to your data
|
||||
- **No Vendor Lock-in**: Standard formats for easy migration
|
||||
|
||||
## Processing Transparency
|
||||
|
||||
### What Happens to Your Audio
|
||||
|
||||
1. **Upload/Stream**: Audio received by your server
|
||||
2. **Temporary Storage**: Stored only for processing duration
|
||||
3. **Processing**: ML models process audio locally or on Modal
|
||||
4. **Results Storage**: Transcripts stored in your database
|
||||
5. **Cleanup**: Original audio deleted (unless configured otherwise)
|
||||
|
||||
### Local vs Cloud Processing
|
||||
|
||||
#### Local Processing
|
||||
When configured for local processing:
|
||||
- All models run on your hardware
|
||||
- No data leaves your infrastructure
|
||||
- Complete air-gap capability
|
||||
- Higher hardware requirements
|
||||
|
||||
#### Modal.com Processing
|
||||
When using Modal for GPU acceleration:
|
||||
- Audio chunks sent to Modal for processing
|
||||
- Processed immediately and deleted
|
||||
- No long-term storage on Modal
|
||||
- Modal's security: SOC 2 Type II compliant
|
||||
|
||||
### Data Retention
|
||||
|
||||
Default retention policies:
|
||||
- **Public Mode**: 24 hours then automatic deletion
|
||||
- **Private Mode**: Configurable (default: indefinite)
|
||||
- **Audio Files**: Deleted after processing (configurable)
|
||||
- **Transcripts**: Retained based on policy
|
||||
|
||||
## Compliance Features
|
||||
|
||||
### GDPR Compliance
|
||||
|
||||
- **Right to Access**: Export all user data
|
||||
- **Right to Deletion**: Permanent data removal
|
||||
- **Data Portability**: Standard export formats
|
||||
- **Privacy by Default**: Minimal data collection
|
||||
|
||||
### HIPAA Considerations
|
||||
|
||||
For healthcare deployments:
|
||||
- **Self-hosted Option**: Complete infrastructure control
|
||||
- **Encryption**: At rest and in transit
|
||||
- **Audit Logging**: Complete access trail
|
||||
- **Access Controls**: Role-based permissions
|
||||
|
||||
### Industry Standards
|
||||
|
||||
- **TLS 1.3**: Modern encryption for data in transit
|
||||
- **AES-256**: Encryption for data at rest
|
||||
- **JWT Tokens**: Secure, stateless authentication
|
||||
- **OWASP Guidelines**: Security best practices
|
||||
|
||||
## Self-Hosted Deployment
|
||||
|
||||
### Complete Independence
|
||||
|
||||
Self-hosting provides maximum control:
|
||||
- **Your Infrastructure**: Run on your servers
|
||||
- **Your Network**: No external connections required
|
||||
- **Your Policies**: Implement custom retention
|
||||
- **Your Compliance**: Meet specific requirements
|
||||
|
||||
### Air-Gap Capability
|
||||
|
||||
Reflector can run completely offline:
|
||||
1. Download all models during setup
|
||||
2. Configure for local processing only
|
||||
3. Disable all external integrations
|
||||
4. Run in isolated network environment
|
||||
|
||||
## Data Flow Control
|
||||
|
||||
### Configurable Processing
|
||||
|
||||
Control where each step happens:
|
||||
|
||||
```yaml
|
||||
# All local processing
|
||||
TRANSCRIPT_BACKEND=local
|
||||
DIARIZATION_BACKEND=local
|
||||
TRANSLATION_BACKEND=local
|
||||
|
||||
# Hybrid approach
|
||||
TRANSCRIPT_BACKEND=modal # Fast GPU processing
|
||||
DIARIZATION_BACKEND=local # Sensitive speaker data
|
||||
TRANSLATION_BACKEND=modal # Non-sensitive translation
|
||||
```
|
||||
|
||||
### Storage Options
|
||||
|
||||
Choose where data is stored:
|
||||
- **Local Filesystem**: Complete control
|
||||
- **PostgreSQL**: Self-hosted database
|
||||
- **S3-Compatible**: MinIO or AWS with encryption
|
||||
- **Hybrid**: Different storage for different data types
|
||||
|
||||
## Security Architecture
|
||||
|
||||
### Defense in Depth
|
||||
|
||||
Multiple layers of security:
|
||||
1. **Network Security**: Firewalls and VPNs
|
||||
2. **Application Security**: Input validation and sanitization
|
||||
3. **Data Security**: Encryption and access controls
|
||||
4. **Operational Security**: Logging and monitoring
|
||||
|
||||
### Zero Trust Principles
|
||||
|
||||
- **Verify Everything**: All requests authenticated
|
||||
- **Least Privilege**: Minimal permissions granted
|
||||
- **Assume Breach**: Design for compromise containment
|
||||
- **Encrypt Everything**: No plaintext transmission
|
||||
|
||||
## Audit and Compliance
|
||||
|
||||
### Audit Logging
|
||||
|
||||
Comprehensive logging of:
|
||||
- **Access Events**: Who accessed what and when
|
||||
- **Processing Events**: What was processed and how
|
||||
- **Configuration Changes**: System modifications
|
||||
- **Security Events**: Failed authentication attempts
|
||||
|
||||
### Compliance Reporting
|
||||
|
||||
Generate reports for:
|
||||
- **Data Processing**: What data was processed
|
||||
- **Data Access**: Who accessed the data
|
||||
- **Data Retention**: What was retained or deleted
|
||||
- **Security Events**: Security-related incidents
|
||||
|
||||
## Best Practices
|
||||
|
||||
### For Maximum Privacy
|
||||
|
||||
1. **Self-host** all components
|
||||
2. **Use local processing** for all models
|
||||
3. **Implement short retention** periods
|
||||
4. **Encrypt all storage** at rest
|
||||
5. **Use VPN** for all connections
|
||||
6. **Regular audits** of access logs
|
||||
|
||||
### For Balanced Approach
|
||||
|
||||
1. **Self-host core services** (database, API)
|
||||
2. **Use Modal for processing** (faster, cost-effective)
|
||||
3. **Implement encryption** everywhere
|
||||
4. **Regular backups** with encryption
|
||||
5. **Monitor access** patterns
|
||||
EOF
|
||||
|
||||
# Concepts - Pipeline
|
||||
cat > docs/concepts/pipeline.md << 'EOF'
|
||||
---
|
||||
sidebar_position: 4
|
||||
title: Processing Pipeline
|
||||
---
|
||||
|
||||
# Processing Pipeline
|
||||
|
||||
Reflector uses a sophisticated pipeline architecture to process audio efficiently and accurately.
|
||||
|
||||
## Pipeline Overview
|
||||
|
||||
The processing pipeline consists of modular components that can be combined and configured based on your needs:
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
A[Audio Input] --> B[Pre-processing]
|
||||
B --> C[Chunking]
|
||||
C --> D[Transcription]
|
||||
D --> E[Diarization]
|
||||
E --> F[Alignment]
|
||||
F --> G[Post-processing]
|
||||
G --> H[Output]
|
||||
```
|
||||
|
||||
## Pipeline Components
|
||||
|
||||
### Audio Input
|
||||
|
||||
Accepts various input sources:
|
||||
- **File Upload**: MP3, WAV, M4A, WebM, MP4
|
||||
- **WebRTC Stream**: Live browser audio
|
||||
- **Recording Integration**: Whereby recordings
|
||||
- **API Upload**: Direct API submission
|
||||
|
||||
### Pre-processing
|
||||
|
||||
Prepares audio for optimal processing:
|
||||
- **Format Conversion**: Convert to 16kHz mono WAV
|
||||
- **Normalization**: Adjust volume to -23 LUFS
|
||||
- **Noise Reduction**: Optional background noise removal
|
||||
- **Validation**: Check duration and quality
|
||||
|
||||
### Chunking
|
||||
|
||||
Splits audio for parallel processing:
|
||||
- **Fixed Size**: 30-second chunks by default
|
||||
- **Overlap**: 1-second overlap for continuity
|
||||
- **Smart Boundaries**: Attempt to split at silence
|
||||
- **Metadata**: Track chunk positions
|
||||
|
||||
### Transcription
|
||||
|
||||
Converts speech to text:
|
||||
- **Model Selection**: Whisper or Parakeet
|
||||
- **Language Detection**: Automatic or specified
|
||||
- **Timestamp Generation**: Word-level timing
|
||||
- **Confidence Scores**: Quality indicators
|
||||
|
||||
### Diarization
|
||||
|
||||
Identifies different speakers:
|
||||
- **Voice Activity Detection**: Find speech segments
|
||||
- **Speaker Embedding**: Extract voice characteristics
|
||||
- **Clustering**: Group similar voices
|
||||
- **Label Assignment**: Assign speaker IDs
|
||||
|
||||
### Alignment
|
||||
|
||||
Merges all processing results:
|
||||
- **Chunk Assembly**: Combine transcription chunks
|
||||
- **Speaker Mapping**: Align speakers with text
|
||||
- **Overlap Resolution**: Handle chunk boundaries
|
||||
- **Timeline Creation**: Build unified timeline
|
||||
|
||||
### Post-processing
|
||||
|
||||
Enhances the final output:
|
||||
- **Formatting**: Apply punctuation and capitalization
|
||||
- **Translation**: Convert to target languages
|
||||
- **Summarization**: Generate concise summaries
|
||||
- **Topic Extraction**: Identify key themes
|
||||
- **Action Items**: Extract tasks and decisions
|
||||
|
||||
## Processing Modes
|
||||
|
||||
### Batch Processing
|
||||
|
||||
For uploaded files:
|
||||
- Optimized for throughput
|
||||
- Parallel chunk processing
|
||||
- Higher accuracy models
|
||||
- Complete file analysis
|
||||
|
||||
### Stream Processing
|
||||
|
||||
For live audio:
|
||||
- Optimized for latency
|
||||
- Sequential processing
|
||||
- Real-time feedback
|
||||
- Progressive results
|
||||
|
||||
### Hybrid Processing
|
||||
|
||||
For meetings:
|
||||
- Stream during meeting
|
||||
- Batch after completion
|
||||
- Best of both modes
|
||||
- Maximum accuracy
|
||||
|
||||
## Pipeline Configuration
|
||||
|
||||
### Model Selection
|
||||
|
||||
Choose models based on requirements:
|
||||
|
||||
```python
|
||||
# High accuracy (slower)
|
||||
config = {
|
||||
"transcription_model": "whisper-large-v3",
|
||||
"diarization_model": "pyannote-3.1",
|
||||
"translation_model": "seamless-m4t-large"
|
||||
}
|
||||
|
||||
# Balanced (default)
|
||||
config = {
|
||||
"transcription_model": "whisper-base",
|
||||
"diarization_model": "pyannote-3.1",
|
||||
"translation_model": "seamless-m4t-medium"
|
||||
}
|
||||
|
||||
# Fast processing
|
||||
config = {
|
||||
"transcription_model": "whisper-tiny",
|
||||
"diarization_model": "pyannote-3.1-fast",
|
||||
"translation_model": "seamless-m4t-small"
|
||||
}
|
||||
```
|
||||
|
||||
### Processing Options
|
||||
|
||||
Customize pipeline behavior:
|
||||
|
||||
```yaml
|
||||
# Parallel processing
|
||||
max_parallel_chunks: 10
|
||||
chunk_size_seconds: 30
|
||||
chunk_overlap_seconds: 1
|
||||
|
||||
# Quality settings
|
||||
enable_noise_reduction: true
|
||||
enable_normalization: true
|
||||
min_speech_confidence: 0.5
|
||||
|
||||
# Post-processing
|
||||
enable_translation: true
|
||||
target_languages: ["es", "fr", "de"]
|
||||
enable_summarization: true
|
||||
summary_length: "medium"
|
||||
```
|
||||
|
||||
## Performance Characteristics
|
||||
|
||||
### Processing Times
|
||||
|
||||
For 1 hour of audio:
|
||||
|
||||
| Pipeline Config | Processing Time | Accuracy |
|
||||
|----------------|-----------------|----------|
|
||||
| Fast | 2-3 minutes | 85-90% |
|
||||
| Balanced | 5-8 minutes | 92-95% |
|
||||
| High Accuracy | 15-20 minutes | 95-98% |
|
||||
|
||||
### Resource Usage
|
||||
|
||||
| Component | CPU Usage | Memory | GPU |
|
||||
|-----------|-----------|---------|-----|
|
||||
| Transcription | Medium | 2-4 GB | Required |
|
||||
| Diarization | High | 4-8 GB | Required |
|
||||
| Translation | Low | 2-3 GB | Optional |
|
||||
| Post-processing | Low | 1-2 GB | Not needed |
|
||||
|
||||
## Pipeline Orchestration
|
||||
|
||||
### Celery Task Chain
|
||||
|
||||
The pipeline is orchestrated using Celery:
|
||||
|
||||
```python
|
||||
chain = (
|
||||
chunk_audio.s(audio_id) |
|
||||
group(transcribe_chunk.s(chunk) for chunk in chunks) |
|
||||
merge_transcriptions.s() |
|
||||
diarize_audio.s() |
|
||||
align_speakers.s() |
|
||||
post_process.s()
|
||||
)
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Robust error recovery:
|
||||
- **Automatic Retry**: Failed tasks retry up to 3 times
|
||||
- **Partial Recovery**: Continue with successful chunks
|
||||
- **Fallback Models**: Use alternative models on failure
|
||||
- **Error Reporting**: Detailed error messages
|
||||
|
||||
### Progress Tracking
|
||||
|
||||
Real-time progress updates:
|
||||
- **Chunk Progress**: Track individual chunk processing
|
||||
- **Overall Progress**: Percentage completion
|
||||
- **ETA Calculation**: Estimated completion time
|
||||
- **WebSocket Updates**: Live progress to clients
|
||||
|
||||
## Optimization Strategies
|
||||
|
||||
### GPU Utilization
|
||||
|
||||
Maximize GPU efficiency:
|
||||
- **Batch Processing**: Process multiple chunks together
|
||||
- **Model Caching**: Keep models loaded in memory
|
||||
- **Dynamic Batching**: Adjust batch size based on GPU memory
|
||||
- **Multi-GPU Support**: Distribute across available GPUs
|
||||
|
||||
### Memory Management
|
||||
|
||||
Efficient memory usage:
|
||||
- **Streaming Processing**: Process large files in chunks
|
||||
- **Garbage Collection**: Clean up after each chunk
|
||||
- **Memory Limits**: Prevent out-of-memory errors
|
||||
- **Disk Caching**: Use disk for large intermediate results
|
||||
|
||||
### Network Optimization
|
||||
|
||||
Minimize network overhead:
|
||||
- **Compression**: Compress audio before transfer
|
||||
- **CDN Integration**: Use CDN for static assets
|
||||
- **Connection Pooling**: Reuse network connections
|
||||
- **Parallel Uploads**: Multiple concurrent uploads
|
||||
|
||||
## Quality Assurance
|
||||
|
||||
### Accuracy Metrics
|
||||
|
||||
Monitor processing quality:
|
||||
- **Word Error Rate (WER)**: Transcription accuracy
|
||||
- **Diarization Error Rate (DER)**: Speaker identification accuracy
|
||||
- **Translation BLEU Score**: Translation quality
|
||||
- **Summary Coherence**: Summary quality metrics
|
||||
|
||||
### Validation Steps
|
||||
|
||||
Ensure output quality:
|
||||
- **Confidence Thresholds**: Filter low-confidence segments
|
||||
- **Consistency Checks**: Verify timeline consistency
|
||||
- **Language Validation**: Ensure correct language detection
|
||||
- **Format Validation**: Check output format compliance
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Custom Models
|
||||
|
||||
Use your own models:
|
||||
- **Fine-tuned Whisper**: Domain-specific models
|
||||
- **Custom Diarization**: Trained on your speakers
|
||||
- **Specialized Post-processing**: Industry-specific formatting
|
||||
|
||||
### Pipeline Extensions
|
||||
|
||||
Add custom processing steps:
|
||||
- **Sentiment Analysis**: Analyze emotional tone
|
||||
- **Entity Extraction**: Identify people, places, organizations
|
||||
- **Custom Metrics**: Calculate domain-specific metrics
|
||||
- **Integration Hooks**: Call external services
|
||||
EOF
|
||||
|
||||
# Create installation documentation
|
||||
cat > docs/installation/overview.md << 'EOF'
|
||||
---
|
||||
sidebar_position: 1
|
||||
title: Installation Overview
|
||||
---
|
||||
|
||||
# Installation Overview
|
||||
|
||||
Reflector is designed for self-hosted deployment, giving you complete control over your infrastructure and data.
|
||||
|
||||
## Deployment Options
|
||||
|
||||
### Docker Deployment (Recommended)
|
||||
|
||||
The easiest way to deploy Reflector:
|
||||
- Pre-configured containers
|
||||
- Automated dependency management
|
||||
- Consistent environment
|
||||
- Easy updates
|
||||
|
||||
### Manual Installation
|
||||
|
||||
For custom deployments:
|
||||
- Greater control over configuration
|
||||
- Integration with existing infrastructure
|
||||
- Custom optimization options
|
||||
- Development environments
|
||||
|
||||
## Requirements
|
||||
|
||||
### System Requirements
|
||||
|
||||
**Minimum Requirements:**
|
||||
- CPU: 4 cores
|
||||
- RAM: 8 GB
|
||||
- Storage: 50 GB
|
||||
- OS: Ubuntu 20.04+ or similar Linux
|
||||
|
||||
**Recommended Requirements:**
|
||||
- CPU: 8+ cores
|
||||
- RAM: 16 GB
|
||||
- Storage: 100 GB SSD
|
||||
- GPU: NVIDIA GPU with 8GB+ VRAM (for local processing)
|
||||
|
||||
### Network Requirements
|
||||
|
||||
- Public IP address (for WebRTC)
|
||||
- Ports: 80, 443, 8000, 3000
|
||||
- Domain name (for SSL)
|
||||
- SSL certificate (Let's Encrypt supported)
|
||||
|
||||
## Required Services
|
||||
|
||||
### Core Services
|
||||
|
||||
These services are required for basic operation:
|
||||
|
||||
1. **PostgreSQL** - Primary database
|
||||
2. **Redis** - Message broker and cache
|
||||
3. **Docker** - Container runtime
|
||||
|
||||
### GPU Processing
|
||||
|
||||
Choose one:
|
||||
- **Modal.com** - Serverless GPU (recommended)
|
||||
- **Local GPU** - Self-hosted GPU processing
|
||||
|
||||
### Optional Services
|
||||
|
||||
Enhance functionality with:
|
||||
- **AWS S3** - Long-term storage
|
||||
- **Whereby** - Video conferencing rooms
|
||||
- **Authentik** - Enterprise authentication
|
||||
- **Zulip** - Chat integration
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Using Docker Compose
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector
|
||||
```
|
||||
|
||||
2. Navigate to docker directory:
|
||||
```bash
|
||||
cd docker
|
||||
```
|
||||
|
||||
3. Copy and configure environment:
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit .env with your settings
|
||||
```
|
||||
|
||||
4. Start services:
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
5. Access Reflector:
|
||||
- Frontend: https://your-domain.com
|
||||
- API: https://your-domain.com/api
|
||||
|
||||
## Configuration Overview
|
||||
|
||||
### Essential Configuration
|
||||
|
||||
```env
|
||||
# Database
|
||||
DATABASE_URL=postgresql://user:pass@localhost/reflector
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
# Modal.com (for GPU processing)
|
||||
TRANSCRIPT_MODAL_API_KEY=your-key
|
||||
DIARIZATION_MODAL_API_KEY=your-key
|
||||
|
||||
# Domain
|
||||
DOMAIN=your-domain.com
|
||||
```
|
||||
|
||||
### Security Configuration
|
||||
|
||||
```env
|
||||
# Authentication
|
||||
REFLECTOR_AUTH_BACKEND=jwt
|
||||
NEXTAUTH_SECRET=generate-strong-secret
|
||||
|
||||
# SSL (handled by Caddy)
|
||||
# Automatic with Let's Encrypt
|
||||
```
|
||||
|
||||
## Service Architecture
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[Caddy Reverse Proxy] --> B[Frontend - Next.js]
|
||||
A --> C[Backend - FastAPI]
|
||||
C --> D[PostgreSQL]
|
||||
C --> E[Redis]
|
||||
C --> F[Celery Workers]
|
||||
F --> G[Modal.com GPU]
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Review Requirements**: [System Requirements](./requirements)
|
||||
2. **Docker Setup**: [Docker Deployment Guide](./docker-setup)
|
||||
3. **Configure Services**:
|
||||
- [Modal.com Setup](./modal-setup)
|
||||
- [Whereby Setup](./whereby-setup)
|
||||
- [AWS S3 Setup](./aws-setup)
|
||||
4. **Optional Services**:
|
||||
- [Authentik Setup](./authentik-setup)
|
||||
- [Zulip Setup](./zulip-setup)
|
||||
|
||||
## Getting Help
|
||||
|
||||
- [Troubleshooting Guide](../reference/troubleshooting)
|
||||
- [GitHub Issues](https://github.com/monadical-sas/reflector/issues)
|
||||
- [Community Discord](#)
|
||||
EOF
|
||||
|
||||
chmod +x create-docs.sh
|
||||
echo "Documentation creation script ready. Run ./create-docs.sh to generate all docs."
|
||||
115
docs/docs/concepts/modes.md
Normal file
115
docs/docs/concepts/modes.md
Normal file
@@ -0,0 +1,115 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
title: Operating Modes
|
||||
---
|
||||
|
||||
# Operating Modes
|
||||
|
||||
Reflector operates in two distinct modes to accommodate different use cases and security requirements.
|
||||
|
||||
## Public Mode
|
||||
|
||||
Public mode provides immediate access to core transcription features without requiring authentication.
|
||||
|
||||
### Features Available
|
||||
- **File Upload**: Process audio files
|
||||
- **Live Transcription**: Stream audio from microphone
|
||||
- **Basic Processing**: Transcription and diarization
|
||||
- **Temporary Storage**: Temporary data retention (configurable)
|
||||
|
||||
### Limitations
|
||||
- No persistent storage
|
||||
- No meeting rooms
|
||||
- Limited to single-user sessions
|
||||
- No team collaboration features
|
||||
|
||||
### Use Cases
|
||||
- Quick transcription needs
|
||||
- Testing and evaluation
|
||||
- Individual users
|
||||
- Public demonstrations
|
||||
|
||||
## Private Mode
|
||||
|
||||
Private mode unlocks the full potential of Reflector with authentication and persistent storage.
|
||||
|
||||
### Additional Features
|
||||
- **Virtual Meeting Rooms**: Whereby and Daily.co integration
|
||||
- **Team Collaboration**: Share transcripts with team
|
||||
- **Persistent Storage**: Long-term transcript archive
|
||||
- **Meeting History**: Search and browse past transcripts
|
||||
- **Custom Integration**: Webhooks and API access
|
||||
- **User Management**: Role-based access control
|
||||
|
||||
### Authentication Options
|
||||
|
||||
#### Authentik Integration
|
||||
Enterprise-grade SSO with support for:
|
||||
- SAML 2.0
|
||||
- OAuth 2.0 / OIDC
|
||||
- LDAP / Active Directory
|
||||
- Multi-factor authentication
|
||||
|
||||
### Room Management
|
||||
|
||||
Virtual rooms provide dedicated spaces for meetings:
|
||||
- **Persistent URLs**: Same link for recurring meetings
|
||||
- **Access Control**: Invite-only or open rooms
|
||||
- **Recording Consent**: Automatic consent management
|
||||
- **Custom Settings**: Per-room configuration
|
||||
|
||||
## Mode Selection
|
||||
|
||||
The mode is determined by your deployment configuration:
|
||||
|
||||
```yaml
|
||||
# Public Mode (no authentication)
|
||||
AUTH_BACKEND=none
|
||||
|
||||
# Private Mode (with authentication)
|
||||
AUTH_BACKEND=jwt
|
||||
```
|
||||
|
||||
See [Authentication Setup](../installation/auth-setup) for configuring JWT authentication.
|
||||
|
||||
## Feature Comparison
|
||||
|
||||
| Feature | Public Mode | Private Mode |
|
||||
|---------|------------|--------------|
|
||||
| File Upload | ✅ | ✅ |
|
||||
| Live Transcription | ✅ | ✅ |
|
||||
| Speaker Diarization | ✅ | ✅ |
|
||||
| Summarization | ✅ | ✅ |
|
||||
| Meeting Rooms | ❌ | ✅ |
|
||||
| Persistent Storage | ❌ | ✅ |
|
||||
| Team Collaboration | ❌ | ✅ |
|
||||
| API Access | Limited | Full |
|
||||
| User Management | ❌ | ✅ |
|
||||
| Custom Branding | ❌ | ✅ |
|
||||
| Meeting History | ❌ | ✅ |
|
||||
| Webhooks | ❌ | ✅ |
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### Public Mode Security
|
||||
- File size restrictions
|
||||
- Automatic cleanup of old data
|
||||
|
||||
### Private Mode Security
|
||||
- Access control lists
|
||||
- Data retention policies
|
||||
|
||||
## Choosing the Right Mode
|
||||
|
||||
### Choose Public Mode if:
|
||||
- You need quick, one-time transcriptions
|
||||
- You're evaluating Reflector
|
||||
- You don't need persistent storage
|
||||
- You're processing non-sensitive content
|
||||
|
||||
### Choose Private Mode if:
|
||||
- You need team collaboration
|
||||
- You require persistent storage
|
||||
- You're processing sensitive content
|
||||
- You need meeting room functionality
|
||||
- You want searchable meeting history
|
||||
201
docs/docs/concepts/overview.md
Normal file
201
docs/docs/concepts/overview.md
Normal file
@@ -0,0 +1,201 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
title: Architecture Overview
|
||||
---
|
||||
|
||||
# Architecture Overview
|
||||
|
||||
Reflector is built as a modern, scalable, microservices-based application designed to handle audio processing workloads efficiently while maintaining data privacy and control.
|
||||
|
||||
## System Components
|
||||
|
||||
### Frontend Application
|
||||
|
||||
The user interface is built with **Next.js 15** using the App Router pattern, providing:
|
||||
|
||||
- Server-side rendering for optimal performance
|
||||
- Real-time WebSocket connections for live transcription
|
||||
- WebRTC support for audio streaming and live meetings (via Daily.co or Whereby)
|
||||
- Responsive design with Chakra UI components
|
||||
|
||||
### Backend API Server
|
||||
|
||||
The core API is powered by **FastAPI**, a modern Python framework that provides:
|
||||
|
||||
- High-performance async request handling
|
||||
- Automatic OpenAPI documentation generation
|
||||
- Type safety with Pydantic models
|
||||
- WebSocket support for real-time updates
|
||||
|
||||
### Processing Pipeline
|
||||
|
||||
Audio processing is handled through a modular pipeline architecture:
|
||||
|
||||
```
|
||||
Audio Input → Chunking → Transcription → Diarization → Post-Processing → Storage
|
||||
```
|
||||
|
||||
Each step can run independently and in parallel, allowing for:
|
||||
- Scalable processing of large files
|
||||
- Real-time streaming capabilities
|
||||
- Fault tolerance and retry mechanisms
|
||||
|
||||
### Worker Architecture
|
||||
|
||||
Background tasks are managed by **Celery** workers with **Redis** as the message broker:
|
||||
|
||||
- Distributed task processing
|
||||
- Priority queues for time-sensitive operations
|
||||
- Automatic retry on failure
|
||||
- Progress tracking and notifications
|
||||
|
||||
### GPU Acceleration
|
||||
|
||||
ML models run on GPU-accelerated infrastructure:
|
||||
|
||||
- **Modal.com** for serverless GPU processing
|
||||
- **Self-hosted GPU** with Docker deployment
|
||||
- Automatic scaling based on demand
|
||||
- Cost-effective pay-per-use model
|
||||
|
||||
## Data Flow
|
||||
|
||||
### Daily.co Meeting Recording Flow
|
||||
|
||||
1. **Recording**: Daily.co captures separate audio tracks per participant
|
||||
2. **Webhook**: Daily.co notifies Reflector when recording is ready
|
||||
3. **Track Download**: Individual participant tracks fetched from S3
|
||||
4. **Padding**: Tracks padded with silence based on join time for synchronization
|
||||
5. **Transcription**: Each track transcribed independently (speaker = track index)
|
||||
6. **Merge**: Transcriptions sorted by timestamp and combined
|
||||
7. **Mixdown**: Tracks mixed to single MP3 for playback
|
||||
8. **Post-Processing**: Topics, title, and summaries generated via LLM
|
||||
9. **Delivery**: Results stored and user notified via WebSocket
|
||||
|
||||
### File Upload Flow
|
||||
|
||||
1. **Upload**: User uploads audio file through web interface
|
||||
2. **Storage**: File stored temporarily
|
||||
3. **Transcription**: Full file transcribed via Whisper
|
||||
4. **Diarization**: ML-based speaker identification (Pyannote)
|
||||
5. **Post-Processing**: Topics, title, summaries
|
||||
6. **Delivery**: Results stored and user notified
|
||||
|
||||
### Live Streaming Flow
|
||||
|
||||
1. **WebRTC Connection**: Browser establishes peer connection via Daily.co or Whereby
|
||||
2. **Audio Capture**: Microphone audio streamed to server
|
||||
3. **Buffering**: Audio buffered for processing
|
||||
4. **Real-time Processing**: Segments transcribed as they arrive
|
||||
5. **WebSocket Updates**: Results streamed back to client
|
||||
6. **Continuous Assembly**: Full transcript built progressively
|
||||
|
||||
## Deployment Architecture
|
||||
|
||||
### Container-Based Deployment
|
||||
|
||||
All components are containerized for consistent deployment:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
web: # Next.js application
|
||||
server: # FastAPI server
|
||||
worker: # Celery workers
|
||||
redis: # Message broker
|
||||
postgres: # Database
|
||||
caddy: # Reverse proxy
|
||||
```
|
||||
|
||||
### Networking
|
||||
|
||||
- **Host Network Mode**: Required for WebRTC/ICE compatibility
|
||||
- **Caddy Reverse Proxy**: Handles SSL termination and routing
|
||||
- **WebSocket Upgrade**: Supports real-time connections
|
||||
|
||||
## Scalability Considerations
|
||||
|
||||
### Horizontal Scaling
|
||||
|
||||
- **Stateless Backend**: Multiple API server instances
|
||||
- **Worker Pools**: Add workers based on queue depth
|
||||
- **Database Pooling**: Connection management for concurrent access
|
||||
|
||||
### Vertical Scaling
|
||||
|
||||
- **GPU Workers**: Scale up for faster model inference
|
||||
- **Memory Optimization**: Efficient audio buffering
|
||||
|
||||
## Security Architecture
|
||||
|
||||
### Authentication & Authorization
|
||||
|
||||
- **JWT Tokens**: Stateless authentication
|
||||
- **Authentik Integration**: Enterprise SSO support
|
||||
- **Role-Based Access**: Granular permissions
|
||||
|
||||
### Data Protection
|
||||
|
||||
- **Encryption in Transit**: TLS for all connections
|
||||
- **Temporary Storage**: Automatic cleanup of processed files
|
||||
|
||||
### Privacy by Design
|
||||
|
||||
- **Local Processing**: Option to process entirely on-premises
|
||||
- **No Training on User Data**: Models are pre-trained
|
||||
- **Data Isolation**: Multi-tenant data separation
|
||||
|
||||
## Integration Points
|
||||
|
||||
### External Services
|
||||
|
||||
- **Modal.com**: GPU processing
|
||||
- **AWS S3**: Long-term storage
|
||||
- **Whereby**: Video conferencing rooms
|
||||
- **Zulip**: Chat integration (optional)
|
||||
|
||||
### APIs and Webhooks
|
||||
|
||||
- **RESTful API**: Standard CRUD operations
|
||||
- **WebSocket API**: Real-time updates
|
||||
- **Webhook Notifications**: Processing completion events
|
||||
- **OpenAPI Specification**: Machine-readable API definition
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
### Caching Strategy
|
||||
|
||||
- **Redis Cache**: Frequently accessed data
|
||||
- **CDN**: Static asset delivery
|
||||
- **Browser Cache**: Client-side optimization
|
||||
|
||||
### Database Optimization
|
||||
|
||||
- **Indexed Queries**: Fast search and retrieval
|
||||
- **Connection Pooling**: Efficient resource usage
|
||||
- **Query Optimization**: N+1 query prevention
|
||||
|
||||
### Processing Optimization
|
||||
|
||||
- **Batch Processing**: Efficient GPU utilization
|
||||
- **Parallel Execution**: Multi-core CPU usage
|
||||
- **Stream Processing**: Reduced memory footprint
|
||||
|
||||
## Monitoring and Observability
|
||||
|
||||
### Metrics Collection
|
||||
|
||||
- **Application Metrics**: Request rates, response times
|
||||
- **System Metrics**: CPU, memory, disk usage
|
||||
- **Business Metrics**: Transcription accuracy, processing times
|
||||
|
||||
### Logging
|
||||
|
||||
- **Structured Logging**: JSON format for analysis
|
||||
- **Log Aggregation**: Centralized log management
|
||||
- **Error Tracking**: Sentry integration
|
||||
|
||||
### Health Checks
|
||||
|
||||
- **Liveness Probes**: Component availability
|
||||
- **Readiness Probes**: Service readiness
|
||||
- **Dependency Checks**: External service status
|
||||
183
docs/docs/concepts/pipeline.md
Normal file
183
docs/docs/concepts/pipeline.md
Normal file
@@ -0,0 +1,183 @@
|
||||
---
|
||||
sidebar_position: 4
|
||||
title: Processing Pipeline
|
||||
---
|
||||
|
||||
# Processing Pipeline
|
||||
|
||||
Reflector uses a modular pipeline architecture to process audio efficiently and accurately.
|
||||
|
||||
## Pipeline Overview
|
||||
|
||||
The processing pipeline consists of modular components that can be combined and configured based on your needs:
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
A[Audio Input] --> B[Pre-processing]
|
||||
B --> C[Chunking]
|
||||
C --> D[Transcription]
|
||||
D --> E[Diarization]
|
||||
E --> F[Alignment]
|
||||
F --> G[Post-processing]
|
||||
G --> H[Output]
|
||||
```
|
||||
|
||||
## Pipeline Components
|
||||
|
||||
### Audio Input
|
||||
|
||||
Accepts various input sources:
|
||||
- **File Upload**: MP3, WAV, M4A, WebM, MP4
|
||||
- **WebRTC Stream**: Live browser audio
|
||||
- **Recording Integration**: Daily.co and Whereby recordings
|
||||
- **API Upload**: Direct API submission
|
||||
|
||||
### Pre-processing
|
||||
|
||||
Prepares audio for optimal processing:
|
||||
- **Format Conversion**: Convert to 16kHz mono WAV
|
||||
- **Noise Reduction**: Optional background noise removal
|
||||
- **Validation**: Check duration and quality
|
||||
|
||||
### Chunking
|
||||
|
||||
Splits audio for parallel processing:
|
||||
- **Configurable Size**: Audio split into processable segments
|
||||
- **Silence Detection**: Optional splitting at natural pauses
|
||||
- **Metadata**: Track chunk positions
|
||||
|
||||
### Transcription
|
||||
|
||||
Converts speech to text:
|
||||
- **Model Selection**: Whisper or Parakeet
|
||||
- **Language Detection**: Automatic or specified
|
||||
- **Timestamp Generation**: Word-level timing
|
||||
- **Confidence Scores**: Quality indicators
|
||||
|
||||
### Diarization
|
||||
|
||||
Identifies different speakers:
|
||||
- **Voice Activity Detection**: Find speech segments
|
||||
- **Speaker Embedding**: Extract voice characteristics
|
||||
- **Clustering**: Group similar voices
|
||||
- **Label Assignment**: Assign speaker IDs
|
||||
|
||||
### Alignment
|
||||
|
||||
Merges all processing results:
|
||||
- **Chunk Assembly**: Combine transcription chunks
|
||||
- **Speaker Mapping**: Align speakers with text
|
||||
- **Overlap Resolution**: Handle chunk boundaries
|
||||
- **Timeline Creation**: Build unified timeline
|
||||
|
||||
### Post-processing
|
||||
|
||||
Enhances the final output:
|
||||
- **Formatting**: Apply punctuation and capitalization
|
||||
- **Summarization**: Generate concise summaries
|
||||
- **Topic Extraction**: Identify key themes
|
||||
- **Action Items**: Extract tasks and decisions
|
||||
|
||||
## Processing Modes
|
||||
|
||||
### Batch Processing
|
||||
|
||||
For uploaded files:
|
||||
- Optimized for throughput
|
||||
- Parallel chunk processing
|
||||
- Higher accuracy models
|
||||
- Complete file analysis
|
||||
|
||||
### Stream Processing
|
||||
|
||||
For live audio:
|
||||
- Optimized for latency
|
||||
- Sequential processing
|
||||
- Real-time feedback
|
||||
- Progressive results
|
||||
|
||||
### Hybrid Processing
|
||||
|
||||
For meetings:
|
||||
- Stream during meeting
|
||||
- Batch after completion
|
||||
- Best of both modes
|
||||
- Maximum accuracy
|
||||
|
||||
## Pipeline Orchestration
|
||||
|
||||
### Error Handling
|
||||
|
||||
Error recovery:
|
||||
- **Automatic Retry**: Failed tasks retry up to 3 times
|
||||
- **Partial Recovery**: Continue with successful chunks
|
||||
- **Fallback Models**: Use alternative models on failure
|
||||
- **Error Reporting**: Detailed error messages
|
||||
|
||||
### Progress Tracking
|
||||
|
||||
Real-time progress updates:
|
||||
- **Chunk Progress**: Track individual chunk processing
|
||||
- **Overall Progress**: Percentage completion
|
||||
- **ETA Calculation**: Estimated completion time
|
||||
- **WebSocket Updates**: Live progress to clients
|
||||
|
||||
## Optimization Strategies
|
||||
|
||||
### GPU Utilization
|
||||
|
||||
Maximize GPU efficiency:
|
||||
- **Batch Processing**: Process multiple chunks together
|
||||
- **Model Caching**: Keep models loaded in memory
|
||||
- **Dynamic Batching**: Adjust batch size based on GPU memory
|
||||
- **Multi-GPU Support**: Distribute across available GPUs
|
||||
|
||||
### Memory Management
|
||||
|
||||
Efficient memory usage:
|
||||
- **Streaming Processing**: Process large files in chunks
|
||||
- **Garbage Collection**: Clean up after each chunk
|
||||
- **Memory Limits**: Prevent out-of-memory errors
|
||||
- **Disk Caching**: Use disk for large intermediate results
|
||||
|
||||
### Network Optimization
|
||||
|
||||
Minimize network overhead:
|
||||
- **Compression**: Compress audio before transfer
|
||||
- **CDN Integration**: Use CDN for static assets
|
||||
- **Connection Pooling**: Reuse network connections
|
||||
- **Parallel Uploads**: Multiple concurrent uploads
|
||||
|
||||
## Quality Assurance
|
||||
|
||||
### Accuracy Metrics
|
||||
|
||||
Monitor processing quality:
|
||||
- **Word Error Rate (WER)**: Transcription accuracy
|
||||
- **Diarization Error Rate (DER)**: Speaker identification accuracy
|
||||
- **Summary Coherence**: Summary quality metrics
|
||||
|
||||
### Validation Steps
|
||||
|
||||
Ensure output quality:
|
||||
- **Confidence Thresholds**: Filter low-confidence segments
|
||||
- **Consistency Checks**: Verify timeline consistency
|
||||
- **Language Validation**: Ensure correct language detection
|
||||
- **Format Validation**: Check output format compliance
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Custom Models
|
||||
|
||||
Use your own models:
|
||||
- **Fine-tuned Whisper**: Domain-specific models
|
||||
- **Custom Diarization**: Trained on your speakers
|
||||
- **Specialized Post-processing**: Industry-specific formatting
|
||||
|
||||
### Pipeline Extensions
|
||||
|
||||
Add custom processing steps:
|
||||
- **Sentiment Analysis**: Analyze emotional tone
|
||||
- **Entity Extraction**: Identify people, places, organizations
|
||||
- **Custom Metrics**: Calculate domain-specific metrics
|
||||
- **Integration Hooks**: Call external services
|
||||
285
docs/docs/installation/auth-setup.md
Normal file
285
docs/docs/installation/auth-setup.md
Normal file
@@ -0,0 +1,285 @@
|
||||
---
|
||||
sidebar_position: 5
|
||||
title: Authentication Setup
|
||||
---
|
||||
|
||||
# Authentication Setup
|
||||
|
||||
This page covers authentication setup in detail. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
Reflector uses [Authentik](https://goauthentik.io/) for OAuth/OIDC authentication. This guide walks you through setting up Authentik and connecting it to Reflector.
|
||||
|
||||
The guide simplistically sets Authentic on the same server as Reflector. You can use your own Authentic instance instead.
|
||||
|
||||
## Overview
|
||||
|
||||
Reflector's authentication flow:
|
||||
1. User clicks "Sign In" on frontend
|
||||
2. Frontend redirects to Authentik login page
|
||||
3. User authenticates with Authentik
|
||||
4. Authentik redirects back with OAuth tokens
|
||||
5. Frontend stores tokens, backends verify JWT signature
|
||||
|
||||
## Option 1: Self-Hosted Authentik (Same Server)
|
||||
|
||||
This setup runs Authentik on the same server as Reflector, with Caddy proxying to both.
|
||||
|
||||
### Deploy Authentik
|
||||
|
||||
```bash
|
||||
# Create directory for Authentik
|
||||
mkdir -p ~/authentik && cd ~/authentik
|
||||
|
||||
# Download docker-compose file
|
||||
curl -O https://goauthentik.io/docker-compose.yml
|
||||
|
||||
# Generate secrets and bootstrap credentials
|
||||
cat > .env << 'EOF'
|
||||
PG_PASS=$(openssl rand -base64 36 | tr -d '\n')
|
||||
AUTHENTIK_SECRET_KEY=$(openssl rand -base64 60 | tr -d '\n')
|
||||
# Privacy-focused choice for self-hosted deployments
|
||||
AUTHENTIK_ERROR_REPORTING__ENABLED=false
|
||||
AUTHENTIK_BOOTSTRAP_PASSWORD=YourSecurePassword123
|
||||
AUTHENTIK_BOOTSTRAP_EMAIL=admin@example.com
|
||||
EOF
|
||||
|
||||
# Start Authentik
|
||||
sudo docker compose up -d
|
||||
```
|
||||
|
||||
Authentik takes ~2 minutes to run migrations and apply blueprints on first start.
|
||||
|
||||
### Connect Authentik to Reflector's Network
|
||||
|
||||
If Authentik runs in a separate Docker Compose project, connect it to Reflector's network so Caddy can proxy to it:
|
||||
|
||||
```bash
|
||||
# Wait for Authentik to be healthy
|
||||
# Connect Authentik server to Reflector's network
|
||||
sudo docker network connect reflector_default authentik-server-1
|
||||
```
|
||||
|
||||
**Important:** This step must be repeated if you restart Authentik with `docker compose down`. Add it to your deployment scripts or use `docker compose up -d` (which preserves containers) instead of down/up.
|
||||
|
||||
### Add Authentik to Caddy
|
||||
|
||||
Uncomment the Authentik section in your `Caddyfile` and set your domain:
|
||||
|
||||
```bash
|
||||
nano Caddyfile
|
||||
```
|
||||
|
||||
Uncomment and edit:
|
||||
```
|
||||
{$AUTHENTIK_DOMAIN:authentik.example.com} {
|
||||
reverse_proxy authentik-server-1:9000
|
||||
}
|
||||
```
|
||||
|
||||
Reload Caddy:
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml exec caddy caddy reload --config /etc/caddy/Caddyfile
|
||||
```
|
||||
|
||||
### Create OAuth2 Provider in Authentik
|
||||
|
||||
**Option A: Automated Setup (Recommended)**
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Run the setup script from the Reflector repository:
|
||||
|
||||
```bash
|
||||
ssh user@your-server-ip
|
||||
cd ~/reflector
|
||||
./scripts/setup-authentik-oauth.sh https://authentik.example.com YourSecurePassword123 https://app.example.com
|
||||
```
|
||||
|
||||
**Important:** The script must be run from the `~/reflector` directory on your server, as it creates files using relative paths.
|
||||
|
||||
The script will output the configuration values to add to your `.env` files. Skip to "Update docker-compose.prod.yml".
|
||||
|
||||
**Option B: Manual Setup**
|
||||
|
||||
1. **Login to Authentik Admin** at `https://authentik.example.com/`
|
||||
- Username: `akadmin`
|
||||
- Password: The `AUTHENTIK_BOOTSTRAP_PASSWORD` you set in .env
|
||||
|
||||
2. **Create OAuth2 Provider:**
|
||||
- Go to **Applications > Providers > Create**
|
||||
- Select **OAuth2/OpenID Provider**
|
||||
- Configure:
|
||||
- **Name**: `Reflector`
|
||||
- **Authorization flow**: `default-provider-authorization-implicit-consent`
|
||||
- **Client type**: `Confidential`
|
||||
- **Client ID**: Note this value (auto-generated)
|
||||
- **Client Secret**: Note this value (auto-generated)
|
||||
- **Redirect URIs**: Add entry with:
|
||||
```
|
||||
https://app.example.com/api/auth/callback/authentik
|
||||
```
|
||||
- Scroll down to **Advanced protocol settings**
|
||||
- In **Scopes**, add these three mappings:
|
||||
- `authentik default OAuth Mapping: OpenID 'email'`
|
||||
- `authentik default OAuth Mapping: OpenID 'openid'`
|
||||
- `authentik default OAuth Mapping: OpenID 'profile'`
|
||||
- Click **Finish**
|
||||
|
||||
3. **Create Application:**
|
||||
- Go to **Applications > Applications > Create**
|
||||
- Configure:
|
||||
- **Name**: `Reflector`
|
||||
- **Slug**: `reflector` (auto-filled)
|
||||
- **Provider**: Select the `Reflector` provider you just created
|
||||
- Click **Create**
|
||||
|
||||
### Get Public Key for JWT Verification
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Extract the public key from Authentik's JWKS endpoint:
|
||||
|
||||
```bash
|
||||
mkdir -p ~/reflector/server/reflector/auth/jwt/keys
|
||||
curl -s https://authentik.example.com/application/o/reflector/jwks/ | \
|
||||
jq -r '.keys[0].x5c[0]' | base64 -d | openssl x509 -pubkey -noout \
|
||||
> ~/reflector/server/reflector/auth/jwt/keys/authentik_public.pem
|
||||
```
|
||||
|
||||
### Update docker-compose.prod.yml
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
**Note:** This step is already done in the current `docker-compose.prod.yml`. Verify the volume mounts exist:
|
||||
|
||||
```yaml
|
||||
server:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
# ... other config ...
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
- ./server/reflector/auth/jwt/keys:/app/reflector/auth/jwt/keys:ro
|
||||
|
||||
worker:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
# ... other config ...
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
- ./server/reflector/auth/jwt/keys:/app/reflector/auth/jwt/keys:ro
|
||||
```
|
||||
|
||||
### Configure Reflector Backend
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Update `server/.env`:
|
||||
```env
|
||||
# Authentication
|
||||
AUTH_BACKEND=jwt
|
||||
AUTH_JWT_PUBLIC_KEY=authentik_public.pem
|
||||
AUTH_JWT_AUDIENCE=<your-client-id>
|
||||
CORS_ALLOW_CREDENTIALS=true
|
||||
```
|
||||
|
||||
Replace `<your-client-id>` with the Client ID from previous steps.
|
||||
|
||||
### Configure Reflector Frontend
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Update `www/.env`:
|
||||
```env
|
||||
# Authentication
|
||||
FEATURE_REQUIRE_LOGIN=true
|
||||
|
||||
# Authentik OAuth
|
||||
AUTHENTIK_ISSUER=https://authentik.example.com/application/o/reflector
|
||||
AUTHENTIK_REFRESH_TOKEN_URL=https://authentik.example.com/application/o/token/
|
||||
AUTHENTIK_CLIENT_ID=<your-client-id>
|
||||
AUTHENTIK_CLIENT_SECRET=<your-client-secret>
|
||||
|
||||
# NextAuth
|
||||
NEXTAUTH_SECRET=<generate-with-openssl-rand-hex-32>
|
||||
```
|
||||
|
||||
### Restart Services
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
```bash
|
||||
cd ~/reflector
|
||||
sudo docker compose -f docker-compose.prod.yml up -d --force-recreate server worker web
|
||||
```
|
||||
|
||||
### Verify Authentication
|
||||
|
||||
1. Visit `https://app.example.com`
|
||||
2. Click "Log in" or navigate to `/api/auth/signin`
|
||||
3. Click "Sign in with Authentik"
|
||||
4. Login with your Authentik credentials
|
||||
5. You should be redirected back and see "Log out" in the header
|
||||
|
||||
## Option 2: Disable Authentication
|
||||
|
||||
For testing or internal deployments where authentication isn't needed:
|
||||
|
||||
**Backend `server/.env`:**
|
||||
```env
|
||||
AUTH_BACKEND=none
|
||||
```
|
||||
|
||||
**Frontend `www/.env`:**
|
||||
```env
|
||||
FEATURE_REQUIRE_LOGIN=false
|
||||
```
|
||||
|
||||
**Note:** The pre-built Docker images have `FEATURE_REQUIRE_LOGIN=true` baked in. To disable auth, you'll need to rebuild the frontend image with the env var set at build time, or set up Authentik.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Invalid redirect URI" error
|
||||
- Verify the redirect URI in Authentik matches exactly:
|
||||
```
|
||||
https://app.example.com/api/auth/callback/authentik
|
||||
```
|
||||
- Check for trailing slashes - they must match exactly
|
||||
|
||||
### "Invalid audience" JWT error
|
||||
- Ensure `AUTH_JWT_AUDIENCE` in `server/.env` matches the Client ID from Authentik
|
||||
- The audience value is the OAuth Client ID, not the issuer URL
|
||||
|
||||
### "JWT verification failed" error
|
||||
- Verify the public key file is mounted in the container
|
||||
- Check `AUTH_JWT_PUBLIC_KEY` points to the correct filename
|
||||
- Ensure the key was extracted from the correct provider's JWKS endpoint
|
||||
|
||||
### Caddy returns 503 for Authentik
|
||||
- Verify Authentik container is connected to Reflector's network:
|
||||
```bash
|
||||
sudo docker network connect reflector_default authentik-server-1
|
||||
```
|
||||
- Check Authentik is healthy: `cd ~/authentik && sudo docker compose ps`
|
||||
|
||||
### Users can't access protected pages
|
||||
- Verify `FEATURE_REQUIRE_LOGIN=true` in frontend
|
||||
- Check `AUTH_BACKEND=jwt` in backend
|
||||
- Verify CORS settings allow credentials
|
||||
|
||||
### Token refresh errors
|
||||
- Ensure Redis is running (frontend uses Redis for token caching)
|
||||
- Verify `KV_URL` is set correctly in frontend env
|
||||
- Check `AUTHENTIK_REFRESH_TOKEN_URL` is correct
|
||||
|
||||
## API Key Authentication
|
||||
|
||||
For programmatic access (scripts, integrations), users can generate API keys:
|
||||
|
||||
1. Login to Reflector
|
||||
2. Go to Settings > API Keys
|
||||
3. Click "Generate New Key"
|
||||
4. Use the key in requests:
|
||||
```bash
|
||||
curl -H "X-API-Key: your-api-key" https://api.example.com/v1/transcripts
|
||||
```
|
||||
|
||||
API keys are stored hashed and can be revoked at any time.
|
||||
165
docs/docs/installation/daily-setup.md
Normal file
165
docs/docs/installation/daily-setup.md
Normal file
@@ -0,0 +1,165 @@
|
||||
---
|
||||
sidebar_position: 6
|
||||
title: Daily.co Setup
|
||||
---
|
||||
|
||||
# Daily.co Setup
|
||||
|
||||
This page covers Daily.co video platform setup for live meeting rooms. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
Daily.co enables live video meetings with automatic recording and transcription.
|
||||
|
||||
## What You'll Set Up
|
||||
|
||||
```
|
||||
User joins meeting → Daily.co video room → Recording to S3 → [Webhook] → Reflector transcribes
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] **Daily.co account** - Free tier at https://dashboard.daily.co
|
||||
- [ ] **AWS account** - For S3 storage
|
||||
- [ ] **Reflector deployed** - Complete steps from [Deployment Guide](./overview)
|
||||
|
||||
---
|
||||
|
||||
## Create Daily.co Account
|
||||
|
||||
1. Visit https://dashboard.daily.co and sign up
|
||||
2. Verify your email
|
||||
3. Note your subdomain (e.g., `yourname.daily.co` → subdomain is `yourname`)
|
||||
|
||||
---
|
||||
|
||||
## Get Daily.co API Key
|
||||
|
||||
1. In Daily.co dashboard, go to **Developers**
|
||||
2. Click **API Keys**
|
||||
3. Click **Create API Key**
|
||||
4. Copy the key (starts with a long string)
|
||||
|
||||
Save this for later.
|
||||
|
||||
---
|
||||
|
||||
## Create AWS S3 Bucket
|
||||
|
||||
Daily.co needs somewhere to store recordings before Reflector processes them.
|
||||
|
||||
```bash
|
||||
# Choose a unique bucket name
|
||||
BUCKET_NAME="reflector-dailyco-yourname" # -yourname is not a requirement, you can name the bucket as you wish
|
||||
AWS_REGION="us-east-1"
|
||||
|
||||
# Create bucket
|
||||
aws s3 mb s3://$BUCKET_NAME --region $AWS_REGION
|
||||
|
||||
# Enable versioning (required)
|
||||
aws s3api put-bucket-versioning \
|
||||
--bucket $BUCKET_NAME \
|
||||
--versioning-configuration Status=Enabled
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Create IAM Role for Daily.co
|
||||
|
||||
Daily.co needs permission to write recordings to your S3 bucket.
|
||||
|
||||
Follow the guide https://docs.daily.co/guides/products/live-streaming-recording/storing-recordings-in-a-custom-s3-bucket
|
||||
|
||||
Save the role ARN - you'll need it soon.
|
||||
|
||||
It looks like: `arn:aws:iam::123456789012:role/DailyCo`
|
||||
|
||||
Shortly, you'll need to set up a role and give this role your s3 bucket access
|
||||
|
||||
No additional setup is required from Daily.co settings website side: the app code takes care of letting Daily know where to save the recordings.
|
||||
|
||||
---
|
||||
|
||||
## Configure Reflector
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Add to `server/.env`:
|
||||
|
||||
```env
|
||||
# Daily.co Configuration
|
||||
DEFAULT_VIDEO_PLATFORM=daily
|
||||
DAILY_API_KEY=<your-api-key-from-daily-setup>
|
||||
DAILY_SUBDOMAIN=<your-subdomain-from-daily-setup>
|
||||
|
||||
# S3 Storage for Daily.co recordings
|
||||
DAILYCO_STORAGE_AWS_BUCKET_NAME=<your-bucket-from-daily-setup>
|
||||
DAILYCO_STORAGE_AWS_REGION=us-east-1
|
||||
DAILYCO_STORAGE_AWS_ROLE_ARN=<your-role-arn-from-daily-setup>
|
||||
|
||||
# Transcript storage (should already be configured from main setup)
|
||||
# TRANSCRIPT_STORAGE_BACKEND=aws
|
||||
# TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=<your-key>
|
||||
# TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY=<your-secret>
|
||||
# TRANSCRIPT_STORAGE_AWS_BUCKET_NAME=<your-bucket-name>
|
||||
# TRANSCRIPT_STORAGE_AWS_REGION=<your-bucket-region>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Restart Services
|
||||
|
||||
After changing `.env` files, reload with `up -d`:
|
||||
|
||||
```bash
|
||||
sudo docker compose -f docker-compose.prod.yml up -d server worker
|
||||
```
|
||||
|
||||
**Note**: `docker compose up -d` detects env changes and recreates containers automatically.
|
||||
|
||||
---
|
||||
|
||||
## Test Live Room
|
||||
|
||||
1. Visit your Reflector frontend: `https://app.example.com`
|
||||
2. Go to **Rooms**
|
||||
3. Click **Create Room**
|
||||
4. Select **Daily** as the platform
|
||||
5. Allow camera/microphone access
|
||||
6. You should see Daily.co video interface
|
||||
7. Speak for 10-20 seconds
|
||||
8. Leave the meeting
|
||||
9. Recording should appear in **Transcripts** within 5 minutes (if webhooks aren't set up yet, see [Webhook Configuration](#webhook-configuration-optional) below)
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Recording doesn't appear in S3
|
||||
|
||||
1. Check Daily.co dashboard → **Logs** for errors
|
||||
2. Verify IAM role trust policy has correct Daily.co account ID and your Daily.co subdomain
|
||||
3. Verify that the bucket has
|
||||
|
||||
### Recording in S3 but not transcribed
|
||||
|
||||
1. Check webhook is configured (Reflector should auto-create it)
|
||||
2. Check worker logs:
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml logs worker --tail 50
|
||||
```
|
||||
3. Verify `DAILYCO_STORAGE_AWS_*` vars in `server/.env`
|
||||
|
||||
### "Access Denied" when Daily.co tries to write to S3
|
||||
|
||||
1. Double-check IAM role ARN in Daily.co settings
|
||||
2. Verify bucket name matches exactly
|
||||
3. Check IAM policy has `s3:PutObject` permission
|
||||
|
||||
---
|
||||
|
||||
## Webhook Configuration [optional]
|
||||
|
||||
`manage_daily_webhook.py` script guides you through creating a webhook for Daily recordings.
|
||||
|
||||
The webhook isn't required - polling mechanism is the default and performed automatically.
|
||||
|
||||
This guide won't go deep into webhook setup.
|
||||
192
docs/docs/installation/docker-setup.md
Normal file
192
docs/docs/installation/docker-setup.md
Normal file
@@ -0,0 +1,192 @@
|
||||
---
|
||||
sidebar_position: 3
|
||||
title: Docker Reference
|
||||
---
|
||||
|
||||
# Docker Reference
|
||||
|
||||
This page documents the Docker Compose configuration for Reflector. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
## Services
|
||||
|
||||
The `docker-compose.prod.yml` includes these services:
|
||||
|
||||
| Service | Image | Purpose |
|
||||
|---------|-------|---------|
|
||||
| `web` | `monadicalsas/reflector-frontend` | Next.js frontend |
|
||||
| `server` | `monadicalsas/reflector-backend` | FastAPI backend |
|
||||
| `worker` | `monadicalsas/reflector-backend` | Celery worker for background tasks |
|
||||
| `beat` | `monadicalsas/reflector-backend` | Celery beat scheduler |
|
||||
| `redis` | `redis:7.2-alpine` | Message broker and cache |
|
||||
| `postgres` | `postgres:17-alpine` | Primary database |
|
||||
| `caddy` | `caddy:2-alpine` | Reverse proxy with auto-SSL |
|
||||
|
||||
## Environment Files
|
||||
|
||||
Reflector uses two separate environment files:
|
||||
|
||||
### Backend (`server/.env`)
|
||||
|
||||
Used by: `server`, `worker`, `beat`
|
||||
|
||||
Key variables:
|
||||
```env
|
||||
# Database connection
|
||||
DATABASE_URL=postgresql+asyncpg://reflector:reflector@postgres:5432/reflector
|
||||
|
||||
# Redis
|
||||
REDIS_HOST=redis
|
||||
CELERY_BROKER_URL=redis://redis:6379/1
|
||||
CELERY_RESULT_BACKEND=redis://redis:6379/1
|
||||
|
||||
# API domain and CORS
|
||||
BASE_URL=https://api.example.com
|
||||
CORS_ORIGIN=https://app.example.com
|
||||
|
||||
# Modal GPU processing
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://...
|
||||
TRANSCRIPT_MODAL_API_KEY=...
|
||||
```
|
||||
|
||||
### Frontend (`www/.env`)
|
||||
|
||||
Used by: `web`
|
||||
|
||||
Key variables:
|
||||
```env
|
||||
# Domain configuration
|
||||
SITE_URL=https://app.example.com
|
||||
API_URL=https://api.example.com
|
||||
WEBSOCKET_URL=wss://api.example.com
|
||||
SERVER_API_URL=http://server:1250
|
||||
|
||||
# Authentication
|
||||
NEXTAUTH_URL=https://app.example.com
|
||||
NEXTAUTH_SECRET=...
|
||||
```
|
||||
|
||||
Note: `API_URL` is used client-side (browser), `SERVER_API_URL` is used server-side (SSR).
|
||||
|
||||
## Volumes
|
||||
|
||||
| Volume | Purpose |
|
||||
|--------|---------|
|
||||
| `redis_data` | Redis persistence |
|
||||
| `postgres_data` | PostgreSQL data |
|
||||
| `server_data` | Uploaded files, local storage |
|
||||
| `caddy_data` | SSL certificates |
|
||||
| `caddy_config` | Caddy configuration |
|
||||
|
||||
## Network
|
||||
|
||||
All services share the default network. The network is marked `attachable: true` to allow external containers (like Authentik) to join.
|
||||
|
||||
## Common Commands
|
||||
|
||||
### Start all services
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
### View logs
|
||||
```bash
|
||||
# All services
|
||||
docker compose -f docker-compose.prod.yml logs -f
|
||||
|
||||
# Specific service
|
||||
docker compose -f docker-compose.prod.yml logs server --tail 50
|
||||
```
|
||||
|
||||
### Restart a service
|
||||
```bash
|
||||
# Quick restart (doesn't reload .env changes)
|
||||
docker compose -f docker-compose.prod.yml restart server
|
||||
|
||||
# Reload .env and restart
|
||||
docker compose -f docker-compose.prod.yml up -d server
|
||||
```
|
||||
|
||||
### Run database migrations
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml exec server uv run alembic upgrade head
|
||||
```
|
||||
|
||||
### Access database
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml exec postgres psql -U reflector
|
||||
```
|
||||
|
||||
### Pull latest images
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml pull
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
### Stop all services
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml down
|
||||
```
|
||||
|
||||
### Full reset (WARNING: deletes data)
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml down -v
|
||||
```
|
||||
|
||||
## Customization
|
||||
|
||||
### Using a different database
|
||||
|
||||
To use an external PostgreSQL:
|
||||
|
||||
1. Remove `postgres` service from compose file
|
||||
2. Update `DATABASE_URL` in `server/.env`:
|
||||
```env
|
||||
DATABASE_URL=postgresql+asyncpg://user:pass@external-host:5432/reflector
|
||||
```
|
||||
|
||||
### Using external Redis
|
||||
|
||||
1. Remove `redis` service from compose file
|
||||
2. Update Redis settings in `server/.env`:
|
||||
```env
|
||||
REDIS_HOST=external-redis-host
|
||||
CELERY_BROKER_URL=redis://external-redis-host:6379/1
|
||||
```
|
||||
|
||||
### Adding Authentik
|
||||
|
||||
To add Authentik for authentication, see [Authentication Setup](./auth-setup). Quick steps:
|
||||
|
||||
1. Deploy Authentik separately
|
||||
2. Connect to Reflector's network:
|
||||
```bash
|
||||
docker network connect reflector_default authentik-server-1
|
||||
```
|
||||
3. Add to Caddyfile:
|
||||
```
|
||||
authentik.example.com {
|
||||
reverse_proxy authentik-server-1:9000
|
||||
}
|
||||
```
|
||||
|
||||
## Caddyfile Reference
|
||||
|
||||
The Caddyfile supports environment variable substitution:
|
||||
|
||||
```
|
||||
{$FRONTEND_DOMAIN:app.example.com} {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
|
||||
{$API_DOMAIN:api.example.com} {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
```
|
||||
|
||||
Set `FRONTEND_DOMAIN` and `API_DOMAIN` environment variables, or edit the file directly.
|
||||
|
||||
### Reload Caddy after changes
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml exec caddy caddy reload --config /etc/caddy/Caddyfile
|
||||
```
|
||||
139
docs/docs/installation/docs-deployment.md
Normal file
139
docs/docs/installation/docs-deployment.md
Normal file
@@ -0,0 +1,139 @@
|
||||
---
|
||||
sidebar_position: 10
|
||||
title: Docs Website Deployment
|
||||
---
|
||||
|
||||
# Docs Website Deployment
|
||||
|
||||
This guide covers deploying the Reflector documentation website. **This is optional and intended for internal/experimental use only.**
|
||||
|
||||
## Overview
|
||||
|
||||
The documentation is built using Docusaurus and deployed as a static nginx-served site.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Reflector already deployed (Steps 1-7 from [Deployment Guide](./overview))
|
||||
- DNS A record for docs subdomain (e.g., `docs.example.com`)
|
||||
|
||||
## Deployment Steps
|
||||
|
||||
### Step 1: Pre-fetch OpenAPI Spec
|
||||
|
||||
The docs site includes API reference from your running backend. Fetch it before building:
|
||||
|
||||
```bash
|
||||
cd ~/reflector
|
||||
docker compose -f docker-compose.prod.yml exec server curl -s http://localhost:1250/openapi.json > docs/static/openapi.json
|
||||
```
|
||||
|
||||
This creates `docs/static/openapi.json` (should be ~70KB) which will be copied during Docker build.
|
||||
|
||||
**Why not fetch during build?** Docker build containers are network-isolated and can't access the running backend services.
|
||||
|
||||
### Step 2: Verify Dockerfile
|
||||
|
||||
The Dockerfile is already in `docs/Dockerfile`:
|
||||
|
||||
```dockerfile
|
||||
FROM node:18-alpine AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
|
||||
# Inshall dependencies
|
||||
RUN npm ci
|
||||
|
||||
# Copy source (includes static/openapi.json if pre-fetched)
|
||||
COPY . .
|
||||
|
||||
# Fix docusaurus config: change onBrokenLinks to 'warn' for Docker build
|
||||
RUN sed -i "s/onBrokenLinks: 'throw'/onBrokenLinks: 'warn'/g" docusaurus.config.ts
|
||||
|
||||
# Build static site
|
||||
RUN npx docusaurus build
|
||||
|
||||
FROM nginx:alpine
|
||||
COPY --from=builder /app/build /usr/share/nginx/html
|
||||
EXPOSE 80
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
```
|
||||
|
||||
### Step 3: Add Docs Service to docker-compose.prod.yml
|
||||
|
||||
Add this service to `docker-compose.prod.yml`:
|
||||
|
||||
```yaml
|
||||
docs:
|
||||
build: ./docs
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- default
|
||||
```
|
||||
|
||||
### Step 4: Add Caddy Route
|
||||
|
||||
Add to `Caddyfile`:
|
||||
|
||||
```
|
||||
{$DOCS_DOMAIN:docs.example.com} {
|
||||
reverse_proxy docs:80
|
||||
}
|
||||
```
|
||||
|
||||
### Step 5: Build and Deploy
|
||||
|
||||
```bash
|
||||
cd ~/reflector
|
||||
docker compose -f docker-compose.prod.yml up -d --build docs
|
||||
docker compose -f docker-compose.prod.yml exec caddy caddy reload --config /etc/caddy/Caddyfile
|
||||
```
|
||||
|
||||
### Step 6: Verify
|
||||
|
||||
```bash
|
||||
# Check container status
|
||||
docker compose -f docker-compose.prod.yml ps docs
|
||||
# Should show "Up"
|
||||
|
||||
# Test URL
|
||||
curl -I https://docs.example.com
|
||||
# Should return HTTP/2 200
|
||||
```
|
||||
|
||||
Visit `https://docs.example.com` in your browser
|
||||
|
||||
## Updating Documentation
|
||||
|
||||
When docs are updated:
|
||||
|
||||
```bash
|
||||
cd ~/reflector
|
||||
git pull
|
||||
|
||||
# Refresh OpenAPI spec from backend
|
||||
docker compose -f docker-compose.prod.yml exec server curl -s http://localhost:1250/openapi.json > docs/static/openapi.json
|
||||
|
||||
# Rebuild docs
|
||||
docker compose -f docker-compose.prod.yml up -d --build docs
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Missing openapi.json during build
|
||||
- Make sure you ran the pre-fetch step first (Step 1)
|
||||
- Verify `docs/static/openapi.json` exists and is ~70KB
|
||||
- Re-run: `docker compose exec server curl -s http://localhost:1250/openapi.json > docs/static/openapi.json`
|
||||
|
||||
### Build fails with "Docusaurus found broken links"
|
||||
- This happens if `onBrokenLinks: 'throw'` is set in docusaurus.config.ts
|
||||
- Solution is already in Dockerfile: uses `sed` to change to `'warn'` during build
|
||||
|
||||
### 404 on all pages
|
||||
- Docusaurus baseUrl might be wrong - should be `/` for custom domain
|
||||
- Check `docs/docusaurus.config.ts`: `baseUrl: '/'`
|
||||
|
||||
### Docs not updating after rebuild
|
||||
- Force rebuild: `docker compose -f docker-compose.prod.yml build --no-cache docs`
|
||||
- Then: `docker compose -f docker-compose.prod.yml up -d docs`
|
||||
171
docs/docs/installation/modal-setup.md
Normal file
171
docs/docs/installation/modal-setup.md
Normal file
@@ -0,0 +1,171 @@
|
||||
---
|
||||
sidebar_position: 4
|
||||
title: Modal.com Setup
|
||||
---
|
||||
|
||||
# Modal.com Setup
|
||||
|
||||
This page covers Modal.com GPU setup in detail. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
Reflector uses [Modal.com](https://modal.com) for GPU-accelerated audio processing. This guide walks you through deploying the required GPU functions.
|
||||
|
||||
## What is Modal.com?
|
||||
|
||||
Modal is a serverless GPU platform. You deploy Python code that runs on their GPUs, and pay only for actual compute time. Reflector uses Modal for:
|
||||
|
||||
- **Transcription**: Whisper model for speech-to-text
|
||||
- **Diarization**: Pyannote model for speaker identification
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. **Modal.com account** - Sign up at https://modal.com (free tier available)
|
||||
2. **HuggingFace account** - Required for Pyannote diarization models:
|
||||
- Create account at https://huggingface.co
|
||||
- Accept **both** Pyannote licenses:
|
||||
- https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
- https://huggingface.co/pyannote/segmentation-3.0
|
||||
- Generate access token at https://huggingface.co/settings/tokens
|
||||
|
||||
## Deployment
|
||||
|
||||
**Location: YOUR LOCAL COMPUTER (laptop/desktop)**
|
||||
|
||||
Modal CLI requires browser authentication, so this must run on a machine with a browser - not on a headless server.
|
||||
|
||||
### Install Modal CLI
|
||||
|
||||
```bash
|
||||
uv tool install modal
|
||||
```
|
||||
|
||||
### Authenticate with Modal
|
||||
|
||||
```bash
|
||||
modal setup
|
||||
```
|
||||
|
||||
This opens your browser for authentication. Complete the login flow.
|
||||
|
||||
### Clone Repository and Deploy
|
||||
|
||||
```bash
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector/gpu/modal_deployments
|
||||
./deploy-all.sh --hf-token YOUR_HUGGINGFACE_TOKEN
|
||||
```
|
||||
|
||||
Or run interactively (script will prompt for token):
|
||||
```bash
|
||||
./deploy-all.sh
|
||||
```
|
||||
|
||||
### What the Script Does
|
||||
|
||||
1. **Prompts for HuggingFace token** - Needed to download the Pyannote diarization model
|
||||
2. **Generates API key** - Creates a secure random key for authenticating requests to GPU functions
|
||||
3. **Creates Modal secrets**:
|
||||
- `hf_token` - Your HuggingFace token
|
||||
- `reflector-gpu` - The generated API key
|
||||
4. **Deploys GPU functions** - Transcriber (Whisper) and Diarizer (Pyannote)
|
||||
5. **Outputs configuration** - Prints URLs and API key to console
|
||||
|
||||
### Example Output
|
||||
|
||||
```
|
||||
==========================================
|
||||
Reflector GPU Functions Deployment
|
||||
==========================================
|
||||
|
||||
Generating API key for GPU services...
|
||||
Creating Modal secrets...
|
||||
-> Creating secret: hf_token
|
||||
-> Creating secret: reflector-gpu
|
||||
|
||||
Deploying transcriber (Whisper)...
|
||||
-> https://yourname--reflector-transcriber-web.modal.run
|
||||
|
||||
Deploying diarizer (Pyannote)...
|
||||
-> https://yourname--reflector-diarizer-web.modal.run
|
||||
|
||||
==========================================
|
||||
Deployment complete!
|
||||
==========================================
|
||||
|
||||
Copy these values to your server's server/.env file:
|
||||
|
||||
# --- Modal GPU Configuration ---
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://yourname--reflector-transcriber-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=abc123...
|
||||
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://yourname--reflector-diarizer-web.modal.run
|
||||
DIARIZATION_MODAL_API_KEY=abc123...
|
||||
# --- End Modal Configuration ---
|
||||
```
|
||||
|
||||
Copy the output and paste it into your `server/.env` file on your server.
|
||||
|
||||
## Costs
|
||||
|
||||
Modal charges based on GPU compute time:
|
||||
- Functions scale to zero when not in use (no cost when idle)
|
||||
- You only pay for actual processing time
|
||||
- Free tier includes $30/month of credits
|
||||
|
||||
Typical costs for audio processing:
|
||||
- Transcription: ~$0.01-0.05 per minute of audio
|
||||
- Diarization: ~$0.02-0.10 per minute of audio
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Modal CLI not installed"
|
||||
```bash
|
||||
uv tool install modal
|
||||
```
|
||||
|
||||
### "Not authenticated with Modal"
|
||||
```bash
|
||||
modal setup
|
||||
# Complete browser authentication
|
||||
```
|
||||
|
||||
### "Failed to create secret hf_token"
|
||||
- Verify your HuggingFace token is valid
|
||||
- Ensure you've accepted the Pyannote license
|
||||
- Token needs `read` permission
|
||||
|
||||
### Deployment fails
|
||||
Check the Modal dashboard for detailed error logs:
|
||||
- Visit https://modal.com/apps
|
||||
- Click on the failed function
|
||||
- View build and runtime logs
|
||||
|
||||
### Re-running deployment
|
||||
The script is safe to re-run. It will:
|
||||
- Update existing secrets if they exist
|
||||
- Redeploy functions with latest code
|
||||
- Output new configuration (API key stays the same if secret exists)
|
||||
|
||||
## Manual Deployment (Advanced)
|
||||
|
||||
If you prefer to deploy functions individually:
|
||||
|
||||
```bash
|
||||
cd gpu/modal_deployments
|
||||
|
||||
# Create secrets manually
|
||||
modal secret create hf_token HF_TOKEN=your-hf-token
|
||||
modal secret create reflector-gpu REFLECTOR_GPU_APIKEY=$(openssl rand -hex 32)
|
||||
|
||||
# Deploy each function
|
||||
modal deploy reflector_transcriber.py
|
||||
modal deploy reflector_diarizer.py
|
||||
```
|
||||
|
||||
## Monitoring
|
||||
|
||||
View your deployed functions and their usage:
|
||||
- **Modal Dashboard**: https://modal.com/apps
|
||||
- **Function logs**: Click on any function to view logs
|
||||
- **Usage**: View compute time and costs in the dashboard
|
||||
411
docs/docs/installation/overview.md
Normal file
411
docs/docs/installation/overview.md
Normal file
@@ -0,0 +1,411 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
title: Deployment Guide
|
||||
---
|
||||
|
||||
# Deployment Guide
|
||||
|
||||
This guide walks you through deploying Reflector from scratch. Follow these steps in order.
|
||||
|
||||
## What You'll Set Up
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
User --> Caddy["Caddy (auto-SSL)"]
|
||||
Caddy --> Frontend["Frontend (Next.js)"]
|
||||
Caddy --> Backend["Backend (FastAPI)"]
|
||||
Backend --> PostgreSQL
|
||||
Backend --> Redis
|
||||
Backend --> Workers["Celery Workers"]
|
||||
Workers --> PostgreSQL
|
||||
Workers --> Redis
|
||||
Workers --> GPU["GPU Processing<br/>(Modal.com OR Self-hosted)"]
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before starting, you need:
|
||||
|
||||
- **Production server** - 4+ cores, 8GB+ RAM, public IP
|
||||
- **Two domain names** - e.g., `app.example.com` (frontend) and `api.example.com` (backend)
|
||||
- **GPU processing** - Choose one:
|
||||
- Modal.com account, OR
|
||||
- GPU server with NVIDIA GPU (8GB+ VRAM)
|
||||
- **HuggingFace account** - Free at https://huggingface.co
|
||||
- Accept both Pyannote licenses (required for speaker diarization):
|
||||
- https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
- https://huggingface.co/pyannote/segmentation-3.0
|
||||
- **LLM API** - For summaries and topic detection. Choose one:
|
||||
- OpenAI API key at https://platform.openai.com/account/api-keys, OR
|
||||
- Any OpenAI-compatible endpoint (vLLM, LiteLLM, Ollama, etc.)
|
||||
- **AWS S3 bucket** - For storing audio files and transcripts (see [S3 Setup](#create-s3-bucket-for-transcript-storage) below)
|
||||
|
||||
### Optional (for live meeting rooms)
|
||||
|
||||
- [ ] **Daily.co account** - Free tier at https://dashboard.daily.co
|
||||
- [ ] **AWS S3 bucket + IAM Role** - For Daily.co recording storage (separate from transcript storage)
|
||||
|
||||
---
|
||||
|
||||
## Configure DNS
|
||||
|
||||
```
|
||||
Type: A Name: app Value: <your-server-ip>
|
||||
Type: A Name: api Value: <your-server-ip>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Deploy GPU Processing
|
||||
|
||||
Reflector requires GPU processing for transcription and speaker diarization. Choose one option:
|
||||
|
||||
| | **Modal.com (Cloud)** | **Self-Hosted GPU** |
|
||||
|---|---|---|
|
||||
| **Best for** | No GPU hardware, zero maintenance | Own GPU server, full control |
|
||||
| **Pricing** | Pay-per-use | Fixed infrastructure cost |
|
||||
|
||||
### Option A: Modal.com (Serverless Cloud GPU)
|
||||
|
||||
#### Accept HuggingFace Licenses
|
||||
|
||||
Visit both pages and click "Accept":
|
||||
- https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
- https://huggingface.co/pyannote/segmentation-3.0
|
||||
|
||||
Generate a token at https://huggingface.co/settings/tokens
|
||||
|
||||
#### Deploy to Modal
|
||||
|
||||
There's an install script to help with this setup. It's using modal API to set all necessary moving parts.
|
||||
|
||||
As an alternative, all those operations that script does could be performed in modal settings in modal UI.
|
||||
|
||||
```bash
|
||||
uv tool install modal
|
||||
modal setup # opens browser for authentication
|
||||
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector/gpu/modal_deployments
|
||||
./deploy-all.sh --hf-token YOUR_HUGGINGFACE_TOKEN
|
||||
```
|
||||
|
||||
**Save the output** - copy the configuration block, you'll need it soon.
|
||||
|
||||
See [Modal Setup](./modal-setup) for troubleshooting and details.
|
||||
|
||||
### Option B: Self-Hosted GPU
|
||||
|
||||
**Location: YOUR GPU SERVER**
|
||||
|
||||
Requires: NVIDIA GPU with 8GB+ VRAM, Ubuntu 22.04+, 40-50GB disk.
|
||||
|
||||
See [Self-Hosted GPU Setup](./self-hosted-gpu-setup) for complete instructions. Quick summary:
|
||||
|
||||
1. Install NVIDIA drivers and Docker
|
||||
2. Clone repository: `git clone https://github.com/monadical-sas/reflector.git`
|
||||
3. Configure `.env` with HuggingFace token
|
||||
4. Start service with Docker compose
|
||||
5. Set up Caddy reverse proxy for HTTPS
|
||||
|
||||
**Save your API key and HTTPS URL** - you'll need them soon.
|
||||
|
||||
---
|
||||
|
||||
## Prepare Server
|
||||
|
||||
**Location: dedicated reflector server**
|
||||
|
||||
### Install Docker
|
||||
|
||||
```bash
|
||||
ssh user@your-server-ip
|
||||
|
||||
curl -fsSL https://get.docker.com | sh
|
||||
sudo usermod -aG docker $USER
|
||||
|
||||
# Log out and back in for group changes
|
||||
exit
|
||||
ssh user@your-server-ip
|
||||
|
||||
docker --version # verify
|
||||
```
|
||||
|
||||
### Firewall
|
||||
|
||||
Ensure ports 80 (HTTP) and 443 (HTTPS) are open for inbound traffic. The method varies by cloud provider and OS configuration.
|
||||
|
||||
**For live transcription without Daily/Whereby rooms**: WebRTC requires UDP port range 49152-65535 for media traffic.
|
||||
|
||||
### Clone Repository
|
||||
|
||||
The Docker images contain all application code. You clone the repository for configuration files and the compose definition:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Create S3 Bucket for Transcript Storage
|
||||
|
||||
Reflector requires AWS S3 to store audio files during processing.
|
||||
|
||||
### Create Bucket
|
||||
|
||||
```bash
|
||||
# Choose a unique bucket name
|
||||
BUCKET_NAME="reflector-transcripts-yourname"
|
||||
AWS_REGION="us-east-1"
|
||||
|
||||
# Create bucket
|
||||
aws s3 mb s3://$BUCKET_NAME --region $AWS_REGION
|
||||
```
|
||||
|
||||
### Create IAM User
|
||||
|
||||
Create an IAM user with S3 access for Reflector:
|
||||
|
||||
1. Go to AWS IAM Console → Users → Create User
|
||||
2. Name: `reflector-transcripts`
|
||||
3. Attach policy: `AmazonS3FullAccess` (or create a custom policy for just your bucket)
|
||||
4. Create access key (Access key ID + Secret access key)
|
||||
|
||||
Save these credentials - you'll need them in the next step.
|
||||
|
||||
---
|
||||
|
||||
## Configure Environment
|
||||
|
||||
Reflector has two env files:
|
||||
- `server/.env` - Backend configuration
|
||||
- `www/.env` - Frontend configuration
|
||||
|
||||
### Backend Configuration
|
||||
|
||||
```bash
|
||||
cp server/.env.example server/.env
|
||||
nano server/.env
|
||||
```
|
||||
|
||||
**Required settings:**
|
||||
```env
|
||||
# Database (defaults work with docker-compose.prod.yml)
|
||||
DATABASE_URL=postgresql+asyncpg://reflector:reflector@postgres:5432/reflector
|
||||
|
||||
# Redis
|
||||
REDIS_HOST=redis
|
||||
CELERY_BROKER_URL=redis://redis:6379/1
|
||||
CELERY_RESULT_BACKEND=redis://redis:6379/1
|
||||
|
||||
# Your domains
|
||||
BASE_URL=https://api.example.com
|
||||
CORS_ORIGIN=https://app.example.com
|
||||
CORS_ALLOW_CREDENTIALS=true
|
||||
|
||||
# Secret key - generate with: openssl rand -hex 32
|
||||
SECRET_KEY=<your-generated-secret>
|
||||
|
||||
# GPU Processing - choose ONE option:
|
||||
|
||||
# Option A: Modal.com (paste from deploy-all.sh output)
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://yourname--reflector-transcriber-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=<from-deploy-all.sh-output>
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://yourname--reflector-diarizer-web.modal.run
|
||||
DIARIZATION_MODAL_API_KEY=<from-deploy-all.sh-output>
|
||||
|
||||
# Option B: Self-hosted GPU (use your GPU server URL and API key)
|
||||
# TRANSCRIPT_BACKEND=modal
|
||||
# TRANSCRIPT_URL=https://gpu.example.com
|
||||
# TRANSCRIPT_MODAL_API_KEY=<your-generated-api-key>
|
||||
# DIARIZATION_BACKEND=modal
|
||||
# DIARIZATION_URL=https://gpu.example.com
|
||||
# DIARIZATION_MODAL_API_KEY=<your-generated-api-key>
|
||||
|
||||
# Storage - where to store audio files and transcripts (requires AWS S3)
|
||||
TRANSCRIPT_STORAGE_BACKEND=aws
|
||||
TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=your-aws-access-key
|
||||
TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY=your-aws-secret-key
|
||||
TRANSCRIPT_STORAGE_AWS_BUCKET_NAME=reflector-media
|
||||
TRANSCRIPT_STORAGE_AWS_REGION=us-east-1
|
||||
|
||||
# LLM - for generating titles, summaries, and topics
|
||||
LLM_API_KEY=sk-your-openai-api-key
|
||||
LLM_MODEL=gpt-4o-mini
|
||||
# LLM_URL=https://api.openai.com/v1 # Optional: custom endpoint (vLLM, LiteLLM, Ollama, etc.)
|
||||
|
||||
# Auth - disable for initial setup (see a dedicated step for authentication)
|
||||
AUTH_BACKEND=none
|
||||
```
|
||||
|
||||
### Frontend Configuration
|
||||
|
||||
```bash
|
||||
cp www/.env.example www/.env
|
||||
nano www/.env
|
||||
```
|
||||
|
||||
**Required settings:**
|
||||
```env
|
||||
# Your domains
|
||||
SITE_URL=https://app.example.com
|
||||
API_URL=https://api.example.com
|
||||
WEBSOCKET_URL=wss://api.example.com
|
||||
SERVER_API_URL=http://server:1250
|
||||
|
||||
# NextAuth
|
||||
NEXTAUTH_URL=https://app.example.com
|
||||
NEXTAUTH_SECRET=<generate-with-openssl-rand-hex-32>
|
||||
|
||||
# Disable login requirement for initial setup
|
||||
FEATURE_REQUIRE_LOGIN=false
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configure Caddy
|
||||
|
||||
```bash
|
||||
cp Caddyfile.example Caddyfile
|
||||
nano Caddyfile
|
||||
```
|
||||
|
||||
Replace `example.com` with your domains. The `{$VAR:default}` syntax uses Caddy's env var substitution - you can either edit the file directly or set `FRONTEND_DOMAIN` and `API_DOMAIN` environment variables.
|
||||
|
||||
```
|
||||
{$FRONTEND_DOMAIN:app.example.com} {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
|
||||
{$API_DOMAIN:api.example.com} {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Start Services
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
Wait for containers to start (first run may take 1-2 minutes to pull images and initialize).
|
||||
|
||||
---
|
||||
|
||||
## Verify Deployment
|
||||
|
||||
### Check services
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml ps
|
||||
# All should show "Up"
|
||||
```
|
||||
|
||||
### Test API
|
||||
```bash
|
||||
curl https://api.example.com/health
|
||||
# Should return: {"status":"healthy"}
|
||||
```
|
||||
|
||||
### Test Frontend
|
||||
- Visit https://app.example.com
|
||||
- You should see the Reflector interface
|
||||
- Try uploading an audio file to test transcription
|
||||
|
||||
If any verification fails, see [Troubleshooting](#troubleshooting) below.
|
||||
|
||||
---
|
||||
|
||||
## Enable Authentication (Required for Live Rooms)
|
||||
|
||||
By default, Reflector is open (no login required). **Authentication is required if you want to use Live Meeting Rooms.**
|
||||
|
||||
See [Authentication Setup](./auth-setup) for full Authentik OAuth configuration.
|
||||
|
||||
Quick summary:
|
||||
1. Deploy Authentik on your server
|
||||
2. Create OAuth provider in Authentik
|
||||
3. Extract public key for JWT verification
|
||||
4. Update `server/.env`: `AUTH_BACKEND=jwt` + `AUTH_JWT_AUDIENCE`
|
||||
5. Update `www/.env`: `FEATURE_REQUIRE_LOGIN=true` + Authentik credentials
|
||||
6. Mount JWT keys volume and restart services
|
||||
|
||||
---
|
||||
|
||||
## Enable Live Meeting Rooms
|
||||
|
||||
**Requires: Authentication Step**
|
||||
|
||||
Live rooms require Daily.co and AWS S3. See [Daily.co Setup](./daily-setup) for complete S3/IAM configuration instructions.
|
||||
|
||||
Note that Reflector also supports Whereby as a call provider - this doc doesn't cover its setup yet.
|
||||
|
||||
Quick config - Add to `server/.env`:
|
||||
|
||||
```env
|
||||
DEFAULT_VIDEO_PLATFORM=daily
|
||||
DAILY_API_KEY=<from-daily.co-dashboard>
|
||||
DAILY_SUBDOMAIN=<your-daily-subdomain>
|
||||
|
||||
# S3 for recording storage
|
||||
DAILYCO_STORAGE_AWS_BUCKET_NAME=<your-bucket>
|
||||
DAILYCO_STORAGE_AWS_REGION=us-east-1
|
||||
DAILYCO_STORAGE_AWS_ROLE_ARN=<arn:aws:iam::ACCOUNT:role/DailyCo>
|
||||
```
|
||||
|
||||
Reload env and restart:
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml up -d server worker
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Check logs for errors
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml logs server --tail 20
|
||||
docker compose -f docker-compose.prod.yml logs worker --tail 20
|
||||
```
|
||||
|
||||
### Services won't start
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml logs
|
||||
```
|
||||
|
||||
### CORS errors in browser
|
||||
- Verify `CORS_ORIGIN` in `server/.env` matches your frontend domain exactly (including `https://`)
|
||||
- Reload env: `docker compose -f docker-compose.prod.yml up -d server`
|
||||
|
||||
### SSL certificate errors
|
||||
- Caddy auto-provisions Let's Encrypt certificates
|
||||
- Ensure ports 80 and 443 are open
|
||||
- Check: `docker compose -f docker-compose.prod.yml logs caddy`
|
||||
|
||||
### Transcription not working
|
||||
- Check Modal dashboard: https://modal.com/apps
|
||||
- Verify URLs in `server/.env` match deployed functions
|
||||
- Check worker logs: `docker compose -f docker-compose.prod.yml logs worker`
|
||||
|
||||
### "Login required" but auth not configured
|
||||
- Set `FEATURE_REQUIRE_LOGIN=false` in `www/.env`
|
||||
- Rebuild frontend: `docker compose -f docker-compose.prod.yml up -d --force-recreate web`
|
||||
|
||||
### Database migrations or connectivity issues
|
||||
Migrations run automatically on server startup. To check database connectivity or debug migration failures:
|
||||
|
||||
```bash
|
||||
# Check server logs for migration errors
|
||||
docker compose -f docker-compose.prod.yml logs server | grep -i -E "(alembic|migration|database|postgres)"
|
||||
|
||||
# Verify database connectivity
|
||||
docker compose -f docker-compose.prod.yml exec server uv run python -c "from reflector.db import engine; print('DB connected')"
|
||||
|
||||
# Manually run migrations (if needed)
|
||||
docker compose -f docker-compose.prod.yml exec server uv run alembic upgrade head
|
||||
```
|
||||
|
||||
63
docs/docs/installation/requirements.md
Normal file
63
docs/docs/installation/requirements.md
Normal file
@@ -0,0 +1,63 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
title: System Requirements
|
||||
---
|
||||
|
||||
# System Requirements
|
||||
|
||||
This page lists hardware and software requirements. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
## Server Requirements
|
||||
|
||||
### Minimum Requirements
|
||||
|
||||
- **CPU**: 4 cores
|
||||
- **RAM**: 8 GB
|
||||
- **Storage**: 50 GB SSD
|
||||
- **OS**: Ubuntu 22.04+ or compatible Linux
|
||||
- **Network**: Public IP address
|
||||
|
||||
### Recommended Requirements
|
||||
|
||||
- **CPU**: 8+ cores
|
||||
- **RAM**: 16 GB
|
||||
- **Storage**: 100 GB SSD
|
||||
- **Network**: 1 Gbps connection
|
||||
|
||||
## Software Requirements
|
||||
|
||||
- Docker Engine 20.10+
|
||||
- Docker Compose 2.0+
|
||||
|
||||
## External Services
|
||||
|
||||
### Required
|
||||
|
||||
- **Two domain names** - One for frontend (e.g., `app.example.com`), one for API (e.g., `api.example.com`)
|
||||
- **Modal.com account** - For GPU-accelerated transcription and diarization (free tier available)
|
||||
- **HuggingFace account** - For Pyannote diarization model access
|
||||
- **LLM API** - For generating summaries and topic detection. Options:
|
||||
- OpenAI API (https://platform.openai.com/account/api-keys)
|
||||
- Any OpenAI-compatible endpoint (vLLM, LiteLLM, Ollama)
|
||||
- Self-hosted: Phi-4 14B 4-bit recommended (~8GB VRAM)
|
||||
|
||||
### Required for Live Meeting Rooms
|
||||
|
||||
- **Daily.co account** - For video conferencing (free tier available at https://dashboard.daily.co)
|
||||
- **AWS S3 bucket + IAM Role** - For Daily.co to store recordings
|
||||
- **Another AWS S3 bucket (optional, can reuse the one above)** - For Reflector to store "compiled" mp3 files and transient diarization process temporary files
|
||||
|
||||
### Optional
|
||||
|
||||
- **AWS S3** - For cloud storage of recordings and transcripts
|
||||
- **Authentik** - For SSO/OIDC authentication
|
||||
- **Sentry** - For error tracking
|
||||
|
||||
## Development Requirements
|
||||
|
||||
For local development only (not required for production deployment):
|
||||
|
||||
- Node.js 22+ (for frontend development)
|
||||
- Python 3.12+ (for backend development)
|
||||
- pnpm (for frontend package management)
|
||||
- uv (for Python package management)
|
||||
307
docs/docs/installation/self-hosted-gpu-setup.md
Normal file
307
docs/docs/installation/self-hosted-gpu-setup.md
Normal file
@@ -0,0 +1,307 @@
|
||||
---
|
||||
sidebar_position: 5
|
||||
title: Self-Hosted GPU Setup
|
||||
---
|
||||
|
||||
# Self-Hosted GPU Setup
|
||||
|
||||
This guide covers deploying Reflector's GPU processing on your own server instead of Modal.com. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
## When to Use Self-Hosted GPU
|
||||
|
||||
**Choose self-hosted GPU if you:**
|
||||
- Have GPU hardware available (NVIDIA required)
|
||||
- Want full control over processing
|
||||
- Prefer fixed infrastructure costs over pay-per-use
|
||||
- Have privacy or data locality requirements
|
||||
- Need to process audio without external API calls
|
||||
|
||||
**Choose Modal.com instead if you:**
|
||||
- Don't have GPU hardware
|
||||
- Want zero infrastructure management
|
||||
- Prefer pay-per-use pricing
|
||||
- Need instant scaling for variable workloads
|
||||
|
||||
See [Modal.com Setup](./modal-setup) for cloud GPU deployment.
|
||||
|
||||
## What Gets Deployed
|
||||
|
||||
The self-hosted GPU service provides the same API endpoints as Modal:
|
||||
- `POST /v1/audio/transcriptions` - Whisper transcription
|
||||
- `POST /v1/audio/transcriptions-from-url` - Transcribe from URL
|
||||
- `POST /diarize` - Pyannote speaker diarization
|
||||
- `POST /translate` - Audio translation
|
||||
|
||||
Your main Reflector server connects to this service exactly like it connects to Modal - only the URL changes.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Hardware
|
||||
- **GPU**: NVIDIA GPU with 8GB+ VRAM (tested on Tesla T4 with 15GB)
|
||||
- **CPU**: 4+ cores recommended
|
||||
- **RAM**: 8GB minimum, 16GB recommended
|
||||
- **Disk**: 40-50GB minimum
|
||||
|
||||
### Software
|
||||
- Public IP address
|
||||
- Domain name with DNS A record pointing to server
|
||||
|
||||
### Accounts
|
||||
- **HuggingFace account** with accepted Pyannote licenses:
|
||||
- https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
- https://huggingface.co/pyannote/segmentation-3.0
|
||||
- **HuggingFace access token** from https://huggingface.co/settings/tokens
|
||||
|
||||
## Docker Deployment
|
||||
|
||||
### Step 1: Install NVIDIA Driver
|
||||
|
||||
```bash
|
||||
sudo apt update
|
||||
sudo apt install -y nvidia-driver-535
|
||||
sudo reboot
|
||||
|
||||
# After reboot, verify installation
|
||||
nvidia-smi
|
||||
```
|
||||
|
||||
Expected output: GPU details with driver version and CUDA version.
|
||||
|
||||
### Step 2: Install Docker
|
||||
|
||||
Follow the [official Docker installation guide](https://docs.docker.com/engine/install/ubuntu/) for your distribution.
|
||||
|
||||
After installation, add your user to the docker group:
|
||||
|
||||
```bash
|
||||
sudo usermod -aG docker $USER
|
||||
|
||||
# Log out and back in for group changes
|
||||
exit
|
||||
# SSH back in
|
||||
```
|
||||
|
||||
### Step 3: Install NVIDIA Container Toolkit
|
||||
|
||||
```bash
|
||||
# Add NVIDIA repository and install toolkit
|
||||
curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | \
|
||||
sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg
|
||||
|
||||
curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \
|
||||
sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \
|
||||
sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list
|
||||
|
||||
sudo apt-get update && sudo apt-get install -y nvidia-container-toolkit
|
||||
sudo nvidia-ctk runtime configure --runtime=docker
|
||||
sudo systemctl restart docker
|
||||
```
|
||||
|
||||
### Step 4: Clone Repository and Configure
|
||||
|
||||
```bash
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector/gpu/self_hosted
|
||||
|
||||
# Create environment file
|
||||
cat > .env << EOF
|
||||
REFLECTOR_GPU_APIKEY=$(openssl rand -hex 16)
|
||||
HF_TOKEN=your_huggingface_token_here
|
||||
EOF
|
||||
|
||||
# Note the generated API key - you'll need it for main server config
|
||||
cat .env
|
||||
```
|
||||
|
||||
### Step 5: Build and Start
|
||||
|
||||
The repository includes a `compose.yml` file. Build and start:
|
||||
|
||||
|
||||
```bash
|
||||
# Build image (takes ~5 minutes, downloads ~10GB)
|
||||
sudo docker compose build
|
||||
|
||||
# Start service
|
||||
sudo docker compose up -d
|
||||
|
||||
# Wait for startup and verify
|
||||
sleep 30
|
||||
sudo docker compose logs
|
||||
```
|
||||
|
||||
Look for: `INFO: Application startup complete. Uvicorn running on http://0.0.0.0:8000`
|
||||
|
||||
### Step 7: Verify GPU Access
|
||||
|
||||
```bash
|
||||
# Check GPU is accessible from container
|
||||
sudo docker exec $(sudo docker ps -q) nvidia-smi
|
||||
```
|
||||
|
||||
Should show GPU with ~3GB VRAM used (models loaded).
|
||||
|
||||
---
|
||||
|
||||
## Configure HTTPS with Caddy
|
||||
|
||||
Caddy handles SSL automatically.
|
||||
|
||||
### Install Caddy
|
||||
|
||||
```bash
|
||||
sudo apt install -y debian-keyring debian-archive-keyring apt-transport-https curl
|
||||
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/gpg.key' | \
|
||||
sudo gpg --dearmor -o /usr/share/keyrings/caddy-stable-archive-keyring.gpg
|
||||
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/debian.deb.txt' | \
|
||||
sudo tee /etc/apt/sources.list.d/caddy-stable.list
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y caddy
|
||||
```
|
||||
|
||||
### Configure Reverse Proxy
|
||||
|
||||
Edit the Caddyfile with your domain:
|
||||
|
||||
```bash
|
||||
sudo nano /etc/caddy/Caddyfile
|
||||
```
|
||||
|
||||
Add (replace `gpu.example.com` with your domain):
|
||||
|
||||
```
|
||||
gpu.example.com {
|
||||
reverse_proxy localhost:8000
|
||||
}
|
||||
```
|
||||
|
||||
Reload Caddy (auto-provisions SSL certificate):
|
||||
|
||||
```bash
|
||||
sudo systemctl reload caddy
|
||||
```
|
||||
|
||||
### Verify HTTPS
|
||||
|
||||
```bash
|
||||
curl -I https://gpu.example.com/docs
|
||||
# Should return HTTP/2 200
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configure Main Reflector Server
|
||||
|
||||
On your main Reflector server, update `server/.env`:
|
||||
|
||||
```env
|
||||
# GPU Processing - Self-hosted
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://gpu.example.com
|
||||
TRANSCRIPT_MODAL_API_KEY=<your-generated-api-key>
|
||||
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://gpu.example.com
|
||||
DIARIZATION_MODAL_API_KEY=<your-generated-api-key>
|
||||
```
|
||||
|
||||
**Note:** The backend type is `modal` because the self-hosted GPU service implements the same API contract as Modal.com. This allows you to switch between cloud and self-hosted GPU processing by only changing the URL and API key.
|
||||
|
||||
Restart services to apply:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml restart server worker
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Service Management
|
||||
|
||||
All commands in this section assume you're in `~/reflector/gpu/self_hosted/`.
|
||||
|
||||
```bash
|
||||
# View logs
|
||||
sudo docker compose logs -f
|
||||
|
||||
# Restart service
|
||||
sudo docker compose restart
|
||||
|
||||
# Stop service
|
||||
sudo docker compose down
|
||||
|
||||
# Check status
|
||||
sudo docker compose ps
|
||||
```
|
||||
|
||||
### Monitor GPU
|
||||
|
||||
```bash
|
||||
# Check GPU usage
|
||||
nvidia-smi
|
||||
|
||||
# Watch in real-time
|
||||
watch -n 1 nvidia-smi
|
||||
```
|
||||
|
||||
**Typical GPU memory usage:**
|
||||
- Idle (models loaded): ~3GB VRAM
|
||||
- During transcription: ~4-5GB VRAM
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### nvidia-smi fails after driver install
|
||||
|
||||
```bash
|
||||
# Manually load kernel modules
|
||||
sudo modprobe nvidia
|
||||
nvidia-smi
|
||||
```
|
||||
|
||||
### Service fails with "Could not download pyannote pipeline"
|
||||
|
||||
1. Verify HF_TOKEN is valid: `echo $HF_TOKEN`
|
||||
2. Check model access at https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
3. Update .env with correct token
|
||||
4. Restart service: `sudo docker compose restart`
|
||||
|
||||
### Cannot connect to HTTPS endpoint
|
||||
|
||||
1. Verify DNS resolves: `dig +short gpu.example.com`
|
||||
2. Check firewall: `sudo ufw status` (ports 80, 443 must be open)
|
||||
3. Check Caddy: `sudo systemctl status caddy`
|
||||
4. View Caddy logs: `sudo journalctl -u caddy -n 50`
|
||||
|
||||
### SSL certificate not provisioning
|
||||
|
||||
Requirements for Let's Encrypt:
|
||||
- Ports 80 and 443 publicly accessible
|
||||
- DNS resolves to server's public IP
|
||||
- Valid domain (not localhost or private IP)
|
||||
|
||||
### Docker container won't start
|
||||
|
||||
```bash
|
||||
# Check logs
|
||||
sudo docker compose logs
|
||||
|
||||
# Common issues:
|
||||
# - Port 8000 already in use
|
||||
# - GPU not accessible (nvidia-ctk not configured)
|
||||
# - Missing .env file
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Updating
|
||||
|
||||
```bash
|
||||
cd ~/reflector/gpu/self_hosted
|
||||
git pull
|
||||
sudo docker compose build
|
||||
sudo docker compose up -d
|
||||
```
|
||||
61
docs/docs/intro.md
Normal file
61
docs/docs/intro.md
Normal file
@@ -0,0 +1,61 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
title: Introduction
|
||||
---
|
||||
|
||||
# Welcome to Reflector
|
||||
|
||||
Reflector is a privacy-focused, self-hosted AI-powered audio transcription and meeting analysis platform that provides real-time transcription, speaker diarization, and summarization for audio content and live meetings. With complete control over your data and infrastructure, you can run models on your own hardware (roadmap - currently supports Modal.com for GPU processing).
|
||||
|
||||
## What is Reflector?
|
||||
|
||||
Reflector is a web application that utilizes AI to process audio content, providing:
|
||||
|
||||
- **Real-time Transcription**: Convert speech to text using [Whisper](https://github.com/openai/whisper) (multi-language) or [Parakeet](https://github.com/NVIDIA/NeMo) (English) models
|
||||
- **Speaker Diarization**: Identify and label different speakers using [Pyannote](https://github.com/pyannote/pyannote-audio) 3.1
|
||||
- **Topic Detection & Summarization**: Extract key topics and generate concise summaries using LLMs
|
||||
- **Meeting Recording**: Create permanent records of meetings with searchable transcripts
|
||||
|
||||

|
||||
|
||||
## Features
|
||||
|
||||
| Feature | Public Mode | Private Mode |
|
||||
|--------------------------------------------|------------|--------------|
|
||||
| **Authentication** | None required | Required |
|
||||
| **Audio Upload** | ✅ | ✅ |
|
||||
| **Live Microphone Streaming** | ✅ | ✅ |
|
||||
| **Transcription** | ✅ | ✅ |
|
||||
| **Speaker Diarization** | ✅ | ✅ |
|
||||
| **Topic Detection** | ✅ | ✅ |
|
||||
| **Summarization** | ✅ | ✅ |
|
||||
| **Virtual Meeting Rooms (Whereby, Daily)** | ❌ | ✅ |
|
||||
| **Browse Transcripts Page** | ❌ | ✅ |
|
||||
| **Search Functionality** | ❌ | ✅ |
|
||||
| **Persistent Storage** | ❌ | ✅ |
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Reflector consists of three main components:
|
||||
|
||||
- **Frontend**: React application built with Next.js
|
||||
- **Backend**: Python server using FastAPI
|
||||
- **Processing**: Scalable GPU workers for ML inference (Modal.com or local)
|
||||
|
||||
## Getting Started
|
||||
|
||||
Ready to deploy Reflector? Head over to our [Installation Guide](./installation/overview) to set up your own instance.
|
||||
|
||||
For a quick overview of how Reflector processes audio, check out our [Pipeline Documentation](./pipelines/overview).
|
||||
|
||||
## Open Source
|
||||
|
||||
Reflector is open source software developed by [Monadical](https://monadical.com) and licensed under the **MIT License**. We welcome contributions from the community!
|
||||
|
||||
- [GitHub Repository](https://github.com/monadical-sas/reflector)
|
||||
- [Issue Tracker](https://github.com/monadical-sas/reflector/issues)
|
||||
- [Pull Requests](https://github.com/monadical-sas/reflector/pulls)
|
||||
|
||||
## Support
|
||||
|
||||
Need help? Reach out to the community through GitHub Discussions.
|
||||
83
docs/docs/pipelines/file-pipeline.md
Normal file
83
docs/docs/pipelines/file-pipeline.md
Normal file
@@ -0,0 +1,83 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
title: File Processing Pipeline
|
||||
---
|
||||
|
||||
# File Processing Pipeline
|
||||
|
||||
The file processing pipeline handles uploaded audio files, optimizing for accuracy and throughput.
|
||||
|
||||
## Pipeline Stages
|
||||
|
||||
### 1. Input Stage
|
||||
|
||||
**Accepted Formats:**
|
||||
- MP3 (most common)
|
||||
- WAV (uncompressed)
|
||||
- M4A (Apple format)
|
||||
- WebM (browser recordings)
|
||||
- MP4 (video with audio track)
|
||||
|
||||
**File Validation:**
|
||||
- Sample rate: Any (will be resampled to 16kHz)
|
||||
|
||||
### 2. Pre-processing
|
||||
|
||||
**Audio Normalization:**
|
||||
```yaml
|
||||
# Convert to standard format
|
||||
- Sample rate: 16kHz (Whisper requirement)
|
||||
- Channels: Mono
|
||||
- Bit depth: 16-bit
|
||||
- Format: WAV
|
||||
```
|
||||
|
||||
**Noise Reduction (Optional):**
|
||||
- Background noise removal
|
||||
- Echo cancellation
|
||||
- High-pass filter for rumble
|
||||
|
||||
### 3. Chunking Strategy
|
||||
|
||||
Audio is split into segments for processing:
|
||||
- Configurable chunk sizes
|
||||
- Optional silence detection for natural breaks
|
||||
- Parallel processing of chunks
|
||||
|
||||
### 4. Transcription Processing
|
||||
|
||||
Transcription uses OpenAI Whisper models via Modal.com or self-hosted GPU:
|
||||
- Automatic language detection
|
||||
- Word-level timestamps
|
||||
|
||||
### 5. Diarization (Speaker Identification)
|
||||
|
||||
Speaker diarization uses Pyannote 3.1:
|
||||
|
||||
1. **Voice Activity Detection (VAD)** - Identifies speech segments
|
||||
2. **Speaker Embedding** - Extracts voice characteristics
|
||||
3. **Clustering** - Groups similar voices
|
||||
4. **Segmentation** - Assigns speaker labels to time segments
|
||||
|
||||
### 6. Alignment & Merging
|
||||
|
||||
- Combines transcription with speaker diarization
|
||||
- Maps speaker labels to transcript segments
|
||||
- Resolves timing overlaps
|
||||
- Validates timeline consistency
|
||||
|
||||
### 7. Post-processing Chain
|
||||
|
||||
- **Text Formatting**: Punctuation, capitalization
|
||||
- **Topic Detection**: LLM-based topic extraction
|
||||
- **Summarization**: AI-generated summaries and action items
|
||||
|
||||
### 8. Storage & Delivery
|
||||
|
||||
**File Storage:**
|
||||
- Original audio: S3 (optional)
|
||||
- Transcript exports: JSON, VTT, TXT
|
||||
|
||||
**Notifications:**
|
||||
- WebSocket updates during processing
|
||||
- Webhook notifications on completion (optional)
|
||||
28
docs/docs/reference/api.md
Normal file
28
docs/docs/reference/api.md
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
title: API Reference
|
||||
---
|
||||
|
||||
# API Reference
|
||||
|
||||
The complete API documentation is auto-generated from the OpenAPI specification.
|
||||
|
||||
## Interactive Documentation
|
||||
|
||||
When running Reflector, interactive API docs are available at:
|
||||
|
||||
- **Swagger UI**: `https://your-api-domain/docs`
|
||||
- **ReDoc**: `https://your-api-domain/redoc`
|
||||
|
||||
## OpenAPI Specification
|
||||
|
||||
The raw OpenAPI 3.0 specification can be downloaded from:
|
||||
|
||||
```
|
||||
https://your-api-domain/openapi.json
|
||||
```
|
||||
|
||||
A static copy is also available: [openapi.json](/openapi.json)
|
||||
|
||||
## Authentication
|
||||
|
||||
See [Authentication Setup](../installation/auth-setup) for configuring API authentication.
|
||||
112
docs/docs/roadmap.md
Normal file
112
docs/docs/roadmap.md
Normal file
@@ -0,0 +1,112 @@
|
||||
---
|
||||
sidebar_position: 100
|
||||
title: Roadmap
|
||||
---
|
||||
|
||||
# Product Roadmap
|
||||
|
||||
Our development roadmap for Reflector, focusing on expanding capabilities while maintaining privacy and performance.
|
||||
|
||||
## Planned Features
|
||||
|
||||
### 🌍 Multi-Language Support Enhancement
|
||||
|
||||
**Current State:**
|
||||
- Whisper supports multi-language transcription
|
||||
- Parakeet supports English only with high accuracy
|
||||
|
||||
**Planned Improvements:**
|
||||
- Default language selection per room/user
|
||||
- Automatic language detection improvements
|
||||
- Multi-language diarization support
|
||||
- RTL (Right-to-Left) language UI support
|
||||
- Language-specific post-processing rules
|
||||
|
||||
### 🏠 Self-Hosted Room Providers
|
||||
|
||||
**Jitsi Integration**
|
||||
|
||||
Moving beyond Whereby to support self-hosted video conferencing:
|
||||
|
||||
- No API keys required
|
||||
- Complete control over video infrastructure
|
||||
- Custom branding and configuration
|
||||
- Lower operational costs
|
||||
- Enhanced privacy with self-hosted video
|
||||
|
||||
**Implementation Plan:**
|
||||
- WebRTC bridge for Jitsi Meet
|
||||
- Room management API integration
|
||||
- Recording synchronization
|
||||
- Participant tracking
|
||||
|
||||
### 📅 Calendar Integration
|
||||
|
||||
**Planned Capabilities:**
|
||||
- Google Calendar synchronization
|
||||
- Microsoft Outlook integration
|
||||
- Automatic meeting room creation
|
||||
- Pre-meeting document preparation
|
||||
- Post-meeting transcript delivery
|
||||
- Recurring meeting support
|
||||
|
||||
**Features:**
|
||||
- Auto-join scheduled meetings
|
||||
- Calendar-based access control
|
||||
- Meeting agenda import
|
||||
- Action item export to calendar
|
||||
|
||||
## Future Considerations
|
||||
|
||||
### Enhanced Analytics
|
||||
- Meeting insights dashboard
|
||||
- Speaker participation metrics
|
||||
- Topic trends over time
|
||||
- Team collaboration patterns
|
||||
|
||||
### Advanced AI Features
|
||||
- Real-time sentiment analysis
|
||||
- Emotion detection
|
||||
- Meeting quality scores
|
||||
- Automated coaching suggestions
|
||||
|
||||
### Integration Ecosystem
|
||||
- Slack/Teams notifications
|
||||
- CRM integration (Salesforce, HubSpot)
|
||||
- Project management tools (Jira, Asana)
|
||||
- Knowledge bases (Notion, Confluence)
|
||||
|
||||
### Performance Improvements
|
||||
- WebAssembly for client-side processing
|
||||
- Edge computing support
|
||||
- 5G network optimization
|
||||
- Blockchain for transcript verification
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome community contributions! Areas where you can help:
|
||||
|
||||
1. **Language Support**: Add support for your language
|
||||
2. **Integrations**: Connect with your favorite tools
|
||||
3. **Models**: Fine-tune models for specific domains
|
||||
4. **Documentation**: Improve guides and examples
|
||||
|
||||
See our [Contributing Guide](https://github.com/monadical-sas/reflector/blob/main/CONTRIBUTING.md) for details.
|
||||
|
||||
## Timeline
|
||||
|
||||
We don't provide specific dates as development depends on community contributions and priorities. Features are generally released when they're ready and properly tested.
|
||||
|
||||
## Feature Requests
|
||||
|
||||
Have an idea for Reflector? We'd love to hear it!
|
||||
|
||||
- [Open a GitHub Issue](https://github.com/monadical-sas/reflector/issues/new)
|
||||
- [Join our Discord](#)
|
||||
- [Email us](mailto:reflector@monadical.com)
|
||||
|
||||
## Stay Updated
|
||||
|
||||
- Watch our [GitHub repository](https://github.com/monadical-sas/reflector)
|
||||
- Follow our [blog](#)
|
||||
- Subscribe to our [newsletter](#)
|
||||
163
docs/docusaurus.config.ts
Normal file
163
docs/docusaurus.config.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import {themes as prismThemes} from 'prism-react-renderer';
|
||||
import type {Config} from '@docusaurus/types';
|
||||
import type * as Preset from '@docusaurus/preset-classic';
|
||||
import type * as OpenApiPlugin from 'docusaurus-plugin-openapi-docs';
|
||||
|
||||
const config: Config = {
|
||||
title: 'Reflector',
|
||||
tagline: 'AI-powered audio transcription and meeting analysis platform',
|
||||
favicon: 'img/favicon.ico',
|
||||
|
||||
url: 'https://monadical-sas.github.io',
|
||||
baseUrl: '/',
|
||||
|
||||
organizationName: 'monadical-sas',
|
||||
projectName: 'reflector',
|
||||
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
},
|
||||
|
||||
i18n: {
|
||||
defaultLocale: 'en',
|
||||
locales: ['en'],
|
||||
},
|
||||
|
||||
presets: [
|
||||
[
|
||||
'classic',
|
||||
{
|
||||
docs: {
|
||||
sidebarPath: './sidebars.ts',
|
||||
editUrl: 'https://github.com/monadical-sas/reflector/tree/main/docs/',
|
||||
},
|
||||
blog: false,
|
||||
theme: {
|
||||
customCss: './src/css/custom.css',
|
||||
},
|
||||
} satisfies Preset.Options,
|
||||
],
|
||||
],
|
||||
|
||||
plugins: [
|
||||
[
|
||||
'docusaurus-plugin-openapi-docs',
|
||||
{
|
||||
id: 'openapi',
|
||||
docsPluginId: 'classic',
|
||||
config: {
|
||||
reflectorapi: {
|
||||
specPath: 'static/openapi.json', // Use local file fetched by script
|
||||
outputDir: 'docs/reference/api-generated',
|
||||
sidebarOptions: {
|
||||
groupPathsBy: 'tag',
|
||||
categoryLinkSource: 'tag',
|
||||
},
|
||||
downloadUrl: '/openapi.json',
|
||||
hideSendButton: false,
|
||||
showExtensions: true,
|
||||
},
|
||||
} satisfies OpenApiPlugin.Options,
|
||||
},
|
||||
],
|
||||
],
|
||||
|
||||
themes: ['docusaurus-theme-openapi-docs', '@docusaurus/theme-mermaid'],
|
||||
|
||||
themeConfig: {
|
||||
image: 'img/reflector-social-card.jpg',
|
||||
colorMode: {
|
||||
defaultMode: 'light',
|
||||
disableSwitch: false,
|
||||
respectPrefersColorScheme: true,
|
||||
},
|
||||
navbar: {
|
||||
title: 'Reflector',
|
||||
logo: {
|
||||
alt: 'Reflector Logo',
|
||||
src: 'img/reflector-logo.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'docSidebar',
|
||||
sidebarId: 'tutorialSidebar',
|
||||
position: 'left',
|
||||
label: 'Documentation',
|
||||
},
|
||||
{
|
||||
to: '/docs/reference/api',
|
||||
label: 'API',
|
||||
position: 'left',
|
||||
},
|
||||
{
|
||||
href: 'https://github.com/monadical-sas/reflector',
|
||||
label: 'GitHub',
|
||||
position: 'right',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: 'dark',
|
||||
links: [
|
||||
{
|
||||
title: 'Documentation',
|
||||
items: [
|
||||
{
|
||||
label: 'Introduction',
|
||||
to: '/docs/intro',
|
||||
},
|
||||
{
|
||||
label: 'Installation',
|
||||
to: '/docs/installation/overview',
|
||||
},
|
||||
{
|
||||
label: 'API Reference',
|
||||
to: '/docs/reference/api',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Resources',
|
||||
items: [
|
||||
{
|
||||
label: 'Architecture',
|
||||
to: '/docs/reference/architecture/overview',
|
||||
},
|
||||
{
|
||||
label: 'Pipelines',
|
||||
to: '/docs/pipelines/overview',
|
||||
},
|
||||
{
|
||||
label: 'Roadmap',
|
||||
to: '/docs/roadmap',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'More',
|
||||
items: [
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/monadical-sas/reflector',
|
||||
},
|
||||
{
|
||||
label: 'Docker Hub',
|
||||
href: 'https://hub.docker.com/r/reflector/backend',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
copyright: `Copyright © ${new Date().getFullYear()} <a href="https://monadical.com" target="_blank" rel="noopener noreferrer">Monadical</a>. Licensed under MIT. Built with Docusaurus.`,
|
||||
},
|
||||
prism: {
|
||||
theme: prismThemes.github,
|
||||
darkTheme: prismThemes.dracula,
|
||||
additionalLanguages: ['python', 'bash', 'docker', 'yaml'],
|
||||
},
|
||||
} satisfies Preset.ThemeConfig,
|
||||
};
|
||||
|
||||
export default config;
|
||||
23526
docs/package-lock.json
generated
Normal file
23526
docs/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
53
docs/package.json
Normal file
53
docs/package.json
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "docs",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"start": "docusaurus start",
|
||||
"build": "docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
"serve": "docusaurus serve",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids",
|
||||
"typecheck": "tsc",
|
||||
"fetch-openapi": "./scripts/fetch-openapi.sh",
|
||||
"gen-api-docs": "npm run fetch-openapi && docusaurus gen-api-docs reflector",
|
||||
"prebuild": "npm run fetch-openapi"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.6.3",
|
||||
"@docusaurus/preset-classic": "3.6.3",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"docusaurus-plugin-openapi-docs": "^4.5.1",
|
||||
"docusaurus-theme-openapi-docs": "^4.5.1",
|
||||
"@docusaurus/theme-mermaid": "3.6.3",
|
||||
"prism-react-renderer": "^2.3.0",
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.6.3",
|
||||
"@docusaurus/tsconfig": "3.6.3",
|
||||
"@docusaurus/types": "3.6.3",
|
||||
"typescript": "~5.6.2"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
">0.5%",
|
||||
"not dead",
|
||||
"not op_mini all"
|
||||
],
|
||||
"development": [
|
||||
"last 3 chrome version",
|
||||
"last 3 firefox version",
|
||||
"last 5 safari version"
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
}
|
||||
}
|
||||
115
docs/scripts/fetch-openapi.sh
Executable file
115
docs/scripts/fetch-openapi.sh
Executable file
@@ -0,0 +1,115 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to fetch OpenAPI specification from FastAPI backend
|
||||
# Used during documentation build process
|
||||
|
||||
set -e
|
||||
|
||||
echo "📡 Fetching OpenAPI specification from FastAPI backend..."
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Configuration
|
||||
BACKEND_DIR="../server"
|
||||
OPENAPI_OUTPUT="./static/openapi.json"
|
||||
SERVER_PORT=1250 # Reflector uses port 1250 by default
|
||||
MAX_WAIT=30
|
||||
|
||||
# Check if backend directory exists
|
||||
if [ ! -d "$BACKEND_DIR" ]; then
|
||||
echo -e "${RED}Error: Backend directory not found at $BACKEND_DIR${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Function to check if server is running
|
||||
check_server() {
|
||||
curl -s -o /dev/null -w "%{http_code}" "http://localhost:${SERVER_PORT}/openapi.json" 2>/dev/null
|
||||
}
|
||||
|
||||
# Function to cleanup on exit
|
||||
cleanup() {
|
||||
if [ ! -z "$SERVER_PID" ]; then
|
||||
echo -e "\n${YELLOW}Stopping FastAPI server (PID: $SERVER_PID)...${NC}"
|
||||
kill $SERVER_PID 2>/dev/null || true
|
||||
wait $SERVER_PID 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
|
||||
# Set trap to cleanup on exit
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
# Change to backend directory
|
||||
cd "$BACKEND_DIR"
|
||||
|
||||
# Check if uv is installed
|
||||
if ! command -v uv &> /dev/null; then
|
||||
echo -e "${YELLOW}uv not found, checking for python...${NC}"
|
||||
if command -v python3 &> /dev/null; then
|
||||
PYTHON_CMD="python3"
|
||||
elif command -v python &> /dev/null; then
|
||||
PYTHON_CMD="python"
|
||||
else
|
||||
echo -e "${RED}Error: Neither uv nor python found${NC}"
|
||||
exit 1
|
||||
fi
|
||||
RUN_CMD="$PYTHON_CMD -m"
|
||||
else
|
||||
RUN_CMD="uv run -m"
|
||||
fi
|
||||
|
||||
# Start the FastAPI server in the background (let it use default port 1250)
|
||||
echo -e "${YELLOW}Starting FastAPI server...${NC}"
|
||||
$RUN_CMD reflector.app > /dev/null 2>&1 &
|
||||
SERVER_PID=$!
|
||||
|
||||
# Wait for server to be ready
|
||||
echo -n "Waiting for server to be ready"
|
||||
WAITED=0
|
||||
while [ $WAITED -lt $MAX_WAIT ]; do
|
||||
if [ "$(check_server)" = "200" ]; then
|
||||
echo -e " ${GREEN}✓${NC}"
|
||||
break
|
||||
fi
|
||||
echo -n "."
|
||||
sleep 1
|
||||
WAITED=$((WAITED + 1))
|
||||
done
|
||||
|
||||
if [ $WAITED -ge $MAX_WAIT ]; then
|
||||
echo -e " ${RED}✗${NC}"
|
||||
echo -e "${RED}Error: Server failed to start within ${MAX_WAIT} seconds${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Change back to docs directory
|
||||
cd - > /dev/null
|
||||
|
||||
# Create static directory if it doesn't exist
|
||||
mkdir -p "$(dirname "$OPENAPI_OUTPUT")"
|
||||
|
||||
# Fetch the OpenAPI specification
|
||||
echo -e "${YELLOW}Fetching OpenAPI specification...${NC}"
|
||||
if curl -s "http://localhost:${SERVER_PORT}/openapi.json" -o "$OPENAPI_OUTPUT"; then
|
||||
echo -e "${GREEN}✓ OpenAPI specification saved to $OPENAPI_OUTPUT${NC}"
|
||||
|
||||
# Validate JSON
|
||||
if command -v jq &> /dev/null; then
|
||||
if jq empty "$OPENAPI_OUTPUT" 2>/dev/null; then
|
||||
echo -e "${GREEN}✓ OpenAPI specification is valid JSON${NC}"
|
||||
# Pretty print the JSON
|
||||
jq . "$OPENAPI_OUTPUT" > "${OPENAPI_OUTPUT}.tmp" && mv "${OPENAPI_OUTPUT}.tmp" "$OPENAPI_OUTPUT"
|
||||
else
|
||||
echo -e "${RED}Error: Invalid JSON in OpenAPI specification${NC}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}Error: Failed to fetch OpenAPI specification${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}✅ OpenAPI specification successfully fetched!${NC}"
|
||||
58
docs/sidebars.ts
Normal file
58
docs/sidebars.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
|
||||
|
||||
const sidebars: SidebarsConfig = {
|
||||
tutorialSidebar: [
|
||||
'intro',
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Concepts',
|
||||
collapsed: false,
|
||||
items: [
|
||||
'concepts/overview',
|
||||
'concepts/modes',
|
||||
'concepts/pipeline',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Installation',
|
||||
collapsed: false,
|
||||
items: [
|
||||
'installation/overview',
|
||||
'installation/requirements',
|
||||
'installation/docker-setup',
|
||||
'installation/modal-setup',
|
||||
'installation/self-hosted-gpu-setup',
|
||||
'installation/auth-setup',
|
||||
'installation/daily-setup',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Pipelines',
|
||||
items: [
|
||||
'pipelines/file-pipeline',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Reference',
|
||||
items: [
|
||||
{
|
||||
type: 'category',
|
||||
label: 'API',
|
||||
items: [
|
||||
{
|
||||
type: 'link',
|
||||
label: 'OpenAPI Reference',
|
||||
href: '/docs/reference/api',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
'roadmap',
|
||||
],
|
||||
};
|
||||
|
||||
export default sidebars;
|
||||
70
docs/src/components/HomepageFeatures/index.tsx
Normal file
70
docs/src/components/HomepageFeatures/index.tsx
Normal file
@@ -0,0 +1,70 @@
|
||||
import clsx from 'clsx';
|
||||
import Heading from '@theme/Heading';
|
||||
import styles from './styles.module.css';
|
||||
|
||||
type FeatureItem = {
|
||||
title: string;
|
||||
Svg: React.ComponentType<React.ComponentProps<'svg'>>;
|
||||
description: JSX.Element;
|
||||
};
|
||||
|
||||
const FeatureList: FeatureItem[] = [
|
||||
{
|
||||
title: 'Easy to Use',
|
||||
Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Docusaurus was designed from the ground up to be easily installed and
|
||||
used to get your website up and running quickly.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Focus on What Matters',
|
||||
Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Docusaurus lets you focus on your docs, and we'll do the chores. Go
|
||||
ahead and move your docs into the <code>docs</code> directory.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Powered by React',
|
||||
Svg: require('@site/static/img/undraw_docusaurus_react.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Extend or customize your website layout by reusing React. Docusaurus can
|
||||
be extended while reusing the same header and footer.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
function Feature({title, Svg, description}: FeatureItem) {
|
||||
return (
|
||||
<div className={clsx('col col--4')}>
|
||||
<div className="text--center">
|
||||
<Svg className={styles.featureSvg} role="img" />
|
||||
</div>
|
||||
<div className="text--center padding-horiz--md">
|
||||
<Heading as="h3">{title}</Heading>
|
||||
<p>{description}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function HomepageFeatures(): JSX.Element {
|
||||
return (
|
||||
<section className={styles.features}>
|
||||
<div className="container">
|
||||
<div className="row">
|
||||
{FeatureList.map((props, idx) => (
|
||||
<Feature key={idx} {...props} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
11
docs/src/components/HomepageFeatures/styles.module.css
Normal file
11
docs/src/components/HomepageFeatures/styles.module.css
Normal file
@@ -0,0 +1,11 @@
|
||||
.features {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 2rem 0;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.featureSvg {
|
||||
height: 200px;
|
||||
width: 200px;
|
||||
}
|
||||
46
docs/src/css/custom.css
Normal file
46
docs/src/css/custom.css
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* Reflector Documentation Theme
|
||||
* Based on frontend colors from www/app/styles/theme.ts
|
||||
*/
|
||||
|
||||
@import url('https://fonts.googleapis.com/css2?family=Poppins:wght@300;400;500;600;700&display=swap');
|
||||
|
||||
:root {
|
||||
--ifm-color-primary: #3158E2;
|
||||
--ifm-color-primary-dark: #2847C9;
|
||||
--ifm-color-primary-darker: #2442BF;
|
||||
--ifm-color-primary-darkest: #1D369C;
|
||||
--ifm-color-primary-light: #4A6FE5;
|
||||
--ifm-color-primary-lighter: #5F81E8;
|
||||
--ifm-color-primary-lightest: #8DA6F0;
|
||||
|
||||
--ifm-background-color: #FFFFFF;
|
||||
--ifm-background-surface-color: #F4F4F4;
|
||||
--ifm-font-color-base: #1A202C;
|
||||
--ifm-font-color-secondary: #838383;
|
||||
|
||||
--ifm-code-font-size: 95%;
|
||||
--docusaurus-highlighted-code-line-bg: rgba(49, 88, 226, 0.1);
|
||||
|
||||
--ifm-font-family-base: 'Poppins', system-ui, -apple-system, sans-serif;
|
||||
--ifm-font-family-monospace: 'Fira Code', 'Monaco', 'Consolas', monospace;
|
||||
--ifm-navbar-background-color: #FFFFFF;
|
||||
--ifm-heading-font-weight: 600;
|
||||
}
|
||||
|
||||
[data-theme='dark'] {
|
||||
--ifm-color-primary: #B1CBFF;
|
||||
--ifm-color-primary-dark: #91B3FF;
|
||||
--ifm-color-primary-darker: #81A7FF;
|
||||
--ifm-color-primary-darkest: #5189FF;
|
||||
--ifm-color-primary-light: #D1DFFF;
|
||||
--ifm-color-primary-lighter: #E1EBFF;
|
||||
--ifm-color-primary-lightest: #F0F5FF;
|
||||
|
||||
--ifm-background-color: #0C0D0E;
|
||||
--ifm-background-surface-color: #1A202C;
|
||||
--ifm-font-color-base: #E2E8F0;
|
||||
--ifm-font-color-secondary: #A0AEC0;
|
||||
--docusaurus-highlighted-code-line-bg: rgba(177, 203, 255, 0.1);
|
||||
--ifm-navbar-background-color: #1A202C;
|
||||
}
|
||||
23
docs/src/pages/index.module.css
Normal file
23
docs/src/pages/index.module.css
Normal file
@@ -0,0 +1,23 @@
|
||||
/**
|
||||
* CSS files with the .module.css suffix will be treated as CSS modules
|
||||
* and scoped locally.
|
||||
*/
|
||||
|
||||
.heroBanner {
|
||||
padding: 4rem 0;
|
||||
text-align: center;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 996px) {
|
||||
.heroBanner {
|
||||
padding: 2rem;
|
||||
}
|
||||
}
|
||||
|
||||
.buttons {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
7
docs/src/pages/index.tsx
Normal file
7
docs/src/pages/index.tsx
Normal file
@@ -0,0 +1,7 @@
|
||||
import React from 'react';
|
||||
import { Redirect } from '@docusaurus/router';
|
||||
import useBaseUrl from '@docusaurus/useBaseUrl';
|
||||
|
||||
export default function Home(): JSX.Element {
|
||||
return <Redirect to={useBaseUrl('/docs/intro')} />;
|
||||
}
|
||||
7
docs/src/pages/markdown-page.md
Normal file
7
docs/src/pages/markdown-page.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: Markdown page example
|
||||
---
|
||||
|
||||
# Markdown page example
|
||||
|
||||
You don't need React to write simple standalone pages.
|
||||
0
docs/static/.nojekyll
vendored
Normal file
0
docs/static/.nojekyll
vendored
Normal file
0
docs/static/img/docusaurus-social-card.jpg
vendored
Normal file
0
docs/static/img/docusaurus-social-card.jpg
vendored
Normal file
0
docs/static/img/docusaurus.png
vendored
Normal file
0
docs/static/img/docusaurus.png
vendored
Normal file
0
docs/static/img/favicon.ico
vendored
Normal file
0
docs/static/img/favicon.ico
vendored
Normal file
17
docs/static/img/logo.svg
vendored
Normal file
17
docs/static/img/logo.svg
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.9.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 500 500" style="enable-background:new 0 0 500 500;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#B6B6B6;}
|
||||
.st1{fill:#4A4A4A;}
|
||||
</style>
|
||||
<g>
|
||||
<polygon class="st0" points="227.5,51.5 86.5,150.1 100.8,383.9 244.3,249.8 "/>
|
||||
<polygon class="st1" points="305.4,421.4 423.9,286 244.3,249.8 100.8,383.9 "/>
|
||||
</g>
|
||||
<image style="overflow:visible;" width="1504" height="1128" xlink:href="Ref/original-12843059d855efa50c3a12db8586ced7.jpg" transform="matrix(1 0 0 1 1857.8739 723.9433)">
|
||||
</image>
|
||||
<image style="overflow:visible;" width="1504" height="1128" xlink:href="Ref/original-f72ce8039f760337a51b47d045b477b8.jpg" transform="matrix(1 0 0 1 1857.8739 -512.4843)">
|
||||
</image>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 965 B |
17
docs/static/img/reflector-logo.svg
vendored
Normal file
17
docs/static/img/reflector-logo.svg
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.9.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 500 500" style="enable-background:new 0 0 500 500;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#B6B6B6;}
|
||||
.st1{fill:#4A4A4A;}
|
||||
</style>
|
||||
<g>
|
||||
<polygon class="st0" points="227.5,51.5 86.5,150.1 100.8,383.9 244.3,249.8 "/>
|
||||
<polygon class="st1" points="305.4,421.4 423.9,286 244.3,249.8 100.8,383.9 "/>
|
||||
</g>
|
||||
<image style="overflow:visible;" width="1504" height="1128" xlink:href="Ref/original-12843059d855efa50c3a12db8586ced7.jpg" transform="matrix(1 0 0 1 1857.8739 723.9433)">
|
||||
</image>
|
||||
<image style="overflow:visible;" width="1504" height="1128" xlink:href="Ref/original-f72ce8039f760337a51b47d045b477b8.jpg" transform="matrix(1 0 0 1 1857.8739 -512.4843)">
|
||||
</image>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 965 B |
BIN
docs/static/img/reflector-transcript-view.png
vendored
Normal file
BIN
docs/static/img/reflector-transcript-view.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 525 KiB |
171
docs/static/img/undraw_docusaurus_mountain.svg
vendored
Normal file
171
docs/static/img/undraw_docusaurus_mountain.svg
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1088" height="687.962" viewBox="0 0 1088 687.962">
|
||||
<title>Easy to Use</title>
|
||||
<g id="Group_12" data-name="Group 12" transform="translate(-57 -56)">
|
||||
<g id="Group_11" data-name="Group 11" transform="translate(57 56)">
|
||||
<path id="Path_83" data-name="Path 83" d="M1017.81,560.461c-5.27,45.15-16.22,81.4-31.25,110.31-20,38.52-54.21,54.04-84.77,70.28a193.275,193.275,0,0,1-27.46,11.94c-55.61,19.3-117.85,14.18-166.74,3.99a657.282,657.282,0,0,0-104.09-13.16q-14.97-.675-29.97-.67c-15.42.02-293.07,5.29-360.67-131.57-16.69-33.76-28.13-75-32.24-125.27-11.63-142.12,52.29-235.46,134.74-296.47,155.97-115.41,369.76-110.57,523.43,7.88C941.15,276.621,1036.99,396.031,1017.81,560.461Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_84" data-name="Path 84" d="M986.56,670.771c-20,38.52-47.21,64.04-77.77,80.28a193.272,193.272,0,0,1-27.46,11.94c-55.61,19.3-117.85,14.18-166.74,3.99a657.3,657.3,0,0,0-104.09-13.16q-14.97-.675-29.97-.67-23.13.03-46.25,1.72c-100.17,7.36-253.82-6.43-321.42-143.29L382,283.981,444.95,445.6l20.09,51.59,55.37-75.98L549,381.981l130.2,149.27,36.8-81.27L970.78,657.9l14.21,11.59Z" transform="translate(-56 -106.019)" fill="#f2f2f2"/>
|
||||
<path id="Path_85" data-name="Path 85" d="M302,282.962l26-57,36,83-31-60Z" opacity="0.1"/>
|
||||
<path id="Path_86" data-name="Path 86" d="M610.5,753.821q-14.97-.675-29.97-.67L465.04,497.191Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<path id="Path_87" data-name="Path 87" d="M464.411,315.191,493,292.962l130,150-132-128Z" opacity="0.1"/>
|
||||
<path id="Path_88" data-name="Path 88" d="M908.79,751.051a193.265,193.265,0,0,1-27.46,11.94L679.2,531.251Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<circle id="Ellipse_11" data-name="Ellipse 11" cx="3" cy="3" r="3" transform="translate(479 98.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_12" data-name="Ellipse 12" cx="3" cy="3" r="3" transform="translate(396 201.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_13" data-name="Ellipse 13" cx="2" cy="2" r="2" transform="translate(600 220.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_14" data-name="Ellipse 14" cx="2" cy="2" r="2" transform="translate(180 265.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_15" data-name="Ellipse 15" cx="2" cy="2" r="2" transform="translate(612 96.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_16" data-name="Ellipse 16" cx="2" cy="2" r="2" transform="translate(736 192.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_17" data-name="Ellipse 17" cx="2" cy="2" r="2" transform="translate(858 344.962)" fill="#f2f2f2"/>
|
||||
<path id="Path_89" data-name="Path 89" d="M306,121.222h-2.76v-2.76h-1.48v2.76H299V122.7h2.76v2.759h1.48V122.7H306Z" fill="#f2f2f2"/>
|
||||
<path id="Path_90" data-name="Path 90" d="M848,424.222h-2.76v-2.76h-1.48v2.76H841V425.7h2.76v2.759h1.48V425.7H848Z" fill="#f2f2f2"/>
|
||||
<path id="Path_91" data-name="Path 91" d="M1144,719.981c0,16.569-243.557,74-544,74s-544-57.431-544-74,243.557,14,544,14S1144,703.413,1144,719.981Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_92" data-name="Path 92" d="M1144,719.981c0,16.569-243.557,74-544,74s-544-57.431-544-74,243.557,14,544,14S1144,703.413,1144,719.981Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<ellipse id="Ellipse_18" data-name="Ellipse 18" cx="544" cy="30" rx="544" ry="30" transform="translate(0 583.962)" fill="#3f3d56"/>
|
||||
<path id="Path_93" data-name="Path 93" d="M624,677.981c0,33.137-14.775,24-33,24s-33,9.137-33-24,33-96,33-96S624,644.844,624,677.981Z" transform="translate(-56 -106.019)" fill="#ff6584"/>
|
||||
<path id="Path_94" data-name="Path 94" d="M606,690.66c0,15.062-6.716,10.909-15,10.909s-15,4.153-15-10.909,15-43.636,15-43.636S606,675.6,606,690.66Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<rect id="Rectangle_97" data-name="Rectangle 97" width="92" height="18" rx="9" transform="translate(489 604.962)" fill="#2f2e41"/>
|
||||
<rect id="Rectangle_98" data-name="Rectangle 98" width="92" height="18" rx="9" transform="translate(489 586.962)" fill="#2f2e41"/>
|
||||
<path id="Path_95" data-name="Path 95" d="M193,596.547c0,55.343,34.719,100.126,77.626,100.126" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_96" data-name="Path 96" d="M270.626,696.673c0-55.965,38.745-101.251,86.626-101.251" transform="translate(-56 -106.019)" fill="#6c63ff"/>
|
||||
<path id="Path_97" data-name="Path 97" d="M221.125,601.564c0,52.57,22.14,95.109,49.5,95.109" transform="translate(-56 -106.019)" fill="#6c63ff"/>
|
||||
<path id="Path_98" data-name="Path 98" d="M270.626,696.673c0-71.511,44.783-129.377,100.126-129.377" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_99" data-name="Path 99" d="M254.3,697.379s11.009-.339,14.326-2.7,16.934-5.183,17.757-1.395,16.544,18.844,4.115,18.945-28.879-1.936-32.19-3.953S254.3,697.379,254.3,697.379Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_100" data-name="Path 100" d="M290.716,710.909c-12.429.1-28.879-1.936-32.19-3.953-2.522-1.536-3.527-7.048-3.863-9.591l-.368.014s.7,8.879,4.009,10.9,19.761,4.053,32.19,3.953c3.588-.029,4.827-1.305,4.759-3.2C294.755,710.174,293.386,710.887,290.716,710.909Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_101" data-name="Path 101" d="M777.429,633.081c0,38.029,23.857,68.8,53.341,68.8" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_102" data-name="Path 102" d="M830.769,701.882c0-38.456,26.623-69.575,59.525-69.575" transform="translate(-56 -106.019)" fill="#6c63ff"/>
|
||||
<path id="Path_103" data-name="Path 103" d="M796.755,636.528c0,36.124,15.213,65.354,34.014,65.354" transform="translate(-56 -106.019)" fill="#6c63ff"/>
|
||||
<path id="Path_104" data-name="Path 104" d="M830.769,701.882c0-49.139,30.773-88.9,68.8-88.9" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_105" data-name="Path 105" d="M819.548,702.367s7.565-.233,9.844-1.856,11.636-3.562,12.2-.958,11.368,12.949,2.828,13.018-19.844-1.33-22.119-2.716S819.548,702.367,819.548,702.367Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_106" data-name="Path 106" d="M844.574,711.664c-8.54.069-19.844-1.33-22.119-2.716-1.733-1.056-2.423-4.843-2.654-6.59l-.253.01s.479,6.1,2.755,7.487,13.579,2.785,22.119,2.716c2.465-.02,3.317-.9,3.27-2.2C847.349,711.159,846.409,711.649,844.574,711.664Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_107" data-name="Path 107" d="M949.813,724.718s11.36-1.729,14.5-4.591,16.89-7.488,18.217-3.667,19.494,17.447,6.633,19.107-30.153,1.609-33.835-.065S949.813,724.718,949.813,724.718Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_108" data-name="Path 108" d="M989.228,734.173c-12.86,1.659-30.153,1.609-33.835-.065-2.8-1.275-4.535-6.858-5.2-9.45l-.379.061s1.833,9.109,5.516,10.783,20.975,1.725,33.835.065c3.712-.479,4.836-1.956,4.529-3.906C993.319,732.907,991.991,733.817,989.228,734.173Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_109" data-name="Path 109" d="M670.26,723.9s9.587-1.459,12.237-3.875,14.255-6.32,15.374-3.095,16.452,14.725,5.6,16.125-25.448,1.358-28.555-.055S670.26,723.9,670.26,723.9Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_110" data-name="Path 110" d="M703.524,731.875c-10.853,1.4-25.448,1.358-28.555-.055-2.367-1.076-3.827-5.788-4.39-7.976l-.32.051s1.547,7.687,4.655,9.1,17.7,1.456,28.555.055c3.133-.4,4.081-1.651,3.822-3.3C706.977,730.807,705.856,731.575,703.524,731.875Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_111" data-name="Path 111" d="M178.389,719.109s7.463-1.136,9.527-3.016,11.1-4.92,11.969-2.409,12.808,11.463,4.358,12.553-19.811,1.057-22.23-.043S178.389,719.109,178.389,719.109Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_112" data-name="Path 112" d="M204.285,725.321c-8.449,1.09-19.811,1.057-22.23-.043-1.842-.838-2.979-4.506-3.417-6.209l-.249.04s1.2,5.984,3.624,7.085,13.781,1.133,22.23.043c2.439-.315,3.177-1.285,2.976-2.566C206.973,724.489,206.1,725.087,204.285,725.321Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_113" data-name="Path 113" d="M439.7,707.337c0,30.22-42.124,20.873-93.7,20.873s-93.074,9.347-93.074-20.873,42.118-36.793,93.694-36.793S439.7,677.117,439.7,707.337Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<path id="Path_114" data-name="Path 114" d="M439.7,699.9c0,30.22-42.124,20.873-93.7,20.873s-93.074,9.347-93.074-20.873S295.04,663.1,346.616,663.1,439.7,669.676,439.7,699.9Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
</g>
|
||||
<g id="docusaurus_keytar" transform="translate(312.271 493.733)">
|
||||
<path id="Path_40" data-name="Path 40" d="M99,52h91.791V89.153H99Z" transform="translate(5.904 -14.001)" fill="#fff" fill-rule="evenodd"/>
|
||||
<path id="Path_41" data-name="Path 41" d="M24.855,163.927A21.828,21.828,0,0,1,5.947,153a21.829,21.829,0,0,0,18.908,32.782H46.71V163.927Z" transform="translate(-3 -4.634)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_42" data-name="Path 42" d="M121.861,61.1l76.514-4.782V45.39A21.854,21.854,0,0,0,176.52,23.535H78.173L75.441,18.8a3.154,3.154,0,0,0-5.464,0l-2.732,4.732L64.513,18.8a3.154,3.154,0,0,0-5.464,0l-2.732,4.732L53.586,18.8a3.154,3.154,0,0,0-5.464,0L45.39,23.535c-.024,0-.046,0-.071,0l-4.526-4.525a3.153,3.153,0,0,0-5.276,1.414l-1.5,5.577-5.674-1.521a3.154,3.154,0,0,0-3.863,3.864L26,34.023l-5.575,1.494a3.155,3.155,0,0,0-1.416,5.278l4.526,4.526c0,.023,0,.046,0,.07L18.8,48.122a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,59.05a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,69.977a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,80.9a3.154,3.154,0,0,0,0,5.464L23.535,89.1,18.8,91.832a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,102.76a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,113.687a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,124.615a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,135.542a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,146.469a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,157.4a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,168.324a3.154,3.154,0,0,0,0,5.464l4.732,2.732A21.854,21.854,0,0,0,45.39,198.375H176.52a21.854,21.854,0,0,0,21.855-21.855V89.1l-76.514-4.782a11.632,11.632,0,0,1,0-23.219" transform="translate(-1.681 -17.226)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_43" data-name="Path 43" d="M143,186.71h32.782V143H143Z" transform="translate(9.984 -5.561)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_44" data-name="Path 44" d="M196.71,159.855a5.438,5.438,0,0,0-.7.07c-.042-.164-.081-.329-.127-.493a5.457,5.457,0,1,0-5.4-9.372q-.181-.185-.366-.367a5.454,5.454,0,1,0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467,5.467,0,1,0-10.788,0c-.162.042-.325.08-.486.126a5.457,5.457,0,1,0-9.384,5.4,21.843,21.843,0,1,0,36.421,21.02,5.452,5.452,0,1,0,.7-10.858" transform="translate(10.912 -6.025)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_45" data-name="Path 45" d="M153,124.855h32.782V103H153Z" transform="translate(10.912 -9.271)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_46" data-name="Path 46" d="M194.855,116.765a2.732,2.732,0,1,0,0-5.464,2.811,2.811,0,0,0-.349.035c-.022-.082-.04-.164-.063-.246a2.733,2.733,0,0,0-1.052-5.253,2.7,2.7,0,0,0-1.648.566q-.09-.093-.184-.184a2.7,2.7,0,0,0,.553-1.633,2.732,2.732,0,0,0-5.245-1.07,10.928,10.928,0,1,0,0,21.031,2.732,2.732,0,0,0,5.245-1.07,2.7,2.7,0,0,0-.553-1.633q.093-.09.184-.184a2.7,2.7,0,0,0,1.648.566,2.732,2.732,0,0,0,1.052-5.253c.023-.081.042-.164.063-.246a2.814,2.814,0,0,0,.349.035" transform="translate(12.767 -9.377)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_47" data-name="Path 47" d="M65.087,56.891a2.732,2.732,0,0,1-2.732-2.732,8.2,8.2,0,0,0-16.391,0,2.732,2.732,0,0,1-5.464,0,13.659,13.659,0,0,1,27.319,0,2.732,2.732,0,0,1-2.732,2.732" transform="translate(0.478 -15.068)" fill-rule="evenodd"/>
|
||||
<path id="Path_48" data-name="Path 48" d="M103,191.347h65.565a21.854,21.854,0,0,0,21.855-21.855V93H124.855A21.854,21.854,0,0,0,103,114.855Z" transform="translate(6.275 -10.199)" fill="#ffff50" fill-rule="evenodd"/>
|
||||
<path id="Path_49" data-name="Path 49" d="M173.216,129.787H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0,21.855H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186m0,21.855H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0-54.434H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0,21.652H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186m0,21.855H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186M189.585,61.611c-.013,0-.024-.007-.037-.005-3.377.115-4.974,3.492-6.384,6.472-1.471,3.114-2.608,5.139-4.473,5.078-2.064-.074-3.244-2.406-4.494-4.874-1.436-2.835-3.075-6.049-6.516-5.929-3.329.114-4.932,3.053-6.346,5.646-1.5,2.762-2.529,4.442-4.5,4.364-2.106-.076-3.225-1.972-4.52-4.167-1.444-2.443-3.112-5.191-6.487-5.1-3.272.113-4.879,2.606-6.3,4.808-1.5,2.328-2.552,3.746-4.551,3.662-2.156-.076-3.27-1.65-4.558-3.472-1.447-2.047-3.077-4.363-6.442-4.251-3.2.109-4.807,2.153-6.224,3.954-1.346,1.709-2.4,3.062-4.621,2.977a1.093,1.093,0,0,0-.079,2.186c3.3.11,4.967-1.967,6.417-3.81,1.286-1.635,2.4-3.045,4.582-3.12,2.1-.09,3.091,1.218,4.584,3.327,1.417,2,3.026,4.277,6.263,4.394,3.391.114,5.022-2.42,6.467-4.663,1.292-2,2.406-3.734,4.535-3.807,1.959-.073,3.026,1.475,4.529,4.022,1.417,2.4,3.023,5.121,6.324,5.241,3.415.118,5.064-2.863,6.5-5.5,1.245-2.282,2.419-4.437,4.5-4.509,1.959-.046,2.981,1.743,4.492,4.732,1.412,2.79,3.013,5.95,6.365,6.071l.185,0c3.348,0,4.937-3.36,6.343-6.331,1.245-2.634,2.423-5.114,4.444-5.216Z" transform="translate(7.109 -13.11)" fill-rule="evenodd"/>
|
||||
<path id="Path_50" data-name="Path 50" d="M83,186.71h43.71V143H83Z" transform="translate(4.42 -5.561)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<g id="Group_8" data-name="Group 8" transform="matrix(0.966, -0.259, 0.259, 0.966, 109.327, 91.085)">
|
||||
<rect id="Rectangle_3" data-name="Rectangle 3" width="92.361" height="36.462" rx="2" transform="translate(0 0)" fill="#d8d8d8"/>
|
||||
<g id="Group_2" data-name="Group 2" transform="translate(1.531 23.03)">
|
||||
<rect id="Rectangle_4" data-name="Rectangle 4" width="5.336" height="5.336" rx="1" transform="translate(16.797 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_5" data-name="Rectangle 5" width="5.336" height="5.336" rx="1" transform="translate(23.12 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_6" data-name="Rectangle 6" width="5.336" height="5.336" rx="1" transform="translate(29.444 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_7" data-name="Rectangle 7" width="5.336" height="5.336" rx="1" transform="translate(35.768 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_8" data-name="Rectangle 8" width="5.336" height="5.336" rx="1" transform="translate(42.091 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_9" data-name="Rectangle 9" width="5.336" height="5.336" rx="1" transform="translate(48.415 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_10" data-name="Rectangle 10" width="5.336" height="5.336" rx="1" transform="translate(54.739 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_11" data-name="Rectangle 11" width="5.336" height="5.336" rx="1" transform="translate(61.063 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_12" data-name="Rectangle 12" width="5.336" height="5.336" rx="1" transform="translate(67.386 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_51" data-name="Path 51" d="M1.093,0H14.518a1.093,1.093,0,0,1,1.093,1.093V4.243a1.093,1.093,0,0,1-1.093,1.093H1.093A1.093,1.093,0,0,1,0,4.243V1.093A1.093,1.093,0,0,1,1.093,0ZM75,0H88.426a1.093,1.093,0,0,1,1.093,1.093V4.243a1.093,1.093,0,0,1-1.093,1.093H75a1.093,1.093,0,0,1-1.093-1.093V1.093A1.093,1.093,0,0,1,75,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="Group_3" data-name="Group 3" transform="translate(1.531 10.261)">
|
||||
<path id="Path_52" data-name="Path 52" d="M1.093,0H6.218A1.093,1.093,0,0,1,7.31,1.093V4.242A1.093,1.093,0,0,1,6.218,5.335H1.093A1.093,1.093,0,0,1,0,4.242V1.093A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_13" data-name="Rectangle 13" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_14" data-name="Rectangle 14" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_15" data-name="Rectangle 15" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_16" data-name="Rectangle 16" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_17" data-name="Rectangle 17" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_18" data-name="Rectangle 18" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_19" data-name="Rectangle 19" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_20" data-name="Rectangle 20" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_21" data-name="Rectangle 21" width="5.336" height="5.336" rx="1" transform="translate(58.888 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_22" data-name="Rectangle 22" width="5.336" height="5.336" rx="1" transform="translate(65.212 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_23" data-name="Rectangle 23" width="5.336" height="5.336" rx="1" transform="translate(71.536 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_24" data-name="Rectangle 24" width="5.336" height="5.336" rx="1" transform="translate(77.859 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_25" data-name="Rectangle 25" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<g id="Group_4" data-name="Group 4" transform="translate(91.05 9.546) rotate(180)">
|
||||
<path id="Path_53" data-name="Path 53" d="M1.093,0H6.219A1.093,1.093,0,0,1,7.312,1.093v3.15A1.093,1.093,0,0,1,6.219,5.336H1.093A1.093,1.093,0,0,1,0,4.243V1.093A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_26" data-name="Rectangle 26" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_27" data-name="Rectangle 27" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_28" data-name="Rectangle 28" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_29" data-name="Rectangle 29" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_30" data-name="Rectangle 30" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_31" data-name="Rectangle 31" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_32" data-name="Rectangle 32" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_33" data-name="Rectangle 33" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_34" data-name="Rectangle 34" width="5.336" height="5.336" rx="1" transform="translate(58.889 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_35" data-name="Rectangle 35" width="5.336" height="5.336" rx="1" transform="translate(65.213 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_36" data-name="Rectangle 36" width="5.336" height="5.336" rx="1" transform="translate(71.537 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_37" data-name="Rectangle 37" width="5.336" height="5.336" rx="1" transform="translate(77.86 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_38" data-name="Rectangle 38" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_39" data-name="Rectangle 39" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_40" data-name="Rectangle 40" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_41" data-name="Rectangle 41" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_42" data-name="Rectangle 42" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_43" data-name="Rectangle 43" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_44" data-name="Rectangle 44" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_45" data-name="Rectangle 45" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_46" data-name="Rectangle 46" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_47" data-name="Rectangle 47" width="5.336" height="5.336" rx="1" transform="translate(58.889 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_48" data-name="Rectangle 48" width="5.336" height="5.336" rx="1" transform="translate(65.213 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_49" data-name="Rectangle 49" width="5.336" height="5.336" rx="1" transform="translate(71.537 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_50" data-name="Rectangle 50" width="5.336" height="5.336" rx="1" transform="translate(77.86 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_51" data-name="Rectangle 51" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<g id="Group_6" data-name="Group 6" transform="translate(1.531 16.584)">
|
||||
<path id="Path_54" data-name="Path 54" d="M1.093,0h7.3A1.093,1.093,0,0,1,9.485,1.093v3.15A1.093,1.093,0,0,1,8.392,5.336h-7.3A1.093,1.093,0,0,1,0,4.243V1.094A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<g id="Group_5" data-name="Group 5" transform="translate(10.671 0)">
|
||||
<rect id="Rectangle_52" data-name="Rectangle 52" width="5.336" height="5.336" rx="1" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_53" data-name="Rectangle 53" width="5.336" height="5.336" rx="1" transform="translate(6.324 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_54" data-name="Rectangle 54" width="5.336" height="5.336" rx="1" transform="translate(12.647 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_55" data-name="Rectangle 55" width="5.336" height="5.336" rx="1" transform="translate(18.971 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_56" data-name="Rectangle 56" width="5.336" height="5.336" rx="1" transform="translate(25.295 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_57" data-name="Rectangle 57" width="5.336" height="5.336" rx="1" transform="translate(31.619 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_58" data-name="Rectangle 58" width="5.336" height="5.336" rx="1" transform="translate(37.942 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_59" data-name="Rectangle 59" width="5.336" height="5.336" rx="1" transform="translate(44.265 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_60" data-name="Rectangle 60" width="5.336" height="5.336" rx="1" transform="translate(50.589 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_61" data-name="Rectangle 61" width="5.336" height="5.336" rx="1" transform="translate(56.912 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_62" data-name="Rectangle 62" width="5.336" height="5.336" rx="1" transform="translate(63.236 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<path id="Path_55" data-name="Path 55" d="M1.094,0H8A1.093,1.093,0,0,1,9.091,1.093v3.15A1.093,1.093,0,0,1,8,5.336H1.093A1.093,1.093,0,0,1,0,4.243V1.094A1.093,1.093,0,0,1,1.093,0Z" transform="translate(80.428 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="Group_7" data-name="Group 7" transform="translate(1.531 29.627)">
|
||||
<rect id="Rectangle_63" data-name="Rectangle 63" width="5.336" height="5.336" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_64" data-name="Rectangle 64" width="5.336" height="5.336" rx="1" transform="translate(6.324 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_65" data-name="Rectangle 65" width="5.336" height="5.336" rx="1" transform="translate(12.647 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_66" data-name="Rectangle 66" width="5.336" height="5.336" rx="1" transform="translate(18.971 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_56" data-name="Path 56" d="M1.093,0H31.515a1.093,1.093,0,0,1,1.093,1.093V4.244a1.093,1.093,0,0,1-1.093,1.093H1.093A1.093,1.093,0,0,1,0,4.244V1.093A1.093,1.093,0,0,1,1.093,0ZM34.687,0h3.942a1.093,1.093,0,0,1,1.093,1.093V4.244a1.093,1.093,0,0,1-1.093,1.093H34.687a1.093,1.093,0,0,1-1.093-1.093V1.093A1.093,1.093,0,0,1,34.687,0Z" transform="translate(25.294 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_67" data-name="Rectangle 67" width="5.336" height="5.336" rx="1" transform="translate(66.003 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_68" data-name="Rectangle 68" width="5.336" height="5.336" rx="1" transform="translate(72.327 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_69" data-name="Rectangle 69" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_57" data-name="Path 57" d="M5.336,0V1.18A1.093,1.093,0,0,1,4.243,2.273H1.093A1.093,1.093,0,0,1,0,1.18V0Z" transform="translate(83.59 2.273) rotate(180)" fill="#4a4a4a"/>
|
||||
<path id="Path_58" data-name="Path 58" d="M5.336,0V1.18A1.093,1.093,0,0,1,4.243,2.273H1.093A1.093,1.093,0,0,1,0,1.18V0Z" transform="translate(78.255 3.063)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<rect id="Rectangle_70" data-name="Rectangle 70" width="88.927" height="2.371" rx="1.085" transform="translate(1.925 1.17)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_71" data-name="Rectangle 71" width="4.986" height="1.581" rx="0.723" transform="translate(4.1 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_72" data-name="Rectangle 72" width="4.986" height="1.581" rx="0.723" transform="translate(10.923 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_73" data-name="Rectangle 73" width="4.986" height="1.581" rx="0.723" transform="translate(16.173 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_74" data-name="Rectangle 74" width="4.986" height="1.581" rx="0.723" transform="translate(21.421 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_75" data-name="Rectangle 75" width="4.986" height="1.581" rx="0.723" transform="translate(26.671 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_76" data-name="Rectangle 76" width="4.986" height="1.581" rx="0.723" transform="translate(33.232 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_77" data-name="Rectangle 77" width="4.986" height="1.581" rx="0.723" transform="translate(38.48 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_78" data-name="Rectangle 78" width="4.986" height="1.581" rx="0.723" transform="translate(43.73 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_79" data-name="Rectangle 79" width="4.986" height="1.581" rx="0.723" transform="translate(48.978 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_80" data-name="Rectangle 80" width="4.986" height="1.581" rx="0.723" transform="translate(55.54 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_81" data-name="Rectangle 81" width="4.986" height="1.581" rx="0.723" transform="translate(60.788 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_82" data-name="Rectangle 82" width="4.986" height="1.581" rx="0.723" transform="translate(66.038 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_83" data-name="Rectangle 83" width="4.986" height="1.581" rx="0.723" transform="translate(72.599 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_84" data-name="Rectangle 84" width="4.986" height="1.581" rx="0.723" transform="translate(77.847 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_85" data-name="Rectangle 85" width="4.986" height="1.581" rx="0.723" transform="translate(83.097 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
</g>
|
||||
<path id="Path_59" data-name="Path 59" d="M146.71,159.855a5.439,5.439,0,0,0-.7.07c-.042-.164-.081-.329-.127-.493a5.457,5.457,0,1,0-5.4-9.372q-.181-.185-.366-.367a5.454,5.454,0,1,0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467,5.467,0,1,0-10.788,0c-.162.042-.325.08-.486.126a5.457,5.457,0,1,0-9.384,5.4,21.843,21.843,0,1,0,36.421,21.02,5.452,5.452,0,1,0,.7-10.858" transform="translate(6.275 -6.025)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_60" data-name="Path 60" d="M83,124.855h43.71V103H83Z" transform="translate(4.42 -9.271)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_61" data-name="Path 61" d="M134.855,116.765a2.732,2.732,0,1,0,0-5.464,2.811,2.811,0,0,0-.349.035c-.022-.082-.04-.164-.063-.246a2.733,2.733,0,0,0-1.052-5.253,2.7,2.7,0,0,0-1.648.566q-.09-.093-.184-.184a2.7,2.7,0,0,0,.553-1.633,2.732,2.732,0,0,0-5.245-1.07,10.928,10.928,0,1,0,0,21.031,2.732,2.732,0,0,0,5.245-1.07,2.7,2.7,0,0,0-.553-1.633q.093-.09.184-.184a2.7,2.7,0,0,0,1.648.566,2.732,2.732,0,0,0,1.052-5.253c.023-.081.042-.164.063-.246a2.811,2.811,0,0,0,.349.035" transform="translate(7.202 -9.377)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_62" data-name="Path 62" d="M143.232,42.33a2.967,2.967,0,0,1-.535-.055,2.754,2.754,0,0,1-.514-.153,2.838,2.838,0,0,1-.471-.251,4.139,4.139,0,0,1-.415-.339,3.2,3.2,0,0,1-.338-.415A2.7,2.7,0,0,1,140.5,39.6a2.968,2.968,0,0,1,.055-.535,3.152,3.152,0,0,1,.152-.514,2.874,2.874,0,0,1,.252-.47,2.633,2.633,0,0,1,.753-.754,2.837,2.837,0,0,1,.471-.251,2.753,2.753,0,0,1,.514-.153,2.527,2.527,0,0,1,1.071,0,2.654,2.654,0,0,1,.983.4,4.139,4.139,0,0,1,.415.339,4.019,4.019,0,0,1,.339.415,2.786,2.786,0,0,1,.251.47,2.864,2.864,0,0,1,.208,1.049,2.77,2.77,0,0,1-.8,1.934,4.139,4.139,0,0,1-.415.339,2.722,2.722,0,0,1-1.519.459m21.855-1.366a2.789,2.789,0,0,1-1.935-.8,4.162,4.162,0,0,1-.338-.415,2.7,2.7,0,0,1-.459-1.519,2.789,2.789,0,0,1,.8-1.934,4.139,4.139,0,0,1,.415-.339,2.838,2.838,0,0,1,.471-.251,2.752,2.752,0,0,1,.514-.153,2.527,2.527,0,0,1,1.071,0,2.654,2.654,0,0,1,.983.4,4.139,4.139,0,0,1,.415.339,2.79,2.79,0,0,1,.8,1.934,3.069,3.069,0,0,1-.055.535,2.779,2.779,0,0,1-.153.514,3.885,3.885,0,0,1-.251.47,4.02,4.02,0,0,1-.339.415,4.138,4.138,0,0,1-.415.339,2.722,2.722,0,0,1-1.519.459" transform="translate(9.753 -15.532)" fill-rule="evenodd"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 31 KiB |
170
docs/static/img/undraw_docusaurus_react.svg
vendored
Normal file
170
docs/static/img/undraw_docusaurus_react.svg
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1041.277" height="554.141" viewBox="0 0 1041.277 554.141">
|
||||
<title>Powered by React</title>
|
||||
<g id="Group_24" data-name="Group 24" transform="translate(-440 -263)">
|
||||
<g id="Group_23" data-name="Group 23" transform="translate(439.989 262.965)">
|
||||
<path id="Path_299" data-name="Path 299" d="M1040.82,611.12q-1.74,3.75-3.47,7.4-2.7,5.67-5.33,11.12c-.78,1.61-1.56,3.19-2.32,4.77-8.6,17.57-16.63,33.11-23.45,45.89A73.21,73.21,0,0,1,942.44,719l-151.65,1.65h-1.6l-13,.14-11.12.12-34.1.37h-1.38l-17.36.19h-.53l-107,1.16-95.51,1-11.11.12-69,.75H429l-44.75.48h-.48l-141.5,1.53-42.33.46a87.991,87.991,0,0,1-10.79-.54h0c-1.22-.14-2.44-.3-3.65-.49a87.38,87.38,0,0,1-51.29-27.54C116,678.37,102.75,655,93.85,629.64q-1.93-5.49-3.6-11.12C59.44,514.37,97,380,164.6,290.08q4.25-5.64,8.64-11l.07-.08c20.79-25.52,44.1-46.84,68.93-62,44-26.91,92.75-34.49,140.7-11.9,40.57,19.12,78.45,28.11,115.17,30.55,3.71.24,7.42.42,11.11.53,84.23,2.65,163.17-27.7,255.87-47.29,3.69-.78,7.39-1.55,11.12-2.28,66.13-13.16,139.49-20.1,226.73-5.51a189.089,189.089,0,0,1,26.76,6.4q5.77,1.86,11.12,4c41.64,16.94,64.35,48.24,74,87.46q1.37,5.46,2.37,11.11C1134.3,384.41,1084.19,518.23,1040.82,611.12Z" transform="translate(-79.34 -172.91)" fill="#f2f2f2"/>
|
||||
<path id="Path_300" data-name="Path 300" d="M576.36,618.52a95.21,95.21,0,0,1-1.87,11.12h93.7V618.52Zm-78.25,62.81,11.11-.09V653.77c-3.81-.17-7.52-.34-11.11-.52ZM265.19,618.52v11.12h198.5V618.52ZM1114.87,279h-74V191.51q-5.35-2.17-11.12-4V279H776.21V186.58c-3.73.73-7.43,1.5-11.12,2.28V279H509.22V236.15c-3.69-.11-7.4-.29-11.11-.53V279H242.24V217c-24.83,15.16-48.14,36.48-68.93,62h-.07v.08q-4.4,5.4-8.64,11h8.64V618.52h-83q1.66,5.63,3.6,11.12h79.39v93.62a87,87,0,0,0,12.2,2.79c1.21.19,2.43.35,3.65.49h0a87.991,87.991,0,0,0,10.79.54l42.33-.46v-97H498.11v94.21l11.11-.12V629.64H765.09V721l11.12-.12V629.64H1029.7v4.77c.76-1.58,1.54-3.16,2.32-4.77q2.63-5.45,5.33-11.12,1.73-3.64,3.47-7.4v-321h76.42Q1116.23,284.43,1114.87,279ZM242.24,618.52V290.08H498.11V618.52Zm267,0V290.08H765.09V618.52Zm520.48,0H776.21V290.08H1029.7Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_301" data-name="Path 301" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l46.65-28,93.6-.78,2-.01.66-.01,2-.03,44.94-.37,2.01-.01.64-.01,2-.01L315,509.3l.38-.01,35.55-.3h.29l277.4-2.34,6.79-.05h.68l5.18-.05,37.65-.31,2-.03,1.85-.02h.96l11.71-.09,2.32-.03,3.11-.02,9.75-.09,15.47-.13,2-.02,3.48-.02h.65l74.71-.64Z" fill="#65617d"/>
|
||||
<path id="Path_302" data-name="Path 302" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l46.65-28,93.6-.78,2-.01.66-.01,2-.03,44.94-.37,2.01-.01.64-.01,2-.01L315,509.3l.38-.01,35.55-.3h.29l277.4-2.34,6.79-.05h.68l5.18-.05,37.65-.31,2-.03,1.85-.02h.96l11.71-.09,2.32-.03,3.11-.02,9.75-.09,15.47-.13,2-.02,3.48-.02h.65l74.71-.64Z" opacity="0.2"/>
|
||||
<path id="Path_303" data-name="Path 303" d="M375.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
|
||||
<path id="Path_304" data-name="Path 304" d="M375.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_305" data-name="Path 305" d="M377.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
|
||||
<rect id="Rectangle_137" data-name="Rectangle 137" width="47.17" height="31.5" transform="translate(680.92 483.65)" fill="#3f3d56"/>
|
||||
<rect id="Rectangle_138" data-name="Rectangle 138" width="47.17" height="31.5" transform="translate(680.92 483.65)" opacity="0.1"/>
|
||||
<rect id="Rectangle_139" data-name="Rectangle 139" width="47.17" height="31.5" transform="translate(678.92 483.65)" fill="#3f3d56"/>
|
||||
<path id="Path_306" data-name="Path 306" d="M298.09,483.65v4.97l-47.17,1.26v-6.23Z" opacity="0.1"/>
|
||||
<path id="Path_307" data-name="Path 307" d="M460.69,485.27v168.2a4,4,0,0,1-3.85,3.95l-191.65,5.1h-.05a4,4,0,0,1-3.95-3.95V485.27a4,4,0,0,1,3.95-3.95h191.6a4,4,0,0,1,3.95,3.95Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
|
||||
<path id="Path_308" data-name="Path 308" d="M265.19,481.32v181.2h-.05a4,4,0,0,1-3.95-3.95V485.27a4,4,0,0,1,3.95-3.95Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_309" data-name="Path 309" d="M194.59,319.15h177.5V467.4l-177.5,4Z" fill="#39374d"/>
|
||||
<path id="Path_310" data-name="Path 310" d="M726.09,483.65v6.41l-47.17-1.26v-5.15Z" opacity="0.1"/>
|
||||
<path id="Path_311" data-name="Path 311" d="M867.69,485.27v173.3a4,4,0,0,1-4,3.95h0L672,657.42a4,4,0,0,1-3.85-3.95V485.27a4,4,0,0,1,3.95-3.95H863.7a4,4,0,0,1,3.99,3.95Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
|
||||
<path id="Path_312" data-name="Path 312" d="M867.69,485.27v173.3a4,4,0,0,1-4,3.95h0V481.32h0a4,4,0,0,1,4,3.95Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_313" data-name="Path 313" d="M775.59,319.15H598.09V467.4l177.5,4Z" fill="#39374d"/>
|
||||
<path id="Path_314" data-name="Path 314" d="M663.19,485.27v168.2a4,4,0,0,1-3.85,3.95l-191.65,5.1h0a4,4,0,0,1-4-3.95V485.27a4,4,0,0,1,3.95-3.95h191.6A4,4,0,0,1,663.19,485.27Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
|
||||
<path id="Path_315" data-name="Path 315" d="M397.09,319.15h177.5V467.4l-177.5,4Z" fill="#4267b2"/>
|
||||
<path id="Path_316" data-name="Path 316" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l202.51-1.33h.48l40.99-.28h.19l283.08-1.87h.29l.17-.01h.47l4.79-.03h1.46l74.49-.5,4.4-.02.98-.01Z" opacity="0.1"/>
|
||||
<circle id="Ellipse_111" data-name="Ellipse 111" cx="51.33" cy="51.33" r="51.33" transform="translate(435.93 246.82)" fill="#fbbebe"/>
|
||||
<path id="Path_317" data-name="Path 317" d="M617.94,550.07s-99.5,12-90,0c3.44-4.34,4.39-17.2,4.2-31.85-.06-4.45-.22-9.06-.45-13.65-1.1-22-3.75-43.5-3.75-43.5s87-41,77-8.5c-4,13.13-2.69,31.57.35,48.88.89,5.05,1.92,10,3,14.7a344.66,344.66,0,0,0,9.65,33.92Z" transform="translate(-79.34 -172.91)" fill="#fbbebe"/>
|
||||
<path id="Path_318" data-name="Path 318" d="M585.47,546c11.51-2.13,23.7-6,34.53-1.54,2.85,1.17,5.47,2.88,8.39,3.86s6.12,1.22,9.16,1.91c10.68,2.42,19.34,10.55,24.9,20s8.44,20.14,11.26,30.72l6.9,25.83c6,22.45,12,45.09,13.39,68.3a2437.506,2437.506,0,0,1-250.84,1.43c5.44-10.34,11-21.31,10.54-33s-7.19-23.22-4.76-34.74c1.55-7.34,6.57-13.39,9.64-20.22,8.75-19.52,1.94-45.79,17.32-60.65,6.92-6.68,17-9.21,26.63-8.89,12.28.41,24.85,4.24,37,6.11C555.09,547.48,569.79,548.88,585.47,546Z" transform="translate(-79.34 -172.91)" fill="#ff6584"/>
|
||||
<path id="Path_319" data-name="Path 319" d="M716.37,657.17l-.1,1.43v.1l-.17,2.3-1.33,18.51-1.61,22.3-.46,6.28-1,13.44v.17l-107,1-175.59,1.9v.84h-.14v-1.12l.45-14.36.86-28.06.74-23.79.07-2.37a10.53,10.53,0,0,1,11.42-10.17c4.72.4,10.85.89,18.18,1.41l3,.22c42.33,2.94,120.56,6.74,199.5,2,1.66-.09,3.33-.19,5-.31,12.24-.77,24.47-1.76,36.58-3a10.53,10.53,0,0,1,11.6,11.23Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_320" data-name="Path 320" d="M429.08,725.44v-.84l175.62-1.91,107-1h.3v-.17l1-13.44.43-6,1.64-22.61,1.29-17.9v-.44a10.617,10.617,0,0,0-.11-2.47.3.3,0,0,0,0-.1,10.391,10.391,0,0,0-2-4.64,10.54,10.54,0,0,0-9.42-4c-12.11,1.24-24.34,2.23-36.58,3-1.67.12-3.34.22-5,.31-78.94,4.69-157.17.89-199.5-2l-3-.22c-7.33-.52-13.46-1-18.18-1.41a10.54,10.54,0,0,0-11.24,8.53,11,11,0,0,0-.18,1.64l-.68,22.16L429.54,710l-.44,14.36v1.12Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
|
||||
<path id="Path_321" data-name="Path 321" d="M716.67,664.18l-1.23,15.33-1.83,22.85-.46,5.72-1,12.81-.06.64v.17h0l-.15,1.48.11-1.48h-.29l-107,1-175.65,1.9v-.28l.49-14.36,1-28.06.64-18.65A6.36,6.36,0,0,1,434.3,658a6.25,6.25,0,0,1,3.78-.9c2.1.17,4.68.37,7.69.59,4.89.36,10.92.78,17.94,1.22,13,.82,29.31,1.7,48,2.42,52,2,122.2,2.67,188.88-3.17,3-.26,6.1-.55,9.13-.84a6.26,6.26,0,0,1,3.48.66,5.159,5.159,0,0,1,.86.54,6.14,6.14,0,0,1,2,2.46,3.564,3.564,0,0,1,.25.61A6.279,6.279,0,0,1,716.67,664.18Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_322" data-name="Path 322" d="M377.44,677.87v3.19a6.13,6.13,0,0,1-3.5,5.54l-40.1.77a6.12,6.12,0,0,1-3.57-5.57v-3Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_323" data-name="Path 323" d="M298.59,515.57l-52.25,1V507.9l52.25-1Z" fill="#3f3d56"/>
|
||||
<path id="Path_324" data-name="Path 324" d="M298.59,515.57l-52.25,1V507.9l52.25-1Z" opacity="0.1"/>
|
||||
<path id="Path_325" data-name="Path 325" d="M300.59,515.57l-52.25,1V507.9l52.25-1Z" fill="#3f3d56"/>
|
||||
<path id="Path_326" data-name="Path 326" d="M758.56,679.87v3.19a6.13,6.13,0,0,0,3.5,5.54l40.1.77a6.12,6.12,0,0,0,3.57-5.57v-3Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_327" data-name="Path 327" d="M678.72,517.57l52.25,1V509.9l-52.25-1Z" opacity="0.1"/>
|
||||
<path id="Path_328" data-name="Path 328" d="M676.72,517.57l52.25,1V509.9l-52.25-1Z" fill="#3f3d56"/>
|
||||
<path id="Path_329" data-name="Path 329" d="M534.13,486.79c.08,7-3.16,13.6-5.91,20.07a163.491,163.491,0,0,0-12.66,74.71c.73,11,2.58,22,.73,32.9s-8.43,21.77-19,24.9c17.53,10.45,41.26,9.35,57.76-2.66,8.79-6.4,15.34-15.33,21.75-24.11a97.86,97.86,0,0,1-13.31,44.75A103.43,103.43,0,0,0,637,616.53c4.31-5.81,8.06-12.19,9.72-19.23,3.09-13-1.22-26.51-4.51-39.5a266.055,266.055,0,0,1-6.17-33c-.43-3.56-.78-7.22.1-10.7,1-4.07,3.67-7.51,5.64-11.22,5.6-10.54,5.73-23.3,2.86-34.88s-8.49-22.26-14.06-32.81c-4.46-8.46-9.3-17.31-17.46-22.28-5.1-3.1-11-4.39-16.88-5.64l-25.37-5.43c-5.55-1.19-11.26-2.38-16.87-1.51-9.47,1.48-16.14,8.32-22,15.34-4.59,5.46-15.81,15.71-16.6,22.86-.72,6.59,5.1,17.63,6.09,24.58,1.3,9,2.22,6,7.3,11.52C532,478.05,534.07,482,534.13,486.79Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
|
||||
</g>
|
||||
<g id="docusaurus_keytar" transform="translate(670.271 615.768)">
|
||||
<path id="Path_40" data-name="Path 40" d="M99,52h43.635V69.662H99Z" transform="translate(-49.132 -33.936)" fill="#fff" fill-rule="evenodd"/>
|
||||
<path id="Path_41" data-name="Path 41" d="M13.389,158.195A10.377,10.377,0,0,1,4.4,153a10.377,10.377,0,0,0,8.988,15.584H23.779V158.195Z" transform="translate(-3 -82.47)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_42" data-name="Path 42" d="M66.967,38.083l36.373-2.273V30.615A10.389,10.389,0,0,0,92.95,20.226H46.2l-1.3-2.249a1.5,1.5,0,0,0-2.6,0L41,20.226l-1.3-2.249a1.5,1.5,0,0,0-2.6,0l-1.3,2.249-1.3-2.249a1.5,1.5,0,0,0-2.6,0l-1.3,2.249-.034,0-2.152-2.151a1.5,1.5,0,0,0-2.508.672L25.21,21.4l-2.7-.723a1.5,1.5,0,0,0-1.836,1.837l.722,2.7-2.65.71a1.5,1.5,0,0,0-.673,2.509l2.152,2.152c0,.011,0,.022,0,.033l-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6L20.226,41l-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3A10.389,10.389,0,0,0,30.615,103.34H92.95A10.389,10.389,0,0,0,103.34,92.95V51.393L66.967,49.12a5.53,5.53,0,0,1,0-11.038" transform="translate(-9.836 -17.226)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_43" data-name="Path 43" d="M143,163.779h15.584V143H143Z" transform="translate(-70.275 -77.665)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_44" data-name="Path 44" d="M173.779,148.389a2.582,2.582,0,0,0-.332.033c-.02-.078-.038-.156-.06-.234a2.594,2.594,0,1,0-2.567-4.455q-.086-.088-.174-.175a2.593,2.593,0,1,0-4.461-2.569c-.077-.022-.154-.04-.231-.06a2.6,2.6,0,1,0-5.128,0c-.077.02-.154.038-.231.06a2.594,2.594,0,1,0-4.461,2.569,10.384,10.384,0,1,0,17.314,9.992,2.592,2.592,0,1,0,.332-5.161" transform="translate(-75.08 -75.262)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_45" data-name="Path 45" d="M153,113.389h15.584V103H153Z" transform="translate(-75.08 -58.444)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_46" data-name="Path 46" d="M183.389,108.944a1.3,1.3,0,1,0,0-2.6,1.336,1.336,0,0,0-.166.017c-.01-.039-.019-.078-.03-.117a1.3,1.3,0,0,0-.5-2.5,1.285,1.285,0,0,0-.783.269q-.043-.044-.087-.087a1.285,1.285,0,0,0,.263-.776,1.3,1.3,0,0,0-2.493-.509,5.195,5.195,0,1,0,0,10,1.3,1.3,0,0,0,2.493-.509,1.285,1.285,0,0,0-.263-.776q.044-.043.087-.087a1.285,1.285,0,0,0,.783.269,1.3,1.3,0,0,0,.5-2.5c.011-.038.02-.078.03-.117a1.337,1.337,0,0,0,.166.017" transform="translate(-84.691 -57.894)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_47" data-name="Path 47" d="M52.188,48.292a1.3,1.3,0,0,1-1.3-1.3,3.9,3.9,0,0,0-7.792,0,1.3,1.3,0,1,1-2.6,0,6.493,6.493,0,0,1,12.987,0,1.3,1.3,0,0,1-1.3,1.3" transform="translate(-21.02 -28.41)" fill-rule="evenodd"/>
|
||||
<path id="Path_48" data-name="Path 48" d="M103,139.752h31.168a10.389,10.389,0,0,0,10.389-10.389V93H113.389A10.389,10.389,0,0,0,103,103.389Z" transform="translate(-51.054 -53.638)" fill="#ffff50" fill-rule="evenodd"/>
|
||||
<path id="Path_49" data-name="Path 49" d="M141.1,94.017H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0-25.877H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.293H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m7.782-47.993c-.006,0-.011,0-.018,0-1.605.055-2.365,1.66-3.035,3.077-.7,1.48-1.24,2.443-2.126,2.414-.981-.035-1.542-1.144-2.137-2.317-.683-1.347-1.462-2.876-3.1-2.819-1.582.054-2.344,1.451-3.017,2.684-.715,1.313-1.2,2.112-2.141,2.075-1-.036-1.533-.938-2.149-1.981-.686-1.162-1.479-2.467-3.084-2.423-1.555.053-2.319,1.239-2.994,2.286-.713,1.106-1.213,1.781-2.164,1.741-1.025-.036-1.554-.784-2.167-1.65-.688-.973-1.463-2.074-3.062-2.021a3.815,3.815,0,0,0-2.959,1.879c-.64.812-1.14,1.456-2.2,1.415a.52.52,0,0,0-.037,1.039,3.588,3.588,0,0,0,3.05-1.811c.611-.777,1.139-1.448,2.178-1.483,1-.043,1.47.579,2.179,1.582.674.953,1.438,2.033,2.977,2.089,1.612.054,2.387-1.151,3.074-2.217.614-.953,1.144-1.775,2.156-1.81.931-.035,1.438.7,2.153,1.912.674,1.141,1.437,2.434,3.006,2.491,1.623.056,2.407-1.361,3.09-2.616.592-1.085,1.15-2.109,2.14-2.143.931-.022,1.417.829,2.135,2.249.671,1.326,1.432,2.828,3.026,2.886l.088,0c1.592,0,2.347-1.6,3.015-3.01.592-1.252,1.152-2.431,2.113-2.479Z" transform="translate(-55.378 -38.552)" fill-rule="evenodd"/>
|
||||
<path id="Path_50" data-name="Path 50" d="M83,163.779h20.779V143H83Z" transform="translate(-41.443 -77.665)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<g id="Group_8" data-name="Group 8" transform="matrix(0.966, -0.259, 0.259, 0.966, 51.971, 43.3)">
|
||||
<rect id="Rectangle_3" data-name="Rectangle 3" width="43.906" height="17.333" rx="2" transform="translate(0 0)" fill="#d8d8d8"/>
|
||||
<g id="Group_2" data-name="Group 2" transform="translate(0.728 10.948)">
|
||||
<rect id="Rectangle_4" data-name="Rectangle 4" width="2.537" height="2.537" rx="1" transform="translate(7.985 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_5" data-name="Rectangle 5" width="2.537" height="2.537" rx="1" transform="translate(10.991 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_6" data-name="Rectangle 6" width="2.537" height="2.537" rx="1" transform="translate(13.997 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_7" data-name="Rectangle 7" width="2.537" height="2.537" rx="1" transform="translate(17.003 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_8" data-name="Rectangle 8" width="2.537" height="2.537" rx="1" transform="translate(20.009 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_9" data-name="Rectangle 9" width="2.537" height="2.537" rx="1" transform="translate(23.015 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_10" data-name="Rectangle 10" width="2.537" height="2.537" rx="1" transform="translate(26.021 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_11" data-name="Rectangle 11" width="2.537" height="2.537" rx="1" transform="translate(29.028 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_12" data-name="Rectangle 12" width="2.537" height="2.537" rx="1" transform="translate(32.034 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_51" data-name="Path 51" d="M.519,0H6.9A.519.519,0,0,1,7.421.52v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0ZM35.653,0h6.383a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H35.652a.519.519,0,0,1-.519-.519V.519A.519.519,0,0,1,35.652,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="Group_3" data-name="Group 3" transform="translate(0.728 4.878)">
|
||||
<path id="Path_52" data-name="Path 52" d="M.519,0H2.956a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_13" data-name="Rectangle 13" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_14" data-name="Rectangle 14" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_15" data-name="Rectangle 15" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_16" data-name="Rectangle 16" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_17" data-name="Rectangle 17" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_18" data-name="Rectangle 18" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_19" data-name="Rectangle 19" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_20" data-name="Rectangle 20" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_21" data-name="Rectangle 21" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_22" data-name="Rectangle 22" width="2.537" height="2.537" rx="1" transform="translate(31 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_23" data-name="Rectangle 23" width="2.537" height="2.537" rx="1" transform="translate(34.006 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_24" data-name="Rectangle 24" width="2.537" height="2.537" rx="1" transform="translate(37.012 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_25" data-name="Rectangle 25" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<g id="Group_4" data-name="Group 4" transform="translate(43.283 4.538) rotate(180)">
|
||||
<path id="Path_53" data-name="Path 53" d="M.519,0H2.956a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_26" data-name="Rectangle 26" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_27" data-name="Rectangle 27" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_28" data-name="Rectangle 28" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_29" data-name="Rectangle 29" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_30" data-name="Rectangle 30" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_31" data-name="Rectangle 31" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_32" data-name="Rectangle 32" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_33" data-name="Rectangle 33" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_34" data-name="Rectangle 34" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_35" data-name="Rectangle 35" width="2.537" height="2.537" rx="1" transform="translate(31.001 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_36" data-name="Rectangle 36" width="2.537" height="2.537" rx="1" transform="translate(34.007 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_37" data-name="Rectangle 37" width="2.537" height="2.537" rx="1" transform="translate(37.013 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_38" data-name="Rectangle 38" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_39" data-name="Rectangle 39" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_40" data-name="Rectangle 40" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_41" data-name="Rectangle 41" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_42" data-name="Rectangle 42" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_43" data-name="Rectangle 43" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_44" data-name="Rectangle 44" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_45" data-name="Rectangle 45" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_46" data-name="Rectangle 46" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_47" data-name="Rectangle 47" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_48" data-name="Rectangle 48" width="2.537" height="2.537" rx="1" transform="translate(31.001 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_49" data-name="Rectangle 49" width="2.537" height="2.537" rx="1" transform="translate(34.007 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_50" data-name="Rectangle 50" width="2.537" height="2.537" rx="1" transform="translate(37.013 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_51" data-name="Rectangle 51" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<g id="Group_6" data-name="Group 6" transform="translate(0.728 7.883)">
|
||||
<path id="Path_54" data-name="Path 54" d="M.519,0h3.47a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.52A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<g id="Group_5" data-name="Group 5" transform="translate(5.073 0)">
|
||||
<rect id="Rectangle_52" data-name="Rectangle 52" width="2.537" height="2.537" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_53" data-name="Rectangle 53" width="2.537" height="2.537" rx="1" transform="translate(3.006 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_54" data-name="Rectangle 54" width="2.537" height="2.537" rx="1" transform="translate(6.012 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_55" data-name="Rectangle 55" width="2.537" height="2.537" rx="1" transform="translate(9.018 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_56" data-name="Rectangle 56" width="2.537" height="2.537" rx="1" transform="translate(12.025 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_57" data-name="Rectangle 57" width="2.537" height="2.537" rx="1" transform="translate(15.031 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_58" data-name="Rectangle 58" width="2.537" height="2.537" rx="1" transform="translate(18.037 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_59" data-name="Rectangle 59" width="2.537" height="2.537" rx="1" transform="translate(21.042 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_60" data-name="Rectangle 60" width="2.537" height="2.537" rx="1" transform="translate(24.049 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_61" data-name="Rectangle 61" width="2.537" height="2.537" rx="1" transform="translate(27.055 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_62" data-name="Rectangle 62" width="2.537" height="2.537" rx="1" transform="translate(30.061 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<path id="Path_55" data-name="Path 55" d="M.52,0H3.8a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.52A.519.519,0,0,1,.519,0Z" transform="translate(38.234 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="Group_7" data-name="Group 7" transform="translate(0.728 14.084)">
|
||||
<rect id="Rectangle_63" data-name="Rectangle 63" width="2.537" height="2.537" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_64" data-name="Rectangle 64" width="2.537" height="2.537" rx="1" transform="translate(3.006 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_65" data-name="Rectangle 65" width="2.537" height="2.537" rx="1" transform="translate(6.012 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_66" data-name="Rectangle 66" width="2.537" height="2.537" rx="1" transform="translate(9.018 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_56" data-name="Path 56" d="M.519,0H14.981A.519.519,0,0,1,15.5.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.018V.519A.519.519,0,0,1,.519,0Zm15.97,0h1.874a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H16.489a.519.519,0,0,1-.519-.519V.519A.519.519,0,0,1,16.489,0Z" transform="translate(12.024 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_67" data-name="Rectangle 67" width="2.537" height="2.537" rx="1" transform="translate(31.376 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_68" data-name="Rectangle 68" width="2.537" height="2.537" rx="1" transform="translate(34.382 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_69" data-name="Rectangle 69" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_57" data-name="Path 57" d="M2.537,0V.561a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,.561V0Z" transform="translate(39.736 1.08) rotate(180)" fill="#4a4a4a"/>
|
||||
<path id="Path_58" data-name="Path 58" d="M2.537,0V.561a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,.561V0Z" transform="translate(37.2 1.456)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<rect id="Rectangle_70" data-name="Rectangle 70" width="42.273" height="1.127" rx="0.564" transform="translate(0.915 0.556)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_71" data-name="Rectangle 71" width="2.37" height="0.752" rx="0.376" transform="translate(1.949 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_72" data-name="Rectangle 72" width="2.37" height="0.752" rx="0.376" transform="translate(5.193 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_73" data-name="Rectangle 73" width="2.37" height="0.752" rx="0.376" transform="translate(7.688 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_74" data-name="Rectangle 74" width="2.37" height="0.752" rx="0.376" transform="translate(10.183 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_75" data-name="Rectangle 75" width="2.37" height="0.752" rx="0.376" transform="translate(12.679 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_76" data-name="Rectangle 76" width="2.37" height="0.752" rx="0.376" transform="translate(15.797 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_77" data-name="Rectangle 77" width="2.37" height="0.752" rx="0.376" transform="translate(18.292 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_78" data-name="Rectangle 78" width="2.37" height="0.752" rx="0.376" transform="translate(20.788 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_79" data-name="Rectangle 79" width="2.37" height="0.752" rx="0.376" transform="translate(23.283 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_80" data-name="Rectangle 80" width="2.37" height="0.752" rx="0.376" transform="translate(26.402 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_81" data-name="Rectangle 81" width="2.37" height="0.752" rx="0.376" transform="translate(28.897 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_82" data-name="Rectangle 82" width="2.37" height="0.752" rx="0.376" transform="translate(31.393 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_83" data-name="Rectangle 83" width="2.37" height="0.752" rx="0.376" transform="translate(34.512 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_84" data-name="Rectangle 84" width="2.37" height="0.752" rx="0.376" transform="translate(37.007 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_85" data-name="Rectangle 85" width="2.37" height="0.752" rx="0.376" transform="translate(39.502 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
</g>
|
||||
<path id="Path_59" data-name="Path 59" d="M123.779,148.389a2.583,2.583,0,0,0-.332.033c-.02-.078-.038-.156-.06-.234a2.594,2.594,0,1,0-2.567-4.455q-.086-.088-.174-.175a2.593,2.593,0,1,0-4.461-2.569c-.077-.022-.154-.04-.231-.06a2.6,2.6,0,1,0-5.128,0c-.077.02-.154.038-.231.06a2.594,2.594,0,1,0-4.461,2.569,10.384,10.384,0,1,0,17.314,9.992,2.592,2.592,0,1,0,.332-5.161" transform="translate(-51.054 -75.262)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_60" data-name="Path 60" d="M83,113.389h20.779V103H83Z" transform="translate(-41.443 -58.444)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_61" data-name="Path 61" d="M123.389,108.944a1.3,1.3,0,1,0,0-2.6,1.338,1.338,0,0,0-.166.017c-.01-.039-.019-.078-.03-.117a1.3,1.3,0,0,0-.5-2.5,1.285,1.285,0,0,0-.783.269q-.043-.044-.087-.087a1.285,1.285,0,0,0,.263-.776,1.3,1.3,0,0,0-2.493-.509,5.195,5.195,0,1,0,0,10,1.3,1.3,0,0,0,2.493-.509,1.285,1.285,0,0,0-.263-.776q.044-.043.087-.087a1.285,1.285,0,0,0,.783.269,1.3,1.3,0,0,0,.5-2.5c.011-.038.02-.078.03-.117a1.335,1.335,0,0,0,.166.017" transform="translate(-55.859 -57.894)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_62" data-name="Path 62" d="M141.8,38.745a1.41,1.41,0,0,1-.255-.026,1.309,1.309,0,0,1-.244-.073,1.349,1.349,0,0,1-.224-.119,1.967,1.967,0,0,1-.2-.161,1.52,1.52,0,0,1-.161-.2,1.282,1.282,0,0,1-.218-.722,1.41,1.41,0,0,1,.026-.255,1.5,1.5,0,0,1,.072-.244,1.364,1.364,0,0,1,.12-.223,1.252,1.252,0,0,1,.358-.358,1.349,1.349,0,0,1,.224-.119,1.309,1.309,0,0,1,.244-.073,1.2,1.2,0,0,1,.509,0,1.262,1.262,0,0,1,.468.192,1.968,1.968,0,0,1,.2.161,1.908,1.908,0,0,1,.161.2,1.322,1.322,0,0,1,.12.223,1.361,1.361,0,0,1,.1.5,1.317,1.317,0,0,1-.379.919,1.968,1.968,0,0,1-.2.161,1.346,1.346,0,0,1-.223.119,1.332,1.332,0,0,1-.5.1m10.389-.649a1.326,1.326,0,0,1-.92-.379,1.979,1.979,0,0,1-.161-.2,1.282,1.282,0,0,1-.218-.722,1.326,1.326,0,0,1,.379-.919,1.967,1.967,0,0,1,.2-.161,1.351,1.351,0,0,1,.224-.119,1.308,1.308,0,0,1,.244-.073,1.2,1.2,0,0,1,.509,0,1.262,1.262,0,0,1,.468.192,1.967,1.967,0,0,1,.2.161,1.326,1.326,0,0,1,.379.919,1.461,1.461,0,0,1-.026.255,1.323,1.323,0,0,1-.073.244,1.847,1.847,0,0,1-.119.223,1.911,1.911,0,0,1-.161.2,1.967,1.967,0,0,1-.2.161,1.294,1.294,0,0,1-.722.218" transform="translate(-69.074 -26.006)" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="React-icon" transform="translate(906.3 541.56)">
|
||||
<path id="Path_330" data-name="Path 330" d="M263.668,117.179c0-5.827-7.3-11.35-18.487-14.775,2.582-11.4,1.434-20.477-3.622-23.382a7.861,7.861,0,0,0-4.016-1v4a4.152,4.152,0,0,1,2.044.466c2.439,1.4,3.5,6.724,2.672,13.574-.2,1.685-.52,3.461-.914,5.272a86.9,86.9,0,0,0-11.386-1.954,87.469,87.469,0,0,0-7.459-8.965c5.845-5.433,11.332-8.41,15.062-8.41V78h0c-4.931,0-11.386,3.514-17.913,9.611-6.527-6.061-12.982-9.539-17.913-9.539v4c3.712,0,9.216,2.959,15.062,8.356a84.687,84.687,0,0,0-7.405,8.947,83.732,83.732,0,0,0-11.4,1.972c-.412-1.793-.717-3.532-.932-5.2-.843-6.85.2-12.175,2.618-13.592a3.991,3.991,0,0,1,2.062-.466v-4h0a8,8,0,0,0-4.052,1c-5.039,2.9-6.168,11.96-3.568,23.328-11.153,3.443-18.415,8.947-18.415,14.757,0,5.828,7.3,11.35,18.487,14.775-2.582,11.4-1.434,20.477,3.622,23.382a7.882,7.882,0,0,0,4.034,1c4.931,0,11.386-3.514,17.913-9.611,6.527,6.061,12.982,9.539,17.913,9.539a8,8,0,0,0,4.052-1c5.039-2.9,6.168-11.96,3.568-23.328C256.406,128.511,263.668,122.988,263.668,117.179Zm-23.346-11.96c-.663,2.313-1.488,4.7-2.421,7.083-.735-1.434-1.506-2.869-2.349-4.3-.825-1.434-1.7-2.833-2.582-4.2C235.517,104.179,237.974,104.645,240.323,105.219Zm-8.212,19.1c-1.4,2.421-2.833,4.716-4.321,6.85-2.672.233-5.379.359-8.1.359-2.708,0-5.415-.126-8.069-.341q-2.232-3.2-4.339-6.814-2.044-3.523-3.73-7.136c1.112-2.4,2.367-4.805,3.712-7.154,1.4-2.421,2.833-4.716,4.321-6.85,2.672-.233,5.379-.359,8.1-.359,2.708,0,5.415.126,8.069.341q2.232,3.2,4.339,6.814,2.044,3.523,3.73,7.136C234.692,119.564,233.455,121.966,232.11,124.315Zm5.792-2.331c.968,2.4,1.793,4.805,2.474,7.136-2.349.574-4.823,1.058-7.387,1.434.879-1.381,1.757-2.8,2.582-4.25C236.4,124.871,237.167,123.419,237.9,121.984ZM219.72,141.116a73.921,73.921,0,0,1-4.985-5.738c1.614.072,3.263.126,4.931.126,1.685,0,3.353-.036,4.985-.126A69.993,69.993,0,0,1,219.72,141.116ZM206.38,130.555c-2.546-.377-5-.843-7.352-1.417.663-2.313,1.488-4.7,2.421-7.083.735,1.434,1.506,2.869,2.349,4.3S205.5,129.192,206.38,130.555ZM219.63,93.241a73.924,73.924,0,0,1,4.985,5.738c-1.614-.072-3.263-.126-4.931-.126-1.686,0-3.353.036-4.985.126A69.993,69.993,0,0,1,219.63,93.241ZM206.362,103.8c-.879,1.381-1.757,2.8-2.582,4.25-.825,1.434-1.6,2.869-2.331,4.3-.968-2.4-1.793-4.805-2.474-7.136C201.323,104.663,203.8,104.179,206.362,103.8Zm-16.227,22.449c-6.348-2.708-10.454-6.258-10.454-9.073s4.106-6.383,10.454-9.073c1.542-.663,3.228-1.255,4.967-1.811a86.122,86.122,0,0,0,4.034,10.92,84.9,84.9,0,0,0-3.981,10.866C193.38,127.525,191.694,126.915,190.134,126.252Zm9.647,25.623c-2.439-1.4-3.5-6.724-2.672-13.574.2-1.686.52-3.461.914-5.272a86.9,86.9,0,0,0,11.386,1.954,87.465,87.465,0,0,0,7.459,8.965c-5.845,5.433-11.332,8.41-15.062,8.41A4.279,4.279,0,0,1,199.781,151.875Zm42.532-13.663c.843,6.85-.2,12.175-2.618,13.592a3.99,3.99,0,0,1-2.062.466c-3.712,0-9.216-2.959-15.062-8.356a84.689,84.689,0,0,0,7.405-8.947,83.731,83.731,0,0,0,11.4-1.972A50.194,50.194,0,0,1,242.313,138.212Zm6.9-11.96c-1.542.663-3.228,1.255-4.967,1.811a86.12,86.12,0,0,0-4.034-10.92,84.9,84.9,0,0,0,3.981-10.866c1.775.556,3.461,1.165,5.039,1.829,6.348,2.708,10.454,6.258,10.454,9.073C259.67,119.994,255.564,123.562,249.216,126.252Z" fill="#61dafb"/>
|
||||
<path id="Path_331" data-name="Path 331" d="M320.8,78.4Z" transform="translate(-119.082 -0.328)" fill="#61dafb"/>
|
||||
<circle id="Ellipse_112" data-name="Ellipse 112" cx="8.194" cy="8.194" r="8.194" transform="translate(211.472 108.984)" fill="#61dafb"/>
|
||||
<path id="Path_332" data-name="Path 332" d="M520.5,78.1Z" transform="translate(-282.975 -0.082)" fill="#61dafb"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 35 KiB |
40
docs/static/img/undraw_docusaurus_tree.svg
vendored
Normal file
40
docs/static/img/undraw_docusaurus_tree.svg
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1129" height="663" viewBox="0 0 1129 663">
|
||||
<title>Focus on What Matters</title>
|
||||
<circle cx="321" cy="321" r="321" fill="#f2f2f2" />
|
||||
<ellipse cx="559" cy="635.49998" rx="514" ry="27.50002" fill="#3f3d56" />
|
||||
<ellipse cx="558" cy="627" rx="460" ry="22" opacity="0.2" />
|
||||
<rect x="131" y="152.5" width="840" height="50" fill="#3f3d56" />
|
||||
<path d="M166.5,727.3299A21.67009,21.67009,0,0,0,188.1701,749H984.8299A21.67009,21.67009,0,0,0,1006.5,727.3299V296h-840Z" transform="translate(-35.5 -118.5)" fill="#3f3d56" />
|
||||
<path d="M984.8299,236H188.1701A21.67009,21.67009,0,0,0,166.5,257.6701V296h840V257.6701A21.67009,21.67009,0,0,0,984.8299,236Z" transform="translate(-35.5 -118.5)" fill="#3f3d56" />
|
||||
<path d="M984.8299,236H188.1701A21.67009,21.67009,0,0,0,166.5,257.6701V296h840V257.6701A21.67009,21.67009,0,0,0,984.8299,236Z" transform="translate(-35.5 -118.5)" opacity="0.2" />
|
||||
<circle cx="181" cy="147.5" r="13" fill="#3f3d56" />
|
||||
<circle cx="217" cy="147.5" r="13" fill="#3f3d56" />
|
||||
<circle cx="253" cy="147.5" r="13" fill="#3f3d56" />
|
||||
<rect x="168" y="213.5" width="337" height="386" rx="5.33505" fill="#606060" />
|
||||
<rect x="603" y="272.5" width="284" height="22" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="537" y="352.5" width="416" height="15" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="537" y="396.5" width="416" height="15" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="537" y="440.5" width="416" height="15" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="537" y="484.5" width="416" height="15" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="865" y="552.5" width="88" height="26" rx="7.02756" fill="#3ecc5f" />
|
||||
<path d="M1088.60287,624.61594a30.11371,30.11371,0,0,0,3.98291-15.266c0-13.79652-8.54358-24.98081-19.08256-24.98081s-19.08256,11.18429-19.08256,24.98081a30.11411,30.11411,0,0,0,3.98291,15.266,31.248,31.248,0,0,0,0,30.53213,31.248,31.248,0,0,0,0,30.53208,31.248,31.248,0,0,0,0,30.53208,30.11408,30.11408,0,0,0-3.98291,15.266c0,13.79652,8.54353,24.98081,19.08256,24.98081s19.08256-11.18429,19.08256-24.98081a30.11368,30.11368,0,0,0-3.98291-15.266,31.248,31.248,0,0,0,0-30.53208,31.248,31.248,0,0,0,0-30.53208,31.248,31.248,0,0,0,0-30.53213Z" transform="translate(-35.5 -118.5)" fill="#3f3d56" />
|
||||
<ellipse cx="1038.00321" cy="460.31783" rx="19.08256" ry="24.9808" fill="#3f3d56" />
|
||||
<ellipse cx="1038.00321" cy="429.78574" rx="19.08256" ry="24.9808" fill="#3f3d56" />
|
||||
<path d="M1144.93871,339.34489a91.61081,91.61081,0,0,0,7.10658-10.46092l-50.141-8.23491,54.22885.4033a91.566,91.566,0,0,0,1.74556-72.42605l-72.75449,37.74139,67.09658-49.32086a91.41255,91.41255,0,1,0-150.971,102.29805,91.45842,91.45842,0,0,0-10.42451,16.66946l65.0866,33.81447-69.40046-23.292a91.46011,91.46011,0,0,0,14.73837,85.83669,91.40575,91.40575,0,1,0,143.68892,0,91.41808,91.41808,0,0,0,0-113.02862Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M981.6885,395.8592a91.01343,91.01343,0,0,0,19.56129,56.51431,91.40575,91.40575,0,1,0,143.68892,0C1157.18982,436.82067,981.6885,385.60008,981.6885,395.8592Z" transform="translate(-35.5 -118.5)" opacity="0.1" />
|
||||
<path d="M365.62,461.43628H477.094v45.12043H365.62Z" transform="translate(-35.5 -118.5)" fill="#fff" fill-rule="evenodd" />
|
||||
<path d="M264.76252,608.74122a26.50931,26.50931,0,0,1-22.96231-13.27072,26.50976,26.50976,0,0,0,22.96231,39.81215H291.304V608.74122Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M384.17242,468.57061l92.92155-5.80726V449.49263a26.54091,26.54091,0,0,0-26.54143-26.54143H331.1161l-3.31768-5.74622a3.83043,3.83043,0,0,0-6.63536,0l-3.31768,5.74622-3.31767-5.74622a3.83043,3.83043,0,0,0-6.63536,0l-3.31768,5.74622L301.257,417.205a3.83043,3.83043,0,0,0-6.63536,0L291.304,422.9512c-.02919,0-.05573.004-.08625.004l-5.49674-5.49541a3.8293,3.8293,0,0,0-6.4071,1.71723l-1.81676,6.77338L270.607,424.1031a3.82993,3.82993,0,0,0-4.6912,4.69253l1.84463,6.89148-6.77072,1.81411a3.8315,3.8315,0,0,0-1.71988,6.40975l5.49673,5.49673c0,.02787-.004.05574-.004.08493l-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74621,3.31768L259.0163,466.081a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31767a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31767a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31768a3.83042,3.83042,0,0,0,0,6.63535l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768L259.0163,558.976a3.83042,3.83042,0,0,0,0,6.63535l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31768a3.83042,3.83042,0,0,0,0,6.63535l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768A26.54091,26.54091,0,0,0,291.304,635.28265H450.55254A26.5409,26.5409,0,0,0,477.094,608.74122V502.5755l-92.92155-5.80727a14.12639,14.12639,0,0,1,0-28.19762" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M424.01111,635.28265h39.81214V582.19979H424.01111Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M490.36468,602.10586a6.60242,6.60242,0,0,0-.848.08493c-.05042-.19906-.09821-.39945-.15393-.59852A6.62668,6.62668,0,1,0,482.80568,590.21q-.2203-.22491-.44457-.44589a6.62391,6.62391,0,1,0-11.39689-6.56369c-.1964-.05575-.39414-.10218-.59056-.15262a6.63957,6.63957,0,1,0-13.10086,0c-.1964.05042-.39414.09687-.59056.15262a6.62767,6.62767,0,1,0-11.39688,6.56369,26.52754,26.52754,0,1,0,44.23127,25.52756,6.6211,6.6211,0,1,0,.848-13.18579" transform="translate(-35.5 -118.5)" fill="#44d860" fill-rule="evenodd" />
|
||||
<path d="M437.28182,555.65836H477.094V529.11693H437.28182Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M490.36468,545.70532a3.31768,3.31768,0,0,0,0-6.63536,3.41133,3.41133,0,0,0-.42333.04247c-.02655-.09953-.04911-.19907-.077-.29859a3.319,3.319,0,0,0-1.278-6.37923,3.28174,3.28174,0,0,0-2.00122.68742q-.10947-.11346-.22294-.22295a3.282,3.282,0,0,0,.67149-1.98265,3.31768,3.31768,0,0,0-6.37-1.2992,13.27078,13.27078,0,1,0,0,25.54082,3.31768,3.31768,0,0,0,6.37-1.2992,3.282,3.282,0,0,0-.67149-1.98265q.11347-.10947.22294-.22294a3.28174,3.28174,0,0,0,2.00122.68742,3.31768,3.31768,0,0,0,1.278-6.37923c.02786-.0982.05042-.19907.077-.29859a3.41325,3.41325,0,0,0,.42333.04246" transform="translate(-35.5 -118.5)" fill="#44d860" fill-rule="evenodd" />
|
||||
<path d="M317.84538,466.081a3.31768,3.31768,0,0,1-3.31767-3.31768,9.953,9.953,0,1,0-19.90608,0,3.31768,3.31768,0,1,1-6.63535,0,16.58839,16.58839,0,1,1,33.17678,0,3.31768,3.31768,0,0,1-3.31768,3.31768" transform="translate(-35.5 -118.5)" fill-rule="evenodd" />
|
||||
<path d="M370.92825,635.28265h79.62429A26.5409,26.5409,0,0,0,477.094,608.74122v-92.895H397.46968a26.54091,26.54091,0,0,0-26.54143,26.54143Z" transform="translate(-35.5 -118.5)" fill="#ffff50" fill-rule="evenodd" />
|
||||
<path d="M457.21444,556.98543H390.80778a1.32707,1.32707,0,0,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0,26.54143H390.80778a1.32707,1.32707,0,1,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0,26.54143H390.80778a1.32707,1.32707,0,1,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0-66.10674H390.80778a1.32707,1.32707,0,0,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0,26.29459H390.80778a1.32707,1.32707,0,0,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0,26.54143H390.80778a1.32707,1.32707,0,0,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414M477.094,474.19076c-.01592,0-.0292-.008-.04512-.00663-4.10064.13934-6.04083,4.24132-7.75274,7.86024-1.78623,3.78215-3.16771,6.24122-5.43171,6.16691-2.50685-.09024-3.94007-2.92222-5.45825-5.91874-1.74377-3.44243-3.73438-7.34667-7.91333-7.20069-4.04227.138-5.98907,3.70784-7.70631,6.857-1.82738,3.35484-3.07084,5.39455-5.46887,5.30033-2.55727-.09289-3.91619-2.39536-5.48877-5.06013-1.75306-2.96733-3.77951-6.30359-7.8775-6.18946-3.97326.13669-5.92537,3.16507-7.64791,5.83912-1.82207,2.82666-3.09872,4.5492-5.52725,4.447-2.61832-.09289-3.9706-2.00388-5.53522-4.21611-1.757-2.4856-3.737-5.299-7.82308-5.16231-3.88567.13271-5.83779,2.61434-7.559,4.80135-1.635,2.07555-2.9116,3.71846-5.61218,3.615a1.32793,1.32793,0,1,0-.09555,2.65414c4.00377.134,6.03154-2.38873,7.79257-4.6275,1.562-1.9853,2.91027-3.69855,5.56441-3.78879,2.55594-.10882,3.75429,1.47968,5.56707,4.04093,1.7212,2.43385,3.67465,5.19416,7.60545,5.33616,4.11789.138,6.09921-2.93946,7.8536-5.66261,1.56861-2.43385,2.92221-4.53461,5.50734-4.62352,2.37944-.08892,3.67466,1.79154,5.50072,4.885,1.72121,2.91557,3.67069,6.21865,7.67977,6.36463,4.14709.14332,6.14965-3.47693,7.89475-6.68181,1.51155-2.77092,2.93814-5.38791,5.46621-5.4755,2.37944-.05573,3.62025,2.11668,5.45558,5.74622,1.71459,3.388,3.65875,7.22591,7.73019,7.37321l.22429.004c4.06614,0,5.99571-4.08074,7.70364-7.68905,1.51154-3.19825,2.94211-6.21069,5.3972-6.33411Z" transform="translate(-35.5 -118.5)" fill-rule="evenodd" />
|
||||
<path d="M344.38682,635.28265h53.08286V582.19979H344.38682Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M424.01111,602.10586a6.60242,6.60242,0,0,0-.848.08493c-.05042-.19906-.09821-.39945-.15394-.59852A6.62667,6.62667,0,1,0,416.45211,590.21q-.2203-.22491-.44458-.44589a6.62391,6.62391,0,1,0-11.39689-6.56369c-.1964-.05575-.39413-.10218-.59054-.15262a6.63957,6.63957,0,1,0-13.10084,0c-.19641.05042-.39414.09687-.59055.15262a6.62767,6.62767,0,1,0-11.39689,6.56369,26.52755,26.52755,0,1,0,44.2313,25.52756,6.6211,6.6211,0,1,0,.848-13.18579" transform="translate(-35.5 -118.5)" fill="#44d860" fill-rule="evenodd" />
|
||||
<path d="M344.38682,555.65836h53.08286V529.11693H344.38682Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M410.74039,545.70532a3.31768,3.31768,0,1,0,0-6.63536,3.41133,3.41133,0,0,0-.42333.04247c-.02655-.09953-.04911-.19907-.077-.29859a3.319,3.319,0,0,0-1.278-6.37923,3.28174,3.28174,0,0,0-2.00122.68742q-.10947-.11346-.22294-.22295a3.282,3.282,0,0,0,.67149-1.98265,3.31768,3.31768,0,0,0-6.37-1.2992,13.27078,13.27078,0,1,0,0,25.54082,3.31768,3.31768,0,0,0,6.37-1.2992,3.282,3.282,0,0,0-.67149-1.98265q.11347-.10947.22294-.22294a3.28174,3.28174,0,0,0,2.00122.68742,3.31768,3.31768,0,0,0,1.278-6.37923c.02786-.0982.05042-.19907.077-.29859a3.41325,3.41325,0,0,0,.42333.04246" transform="translate(-35.5 -118.5)" fill="#44d860" fill-rule="evenodd" />
|
||||
<path d="M424.01111,447.8338a3.60349,3.60349,0,0,1-.65028-.06636,3.34415,3.34415,0,0,1-.62372-.18579,3.44679,3.44679,0,0,1-.572-.30522,5.02708,5.02708,0,0,1-.50429-.4114,3.88726,3.88726,0,0,1-.41007-.50428,3.27532,3.27532,0,0,1-.55737-1.84463,3.60248,3.60248,0,0,1,.06636-.65027,3.82638,3.82638,0,0,1,.18447-.62373,3.48858,3.48858,0,0,1,.30656-.57064,3.197,3.197,0,0,1,.91436-.91568,3.44685,3.44685,0,0,1,.572-.30523,3.344,3.344,0,0,1,.62372-.18578,3.06907,3.06907,0,0,1,1.30053,0,3.22332,3.22332,0,0,1,1.19436.491,5.02835,5.02835,0,0,1,.50429.41139,4.8801,4.8801,0,0,1,.41139.50429,3.38246,3.38246,0,0,1,.30522.57064,3.47806,3.47806,0,0,1,.25215,1.274A3.36394,3.36394,0,0,1,426.36,446.865a5.02708,5.02708,0,0,1-.50429.4114,3.3057,3.3057,0,0,1-1.84463.55737m26.54143-1.65884a3.38754,3.38754,0,0,1-2.35024-.96877,5.04185,5.04185,0,0,1-.41007-.50428,3.27532,3.27532,0,0,1-.55737-1.84463,3.38659,3.38659,0,0,1,.96744-2.34892,5.02559,5.02559,0,0,1,.50429-.41139,3.44685,3.44685,0,0,1,.572-.30523,3.3432,3.3432,0,0,1,.62373-.18579,3.06952,3.06952,0,0,1,1.30052,0,3.22356,3.22356,0,0,1,1.19436.491,5.02559,5.02559,0,0,1,.50429.41139,3.38792,3.38792,0,0,1,.96876,2.34892,3.72635,3.72635,0,0,1-.06636.65026,3.37387,3.37387,0,0,1-.18579.62373,4.71469,4.71469,0,0,1-.30522.57064,4.8801,4.8801,0,0,1-.41139.50429,5.02559,5.02559,0,0,1-.50429.41139,3.30547,3.30547,0,0,1-1.84463.55737" transform="translate(-35.5 -118.5)" fill-rule="evenodd" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 12 KiB |
3456
docs/static/openapi.json
vendored
Normal file
3456
docs/static/openapi.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
241
docs/transcript.md
Normal file
241
docs/transcript.md
Normal file
@@ -0,0 +1,241 @@
|
||||
# Transcript Formats
|
||||
|
||||
The Reflector API provides multiple output formats for transcript data through the `transcript_format` query parameter on the GET `/v1/transcripts/{id}` endpoint.
|
||||
|
||||
## Overview
|
||||
|
||||
When retrieving a transcript, you can specify the desired format using the `transcript_format` query parameter. The API supports four formats optimized for different use cases:
|
||||
|
||||
- **text** - Plain text with speaker names (default)
|
||||
- **text-timestamped** - Timestamped text with speaker names
|
||||
- **webvtt-named** - WebVTT subtitle format with participant names
|
||||
- **json** - Structured JSON segments with full metadata
|
||||
|
||||
All formats include participant information when available, resolving speaker IDs to actual names.
|
||||
|
||||
## Query Parameter Usage
|
||||
|
||||
```
|
||||
GET /v1/transcripts/{id}?transcript_format={format}
|
||||
```
|
||||
|
||||
### Parameters
|
||||
|
||||
- `transcript_format` (optional): The desired output format
|
||||
- Type: `"text" | "text-timestamped" | "webvtt-named" | "json"`
|
||||
- Default: `"text"`
|
||||
|
||||
## Format Descriptions
|
||||
|
||||
### Text Format (`text`)
|
||||
|
||||
**Use case:** Simple, human-readable transcript for display or export.
|
||||
|
||||
**Format:** Speaker names followed by their dialogue, one line per segment.
|
||||
|
||||
**Example:**
|
||||
```
|
||||
John Smith: Hello everyone
|
||||
Jane Doe: Hi there
|
||||
John Smith: How are you today?
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```bash
|
||||
GET /v1/transcripts/{id}?transcript_format=text
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"id": "transcript_123",
|
||||
"name": "Meeting Recording",
|
||||
"transcript_format": "text",
|
||||
"transcript": "John Smith: Hello everyone\nJane Doe: Hi there\nJohn Smith: How are you today?",
|
||||
"participants": [
|
||||
{"id": "p1", "speaker": 0, "name": "John Smith"},
|
||||
{"id": "p2", "speaker": 1, "name": "Jane Doe"}
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### Text Timestamped Format (`text-timestamped`)
|
||||
|
||||
**Use case:** Transcript with timing information for navigation or reference.
|
||||
|
||||
**Format:** `[MM:SS]` timestamp prefix before each speaker and dialogue.
|
||||
|
||||
**Example:**
|
||||
```
|
||||
[00:00] John Smith: Hello everyone
|
||||
[00:05] Jane Doe: Hi there
|
||||
[00:12] John Smith: How are you today?
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```bash
|
||||
GET /v1/transcripts/{id}?transcript_format=text-timestamped
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"id": "transcript_123",
|
||||
"name": "Meeting Recording",
|
||||
"transcript_format": "text-timestamped",
|
||||
"transcript": "[00:00] John Smith: Hello everyone\n[00:05] Jane Doe: Hi there\n[00:12] John Smith: How are you today?",
|
||||
"participants": [
|
||||
{"id": "p1", "speaker": 0, "name": "John Smith"},
|
||||
{"id": "p2", "speaker": 1, "name": "Jane Doe"}
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### WebVTT Named Format (`webvtt-named`)
|
||||
|
||||
**Use case:** Subtitle files for video players, accessibility tools, or video editing.
|
||||
|
||||
**Format:** Standard WebVTT subtitle format with voice tags using participant names.
|
||||
|
||||
**Example:**
|
||||
```
|
||||
WEBVTT
|
||||
|
||||
00:00:00.000 --> 00:00:05.000
|
||||
<v John Smith>Hello everyone
|
||||
|
||||
00:00:05.000 --> 00:00:12.000
|
||||
<v Jane Doe>Hi there
|
||||
|
||||
00:00:12.000 --> 00:00:18.000
|
||||
<v John Smith>How are you today?
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```bash
|
||||
GET /v1/transcripts/{id}?transcript_format=webvtt-named
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"id": "transcript_123",
|
||||
"name": "Meeting Recording",
|
||||
"transcript_format": "webvtt-named",
|
||||
"transcript": "WEBVTT\n\n00:00:00.000 --> 00:00:05.000\n<v John Smith>Hello everyone\n\n...",
|
||||
"participants": [
|
||||
{"id": "p1", "speaker": 0, "name": "John Smith"},
|
||||
{"id": "p2", "speaker": 1, "name": "Jane Doe"}
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### JSON Format (`json`)
|
||||
|
||||
**Use case:** Programmatic access with full timing and speaker metadata.
|
||||
|
||||
**Format:** Array of segment objects with speaker information, text content, and precise timing.
|
||||
|
||||
**Example:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"speaker": 0,
|
||||
"speaker_name": "John Smith",
|
||||
"text": "Hello everyone",
|
||||
"start": 0.0,
|
||||
"end": 5.0
|
||||
},
|
||||
{
|
||||
"speaker": 1,
|
||||
"speaker_name": "Jane Doe",
|
||||
"text": "Hi there",
|
||||
"start": 5.0,
|
||||
"end": 12.0
|
||||
},
|
||||
{
|
||||
"speaker": 0,
|
||||
"speaker_name": "John Smith",
|
||||
"text": "How are you today?",
|
||||
"start": 12.0,
|
||||
"end": 18.0
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```bash
|
||||
GET /v1/transcripts/{id}?transcript_format=json
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"id": "transcript_123",
|
||||
"name": "Meeting Recording",
|
||||
"transcript_format": "json",
|
||||
"transcript": [
|
||||
{
|
||||
"speaker": 0,
|
||||
"speaker_name": "John Smith",
|
||||
"text": "Hello everyone",
|
||||
"start": 0.0,
|
||||
"end": 5.0
|
||||
},
|
||||
{
|
||||
"speaker": 1,
|
||||
"speaker_name": "Jane Doe",
|
||||
"text": "Hi there",
|
||||
"start": 5.0,
|
||||
"end": 12.0
|
||||
}
|
||||
],
|
||||
"participants": [
|
||||
{"id": "p1", "speaker": 0, "name": "John Smith"},
|
||||
{"id": "p2", "speaker": 1, "name": "Jane Doe"}
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
## Response Structure
|
||||
|
||||
All formats return the same base transcript metadata with an additional `transcript_format` field and format-specific `transcript` field:
|
||||
|
||||
### Common Fields
|
||||
|
||||
- `id`: Transcript identifier
|
||||
- `user_id`: Owner user ID (if authenticated)
|
||||
- `name`: Transcript name
|
||||
- `status`: Processing status
|
||||
- `locked`: Whether transcript is locked for editing
|
||||
- `duration`: Total duration in seconds
|
||||
- `title`: Auto-generated or custom title
|
||||
- `short_summary`: Brief summary
|
||||
- `long_summary`: Detailed summary
|
||||
- `created_at`: Creation timestamp
|
||||
- `share_mode`: Access control setting
|
||||
- `source_language`: Original audio language
|
||||
- `target_language`: Translation target language
|
||||
- `reviewed`: Whether transcript has been reviewed
|
||||
- `meeting_id`: Associated meeting ID (if applicable)
|
||||
- `source_kind`: Source type (live, file, room)
|
||||
- `room_id`: Associated room ID (if applicable)
|
||||
- `audio_deleted`: Whether audio has been deleted
|
||||
- `participants`: Array of participant objects with speaker mappings
|
||||
|
||||
### Format-Specific Fields
|
||||
|
||||
- `transcript_format`: The format identifier (discriminator field)
|
||||
- `transcript`: The formatted transcript content (string for text/webvtt formats, array for json format)
|
||||
|
||||
## Speaker Name Resolution
|
||||
|
||||
All formats resolve speaker IDs to participant names when available:
|
||||
|
||||
- If a participant exists for the speaker ID, their name is used
|
||||
- If no participant exists, a default name like "Speaker 0" is generated
|
||||
- Speaker IDs are integers (0, 1, 2, etc.) assigned during diarization
|
||||
8
docs/tsconfig.json
Normal file
8
docs/tsconfig.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
// This file is not used in compilation. It is here just for a nice editor experience.
|
||||
"extends": "@docusaurus/tsconfig",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "."
|
||||
},
|
||||
"exclude": [".docusaurus", "build"]
|
||||
}
|
||||
161
gpu/modal_deployments/deploy-all.sh
Executable file
161
gpu/modal_deployments/deploy-all.sh
Executable file
@@ -0,0 +1,161 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# --- Usage ---
|
||||
usage() {
|
||||
echo "Usage: $0 [OPTIONS]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --hf-token TOKEN HuggingFace token"
|
||||
echo " --help Show this help message"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # Interactive mode"
|
||||
echo " $0 --hf-token hf_xxxxx # Non-interactive mode"
|
||||
echo ""
|
||||
exit 0
|
||||
}
|
||||
|
||||
# --- Parse Arguments ---
|
||||
HF_TOKEN=""
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--hf-token)
|
||||
HF_TOKEN="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "=========================================="
|
||||
echo "Reflector GPU Functions Deployment"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
# --- Check Dependencies ---
|
||||
if ! command -v modal &> /dev/null; then
|
||||
echo "Error: Modal CLI not installed."
|
||||
echo " Install with: pip install modal"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v openssl &> /dev/null; then
|
||||
echo "Error: openssl not found."
|
||||
echo " Mac: brew install openssl"
|
||||
echo " Ubuntu: sudo apt-get install openssl"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check Modal authentication
|
||||
if ! modal profile current &> /dev/null; then
|
||||
echo "Error: Not authenticated with Modal."
|
||||
echo " Run: modal setup"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- HuggingFace Token Setup ---
|
||||
if [ -z "$HF_TOKEN" ]; then
|
||||
echo "HuggingFace token required for Pyannote diarization model."
|
||||
echo "1. Create account at https://huggingface.co"
|
||||
echo "2. Accept license at https://huggingface.co/pyannote/speaker-diarization-3.1"
|
||||
echo "3. Generate token at https://huggingface.co/settings/tokens"
|
||||
echo ""
|
||||
read -p "Enter your HuggingFace token: " HF_TOKEN
|
||||
fi
|
||||
|
||||
if [ -z "$HF_TOKEN" ]; then
|
||||
echo "Error: HuggingFace token is required for diarization"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Basic token format validation
|
||||
if [[ ! "$HF_TOKEN" =~ ^hf_ ]]; then
|
||||
echo "Warning: HuggingFace tokens usually start with 'hf_'"
|
||||
if [ -t 0 ]; then
|
||||
read -p "Continue anyway? (y/n): " confirm
|
||||
if [ "$confirm" != "y" ]; then
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Non-interactive mode: proceeding anyway"
|
||||
fi
|
||||
fi
|
||||
|
||||
# --- Auto-generate reflector<->GPU API Key ---
|
||||
echo ""
|
||||
echo "Generating API key for GPU services..."
|
||||
API_KEY=$(openssl rand -hex 32)
|
||||
|
||||
# --- Create Modal Secrets ---
|
||||
echo "Creating Modal secrets..."
|
||||
|
||||
# Create or update hf_token secret (delete first if exists)
|
||||
if modal secret list 2>/dev/null | grep -q "hf_token"; then
|
||||
echo " -> Recreating secret: hf_token"
|
||||
modal secret delete hf_token --yes 2>/dev/null || true
|
||||
fi
|
||||
echo " -> Creating secret: hf_token"
|
||||
modal secret create hf_token HF_TOKEN="$HF_TOKEN"
|
||||
|
||||
# Create or update reflector-gpu secret (delete first if exists)
|
||||
if modal secret list 2>/dev/null | grep -q "reflector-gpu"; then
|
||||
echo " -> Recreating secret: reflector-gpu"
|
||||
modal secret delete reflector-gpu --yes 2>/dev/null || true
|
||||
fi
|
||||
echo " -> Creating secret: reflector-gpu"
|
||||
modal secret create reflector-gpu REFLECTOR_GPU_APIKEY="$API_KEY"
|
||||
|
||||
# --- Deploy Functions ---
|
||||
echo ""
|
||||
echo "Deploying transcriber (Whisper)..."
|
||||
TRANSCRIBER_URL=$(modal deploy reflector_transcriber.py 2>&1 | grep -o 'https://[^ ]*web.modal.run' | head -1)
|
||||
if [ -z "$TRANSCRIBER_URL" ]; then
|
||||
echo "Error: Failed to deploy transcriber. Check Modal dashboard for details."
|
||||
exit 1
|
||||
fi
|
||||
echo " -> $TRANSCRIBER_URL"
|
||||
|
||||
echo ""
|
||||
echo "Deploying diarizer (Pyannote)..."
|
||||
DIARIZER_URL=$(modal deploy reflector_diarizer.py 2>&1 | grep -o 'https://[^ ]*web.modal.run' | head -1)
|
||||
if [ -z "$DIARIZER_URL" ]; then
|
||||
echo "Error: Failed to deploy diarizer. Check Modal dashboard for details."
|
||||
exit 1
|
||||
fi
|
||||
echo " -> $DIARIZER_URL"
|
||||
|
||||
echo ""
|
||||
echo "Deploying padding (CPU audio processing via Modal SDK)..."
|
||||
modal deploy reflector_padding.py
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error: Failed to deploy padding. Check Modal dashboard for details."
|
||||
exit 1
|
||||
fi
|
||||
echo " -> reflector-padding.pad_track (Modal SDK function)"
|
||||
|
||||
# --- Output Configuration ---
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Deployment complete!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "Copy these values to your server's server/.env file:"
|
||||
echo ""
|
||||
echo "# --- Modal GPU Configuration ---"
|
||||
echo "TRANSCRIPT_BACKEND=modal"
|
||||
echo "TRANSCRIPT_URL=$TRANSCRIBER_URL"
|
||||
echo "TRANSCRIPT_MODAL_API_KEY=$API_KEY"
|
||||
echo ""
|
||||
echo "DIARIZATION_BACKEND=modal"
|
||||
echo "DIARIZATION_URL=$DIARIZER_URL"
|
||||
echo "DIARIZATION_MODAL_API_KEY=$API_KEY"
|
||||
echo ""
|
||||
echo "# Padding uses Modal SDK (requires MODAL_TOKEN_ID/SECRET in worker containers)"
|
||||
echo "# --- End Modal Configuration ---"
|
||||
@@ -24,6 +24,12 @@ app = modal.App(name="reflector-diarizer")
|
||||
upload_volume = modal.Volume.from_name("diarizer-uploads", create_if_missing=True)
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (2 copies)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/modal_deployments/reflector_diarizer.py (this file)
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtension:
|
||||
parsed_url = urlparse(url)
|
||||
url_path = parsed_url.path
|
||||
@@ -39,6 +45,8 @@ def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtens
|
||||
return AudioFileExtension("wav")
|
||||
if "audio/mp4" in content_type:
|
||||
return AudioFileExtension("mp4")
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return AudioFileExtension("webm")
|
||||
|
||||
raise ValueError(
|
||||
f"Unsupported audio format for URL: {url}. "
|
||||
@@ -105,7 +113,7 @@ def download_pyannote_audio():
|
||||
|
||||
|
||||
diarizer_image = (
|
||||
modal.Image.debian_slim(python_version="3.10.8")
|
||||
modal.Image.debian_slim(python_version="3.10")
|
||||
.pip_install(
|
||||
"pyannote.audio==3.1.0",
|
||||
"requests",
|
||||
@@ -116,7 +124,7 @@ diarizer_image = (
|
||||
"transformers==4.34.0",
|
||||
"sentencepiece",
|
||||
"protobuf",
|
||||
"numpy",
|
||||
"numpy<2",
|
||||
"huggingface_hub",
|
||||
"hf-transfer",
|
||||
)
|
||||
|
||||
277
gpu/modal_deployments/reflector_padding.py
Normal file
277
gpu/modal_deployments/reflector_padding.py
Normal file
@@ -0,0 +1,277 @@
|
||||
"""
|
||||
Reflector GPU backend - audio padding
|
||||
======================================
|
||||
|
||||
CPU-intensive audio padding service for adding silence to audio tracks.
|
||||
Uses PyAV filter graph (adelay) for precise track synchronization.
|
||||
|
||||
IMPORTANT: This padding logic is duplicated from server/reflector/utils/audio_padding.py
|
||||
for Modal deployment isolation (Modal can't import from server/reflector/). If you modify
|
||||
the PyAV filter graph or padding algorithm, you MUST update both:
|
||||
- gpu/modal_deployments/reflector_padding.py (this file)
|
||||
- server/reflector/utils/audio_padding.py
|
||||
|
||||
Constants duplicated from server/reflector/utils/audio_constants.py for same reason.
|
||||
"""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from fractions import Fraction
|
||||
import math
|
||||
import asyncio
|
||||
|
||||
import modal
|
||||
|
||||
S3_TIMEOUT = 60 # happens 2 times
|
||||
PADDING_TIMEOUT = 600 + (S3_TIMEOUT * 2)
|
||||
SCALEDOWN_WINDOW = 60 # The maximum duration (in seconds) that individual containers can remain idle when scaling down.
|
||||
DISCONNECT_CHECK_INTERVAL = 2 # Check for client disconnect
|
||||
|
||||
|
||||
app = modal.App("reflector-padding")
|
||||
|
||||
# CPU-based image
|
||||
image = (
|
||||
modal.Image.debian_slim(python_version="3.12")
|
||||
.apt_install("ffmpeg") # Required by PyAV
|
||||
.pip_install(
|
||||
"av==13.1.0", # PyAV for audio processing
|
||||
"requests==2.32.3", # HTTP for presigned URL downloads/uploads
|
||||
"fastapi==0.115.12", # API framework
|
||||
)
|
||||
)
|
||||
|
||||
# ref B0F71CE8-FC59-4AA5-8414-DAFB836DB711
|
||||
OPUS_STANDARD_SAMPLE_RATE = 48000
|
||||
# ref B0F71CE8-FC59-4AA5-8414-DAFB836DB711
|
||||
OPUS_DEFAULT_BIT_RATE = 128000
|
||||
|
||||
|
||||
@app.function(
|
||||
cpu=2.0,
|
||||
timeout=PADDING_TIMEOUT,
|
||||
scaledown_window=SCALEDOWN_WINDOW,
|
||||
image=image,
|
||||
)
|
||||
@modal.asgi_app()
|
||||
def web():
|
||||
from fastapi import FastAPI, Request, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
class PaddingRequest(BaseModel):
|
||||
track_url: str
|
||||
output_url: str
|
||||
start_time_seconds: float
|
||||
track_index: int
|
||||
|
||||
class PaddingResponse(BaseModel):
|
||||
size: int
|
||||
cancelled: bool = False
|
||||
|
||||
web_app = FastAPI()
|
||||
|
||||
@web_app.post("/pad")
|
||||
async def pad_track_endpoint(request: Request, req: PaddingRequest) -> PaddingResponse:
|
||||
"""Modal web endpoint for padding audio tracks with disconnect detection.
|
||||
"""
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if not req.track_url:
|
||||
raise HTTPException(status_code=400, detail="track_url cannot be empty")
|
||||
if not req.output_url:
|
||||
raise HTTPException(status_code=400, detail="output_url cannot be empty")
|
||||
if req.start_time_seconds <= 0:
|
||||
raise HTTPException(status_code=400, detail=f"start_time_seconds must be positive, got {req.start_time_seconds}")
|
||||
if req.start_time_seconds > 18000:
|
||||
raise HTTPException(status_code=400, detail=f"start_time_seconds exceeds maximum 18000s (5 hours)")
|
||||
|
||||
logger.info(f"Padding request: track {req.track_index}, delay={req.start_time_seconds}s")
|
||||
|
||||
# Thread-safe cancellation flag shared between async disconnect checker and blocking thread
|
||||
import threading
|
||||
cancelled = threading.Event()
|
||||
|
||||
async def check_disconnect():
|
||||
"""Background task to check for client disconnect every 2 seconds."""
|
||||
while not cancelled.is_set():
|
||||
await asyncio.sleep(DISCONNECT_CHECK_INTERVAL)
|
||||
if await request.is_disconnected():
|
||||
logger.warning("Client disconnected, setting cancellation flag")
|
||||
cancelled.set()
|
||||
break
|
||||
|
||||
# Start disconnect checker in background
|
||||
disconnect_task = asyncio.create_task(check_disconnect())
|
||||
|
||||
try:
|
||||
result = await asyncio.get_event_loop().run_in_executor(
|
||||
None, _pad_track_blocking, req, cancelled, logger
|
||||
)
|
||||
return PaddingResponse(**result)
|
||||
finally:
|
||||
cancelled.set()
|
||||
disconnect_task.cancel()
|
||||
try:
|
||||
await disconnect_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
def _pad_track_blocking(req, cancelled, logger) -> dict:
|
||||
"""Blocking CPU-bound padding work with periodic cancellation checks.
|
||||
|
||||
Args:
|
||||
cancelled: threading.Event for thread-safe cancellation signaling
|
||||
"""
|
||||
import av
|
||||
import requests
|
||||
from av.audio.resampler import AudioResampler
|
||||
import time
|
||||
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
input_path = None
|
||||
output_path = None
|
||||
last_check = time.time()
|
||||
|
||||
try:
|
||||
logger.info("Downloading track for padding")
|
||||
response = requests.get(req.track_url, stream=True, timeout=S3_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
|
||||
input_path = os.path.join(temp_dir, "track.webm")
|
||||
total_bytes = 0
|
||||
chunk_count = 0
|
||||
with open(input_path, "wb") as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
total_bytes += len(chunk)
|
||||
chunk_count += 1
|
||||
|
||||
# Check for cancellation every arbitrary amount of chunks
|
||||
if chunk_count % 12 == 0:
|
||||
now = time.time()
|
||||
if now - last_check >= DISCONNECT_CHECK_INTERVAL:
|
||||
if cancelled.is_set():
|
||||
logger.info("Cancelled during download, exiting early")
|
||||
return {"size": 0, "cancelled": True}
|
||||
last_check = now
|
||||
logger.info(f"Track downloaded: {total_bytes} bytes")
|
||||
|
||||
if cancelled.is_set():
|
||||
logger.info("Cancelled after download, exiting early")
|
||||
return {"size": 0, "cancelled": True}
|
||||
|
||||
# Apply padding using PyAV
|
||||
output_path = os.path.join(temp_dir, "padded.webm")
|
||||
delay_ms = math.floor(req.start_time_seconds * 1000)
|
||||
logger.info(f"Padding track {req.track_index} with {delay_ms}ms delay using PyAV")
|
||||
|
||||
in_container = av.open(input_path)
|
||||
in_stream = next((s for s in in_container.streams if s.type == "audio"), None)
|
||||
if in_stream is None:
|
||||
raise ValueError("No audio stream in input")
|
||||
|
||||
with av.open(output_path, "w", format="webm") as out_container:
|
||||
out_stream = out_container.add_stream("libopus", rate=OPUS_STANDARD_SAMPLE_RATE)
|
||||
out_stream.bit_rate = OPUS_DEFAULT_BIT_RATE
|
||||
graph = av.filter.Graph()
|
||||
|
||||
abuf_args = (
|
||||
f"time_base=1/{OPUS_STANDARD_SAMPLE_RATE}:"
|
||||
f"sample_rate={OPUS_STANDARD_SAMPLE_RATE}:"
|
||||
f"sample_fmt=s16:"
|
||||
f"channel_layout=stereo"
|
||||
)
|
||||
src = graph.add("abuffer", args=abuf_args, name="src")
|
||||
aresample_f = graph.add("aresample", args="async=1", name="ares")
|
||||
delays_arg = f"{delay_ms}|{delay_ms}"
|
||||
adelay_f = graph.add("adelay", args=f"delays={delays_arg}:all=1", name="delay")
|
||||
sink = graph.add("abuffersink", name="sink")
|
||||
|
||||
src.link_to(aresample_f)
|
||||
aresample_f.link_to(adelay_f)
|
||||
adelay_f.link_to(sink)
|
||||
graph.configure()
|
||||
|
||||
resampler = AudioResampler(
|
||||
format="s16", layout="stereo", rate=OPUS_STANDARD_SAMPLE_RATE
|
||||
)
|
||||
|
||||
for frame in in_container.decode(in_stream):
|
||||
# Check for cancellation periodically
|
||||
now = time.time()
|
||||
if now - last_check >= DISCONNECT_CHECK_INTERVAL:
|
||||
if cancelled.is_set():
|
||||
logger.info("Cancelled during processing, exiting early")
|
||||
in_container.close()
|
||||
return {"size": 0, "cancelled": True}
|
||||
last_check = now
|
||||
|
||||
out_frames = resampler.resample(frame) or []
|
||||
for rframe in out_frames:
|
||||
rframe.sample_rate = OPUS_STANDARD_SAMPLE_RATE
|
||||
rframe.time_base = Fraction(1, OPUS_STANDARD_SAMPLE_RATE)
|
||||
src.push(rframe)
|
||||
|
||||
while True:
|
||||
try:
|
||||
f_out = sink.pull()
|
||||
except Exception:
|
||||
break
|
||||
f_out.sample_rate = OPUS_STANDARD_SAMPLE_RATE
|
||||
f_out.time_base = Fraction(1, OPUS_STANDARD_SAMPLE_RATE)
|
||||
for packet in out_stream.encode(f_out):
|
||||
out_container.mux(packet)
|
||||
|
||||
# Flush filter graph
|
||||
src.push(None)
|
||||
while True:
|
||||
try:
|
||||
f_out = sink.pull()
|
||||
except Exception:
|
||||
break
|
||||
f_out.sample_rate = OPUS_STANDARD_SAMPLE_RATE
|
||||
f_out.time_base = Fraction(1, OPUS_STANDARD_SAMPLE_RATE)
|
||||
for packet in out_stream.encode(f_out):
|
||||
out_container.mux(packet)
|
||||
|
||||
# Flush encoder
|
||||
for packet in out_stream.encode(None):
|
||||
out_container.mux(packet)
|
||||
|
||||
in_container.close()
|
||||
|
||||
file_size = os.path.getsize(output_path)
|
||||
logger.info(f"Padding complete: {file_size} bytes")
|
||||
|
||||
logger.info("Uploading padded track to S3")
|
||||
|
||||
with open(output_path, "rb") as f:
|
||||
upload_response = requests.put(req.output_url, data=f, timeout=S3_TIMEOUT)
|
||||
|
||||
upload_response.raise_for_status()
|
||||
logger.info(f"Upload complete: {file_size} bytes")
|
||||
|
||||
return {"size": file_size}
|
||||
|
||||
finally:
|
||||
if input_path and os.path.exists(input_path):
|
||||
try:
|
||||
os.unlink(input_path)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cleanup input file: {e}")
|
||||
if output_path and os.path.exists(output_path):
|
||||
try:
|
||||
os.unlink(output_path)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cleanup output file: {e}")
|
||||
try:
|
||||
os.rmdir(temp_dir)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cleanup temp directory: {e}")
|
||||
|
||||
return web_app
|
||||
|
||||
@@ -89,6 +89,7 @@ image = (
|
||||
"torch==2.5.1",
|
||||
"faster-whisper==1.1.1",
|
||||
"fastapi==0.115.12",
|
||||
"python-multipart",
|
||||
"requests",
|
||||
"librosa==0.10.1",
|
||||
"numpy<2",
|
||||
@@ -98,6 +99,12 @@ image = (
|
||||
)
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (this file - 2 copies!)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/modal_deployments/reflector_diarizer.py
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtension:
|
||||
parsed_url = urlparse(url)
|
||||
url_path = parsed_url.path
|
||||
@@ -113,6 +120,8 @@ def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtens
|
||||
return AudioFileExtension("wav")
|
||||
if "audio/mp4" in content_type:
|
||||
return AudioFileExtension("mp4")
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return AudioFileExtension("webm")
|
||||
|
||||
raise ValueError(
|
||||
f"Unsupported audio format for URL: {url}. "
|
||||
@@ -315,6 +324,11 @@ class TranscriberWhisperFile:
|
||||
import numpy as np
|
||||
from silero_vad import VADIterator
|
||||
|
||||
# IMPORTANT: This VAD segment logic is duplicated in multiple files for deployment isolation.
|
||||
# If you modify this function, you MUST update all copies:
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (this file)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/self_hosted/app/services/transcriber.py
|
||||
def vad_segments(
|
||||
audio_array,
|
||||
sample_rate: int = SAMPLERATE,
|
||||
@@ -322,6 +336,7 @@ class TranscriberWhisperFile:
|
||||
) -> Generator[TimeSegment, None, None]:
|
||||
"""Generate speech segments as TimeSegment using Silero VAD."""
|
||||
iterator = VADIterator(self.vad_model, sampling_rate=sample_rate)
|
||||
audio_duration = len(audio_array) / float(SAMPLERATE)
|
||||
start = None
|
||||
for i in range(0, len(audio_array), window_size):
|
||||
chunk = audio_array[i : i + window_size]
|
||||
@@ -341,6 +356,9 @@ class TranscriberWhisperFile:
|
||||
start / float(SAMPLERATE), end / float(SAMPLERATE)
|
||||
)
|
||||
start = None
|
||||
# Handle case where audio ends while speech is still active
|
||||
if start is not None:
|
||||
yield TimeSegment(start / float(SAMPLERATE), audio_duration)
|
||||
iterator.reset_states()
|
||||
|
||||
upload_volume.reload()
|
||||
@@ -406,6 +424,12 @@ class TranscriberWhisperFile:
|
||||
return {"text": " ".join(all_text), "words": all_words}
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (this file - 2 copies!)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/modal_deployments/reflector_diarizer.py
|
||||
def detect_audio_format(url: str, headers: dict) -> str:
|
||||
from urllib.parse import urlparse
|
||||
|
||||
@@ -423,6 +447,8 @@ def detect_audio_format(url: str, headers: dict) -> str:
|
||||
return "wav"
|
||||
if "audio/mp4" in content_type:
|
||||
return "mp4"
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return "webm"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
|
||||
@@ -81,15 +81,21 @@ image = (
|
||||
"cuda-python==12.8.0",
|
||||
"fastapi==0.115.12",
|
||||
"numpy<2",
|
||||
"librosa==0.10.1",
|
||||
"librosa==0.11.0",
|
||||
"requests",
|
||||
"silero-vad==5.1.0",
|
||||
"silero-vad==6.2.0",
|
||||
"torch",
|
||||
)
|
||||
.entrypoint([]) # silence chatty logs by container on start
|
||||
)
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (2 copies)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py (this file)
|
||||
# - gpu/modal_deployments/reflector_diarizer.py
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtension:
|
||||
parsed_url = urlparse(url)
|
||||
url_path = parsed_url.path
|
||||
@@ -105,6 +111,8 @@ def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtens
|
||||
return AudioFileExtension("wav")
|
||||
if "audio/mp4" in content_type:
|
||||
return AudioFileExtension("mp4")
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return AudioFileExtension("webm")
|
||||
|
||||
raise ValueError(
|
||||
f"Unsupported audio format for URL: {url}. "
|
||||
@@ -301,11 +309,17 @@ class TranscriberParakeetFile:
|
||||
audio_array, sample_rate = librosa.load(file_path, sr=SAMPLERATE, mono=True)
|
||||
return audio_array
|
||||
|
||||
# IMPORTANT: This VAD segment logic is duplicated in multiple files for deployment isolation.
|
||||
# If you modify this function, you MUST update all copies:
|
||||
# - gpu/modal_deployments/reflector_transcriber.py
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py (this file)
|
||||
# - gpu/self_hosted/app/services/transcriber.py
|
||||
def vad_segment_generator(
|
||||
audio_array,
|
||||
) -> Generator[TimeSegment, None, None]:
|
||||
"""Generate speech segments using VAD with start/end sample indices"""
|
||||
vad_iterator = VADIterator(self.vad_model, sampling_rate=SAMPLERATE)
|
||||
audio_duration = len(audio_array) / float(SAMPLERATE)
|
||||
window_size = VAD_CONFIG["window_size"]
|
||||
start = None
|
||||
|
||||
@@ -332,6 +346,10 @@ class TranscriberParakeetFile:
|
||||
yield TimeSegment(start_time, end_time)
|
||||
start = None
|
||||
|
||||
if start is not None:
|
||||
start_time = start / float(SAMPLERATE)
|
||||
yield TimeSegment(start_time, audio_duration)
|
||||
|
||||
vad_iterator.reset_states()
|
||||
|
||||
def batch_speech_segments(
|
||||
|
||||
@@ -103,7 +103,7 @@ def configure_seamless_m4t():
|
||||
|
||||
|
||||
transcriber_image = (
|
||||
Image.debian_slim(python_version="3.10.8")
|
||||
Image.debian_slim(python_version="3.10")
|
||||
.apt_install("git")
|
||||
.apt_install("wget")
|
||||
.apt_install("libsndfile-dev")
|
||||
@@ -119,6 +119,7 @@ transcriber_image = (
|
||||
"fairseq2",
|
||||
"pyyaml",
|
||||
"hf-transfer~=0.1",
|
||||
"pydantic",
|
||||
)
|
||||
.run_function(install_seamless_communication)
|
||||
.run_function(download_seamlessm4t_model)
|
||||
|
||||
137
gpu/self_hosted/DEV_SETUP.md
Normal file
137
gpu/self_hosted/DEV_SETUP.md
Normal file
@@ -0,0 +1,137 @@
|
||||
# Local Development GPU Setup
|
||||
|
||||
Run transcription and diarization locally for development/testing.
|
||||
|
||||
> **For production deployment**, see the [Self-Hosted GPU Setup Guide](../../docs/docs/installation/self-hosted-gpu-setup.md).
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. **Python 3.12+** and **uv** package manager
|
||||
2. **FFmpeg** installed and on PATH
|
||||
3. **HuggingFace account** with access to pyannote models
|
||||
|
||||
### Accept Pyannote Licenses (Required)
|
||||
|
||||
Before first run, accept licenses for these gated models (logged into HuggingFace):
|
||||
- https://hf.co/pyannote/speaker-diarization-3.1
|
||||
- https://hf.co/pyannote/segmentation-3.0
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Install dependencies
|
||||
|
||||
```bash
|
||||
cd gpu/self_hosted
|
||||
uv sync
|
||||
```
|
||||
|
||||
### 2. Start the GPU service
|
||||
|
||||
```bash
|
||||
cd gpu/self_hosted
|
||||
HF_TOKEN=<your-huggingface-token> \
|
||||
REFLECTOR_GPU_APIKEY=dev-key-12345 \
|
||||
.venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
Note: The `.env` file is NOT auto-loaded. Pass env vars explicitly or use:
|
||||
```bash
|
||||
export HF_TOKEN=<your-token>
|
||||
export REFLECTOR_GPU_APIKEY=dev-key-12345
|
||||
.venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
### 3. Configure Reflector to use local GPU
|
||||
|
||||
Edit `server/.env`:
|
||||
|
||||
```bash
|
||||
# Transcription - local GPU service
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=http://host.docker.internal:8000
|
||||
TRANSCRIPT_MODAL_API_KEY=dev-key-12345
|
||||
|
||||
# Diarization - local GPU service
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=http://host.docker.internal:8000
|
||||
DIARIZATION_MODAL_API_KEY=dev-key-12345
|
||||
```
|
||||
|
||||
Note: Use `host.docker.internal` because Reflector server runs in Docker.
|
||||
|
||||
### 4. Restart Reflector server
|
||||
|
||||
```bash
|
||||
cd server
|
||||
docker compose restart server worker
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
### Test transcription
|
||||
|
||||
```bash
|
||||
curl -s -X POST http://localhost:8000/v1/audio/transcriptions \
|
||||
-H "Authorization: Bearer dev-key-12345" \
|
||||
-F "file=@/path/to/audio.wav" \
|
||||
-F "language=en"
|
||||
```
|
||||
|
||||
### Test diarization
|
||||
|
||||
```bash
|
||||
curl -s -X POST "http://localhost:8000/diarize?audio_file_url=<audio-url>" \
|
||||
-H "Authorization: Bearer dev-key-12345"
|
||||
```
|
||||
|
||||
## Platform Notes
|
||||
|
||||
### macOS (ARM)
|
||||
|
||||
Docker build fails - CUDA packages are x86_64 only. Use local Python instead:
|
||||
```bash
|
||||
uv sync
|
||||
HF_TOKEN=xxx REFLECTOR_GPU_APIKEY=xxx .venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
### Linux with NVIDIA GPU
|
||||
|
||||
Docker works with CUDA acceleration:
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### CPU-only
|
||||
|
||||
Works on any platform, just slower. PyTorch auto-detects and falls back to CPU.
|
||||
|
||||
## Switching Back to Modal.com
|
||||
|
||||
Edit `server/.env`:
|
||||
|
||||
```bash
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-parakeet-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=<modal-api-key>
|
||||
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run
|
||||
DIARIZATION_MODAL_API_KEY=<modal-api-key>
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Could not download pyannote pipeline"
|
||||
- Accept model licenses at HuggingFace (see Prerequisites)
|
||||
- Verify HF_TOKEN is set and valid
|
||||
|
||||
### Service won't start
|
||||
- Check port 8000 is free: `lsof -i :8000`
|
||||
- Kill orphan processes if needed
|
||||
|
||||
### Transcription returns empty text
|
||||
- Ensure audio contains speech (not just tones/silence)
|
||||
- Check audio format is supported (wav, mp3, etc.)
|
||||
|
||||
### Deprecation warnings from torchaudio/pyannote
|
||||
- Safe to ignore - doesn't affect functionality
|
||||
@@ -56,9 +56,13 @@ Docker
|
||||
|
||||
- Not yet provided in this directory. A Dockerfile will be added later. For now, use Local run above
|
||||
|
||||
Conformance tests
|
||||
# Setup
|
||||
|
||||
# From this directory
|
||||
[SETUP.md](SETUP.md)
|
||||
|
||||
# Conformance tests
|
||||
|
||||
## From this directory
|
||||
|
||||
TRANSCRIPT_URL=http://localhost:8000 \
|
||||
TRANSCRIPT_API_KEY=dev-key \
|
||||
|
||||
@@ -129,6 +129,11 @@ class WhisperService:
|
||||
audio = np.frombuffer(proc.stdout, dtype=np.float32)
|
||||
return audio
|
||||
|
||||
# IMPORTANT: This VAD segment logic is duplicated in multiple files for deployment isolation.
|
||||
# If you modify this function, you MUST update all copies:
|
||||
# - gpu/modal_deployments/reflector_transcriber.py
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/self_hosted/app/services/transcriber.py (this file)
|
||||
def vad_segments(
|
||||
audio_array,
|
||||
sample_rate: int = SAMPLE_RATE,
|
||||
@@ -153,6 +158,10 @@ class WhisperService:
|
||||
end = speech["end"]
|
||||
yield (start / float(SAMPLE_RATE), end / float(SAMPLE_RATE))
|
||||
start = None
|
||||
# Handle case where audio ends while speech is still active
|
||||
if start is not None:
|
||||
audio_duration = len(audio_array) / float(sample_rate)
|
||||
yield (start / float(SAMPLE_RATE), audio_duration)
|
||||
iterator.reset_states()
|
||||
|
||||
audio_array = load_audio_via_ffmpeg(file_path, SAMPLE_RATE)
|
||||
|
||||
@@ -34,6 +34,12 @@ def ensure_dirs():
|
||||
UPLOADS_PATH.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py (this file)
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (2 copies)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/modal_deployments/reflector_diarizer.py
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> str:
|
||||
url_path = urlparse(url).path
|
||||
for ext in SUPPORTED_FILE_EXTENSIONS:
|
||||
@@ -47,6 +53,8 @@ def detect_audio_format(url: str, headers: Mapping[str, str]) -> str:
|
||||
return "wav"
|
||||
if "audio/mp4" in content_type:
|
||||
return "mp4"
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return "webm"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
|
||||
@@ -8,3 +8,11 @@ services:
|
||||
- .env
|
||||
volumes:
|
||||
- ./cache:/root/.cache
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: all
|
||||
capabilities: [gpu]
|
||||
restart: unless-stopped
|
||||
|
||||
264
gpu/self_hosted/uv.lock
generated
264
gpu/self_hosted/uv.lock
generated
@@ -1,5 +1,5 @@
|
||||
version = 1
|
||||
revision = 2
|
||||
revision = 3
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[package]]
|
||||
@@ -13,7 +13,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.12.15"
|
||||
version = "3.13.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
@@ -24,42 +24,76 @@ dependencies = [
|
||||
{ name = "propcache" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/33/918091abcf102e39d15aba2476ad9e7bd35ddb190dcdd43a854000d3da0d/aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315", size = 696741, upload-time = "2025-07-29T05:51:19.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/2a/7495a81e39a998e400f3ecdd44a62107254803d1681d9189be5c2e4530cd/aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd", size = 474407, upload-time = "2025-07-29T05:51:21.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/fc/a9576ab4be2dcbd0f73ee8675d16c707cfc12d5ee80ccf4015ba543480c9/aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4", size = 466703, upload-time = "2025-07-29T05:51:22.948Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/2f/d4bcc8448cf536b2b54eed48f19682031ad182faa3a3fee54ebe5b156387/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7", size = 1705532, upload-time = "2025-07-29T05:51:25.211Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/f3/59406396083f8b489261e3c011aa8aee9df360a96ac8fa5c2e7e1b8f0466/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d", size = 1686794, upload-time = "2025-07-29T05:51:27.145Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/71/164d194993a8d114ee5656c3b7ae9c12ceee7040d076bf7b32fb98a8c5c6/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b", size = 1738865, upload-time = "2025-07-29T05:51:29.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/00/d198461b699188a93ead39cb458554d9f0f69879b95078dce416d3209b54/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d", size = 1788238, upload-time = "2025-07-29T05:51:31.285Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/b8/9e7175e1fa0ac8e56baa83bf3c214823ce250d0028955dfb23f43d5e61fd/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d", size = 1710566, upload-time = "2025-07-29T05:51:33.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/e4/16a8eac9df39b48ae102ec030fa9f726d3570732e46ba0c592aeeb507b93/aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645", size = 1624270, upload-time = "2025-07-29T05:51:35.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/f8/cd84dee7b6ace0740908fd0af170f9fab50c2a41ccbc3806aabcb1050141/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461", size = 1677294, upload-time = "2025-07-29T05:51:37.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/42/d0f1f85e50d401eccd12bf85c46ba84f947a84839c8a1c2c5f6e8ab1eb50/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9", size = 1708958, upload-time = "2025-07-29T05:51:39.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/6b/f6fa6c5790fb602538483aa5a1b86fcbad66244997e5230d88f9412ef24c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d", size = 1651553, upload-time = "2025-07-29T05:51:41.356Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/36/a6d36ad545fa12e61d11d1932eef273928b0495e6a576eb2af04297fdd3c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693", size = 1727688, upload-time = "2025-07-29T05:51:43.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/c8/f195e5e06608a97a4e52c5d41c7927301bf757a8e8bb5bbf8cef6c314961/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64", size = 1761157, upload-time = "2025-07-29T05:51:45.643Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/6a/ea199e61b67f25ba688d3ce93f63b49b0a4e3b3d380f03971b4646412fc6/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51", size = 1710050, upload-time = "2025-07-29T05:51:48.203Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/2e/ffeb7f6256b33635c29dbed29a22a723ff2dd7401fff42ea60cf2060abfb/aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0", size = 422647, upload-time = "2025-07-29T05:51:50.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -89,6 +123,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/39/4a/4c61d4c84cfd9befb6fa08a702535b27b21fff08c946bc2f6139decbf7f7/alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3", size = 247355, upload-time = "2025-08-27T18:02:07.37Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "annotated-doc"
|
||||
version = "0.0.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
@@ -460,16 +503,17 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.116.1"
|
||||
version = "0.128.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
@@ -478,6 +522,8 @@ standard = [
|
||||
{ name = "fastapi-cli", extra = ["standard"] },
|
||||
{ name = "httpx" },
|
||||
{ name = "jinja2" },
|
||||
{ name = "pydantic-extra-types" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "python-multipart" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
]
|
||||
@@ -539,11 +585,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.19.1"
|
||||
version = "3.20.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -557,43 +603,43 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "fonttools"
|
||||
version = "4.59.2"
|
||||
version = "4.60.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0d/a5/fba25f9fbdab96e26dedcaeeba125e5f05a09043bf888e0305326e55685b/fonttools-4.59.2.tar.gz", hash = "sha256:e72c0749b06113f50bcb80332364c6be83a9582d6e3db3fe0b280f996dc2ef22", size = 3540889, upload-time = "2025-08-27T16:40:30.97Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3e/c4/db6a7b5eb0656534c3aa2596c2c5e18830d74f1b9aa5aa8a7dff63a0b11d/fonttools-4.60.2.tar.gz", hash = "sha256:d29552e6b155ebfc685b0aecf8d429cb76c14ab734c22ef5d3dea6fdf800c92c", size = 3562254, upload-time = "2025-12-09T13:38:11.835Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/3d/1f45db2df51e7bfa55492e8f23f383d372200be3a0ded4bf56a92753dd1f/fonttools-4.59.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:82906d002c349cad647a7634b004825a7335f8159d0d035ae89253b4abf6f3ea", size = 2769711, upload-time = "2025-08-27T16:39:04.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/df/cd236ab32a8abfd11558f296e064424258db5edefd1279ffdbcfd4fd8b76/fonttools-4.59.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a10c1bd7644dc58f8862d8ba0cf9fb7fef0af01ea184ba6ce3f50ab7dfe74d5a", size = 2340225, upload-time = "2025-08-27T16:39:06.143Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/12/b6f9f964fe6d4b4dd4406bcbd3328821c3de1f909ffc3ffa558fe72af48c/fonttools-4.59.2-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:738f31f23e0339785fd67652a94bc69ea49e413dfdb14dcb8c8ff383d249464e", size = 4912766, upload-time = "2025-08-27T16:39:08.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/78/82bde2f2d2c306ef3909b927363170b83df96171f74e0ccb47ad344563cd/fonttools-4.59.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ec99f9bdfee9cdb4a9172f9e8fd578cce5feb231f598909e0aecf5418da4f25", size = 4955178, upload-time = "2025-08-27T16:39:10.094Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/77/7de766afe2d31dda8ee46d7e479f35c7d48747e558961489a2d6e3a02bd4/fonttools-4.59.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0476ea74161322e08c7a982f83558a2b81b491509984523a1a540baf8611cc31", size = 4897898, upload-time = "2025-08-27T16:39:12.087Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/77/ce0e0b905d62a06415fda9f2b2e109a24a5db54a59502b769e9e297d2242/fonttools-4.59.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:95922a922daa1f77cc72611747c156cfb38030ead72436a2c551d30ecef519b9", size = 5049144, upload-time = "2025-08-27T16:39:13.84Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/ea/870d93aefd23fff2e07cbeebdc332527868422a433c64062c09d4d5e7fe6/fonttools-4.59.2-cp312-cp312-win32.whl", hash = "sha256:39ad9612c6a622726a6a130e8ab15794558591f999673f1ee7d2f3d30f6a3e1c", size = 2206473, upload-time = "2025-08-27T16:39:15.854Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/c4/e44bad000c4a4bb2e9ca11491d266e857df98ab6d7428441b173f0fe2517/fonttools-4.59.2-cp312-cp312-win_amd64.whl", hash = "sha256:980fd7388e461b19a881d35013fec32c713ffea1fc37aef2f77d11f332dfd7da", size = 2254706, upload-time = "2025-08-27T16:39:17.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/7b/d0d3b9431642947b5805201fbbbe938a47b70c76685ef1f0cb5f5d7140d6/fonttools-4.59.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:381bde13216ba09489864467f6bc0c57997bd729abfbb1ce6f807ba42c06cceb", size = 2761563, upload-time = "2025-08-27T16:39:20.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/be/fc5fe58dd76af7127b769b68071dbc32d4b95adc8b58d1d28d42d93c90f2/fonttools-4.59.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f33839aa091f7eef4e9078f5b7ab1b8ea4b1d8a50aeaef9fdb3611bba80869ec", size = 2335671, upload-time = "2025-08-27T16:39:22.027Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/9f/bf231c2a3fac99d1d7f1d89c76594f158693f981a4aa02be406e9f036832/fonttools-4.59.2-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6235fc06bcbdb40186f483ba9d5d68f888ea68aa3c8dac347e05a7c54346fbc8", size = 4893967, upload-time = "2025-08-27T16:39:23.664Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/a9/d46d2ad4fcb915198504d6727f83aa07f46764c64f425a861aa38756c9fd/fonttools-4.59.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83ad6e5d06ef3a2884c4fa6384a20d6367b5cfe560e3b53b07c9dc65a7020e73", size = 4951986, upload-time = "2025-08-27T16:39:25.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/90/1cc8d7dd8f707dfeeca472b82b898d3add0ebe85b1f645690dcd128ee63f/fonttools-4.59.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d029804c70fddf90be46ed5305c136cae15800a2300cb0f6bba96d48e770dde0", size = 4891630, upload-time = "2025-08-27T16:39:27.494Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/04/f0345b0d9fe67d65aa8d3f2d4cbf91d06f111bc7b8d802e65914eb06194d/fonttools-4.59.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:95807a3b5e78f2714acaa26a33bc2143005cc05c0217b322361a772e59f32b89", size = 5035116, upload-time = "2025-08-27T16:39:29.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/7d/5ba5eefffd243182fbd067cdbfeb12addd4e5aec45011b724c98a344ea33/fonttools-4.59.2-cp313-cp313-win32.whl", hash = "sha256:b3ebda00c3bb8f32a740b72ec38537d54c7c09f383a4cfefb0b315860f825b08", size = 2204907, upload-time = "2025-08-27T16:39:31.42Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/a9/be7219fc64a6026cc0aded17fa3720f9277001c185434230bd351bf678e6/fonttools-4.59.2-cp313-cp313-win_amd64.whl", hash = "sha256:a72155928d7053bbde499d32a9c77d3f0f3d29ae72b5a121752481bcbd71e50f", size = 2253742, upload-time = "2025-08-27T16:39:33.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/c7/486580d00be6fa5d45e41682e5ffa5c809f3d25773c6f39628d60f333521/fonttools-4.59.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:d09e487d6bfbe21195801323ba95c91cb3523f0fcc34016454d4d9ae9eaa57fe", size = 2762444, upload-time = "2025-08-27T16:39:34.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/9b/950ea9b7b764ceb8d18645c62191e14ce62124d8e05cb32a4dc5e65fde0b/fonttools-4.59.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:dec2f22486d7781087b173799567cffdcc75e9fb2f1c045f05f8317ccce76a3e", size = 2333256, upload-time = "2025-08-27T16:39:40.777Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/4d/8ee9d563126de9002eede950cde0051be86cc4e8c07c63eca0c9fc95734a/fonttools-4.59.2-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1647201af10993090120da2e66e9526c4e20e88859f3e34aa05b8c24ded2a564", size = 4834846, upload-time = "2025-08-27T16:39:42.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/26/f26d947b0712dce3d118e92ce30ca88f98938b066498f60d0ee000a892ae/fonttools-4.59.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47742c33fe65f41eabed36eec2d7313a8082704b7b808752406452f766c573fc", size = 4930871, upload-time = "2025-08-27T16:39:44.818Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/7f/ebe878061a5a5e6b6502f0548489e01100f7e6c0049846e6546ba19a3ab4/fonttools-4.59.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:92ac2d45794f95d1ad4cb43fa07e7e3776d86c83dc4b9918cf82831518165b4b", size = 4876971, upload-time = "2025-08-27T16:39:47.027Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/0d/0d22e3a20ac566836098d30718092351935487e3271fd57385db1adb2fde/fonttools-4.59.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fa9ecaf2dcef8941fb5719e16322345d730f4c40599bbf47c9753de40eb03882", size = 4987478, upload-time = "2025-08-27T16:39:48.774Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/a3/960cc83182a408ffacc795e61b5f698c6f7b0cfccf23da4451c39973f3c8/fonttools-4.59.2-cp314-cp314-win32.whl", hash = "sha256:a8d40594982ed858780e18a7e4c80415af65af0f22efa7de26bdd30bf24e1e14", size = 2208640, upload-time = "2025-08-27T16:39:50.592Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/74/55e5c57c414fa3965fee5fc036ed23f26a5c4e9e10f7f078a54ff9c7dfb7/fonttools-4.59.2-cp314-cp314-win_amd64.whl", hash = "sha256:9cde8b6a6b05f68516573523f2013a3574cb2c75299d7d500f44de82ba947b80", size = 2258457, upload-time = "2025-08-27T16:39:52.611Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/dc/8e4261dc591c5cfee68fecff3ffee2a9b29e1edc4c4d9cbafdc5aefe74ee/fonttools-4.59.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:036cd87a2dbd7ef72f7b68df8314ced00b8d9973aee296f2464d06a836aeb9a9", size = 2829901, upload-time = "2025-08-27T16:39:55.014Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/05/331538dcf21fd6331579cd628268150e85210d0d2bdae20f7598c2b36c05/fonttools-4.59.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:14870930181493b1d740b6f25483e20185e5aea58aec7d266d16da7be822b4bb", size = 2362717, upload-time = "2025-08-27T16:39:56.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/ae/d26428ca9ede809c0a93f0af91f44c87433dc0251e2aec333da5ed00d38f/fonttools-4.59.2-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7ff58ea1eb8fc7e05e9a949419f031890023f8785c925b44d6da17a6a7d6e85d", size = 4835120, upload-time = "2025-08-27T16:39:59.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/c4/0f6ac15895de509e07688cb1d45f1ae583adbaa0fa5a5699d73f3bd58ca0/fonttools-4.59.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dee142b8b3096514c96ad9e2106bf039e2fe34a704c587585b569a36df08c3c", size = 5071115, upload-time = "2025-08-27T16:40:01.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/b6/147a711b7ecf7ea39f9da9422a55866f6dd5747c2f36b3b0a7a7e0c6820b/fonttools-4.59.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8991bdbae39cf78bcc9cd3d81f6528df1f83f2e7c23ccf6f990fa1f0b6e19708", size = 4943905, upload-time = "2025-08-27T16:40:03.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/4e/2ab19006646b753855e2b02200fa1cabb75faa4eeca4ef289f269a936974/fonttools-4.59.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:53c1a411b7690042535a4f0edf2120096a39a506adeb6c51484a232e59f2aa0c", size = 4960313, upload-time = "2025-08-27T16:40:05.45Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/3d/df77907e5be88adcca93cc2cee00646d039da220164be12bee028401e1cf/fonttools-4.59.2-cp314-cp314t-win32.whl", hash = "sha256:59d85088e29fa7a8f87d19e97a1beae2a35821ee48d8ef6d2c4f965f26cb9f8a", size = 2269719, upload-time = "2025-08-27T16:40:07.553Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/a0/d4c4bc5b50275449a9a908283b567caa032a94505fe1976e17f994faa6be/fonttools-4.59.2-cp314-cp314t-win_amd64.whl", hash = "sha256:7ad5d8d8cc9e43cb438b3eb4a0094dd6d4088daa767b0a24d52529361fd4c199", size = 2333169, upload-time = "2025-08-27T16:40:09.656Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/a4/d2f7be3c86708912c02571db0b550121caab8cd88a3c0aacb9cfa15ea66e/fonttools-4.59.2-py3-none-any.whl", hash = "sha256:8bd0f759020e87bb5d323e6283914d9bf4ae35a7307dafb2cbd1e379e720ad37", size = 1132315, upload-time = "2025-08-27T16:40:28.984Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/30/530c9eddcd1c39219dc0aaede2b5a4c8ab80e0bb88d1b3ffc12944c4aac3/fonttools-4.60.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e0164b7609d2b5c5dd4e044b8085b7bd7ca7363ef8c269a4ab5b5d4885a426b2", size = 2847196, upload-time = "2025-12-09T13:36:33.262Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/2f/4077a482836d5bbe3bc9dac1c004d02ee227cf04ed62b0a2dfc41d4f0dfd/fonttools-4.60.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1dd3d9574fc595c1e97faccae0f264dc88784ddf7fbf54c939528378bacc0033", size = 2395842, upload-time = "2025-12-09T13:36:35.47Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/05/aae5bb99c5398f8ed4a8b784f023fd9dd3568f0bd5d5b21e35b282550f11/fonttools-4.60.2-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:98d0719f1b11c2817307d2da2e94296a3b2a3503f8d6252a101dca3ee663b917", size = 4949713, upload-time = "2025-12-09T13:36:37.874Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/37/49067349fc78ff0efbf09fadefe80ddf41473ca8f8a25400e3770da38328/fonttools-4.60.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d3ea26957dd07209f207b4fff64c702efe5496de153a54d3b91007ec28904dd", size = 4999907, upload-time = "2025-12-09T13:36:39.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/31/d0f11c758bd0db36b664c92a0f9dfdcc2d7313749aa7d6629805c6946f21/fonttools-4.60.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ee301273b0850f3a515299f212898f37421f42ff9adfc341702582ca5073c13", size = 4939717, upload-time = "2025-12-09T13:36:43.075Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/bc/1cff0d69522e561bf1b99bee7c3911c08c25e919584827c3454a64651ce9/fonttools-4.60.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c6eb4694cc3b9c03b7c01d65a9cf35b577f21aa6abdbeeb08d3114b842a58153", size = 5089205, upload-time = "2025-12-09T13:36:45.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/e6/fb174f0069b7122e19828c551298bfd34fdf9480535d2a6ac2ed37afacd3/fonttools-4.60.2-cp312-cp312-win32.whl", hash = "sha256:57f07b616c69c244cc1a5a51072eeef07dddda5ebef9ca5c6e9cf6d59ae65b70", size = 2264674, upload-time = "2025-12-09T13:36:49.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/57/6552ffd6b582d3e6a9f01780c5275e6dfff1e70ca146101733aa1c12a129/fonttools-4.60.2-cp312-cp312-win_amd64.whl", hash = "sha256:310035802392f1fe5a7cf43d76f6ff4a24c919e4c72c0352e7b8176e2584b8a0", size = 2314701, upload-time = "2025-12-09T13:36:51.09Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/e4/8381d0ca6b6c6c484660b03517ec5b5b81feeefca3808726dece36c652a9/fonttools-4.60.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2bb5fd231e56ccd7403212636dcccffc96c5ae0d6f9e4721fa0a32cb2e3ca432", size = 2842063, upload-time = "2025-12-09T13:36:53.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/2c/4367117ee8ff4f4374787a1222da0bd413d80cf3522111f727a7b8f80d1d/fonttools-4.60.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:536b5fab7b6fec78ccf59b5c59489189d9d0a8b0d3a77ed1858be59afb096696", size = 2393792, upload-time = "2025-12-09T13:36:55.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/b7/a76b6dffa193869e54e32ca2f9abb0d0e66784bc8a24e6f86eb093015481/fonttools-4.60.2-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6b9288fc38252ac86a9570f19313ecbc9ff678982e0f27c757a85f1f284d3400", size = 4924020, upload-time = "2025-12-09T13:36:58.229Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/4e/0078200e2259f0061c86a74075f507d64c43dd2ab38971956a5c0012d344/fonttools-4.60.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93fcb420791d839ef592eada2b69997c445d0ce9c969b5190f2e16828ec10607", size = 4980070, upload-time = "2025-12-09T13:37:00.311Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/1f/d87c85a11cb84852c975251581862681e4a0c1c3bd456c648792203f311b/fonttools-4.60.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7916a381b094db4052ac284255186aebf74c5440248b78860cb41e300036f598", size = 4921411, upload-time = "2025-12-09T13:37:02.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/c0/7efad650f5ed8e317c2633133ef3c64917e7adf2e4e2940c798f5d57ec6e/fonttools-4.60.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58c8c393d5e16b15662cfc2d988491940458aa87894c662154f50c7b49440bef", size = 5063465, upload-time = "2025-12-09T13:37:04.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/a8/750518c4f8cdd79393b386bc81226047ade80239e58c6c9f5dbe1fdd8ea1/fonttools-4.60.2-cp313-cp313-win32.whl", hash = "sha256:19c6e0afd8b02008caa0aa08ab896dfce5d0bcb510c49b2c499541d5cb95a963", size = 2263443, upload-time = "2025-12-09T13:37:06.762Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/22/026c60376f165981f80a0e90bd98a79ae3334e9d89a3d046c4d2e265c724/fonttools-4.60.2-cp313-cp313-win_amd64.whl", hash = "sha256:6a500dc59e11b2338c2dba1f8cf11a4ae8be35ec24af8b2628b8759a61457b76", size = 2313800, upload-time = "2025-12-09T13:37:08.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/ab/7cf1f5204e1366ddf9dc5cdc2789b571feb9eebcee0e3463c3f457df5f52/fonttools-4.60.2-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:9387c532acbe323bbf2a920f132bce3c408a609d5f9dcfc6532fbc7e37f8ccbb", size = 2841690, upload-time = "2025-12-09T13:37:10.696Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/3c/0bf83c6f863cc8b934952567fa2bf737cfcec8fc4ffb59b3f93820095f89/fonttools-4.60.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e6f1c824185b5b8fb681297f315f26ae55abb0d560c2579242feea8236b1cfef", size = 2392191, upload-time = "2025-12-09T13:37:12.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/f0/40090d148b8907fbea12e9bdf1ff149f30cdf1769e3b2c3e0dbf5106b88d/fonttools-4.60.2-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:55a3129d1e4030b1a30260f1b32fe76781b585fb2111d04a988e141c09eb6403", size = 4873503, upload-time = "2025-12-09T13:37:15.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/e0/d8b13f99e58b8c293781288ba62fe634f1f0697c9c4c0ae104d3215f3a10/fonttools-4.60.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b196e63753abc33b3b97a6fd6de4b7c4fef5552c0a5ba5e562be214d1e9668e0", size = 4968493, upload-time = "2025-12-09T13:37:18.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/c5/960764d12c92bc225f02401d3067048cb7b282293d9e48e39fe2b0ec38a9/fonttools-4.60.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de76c8d740fb55745f3b154f0470c56db92ae3be27af8ad6c2e88f1458260c9a", size = 4920015, upload-time = "2025-12-09T13:37:20.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/ab/839d8caf253d1eef3653ef4d34427d0326d17a53efaec9eb04056b670fff/fonttools-4.60.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6ba6303225c95998c9fda2d410aa792c3d2c1390a09df58d194b03e17583fa25", size = 5031165, upload-time = "2025-12-09T13:37:23.57Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/bf/3bc862796a6841cbe0725bb5512d272239b809dba631a4b0301df885e62d/fonttools-4.60.2-cp314-cp314-win32.whl", hash = "sha256:0a89728ce10d7c816fedaa5380c06d2793e7a8a634d7ce16810e536c22047384", size = 2267526, upload-time = "2025-12-09T13:37:25.821Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/a1/c1909cacf00c76dc37b4743451561fbaaf7db4172c22a6d9394081d114c3/fonttools-4.60.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa8446e6ab8bd778b82cb1077058a2addba86f30de27ab9cc18ed32b34bc8667", size = 2319096, upload-time = "2025-12-09T13:37:28.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/b3/f66e71433f08e3a931b2b31a665aeed17fcc5e6911fc73529c70a232e421/fonttools-4.60.2-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:4063bc81ac5a4137642865cb63dd270e37b3cd1f55a07c0d6e41d072699ccca2", size = 2925167, upload-time = "2025-12-09T13:37:30.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/13/eeb491ff743594bbd0bee6e49422c03a59fe9c49002d3cc60eeb77414285/fonttools-4.60.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:ebfdb66fa69732ed604ab8e2a0431e6deff35e933a11d73418cbc7823d03b8e1", size = 2430923, upload-time = "2025-12-09T13:37:32.817Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/e5/db609f785e460796e53c4dbc3874a5f4948477f27beceb5e2d24b2537666/fonttools-4.60.2-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50b10b3b1a72d1d54c61b0e59239e1a94c0958f4a06a1febf97ce75388dd91a4", size = 4877729, upload-time = "2025-12-09T13:37:35.858Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/d6/85e4484dd4bfb03fee7bd370d65888cccbd3dee2681ee48c869dd5ccb23f/fonttools-4.60.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:beae16891a13b4a2ddec9b39b4de76092a3025e4d1c82362e3042b62295d5e4d", size = 5096003, upload-time = "2025-12-09T13:37:37.862Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/49/1a98e44b71030b83d2046f981373b80571868259d98e6dae7bc20099dac6/fonttools-4.60.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:522f017fdb3766fd5d2d321774ef351cc6ce88ad4e6ac9efe643e4a2b9d528db", size = 4974410, upload-time = "2025-12-09T13:37:40.166Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/07/d6f775d950ee8a841012472c7303f8819423d8cc3b4530915de7265ebfa2/fonttools-4.60.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:82cceceaf9c09a965a75b84a4b240dd3768e596ffb65ef53852681606fe7c9ba", size = 5002036, upload-time = "2025-12-09T13:37:42.639Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/f6/ba6458f83ce1a9f8c3b17bd8f7b8a2205a126aac1055796b7e7cfebbd38f/fonttools-4.60.2-cp314-cp314t-win32.whl", hash = "sha256:bbfbc918a75437fe7e6d64d1b1e1f713237df1cf00f3a36dedae910b2ba01cee", size = 2330985, upload-time = "2025-12-09T13:37:45.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/24/fea0ba4d3a32d4ed1103a1098bfd99dc78b5fe3bb97202920744a37b73dc/fonttools-4.60.2-cp314-cp314t-win_amd64.whl", hash = "sha256:0e5cd9b0830f6550d58c84f3ab151a9892b50c4f9d538c5603c0ce6fff2eb3f1", size = 2396226, upload-time = "2025-12-09T13:37:47.355Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/6c/10280af05b44fafd1dff69422805061fa1af29270bc52dce031ac69540bf/fonttools-4.60.2-py3-none-any.whl", hash = "sha256:73cf92eeda67cf6ff10c8af56fc8f4f07c1647d989a979be9e388a49be26552a", size = 1144610, upload-time = "2025-12-09T13:38:09.5Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -684,6 +730,8 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" },
|
||||
@@ -693,6 +741,8 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" },
|
||||
@@ -700,6 +750,8 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681", size = 1680760, upload-time = "2025-11-04T12:42:25.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" },
|
||||
]
|
||||
|
||||
@@ -1674,16 +1726,17 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "protobuf"
|
||||
version = "6.32.0"
|
||||
version = "6.33.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c0/df/fb4a8eeea482eca989b51cffd274aac2ee24e825f0bf3cbce5281fa1567b/protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2", size = 440614, upload-time = "2025-08-14T21:21:25.015Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/33/18/df8c87da2e47f4f1dcc5153a81cd6bca4e429803f4069a299e236e4dd510/protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741", size = 424409, upload-time = "2025-08-14T21:21:12.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/59/0a820b7310f8139bd8d5a9388e6a38e1786d179d6f33998448609296c229/protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e", size = 435735, upload-time = "2025-08-14T21:21:15.046Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/5b/0d421533c59c789e9c9894683efac582c06246bf24bb26b753b149bd88e4/protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0", size = 426449, upload-time = "2025-08-14T21:21:16.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/7b/607764ebe6c7a23dcee06e054fd1de3d5841b7648a90fd6def9a3bb58c5e/protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1", size = 322869, upload-time = "2025-08-14T21:21:18.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/01/2e730bd1c25392fc32e3268e02446f0d77cb51a2c3a8486b1798e34d5805/protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c", size = 322009, upload-time = "2025-08-14T21:21:19.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/f2/80ffc4677aac1bc3519b26bc7f7f5de7fce0ee2f7e36e59e27d8beb32dd1/protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783", size = 169287, upload-time = "2025-08-14T21:21:23.515Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1857,6 +1910,33 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-extra-types"
|
||||
version = "2.11.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fd/35/2fee58b1316a73e025728583d3b1447218a97e621933fc776fb8c0f2ebdd/pydantic_extra_types-2.11.0.tar.gz", hash = "sha256:4e9991959d045b75feb775683437a97991d02c138e00b59176571db9ce634f0e", size = 157226, upload-time = "2025-12-31T16:18:27.944Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/17/fabd56da47096d240dd45ba627bead0333b0cf0ee8ada9bec579287dadf3/pydantic_extra_types-2.11.0-py3-none-any.whl", hash = "sha256:84b864d250a0fc62535b7ec591e36f2c5b4d1325fa0017eb8cda9aeb63b374a6", size = 74296, upload-time = "2025-12-31T16:18:26.38Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
@@ -2515,15 +2595,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.47.3"
|
||||
version = "0.49.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/15/b9/cc3017f9a9c9b6e27c5106cc10cc7904653c3eec0729793aec10479dd669/starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9", size = 2584144, upload-time = "2025-08-24T13:36:42.122Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1b/3f/507c21db33b66fb027a332f2cb3abbbe924cc3a79ced12f01ed8645955c9/starlette-0.49.1.tar.gz", hash = "sha256:481a43b71e24ed8c43b11ea02f5353d77840e01480881b8cb5a26b8cae64a8cb", size = 2654703, upload-time = "2025-10-28T17:34:10.928Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/fd/901cfa59aaa5b30a99e16876f11abe38b59a1a2c51ffb3d7142bb6089069/starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51", size = 72991, upload-time = "2025-08-24T13:36:40.887Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/da/545b75d420bb23b5d494b0517757b351963e974e79933f01e05c929f20a6/starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875", size = 74175, upload-time = "2025-10-28T17:34:09.13Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2798,11 +2878,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.5.0"
|
||||
version = "2.6.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
258
scripts/setup-authentik-oauth.sh
Executable file
258
scripts/setup-authentik-oauth.sh
Executable file
@@ -0,0 +1,258 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Setup Authentik OAuth provider for Reflector
|
||||
#
|
||||
# IMPORTANT: Run this script from your Reflector repository directory (cd ~/reflector)
|
||||
# The script creates files using relative paths: server/reflector/auth/jwt/keys/
|
||||
#
|
||||
# Usage: ./setup-authentik-oauth.sh <authentik-url> <admin-password> <frontend-url>
|
||||
# Example: ./setup-authentik-oauth.sh https://authentik.example.com MyPassword123 https://app.example.com
|
||||
|
||||
AUTHENTIK_URL="${1:-}"
|
||||
ADMIN_PASSWORD="${2:-}"
|
||||
FRONTEND_URL="${3:-}"
|
||||
|
||||
if [ -z "$AUTHENTIK_URL" ] || [ -z "$ADMIN_PASSWORD" ] || [ -z "$FRONTEND_URL" ]; then
|
||||
echo "Usage: $0 <authentik-url> <admin-password> <frontend-url>"
|
||||
echo "Example: $0 https://authentik.example.com MyPassword123 https://app.example.com"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove trailing slash from URLs
|
||||
AUTHENTIK_URL="${AUTHENTIK_URL%/}"
|
||||
FRONTEND_URL="${FRONTEND_URL%/}"
|
||||
|
||||
echo "==========================================="
|
||||
echo "Authentik OAuth Setup for Reflector"
|
||||
echo "==========================================="
|
||||
echo ""
|
||||
echo "Authentik URL: $AUTHENTIK_URL"
|
||||
echo "Frontend URL: $FRONTEND_URL"
|
||||
echo ""
|
||||
|
||||
# Step 1: Create API token via Django shell
|
||||
echo "Creating API token..."
|
||||
cd ~/authentik || { echo "Error: ~/authentik directory not found"; exit 1; }
|
||||
|
||||
API_TOKEN=$(sudo docker compose exec -T server python -m manage shell 2>&1 << 'PYTHON' | grep "^TOKEN:" | cut -d: -f2
|
||||
from authentik.core.models import User, Token, TokenIntents
|
||||
|
||||
user = User.objects.get(username='akadmin')
|
||||
token, created = Token.objects.update_or_create(
|
||||
identifier='reflector-setup',
|
||||
defaults={
|
||||
'user': user,
|
||||
'intent': TokenIntents.INTENT_API,
|
||||
'description': 'Reflector setup token',
|
||||
'expiring': False
|
||||
}
|
||||
)
|
||||
print(f"TOKEN:{token.key}")
|
||||
PYTHON
|
||||
)
|
||||
|
||||
cd - > /dev/null
|
||||
|
||||
if [ -z "$API_TOKEN" ] || [ "$API_TOKEN" = "null" ]; then
|
||||
echo "Error: Failed to create API token"
|
||||
echo "Make sure Authentik is fully started and akadmin user exists"
|
||||
exit 1
|
||||
fi
|
||||
echo " -> Got API token"
|
||||
|
||||
# Step 2: Get authorization flow UUID
|
||||
echo "Getting authorization flow..."
|
||||
FLOW_RESPONSE=$(curl -s "$AUTHENTIK_URL/api/v3/flows/instances/?slug=default-provider-authorization-implicit-consent" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
|
||||
FLOW_UUID=$(echo "$FLOW_RESPONSE" | jq -r '.results[0].pk')
|
||||
if [ -z "$FLOW_UUID" ] || [ "$FLOW_UUID" = "null" ]; then
|
||||
echo "Error: Could not find authorization flow"
|
||||
echo "Response: $FLOW_RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
echo " -> Flow UUID: $FLOW_UUID"
|
||||
|
||||
# Step 3: Get invalidation flow UUID
|
||||
echo "Getting invalidation flow..."
|
||||
INVALIDATION_RESPONSE=$(curl -s "$AUTHENTIK_URL/api/v3/flows/instances/?slug=default-provider-invalidation-flow" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
|
||||
INVALIDATION_UUID=$(echo "$INVALIDATION_RESPONSE" | jq -r '.results[0].pk')
|
||||
if [ -z "$INVALIDATION_UUID" ] || [ "$INVALIDATION_UUID" = "null" ]; then
|
||||
echo "Warning: Could not find invalidation flow, using authorization flow"
|
||||
INVALIDATION_UUID="$FLOW_UUID"
|
||||
fi
|
||||
echo " -> Invalidation UUID: $INVALIDATION_UUID"
|
||||
|
||||
# Step 4: Get scope mappings (email, openid, profile)
|
||||
echo "Getting scope mappings..."
|
||||
SCOPE_RESPONSE=$(curl -s "$AUTHENTIK_URL/api/v3/propertymappings/all/" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
|
||||
EMAIL_SCOPE=$(echo "$SCOPE_RESPONSE" | jq -r '.results[] | select(.name == "authentik default OAuth Mapping: OpenID '\''email'\''") | .pk')
|
||||
OPENID_SCOPE=$(echo "$SCOPE_RESPONSE" | jq -r '.results[] | select(.name == "authentik default OAuth Mapping: OpenID '\''openid'\''") | .pk')
|
||||
PROFILE_SCOPE=$(echo "$SCOPE_RESPONSE" | jq -r '.results[] | select(.name == "authentik default OAuth Mapping: OpenID '\''profile'\''") | .pk')
|
||||
echo " -> email: $EMAIL_SCOPE"
|
||||
echo " -> openid: $OPENID_SCOPE"
|
||||
echo " -> profile: $PROFILE_SCOPE"
|
||||
|
||||
# Step 5: Get signing key
|
||||
echo "Getting signing key..."
|
||||
CERT_RESPONSE=$(curl -s "$AUTHENTIK_URL/api/v3/crypto/certificatekeypairs/" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
SIGNING_KEY=$(echo "$CERT_RESPONSE" | jq -r '.results[0].pk')
|
||||
echo " -> Signing key: $SIGNING_KEY"
|
||||
|
||||
# Step 6: Generate client credentials
|
||||
CLIENT_ID="reflector"
|
||||
CLIENT_SECRET=$(openssl rand -hex 32)
|
||||
|
||||
# Step 7: Create OAuth2 provider
|
||||
echo "Creating OAuth2 provider..."
|
||||
PROVIDER_RESPONSE=$(curl -s -X POST "$AUTHENTIK_URL/api/v3/providers/oauth2/" \
|
||||
-H "Authorization: Bearer $API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"name\": \"Reflector\",
|
||||
\"authorization_flow\": \"$FLOW_UUID\",
|
||||
\"invalidation_flow\": \"$INVALIDATION_UUID\",
|
||||
\"client_type\": \"confidential\",
|
||||
\"client_id\": \"$CLIENT_ID\",
|
||||
\"client_secret\": \"$CLIENT_SECRET\",
|
||||
\"redirect_uris\": [{
|
||||
\"matching_mode\": \"strict\",
|
||||
\"url\": \"$FRONTEND_URL/api/auth/callback/authentik\"
|
||||
}],
|
||||
\"property_mappings\": [\"$EMAIL_SCOPE\", \"$OPENID_SCOPE\", \"$PROFILE_SCOPE\"],
|
||||
\"signing_key\": \"$SIGNING_KEY\",
|
||||
\"access_token_validity\": \"hours=1\",
|
||||
\"refresh_token_validity\": \"days=30\"
|
||||
}")
|
||||
|
||||
PROVIDER_ID=$(echo "$PROVIDER_RESPONSE" | jq -r '.pk')
|
||||
if [ -z "$PROVIDER_ID" ] || [ "$PROVIDER_ID" = "null" ]; then
|
||||
# Check if provider already exists
|
||||
if echo "$PROVIDER_RESPONSE" | grep -q "already exists"; then
|
||||
echo " -> Provider already exists, updating..."
|
||||
EXISTING=$(curl -s "$AUTHENTIK_URL/api/v3/providers/oauth2/?name=Reflector" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
PROVIDER_ID=$(echo "$EXISTING" | jq -r '.results[0].pk')
|
||||
CLIENT_ID=$(echo "$EXISTING" | jq -r '.results[0].client_id')
|
||||
# Update secret and scopes
|
||||
curl -s -X PATCH "$AUTHENTIK_URL/api/v3/providers/oauth2/$PROVIDER_ID/" \
|
||||
-H "Authorization: Bearer $API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"client_secret\": \"$CLIENT_SECRET\",
|
||||
\"property_mappings\": [\"$EMAIL_SCOPE\", \"$OPENID_SCOPE\", \"$PROFILE_SCOPE\"],
|
||||
\"signing_key\": \"$SIGNING_KEY\"
|
||||
}" > /dev/null
|
||||
else
|
||||
echo "Error: Failed to create provider"
|
||||
echo "Response: $PROVIDER_RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
echo " -> Provider ID: $PROVIDER_ID"
|
||||
|
||||
# Step 8: Create application
|
||||
echo "Creating application..."
|
||||
APP_RESPONSE=$(curl -s -X POST "$AUTHENTIK_URL/api/v3/core/applications/" \
|
||||
-H "Authorization: Bearer $API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"name\": \"Reflector\",
|
||||
\"slug\": \"reflector\",
|
||||
\"provider\": $PROVIDER_ID
|
||||
}")
|
||||
|
||||
if echo "$APP_RESPONSE" | grep -q "already exists"; then
|
||||
echo " -> Application already exists"
|
||||
else
|
||||
APP_SLUG=$(echo "$APP_RESPONSE" | jq -r '.slug')
|
||||
if [ -z "$APP_SLUG" ] || [ "$APP_SLUG" = "null" ]; then
|
||||
echo "Error: Failed to create application"
|
||||
echo "Response: $APP_RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
echo " -> Application created: $APP_SLUG"
|
||||
fi
|
||||
|
||||
# Step 9: Extract public key for JWT verification
|
||||
echo "Extracting public key for JWT verification..."
|
||||
mkdir -p server/reflector/auth/jwt/keys
|
||||
curl -s "$AUTHENTIK_URL/application/o/reflector/jwks/" | \
|
||||
jq -r '.keys[0].x5c[0]' | \
|
||||
base64 -d | \
|
||||
openssl x509 -pubkey -noout > server/reflector/auth/jwt/keys/authentik_public.pem
|
||||
|
||||
if [ ! -s server/reflector/auth/jwt/keys/authentik_public.pem ]; then
|
||||
echo "Error: Failed to extract public key"
|
||||
exit 1
|
||||
fi
|
||||
echo " -> Saved to server/reflector/auth/jwt/keys/authentik_public.pem"
|
||||
|
||||
# Step 10: Update environment files automatically
|
||||
echo "Updating environment files..."
|
||||
|
||||
# Update server/.env
|
||||
cat >> server/.env << EOF
|
||||
|
||||
# --- Authentik OAuth (added by setup script) ---
|
||||
AUTH_BACKEND=jwt
|
||||
AUTH_JWT_AUDIENCE=$CLIENT_ID
|
||||
AUTH_JWT_PUBLIC_KEY=authentik_public.pem
|
||||
# --- End JWT Configuration ---
|
||||
EOF
|
||||
echo " -> Updated server/.env"
|
||||
|
||||
# Update www/.env
|
||||
cat >> www/.env << EOF
|
||||
|
||||
# --- Authentik OAuth (added by setup script) ---
|
||||
FEATURE_REQUIRE_LOGIN=true
|
||||
AUTHENTIK_ISSUER=$AUTHENTIK_URL/application/o/reflector
|
||||
AUTHENTIK_REFRESH_TOKEN_URL=$AUTHENTIK_URL/application/o/token/
|
||||
AUTHENTIK_CLIENT_ID=$CLIENT_ID
|
||||
AUTHENTIK_CLIENT_SECRET=$CLIENT_SECRET
|
||||
# --- End Authentik Configuration ---
|
||||
EOF
|
||||
echo " -> Updated www/.env"
|
||||
|
||||
# Step 11: Restart Reflector services
|
||||
echo "Restarting Reflector services..."
|
||||
docker compose -f docker-compose.prod.yml up -d server worker web
|
||||
|
||||
echo ""
|
||||
echo "==========================================="
|
||||
echo "Setup complete!"
|
||||
echo "==========================================="
|
||||
echo ""
|
||||
echo "Authentik admin: $AUTHENTIK_URL"
|
||||
echo " Username: akadmin"
|
||||
echo " Password: (provided as argument)"
|
||||
echo ""
|
||||
echo "Frontend: $FRONTEND_URL"
|
||||
echo " Authentication is now required"
|
||||
echo ""
|
||||
echo "Note: Public key saved to server/reflector/auth/jwt/keys/authentik_public.pem"
|
||||
echo " and mounted via docker-compose volume."
|
||||
echo ""
|
||||
echo "==========================================="
|
||||
echo "Configuration values (for reference):"
|
||||
echo "==========================================="
|
||||
echo ""
|
||||
echo "# server/.env"
|
||||
echo "AUTH_BACKEND=jwt"
|
||||
echo "AUTH_JWT_AUDIENCE=$CLIENT_ID"
|
||||
echo "AUTH_JWT_PUBLIC_KEY=authentik_public.pem"
|
||||
echo ""
|
||||
echo "# www/.env"
|
||||
echo "FEATURE_REQUIRE_LOGIN=true"
|
||||
echo "AUTHENTIK_ISSUER=$AUTHENTIK_URL/application/o/reflector"
|
||||
echo "AUTHENTIK_REFRESH_TOKEN_URL=$AUTHENTIK_URL/application/o/token/"
|
||||
echo "AUTHENTIK_CLIENT_ID=$CLIENT_ID"
|
||||
echo "AUTHENTIK_CLIENT_SECRET=$CLIENT_SECRET"
|
||||
echo ""
|
||||
139
server/.env.example
Normal file
139
server/.env.example
Normal file
@@ -0,0 +1,139 @@
|
||||
#
|
||||
# This file serve as an example of possible configuration
|
||||
# All the settings are described here: reflector/settings.py
|
||||
#
|
||||
|
||||
## =======================================================
|
||||
## Core Configuration (Required for Production)
|
||||
## =======================================================
|
||||
|
||||
## Database (for docker-compose.prod.yml, use postgres hostname)
|
||||
#DATABASE_URL=postgresql+asyncpg://reflector:reflector@postgres:5432/reflector
|
||||
|
||||
## Redis (for docker-compose.prod.yml, use redis hostname)
|
||||
#REDIS_HOST=redis
|
||||
#REDIS_PORT=6379
|
||||
#CELERY_BROKER_URL=redis://redis:6379/1
|
||||
#CELERY_RESULT_BACKEND=redis://redis:6379/1
|
||||
|
||||
## Base URL - your API domain with https
|
||||
#BASE_URL=https://api.example.com
|
||||
|
||||
## CORS - required when frontend and API are on different domains
|
||||
#CORS_ORIGIN=https://app.example.com
|
||||
#CORS_ALLOW_CREDENTIALS=true
|
||||
|
||||
## Secret key - generate with: openssl rand -hex 32
|
||||
#SECRET_KEY=changeme-generate-a-secure-random-string
|
||||
|
||||
## =======================================================
|
||||
## User authentication
|
||||
## =======================================================
|
||||
|
||||
## Using jwt/authentik
|
||||
AUTH_BACKEND=jwt
|
||||
AUTH_JWT_AUDIENCE=
|
||||
|
||||
## =======================================================
|
||||
## Transcription backend
|
||||
##
|
||||
## Check reflector/processors/audio_transcript_* for the
|
||||
## full list of available transcription backend
|
||||
## =======================================================
|
||||
|
||||
## Using local whisper
|
||||
#TRANSCRIPT_BACKEND=whisper
|
||||
|
||||
## Using serverless modal.com (require reflector-gpu-modal deployed)
|
||||
#TRANSCRIPT_BACKEND=modal
|
||||
#TRANSCRIPT_URL=https://xxxxx--reflector-transcriber-web.modal.run
|
||||
#TRANSCRIPT_MODAL_API_KEY=xxxxx
|
||||
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-parakeet-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=
|
||||
|
||||
## =======================================================
|
||||
## Translation backend
|
||||
##
|
||||
## Only available in modal atm
|
||||
## =======================================================
|
||||
TRANSLATION_BACKEND=modal
|
||||
TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run
|
||||
#TRANSLATION_MODAL_API_KEY=xxxxx
|
||||
|
||||
## =======================================================
|
||||
## LLM backend (Required)
|
||||
##
|
||||
## Responsible for generating titles, summaries, and topic detection
|
||||
## Requires OpenAI API key
|
||||
## =======================================================
|
||||
|
||||
## OpenAI API key - get from https://platform.openai.com/account/api-keys
|
||||
LLM_API_KEY=sk-your-openai-api-key
|
||||
LLM_MODEL=gpt-4o-mini
|
||||
|
||||
## Optional: Custom endpoint (defaults to OpenAI)
|
||||
# LLM_URL=https://api.openai.com/v1
|
||||
|
||||
## Context size for summary generation (tokens)
|
||||
LLM_CONTEXT_WINDOW=16000
|
||||
|
||||
## =======================================================
|
||||
## Diarization
|
||||
##
|
||||
## Only available on modal
|
||||
## To allow diarization, you need to expose expose the files to be dowloded by the pipeline
|
||||
## =======================================================
|
||||
DIARIZATION_ENABLED=false
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run
|
||||
#DIARIZATION_MODAL_API_KEY=xxxxx
|
||||
|
||||
|
||||
## =======================================================
|
||||
## Transcript Storage
|
||||
##
|
||||
## Where to store audio files and transcripts
|
||||
## AWS S3 is required for production
|
||||
## =======================================================
|
||||
TRANSCRIPT_STORAGE_BACKEND=aws
|
||||
TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=your-aws-access-key
|
||||
TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY=your-aws-secret-key
|
||||
TRANSCRIPT_STORAGE_AWS_BUCKET_NAME=reflector-media
|
||||
TRANSCRIPT_STORAGE_AWS_REGION=us-east-1
|
||||
|
||||
|
||||
## =======================================================
|
||||
## Sentry
|
||||
## =======================================================
|
||||
|
||||
## Sentry DSN configuration
|
||||
#SENTRY_DSN=
|
||||
|
||||
## =======================================================
|
||||
## Video Platform Configuration
|
||||
## =======================================================
|
||||
|
||||
## Whereby
|
||||
#WHEREBY_API_KEY=your-whereby-api-key
|
||||
#WHEREBY_WEBHOOK_SECRET=your-whereby-webhook-secret
|
||||
#WHEREBY_STORAGE_AWS_ACCESS_KEY_ID=your-aws-key
|
||||
#WHEREBY_STORAGE_AWS_SECRET_ACCESS_KEY=your-aws-secret
|
||||
#AWS_PROCESS_RECORDING_QUEUE_URL=https://sqs.us-west-2.amazonaws.com/...
|
||||
|
||||
## Daily.co
|
||||
#DAILY_API_KEY=your-daily-api-key
|
||||
#DAILY_WEBHOOK_SECRET=your-daily-webhook-secret
|
||||
#DAILY_SUBDOMAIN=your-subdomain
|
||||
#DAILY_WEBHOOK_UUID= # Auto-populated by recreate_daily_webhook.py script
|
||||
#DAILYCO_STORAGE_AWS_ROLE_ARN=... # IAM role ARN for Daily.co S3 access
|
||||
#DAILYCO_STORAGE_AWS_BUCKET_NAME=reflector-dailyco
|
||||
#DAILYCO_STORAGE_AWS_REGION=us-west-2
|
||||
|
||||
## Whereby (optional separate bucket)
|
||||
#WHEREBY_STORAGE_AWS_BUCKET_NAME=reflector-whereby
|
||||
#WHEREBY_STORAGE_AWS_REGION=us-east-1
|
||||
|
||||
## Platform Configuration
|
||||
#DEFAULT_VIDEO_PLATFORM=whereby # Default platform for new rooms
|
||||
@@ -53,6 +53,36 @@ response = sqs.receive_message(QueueUrl=queue_url, ...)
|
||||
uv run /app/requeue_uploaded_file.py TRANSCRIPT_ID
|
||||
```
|
||||
|
||||
## Hatchet Setup (Fresh DB)
|
||||
|
||||
After resetting the Hatchet database:
|
||||
|
||||
### Option A: Automatic (CLI)
|
||||
|
||||
```bash
|
||||
# Get default tenant ID and create token in one command
|
||||
TENANT_ID=$(docker compose exec -T postgres psql -U reflector -d hatchet -t -c \
|
||||
"SELECT id FROM \"Tenant\" WHERE slug = 'default';" | tr -d ' \n') && \
|
||||
TOKEN=$(docker compose exec -T hatchet /hatchet-admin token create \
|
||||
--config /config --tenant-id "$TENANT_ID" 2>/dev/null | tr -d '\n') && \
|
||||
echo "HATCHET_CLIENT_TOKEN=$TOKEN"
|
||||
```
|
||||
|
||||
Copy the output to `server/.env`.
|
||||
|
||||
### Option B: Manual (UI)
|
||||
|
||||
1. Create API token at http://localhost:8889 → Settings → API Tokens
|
||||
2. Update `server/.env`: `HATCHET_CLIENT_TOKEN=<new-token>`
|
||||
|
||||
### Then restart workers
|
||||
|
||||
```bash
|
||||
docker compose restart server hatchet-worker
|
||||
```
|
||||
|
||||
Workflows register automatically when hatchet-worker starts.
|
||||
|
||||
## Pipeline Management
|
||||
|
||||
### Continue stuck pipeline from final summaries (identify_participants) step:
|
||||
|
||||
2
server/docker/init-hatchet-db.sql
Normal file
2
server/docker/init-hatchet-db.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- Create hatchet database for Hatchet workflow engine
|
||||
CREATE DATABASE hatchet;
|
||||
496
server/docs/DAILY_REFLECTOR_DATA_MODEL.md
Normal file
496
server/docs/DAILY_REFLECTOR_DATA_MODEL.md
Normal file
@@ -0,0 +1,496 @@
|
||||
# Daily.co and Reflector Data Model
|
||||
|
||||
This document explains the data model relationships between Daily.co's API concepts and Reflector's database schema, clarifying common sources of confusion.
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Core Entities Overview](#core-entities-overview)
|
||||
2. [Daily.co vs Reflector Terminology](#dailyco-vs-reflector-terminology)
|
||||
3. [Entity Relationships](#entity-relationships)
|
||||
4. [Recording Multiplicity](#recording-multiplicity)
|
||||
5. [Session Identifiers Explained](#session-identifiers-explained)
|
||||
6. [Time-Based Matching](#time-based-matching)
|
||||
7. [Multitrack Recording Details](#multitrack-recording-details)
|
||||
8. [Verified Example](#verified-example)
|
||||
|
||||
---
|
||||
|
||||
## Core Entities Overview
|
||||
|
||||
### Reflector's Four Primary Entities
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Room (Reflector) │
|
||||
│ - Persistent meeting template │
|
||||
│ - User-created configuration │
|
||||
│ - Example: "team-standup" │
|
||||
└────────────────────┬────────────────────────────────────────────┘
|
||||
│ 1:N
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Meeting (Reflector) │
|
||||
│ - Single session instance │
|
||||
│ - Creates NEW Daily.co room with timestamp │
|
||||
│ - Example: "team-standup-20260115120000" │
|
||||
└────────────────────┬────────────────────────────────────────────┘
|
||||
│ 1:N
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Recording (Reflector + Daily.co) │
|
||||
│ - One segment of audio/video │
|
||||
│ - New recording created on stop/restart │
|
||||
│ - track_keys: JSON array of S3 file paths │
|
||||
└────────────────────┬────────────────────────────────────────────┘
|
||||
│ 1:1
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Transcript (Reflector) │
|
||||
│ - Processed audio with transcription │
|
||||
│ - Diarization, summaries, topics │
|
||||
│ - One transcript per recording │
|
||||
└─────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Daily.co vs Reflector Terminology
|
||||
|
||||
### Room
|
||||
|
||||
| Aspect | Daily.co | Reflector |
|
||||
|--------|----------|-----------|
|
||||
| **Definition** | Virtual meeting space on Daily.co platform | User-created meeting template/configuration |
|
||||
| **Lifetime** | Configurable expiration | Persistent until user deletes |
|
||||
| **Creation** | API call for each meeting | Pre-created by user once |
|
||||
| **Reuse** | Can host multiple sessions | Generates new Daily.co room per meeting |
|
||||
| **Name Format** | `room-name` (reusable) | `room-name` (base identifier) |
|
||||
| **Timestamping** | Not required | Meeting adds timestamp: `{name}-YYYYMMDDHHMMSS` |
|
||||
|
||||
**Example:**
|
||||
```
|
||||
Reflector Room: "daily-private-igor" (persistent config)
|
||||
↓ starts meeting
|
||||
Daily.co Room: "daily-private-igor-20260110042117"
|
||||
```
|
||||
|
||||
### Meeting
|
||||
|
||||
| Aspect | Daily.co | Reflector |
|
||||
|--------|----------|-----------|
|
||||
| **Definition** | Session that starts when first participant joins | Explicit database record of a session |
|
||||
| **Identifier** | `mtgSessionId` (generated by Daily.co) | `meeting.id` (UUID, generated by Reflector) |
|
||||
| **Creation** | Implicit (first participant join) | Explicit API call before participants join |
|
||||
| **Purpose** | Tracks active session state | Links recordings, transcripts, participants |
|
||||
| **Scope** | Per room instance | Per Reflector room + timestamp |
|
||||
|
||||
**Critical Limitation:** Daily.co's recordings API often does NOT return `mtgSessionId`, requiring time-based matching (see [Time-Based Matching](#time-based-matching)).
|
||||
|
||||
### Recording
|
||||
|
||||
| Aspect | Daily.co | Reflector |
|
||||
|--------|----------|-----------|
|
||||
| **Definition** | Audio/video files on S3 | Metadata + processing status |
|
||||
| **Types** | `cloud` (composed video), `raw-tracks` (multitrack) | Stores references + `track_keys` array |
|
||||
| **Multiplicity** | One recording object per start/stop cycle | One DB row per Daily.co recording object |
|
||||
| **Identifier** | Daily.co `recording_id` | Same `recording_id` (stored in DB) |
|
||||
| **Multitrack** | Array of `.webm` files (one per participant) | `track_keys` JSON array with S3 paths |
|
||||
| **Linkage** | Via `room_name` + `start_ts` | FK `meeting_id` (set via time-based match) |
|
||||
|
||||
**Critical Behavior:** Recording **stops/restarts** create **separate recording objects** with unique IDs.
|
||||
|
||||
---
|
||||
|
||||
## Entity Relationships
|
||||
|
||||
### Database Schema Relationships
|
||||
|
||||
```sql
|
||||
-- Simplified schema showing key relationships
|
||||
|
||||
TABLE room (
|
||||
id VARCHAR PRIMARY KEY,
|
||||
name VARCHAR UNIQUE,
|
||||
platform VARCHAR -- 'whereby' | 'daily'
|
||||
)
|
||||
|
||||
TABLE meeting (
|
||||
id VARCHAR PRIMARY KEY,
|
||||
room_id VARCHAR REFERENCES room(id) ON DELETE CASCADE, -- nullable
|
||||
room_name VARCHAR, -- Daily.co room name (timestamped)
|
||||
start_date TIMESTAMP,
|
||||
platform VARCHAR
|
||||
)
|
||||
|
||||
TABLE recording (
|
||||
id VARCHAR PRIMARY KEY, -- Daily.co recording_id
|
||||
meeting_id VARCHAR, -- FK to meeting (set via time-based match)
|
||||
bucket_name VARCHAR,
|
||||
object_key VARCHAR, -- S3 prefix
|
||||
track_keys JSON, -- Array of S3 keys for multitrack
|
||||
recorded_at TIMESTAMP
|
||||
)
|
||||
|
||||
TABLE transcript (
|
||||
id VARCHAR PRIMARY KEY,
|
||||
recording_id VARCHAR, -- nullable FK
|
||||
meeting_id VARCHAR, -- nullable FK
|
||||
room_id VARCHAR, -- nullable FK
|
||||
participants JSON, -- [{id, speaker, name, user_id}, ...]
|
||||
title VARCHAR,
|
||||
long_summary VARCHAR,
|
||||
webvtt TEXT
|
||||
)
|
||||
```
|
||||
|
||||
**Relationship Cardinalities:**
|
||||
```
|
||||
1 Room → N Meetings
|
||||
1 Meeting → N Recordings (common: 1-21 recordings per meeting)
|
||||
1 Recording → 1 Transcript
|
||||
1 Meeting → N Transcripts (via recordings)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Recording Multiplicity
|
||||
|
||||
### Why Multiple Recordings Per Meeting?
|
||||
|
||||
Daily.co creates a **new recording object** (new ID, new files) whenever recording stops and restarts. This happens due to:
|
||||
|
||||
1. **Manual stop/start** - User clicks stop, then start recording again
|
||||
2. **Network reconnection** - Participant drops, reconnects → triggers restart
|
||||
3. **Participant rejoin** - Last participant leaves, new one joins → new session
|
||||
|
||||
---
|
||||
|
||||
## Session Identifiers Explained
|
||||
|
||||
### The Hidden Entity: Daily.co Meeting Session
|
||||
|
||||
Daily.co has an **implicit ephemeral entity** that sits between Room and Recording:
|
||||
|
||||
```
|
||||
Daily.co Room: "daily-private-igor-20260110042117"
|
||||
│
|
||||
├─ Daily.co Meeting Session #1 (mtgSessionId: c04334de...)
|
||||
│ └─ Recording #3 (f4a50f94) - 4s, 1 track
|
||||
│
|
||||
└─ Daily.co Meeting Session #2 (mtgSessionId: 4cdae3c0...)
|
||||
├─ Recording #2 (b0fa94da) - 80s, 2 tracks ← recording stopped
|
||||
└─ Recording #1 (05edf519) - 62s, 1 track ← then restarted
|
||||
```
|
||||
|
||||
**Daily.co Meeting Session:**
|
||||
- **Lifecycle:** Starts when first participant joins, ends when last participant leaves
|
||||
- **Identifier:** `mtgSessionId` (generated by Daily.co)
|
||||
- **Persistence:** Ephemeral - new ID if everyone leaves and someone rejoins
|
||||
- **Relationship:** 1 Session → N Recordings (if recording stops/restarts during session)
|
||||
|
||||
**Key Insight:** Multiple recordings can share the same `mtgSessionId` if recording was stopped and restarted while participants remained connected.
|
||||
|
||||
### mtgSessionId (Meeting Session Identifier)
|
||||
|
||||
`mtgSessionId` identifies a **Daily.co meeting session** (not individual participants, not a room).
|
||||
|
||||
### session_id (Per-Participant)
|
||||
|
||||
**Different concept:** Per-participant connection identifier from webhooks.
|
||||
|
||||
**Reflector Tracking:** `daily_participant_session` table
|
||||
```sql
|
||||
TABLE daily_participant_session (
|
||||
id VARCHAR PRIMARY KEY, -- {meeting_id}:{user_id}:{joined_at_ms}
|
||||
meeting_id VARCHAR,
|
||||
session_id VARCHAR, -- From webhook (per-participant)
|
||||
user_id VARCHAR,
|
||||
user_name VARCHAR,
|
||||
joined_at TIMESTAMP,
|
||||
left_at TIMESTAMP
|
||||
)
|
||||
```
|
||||
---
|
||||
|
||||
## Time-Based Matching
|
||||
|
||||
### Problem Statement
|
||||
|
||||
Daily.co's recordings API does not reliably return `mtgSessionId`, making it impossible to directly link recordings to meetings via Daily.co's identifiers.
|
||||
|
||||
**Example API response:**
|
||||
```json
|
||||
{
|
||||
"id": "recording-uuid",
|
||||
"room_name": "daily-private-igor-20260110042117",
|
||||
"start_ts": 1768018896,
|
||||
"mtgSessionId": null ← Missing!
|
||||
}
|
||||
```
|
||||
|
||||
### Solution: Time-Based Matching
|
||||
|
||||
**Implementation:** `reflector/db/meetings.py:get_by_room_name_and_time()`
|
||||
|
||||
|
||||
---
|
||||
|
||||
## Multitrack Recording Details
|
||||
|
||||
### track_keys JSON Array
|
||||
|
||||
**Schema:** `recording.track_keys` (JSON, nullable)
|
||||
```sql
|
||||
-- Example recording with 2 audio tracks
|
||||
{
|
||||
"id": "b0fa94da-73b5-4f95-9239-5216a682a505",
|
||||
"track_keys": [
|
||||
"igormonadical/daily-private-igor-20260110042117/1768018896877-890c0eae-e186-4534-a7bd-7c794b7d6d7f-cam-audio-1768018914565",
|
||||
"igormonadical/daily-private-igor-20260110042117/1768018896877-9660e8e9-4297-4f17-951d-0b2bf2401803-cam-audio-1768018899286"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Semantics:**
|
||||
- `track_keys = null` → Not multitrack (cloud recording)
|
||||
- `track_keys = []` → Multitrack recording with no audio captured (silence/muted)
|
||||
- `track_keys = [...]` → Multitrack with N audio tracks
|
||||
|
||||
**Property:** `recording.is_multitrack` (Python)
|
||||
```python
|
||||
@property
|
||||
def is_multitrack(self) -> bool:
|
||||
return self.track_keys is not None and len(self.track_keys) > 0
|
||||
```
|
||||
|
||||
### Track Filename Format
|
||||
|
||||
Daily.co multitrack filenames encode timing and participant information:
|
||||
|
||||
**Format:** `{recording_start_ts}-{participant_id}-cam-audio-{track_start_ts}`
|
||||
|
||||
**Example:** `1768018896877-890c0eae-e186-4534-a7bd-7c794b7d6d7f-cam-audio-1768018914565`
|
||||
|
||||
**Parsed Components:**
|
||||
```python
|
||||
# reflector/utils/daily.py:25-60
|
||||
class DailyRecordingFilename(NamedTuple):
|
||||
recording_start_ts: int # 1768018896877 (milliseconds)
|
||||
participant_id: str # 890c0eae-e186-4534-a7bd-7c794b7d6d7f
|
||||
track_start_ts: int # 1768018914565 (milliseconds)
|
||||
```
|
||||
|
||||
**Note:** Browser downloads from S3 add `.webm` extension due to MIME headers, but S3 object keys have no extension.
|
||||
|
||||
### Video Track Filtering
|
||||
|
||||
Daily.co API returns both audio and video tracks, but Reflector only processes audio.
|
||||
|
||||
**Filtering Logic:** `reflector/worker/process.py:660`
|
||||
```python
|
||||
track_keys = [t.s3Key for t in recording.tracks if t.type == "audio"]
|
||||
```
|
||||
|
||||
**Example API Response:**
|
||||
```json
|
||||
{
|
||||
"tracks": [
|
||||
{"type": "audio", "s3Key": "...cam-audio-1768018914565"},
|
||||
{"type": "audio", "s3Key": "...cam-audio-1768018899286"},
|
||||
{"type": "video", "s3Key": "...cam-video-1768018897095"} ← Filtered out
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Result:** Only 2 audio tracks stored in `recording.track_keys`, video track discarded.
|
||||
|
||||
**Rationale:** Reflector is audio transcription system; video not needed for processing.
|
||||
|
||||
### Track-to-Participant Mapping
|
||||
|
||||
**Flow:**
|
||||
1. Daily.co webhook/polling provides `track_keys` array
|
||||
2. Each track filename contains `participant_id`
|
||||
3. Reflector queries Daily.co API: `GET /meetings/{mtgSessionId}/participants`
|
||||
4. Maps `participant_id` → `user_name`
|
||||
5. Stores in `transcript.participants` JSON:
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": "890c0eae-e186-4534-a7bd-7c794b7d6d7f",
|
||||
"speaker": 0,
|
||||
"name": "test2",
|
||||
"user_id": "907f2cc1-eaab-435f-8ee2-09185f416b22"
|
||||
},
|
||||
{
|
||||
"id": "9660e8e9-4297-4f17-951d-0b2bf2401803",
|
||||
"speaker": 1,
|
||||
"name": "test",
|
||||
"user_id": "907f2cc1-eaab-435f-8ee2-09185f416b22"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
**Diarization:** Multitrack recordings don't need speaker diarization AI — speaker identity comes from separate audio tracks.
|
||||
|
||||
---
|
||||
|
||||
## Example
|
||||
|
||||
### Meeting: daily-private-igor-20260110042117
|
||||
|
||||
**Context:** User conducted test recording with start/stop cycles, producing 3 recordings.
|
||||
|
||||
#### Database State
|
||||
|
||||
```sql
|
||||
-- Meeting
|
||||
id: 034804b8-cee2-4fb4-94d7-122f6f068a61
|
||||
room_name: daily-private-igor-20260110042117
|
||||
start_date: 2026-01-10 04:21:17+00
|
||||
```
|
||||
|
||||
#### Daily.co API Response
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": "f4a50f94-053c-4f9d-bda6-78ad051fbc36",
|
||||
"room_name": "daily-private-igor-20260110042117",
|
||||
"start_ts": 1768018885,
|
||||
"duration": 4,
|
||||
"status": "finished",
|
||||
"mtgSessionId": "c04334de-42a0-4c2a-96be-a49b068dca85",
|
||||
"tracks": [
|
||||
{"type": "audio", "s3Key": "...62e8f3ae...cam-audio-1768018885417"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "b0fa94da-73b5-4f95-9239-5216a682a505",
|
||||
"room_name": "daily-private-igor-20260110042117",
|
||||
"start_ts": 1768018896,
|
||||
"duration": 80,
|
||||
"status": "finished",
|
||||
"mtgSessionId": "4cdae3c0-86cb-4578-8a6d-3a228bb48345",
|
||||
"tracks": [
|
||||
{"type": "audio", "s3Key": "...890c0eae...cam-audio-1768018914565"},
|
||||
{"type": "audio", "s3Key": "...9660e8e9...cam-audio-1768018899286"},
|
||||
{"type": "video", "s3Key": "...9660e8e9...cam-video-1768018897095"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "05edf519-9048-4b49-9a75-73e9826fd950",
|
||||
"room_name": "daily-private-igor-20260110042117",
|
||||
"start_ts": 1768018914,
|
||||
"duration": 62,
|
||||
"status": "finished",
|
||||
"mtgSessionId": "4cdae3c0-86cb-4578-8a6d-3a228bb48345",
|
||||
"tracks": [
|
||||
{"type": "audio", "s3Key": "...890c0eae...cam-audio-1768018914948"}
|
||||
]
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
**Key Observations:**
|
||||
- 3 recording objects returned by Daily.co
|
||||
- 2 different `mtgSessionId` values (2 different meeting instances)
|
||||
- Recording #2 has 3 tracks (2 audio + 1 video)
|
||||
- Timestamps: 1768018885 → 1768018896 (+11s) → 1768018914 (+18s)
|
||||
|
||||
#### Reflector Database
|
||||
|
||||
**Recordings:**
|
||||
```
|
||||
┌──────────────────────────────────────┬──────────────┬────────────┬──────────────────────────────────────┐
|
||||
│ id │ track_count │ duration │ mtgSessionId │
|
||||
├──────────────────────────────────────┼──────────────┼────────────┼──────────────────────────────────────┤
|
||||
│ f4a50f94-053c-4f9d-bda6-78ad051fbc36 │ 1 │ 4s │ c04334de-42a0-4c2a-96be-a49b068dca85 │
|
||||
│ b0fa94da-73b5-4f95-9239-5216a682a505 │ 2 (video=0) │ 80s │ 4cdae3c0-86cb-4578-8a6d-3a228bb48345 │
|
||||
│ 05edf519-9048-4b49-9a75-73e9826fd950 │ 1 │ 62s │ 4cdae3c0-86cb-4578-8a6d-3a228bb48345 │
|
||||
└──────────────────────────────────────┴──────────────┴────────────┴──────────────────────────────────────┘
|
||||
```
|
||||
**Note:** Recording #2 has 2 audio tracks (video filtered out), not 3.
|
||||
|
||||
**Transcripts:**
|
||||
```
|
||||
┌──────────────────────────────────────┬──────────────────────────────────────┬──────────────┬──────────────────────────────────────────────┐
|
||||
│ id │ recording_id │ participants │ title │
|
||||
├──────────────────────────────────────┼──────────────────────────────────────┼──────────────┼──────────────────────────────────────────────┤
|
||||
│ 17149b1f-546c-4837-80a0-f8140bd16592 │ f4a50f94-053c-4f9d-bda6-78ad051fbc36 │ 1 (test) │ (empty - no speech) │
|
||||
│ 49801332-3222-4c11-bdb2-375479fc87f2 │ b0fa94da-73b5-4f95-9239-5216a682a505 │ 2 (test, │ "Examination and Validation Procedures │
|
||||
│ │ │ test2) │ Review" │
|
||||
│ e5271e12-20fb-42d2-b5a8-21438abadef9 │ 05edf519-9048-4b49-9a75-73e9826fd950 │ 1 (test2) │ "Technical Sound Check Procedure Review" │
|
||||
└──────────────────────────────────────┴──────────────────────────────────────┴──────────────┴──────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
**Transcript Content:**
|
||||
|
||||
*Transcript #1* (17149b1f): Empty WebVTT (no audio captured)
|
||||
|
||||
*Transcript #2* (49801332):
|
||||
```webvtt
|
||||
WEBVTT
|
||||
|
||||
00:00:03.109 --> 00:00:05.589
|
||||
<v Speaker1>Test, test, test. Test, test, test, test, test.
|
||||
|
||||
00:00:19.829 --> 00:00:22.710
|
||||
<v Speaker0>Test test test test test test test test test test test.
|
||||
```
|
||||
**AI-Generated Summary:**
|
||||
> "The meeting focused on the critical importance of rigorous testing for ensuring reliability and quality, with test and test2 emphasizing the need for a structured testing framework and meticulous documentation..."
|
||||
|
||||
*Transcript #3* (e5271e12):
|
||||
```webvtt
|
||||
WEBVTT
|
||||
|
||||
00:00:02.029 --> 00:00:04.910
|
||||
<v Speaker0>Test, test, test, test, test, test, test, test, test, test, test.
|
||||
```
|
||||
|
||||
#### Validation: track_keys → participants
|
||||
|
||||
**Recording #2 (b0fa94da) tracks:**
|
||||
```json
|
||||
[
|
||||
".../890c0eae-e186-4534-a7bd-7c794b7d6d7f-cam-audio-...",
|
||||
".../9660e8e9-4297-4f17-951d-0b2bf2401803-cam-audio-..."
|
||||
]
|
||||
```
|
||||
|
||||
**Transcript #2 (49801332) participants:**
|
||||
```json
|
||||
[
|
||||
{"id": "890c0eae-e186-4534-a7bd-7c794b7d6d7f", "speaker": 0, "name": "test2"},
|
||||
{"id": "9660e8e9-4297-4f17-951d-0b2bf2401803", "speaker": 1, "name": "test"}
|
||||
]
|
||||
```
|
||||
|
||||
### Data Flow
|
||||
|
||||
```
|
||||
Daily.co API: 3 recordings
|
||||
↓
|
||||
Polling: _poll_raw_tracks_recordings()
|
||||
↓
|
||||
Worker: process_multitrack_recording.delay() × 3
|
||||
↓
|
||||
DB: 3 recording rows created
|
||||
↓
|
||||
Pipeline: Audio processing + transcription × 3
|
||||
↓
|
||||
DB: 3 transcript rows created (1:1 with recordings)
|
||||
↓
|
||||
UI: User sees 3 separate transcripts
|
||||
```
|
||||
|
||||
**Result:** ✅ 1:1 Recording → Transcript relationship maintained.
|
||||
|
||||
|
||||
---
|
||||
**Document Version:** 1.0
|
||||
**Last Verified:** 2026-01-15
|
||||
**Data Source:** Production database + Daily.co API inspection
|
||||
421
server/docs/daily_pipeline.md
Normal file
421
server/docs/daily_pipeline.md
Normal file
@@ -0,0 +1,421 @@
|
||||
# Daily.co pipeline
|
||||
|
||||
This document details every external call, storage operation, and database write that occurs when a new Daily.co recording is discovered.
|
||||
It includes a bunch of common logic that other pipelines use, therefore not everything is Daily-oriented.
|
||||
|
||||
**The doc was generated at 12.12.2025 and things may have changed since.**
|
||||
|
||||
## Trigger
|
||||
|
||||
Two entry points, both converging to the same handler:
|
||||
|
||||
1. **Webhook**: Daily.co sends `POST /v1/daily/webhook` with `recording.ready-to-download`
|
||||
2. **Polling**: `GET /recordings` (paginated, max 100/call) → filter new → convert to same payload format
|
||||
|
||||
Both produce `RecordingReadyPayload` and call `handleRecordingReady(payload)`.
|
||||
|
||||
```
|
||||
┌─────────────────┐ ┌──────────────────────────┐
|
||||
│ Daily Webhook │────▶│ RecordingReadyPayload │
|
||||
│ (push) │ │ {room_name, recording_id│
|
||||
└─────────────────┘ │ tracks[], ...} │
|
||||
└────────────┬─────────────┘
|
||||
┌─────────────────┐ │
|
||||
│ GET /recordings│ ▼
|
||||
│ (poll) │────▶ convert ──▶ handleRecordingReady()
|
||||
└─────────────────┘ │
|
||||
▼
|
||||
┌────────────────────────┐
|
||||
│ process_multitrack_ │
|
||||
│ recording pipeline │
|
||||
└────────────────────────┘
|
||||
```
|
||||
|
||||
**Polling API**: `GET https://api.daily.co/v1/recordings`
|
||||
- Pagination: `limit` (max 100), `starting_after`, `ending_before`
|
||||
- Rate limit: ~2 req/sec
|
||||
- Response: `{total_count, data: Recording[]}`
|
||||
|
||||
```mermaid
|
||||
flowchart TB
|
||||
subgraph Trigger["1. Recording Discovery - Daily.co Webhook"]
|
||||
DAILY_WEBHOOK["Daily.co sends POST /v1/daily/webhook<br/>type: recording.ready-to-download"]
|
||||
VERIFY["Verify X-Webhook-Signature (HMAC)"]
|
||||
PARSE["Parse DailyWebhookEvent<br/>Extract tracks[], room_name, recording_id"]
|
||||
FILTER["Filter audio tracks only<br/>track_keys = [t.s3Key for t in tracks if t.type == 'audio']"]
|
||||
DISPATCH["process_multitrack_recording.delay()"]
|
||||
|
||||
DAILY_WEBHOOK --> VERIFY --> PARSE --> FILTER --> DISPATCH
|
||||
end
|
||||
|
||||
subgraph Init["2. Recording Initialization"]
|
||||
FETCH_MEETING[DB READ: meetings_controller.get_by_room_name]
|
||||
FETCH_ROOM[DB READ: rooms_controller.get_by_name]
|
||||
DAILY_API_REC[Daily API: GET /recordings/recording_id]
|
||||
DAILY_API_PART[Daily API: GET /meetings/mtgSessionId/participants]
|
||||
CREATE_RECORDING[DB WRITE: recordings_controller.create]
|
||||
CREATE_TRANSCRIPT[DB WRITE: transcripts_controller.add]
|
||||
MAP_PARTICIPANTS[DB WRITE: transcript.participants upsert]
|
||||
end
|
||||
|
||||
subgraph Pipeline["3. Processing Pipeline"]
|
||||
direction TB
|
||||
PAD[Track Padding & Mixdown]
|
||||
TRANSCRIBE[GPU: Transcription per track]
|
||||
TOPICS[LLM: Topic Detection]
|
||||
TITLE[LLM: Title Generation]
|
||||
SUMMARY[LLM: Summary Generation]
|
||||
end
|
||||
|
||||
subgraph Storage["4. S3 Operations"]
|
||||
S3_PRESIGN[S3: generate_presigned_url for tracks]
|
||||
S3_UPLOAD_PADDED[S3 UPLOAD: padded tracks temp]
|
||||
S3_UPLOAD_MP3[S3 UPLOAD: audio.mp3]
|
||||
S3_DELETE_TEMP[S3 DELETE: cleanup temp files]
|
||||
end
|
||||
|
||||
subgraph PostProcess["5. Post-Processing"]
|
||||
CONSENT[Consent check & cleanup]
|
||||
ZULIP[Zulip: send/update message]
|
||||
WEBHOOK_OUT[Webhook: POST to room.webhook_url]
|
||||
end
|
||||
|
||||
Trigger --> Init --> Pipeline
|
||||
Pipeline --> Storage
|
||||
Pipeline --> PostProcess
|
||||
```
|
||||
|
||||
## Detailed Sequence: Daily.co Multitrack Recording
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant DailyCo as Daily.co
|
||||
participant API as FastAPI /v1/daily/webhook
|
||||
participant Worker as Celery Worker
|
||||
participant DB as PostgreSQL
|
||||
participant DailyAPI as Daily.co REST API
|
||||
participant S3 as AWS S3
|
||||
participant GPU as Modal.com GPU
|
||||
participant LLM as LLM Service
|
||||
participant WS as WebSocket
|
||||
participant Zulip as Zulip
|
||||
participant ExtWH as External Webhook
|
||||
|
||||
Note over DailyCo,API: Phase 0: Webhook Receipt
|
||||
DailyCo->>API: POST /v1/daily/webhook
|
||||
Note right of DailyCo: X-Webhook-Signature, X-Webhook-Timestamp
|
||||
API->>API: verify_webhook_signature()
|
||||
API->>API: Extract audio track s3Keys from payload.tracks[]
|
||||
API->>Worker: process_multitrack_recording.delay()
|
||||
API-->>DailyCo: 200 OK
|
||||
|
||||
Note over Worker,DailyAPI: Phase 1: Recording Initialization
|
||||
Worker->>DB: SELECT meeting WHERE room_name=?
|
||||
Worker->>DB: SELECT room WHERE name=?
|
||||
Worker->>DailyAPI: GET /recordings/{recording_id}
|
||||
DailyAPI-->>Worker: {mtgSessionId, ...}
|
||||
Worker->>DailyAPI: GET /meetings/{mtgSessionId}/participants
|
||||
DailyAPI-->>Worker: [{participant_id, user_name}, ...]
|
||||
Worker->>DB: INSERT INTO recording
|
||||
Worker->>DB: INSERT INTO transcript (status='idle')
|
||||
loop For each track_key (parse participant_id from filename)
|
||||
Worker->>DB: UPSERT transcript.participants[speaker=idx, name=X]
|
||||
end
|
||||
|
||||
Note over Worker,S3: Phase 2: Track Padding
|
||||
Worker->>DB: UPDATE transcript SET status='processing'
|
||||
Worker->>WS: broadcast STATUS='processing'
|
||||
loop For each track in track_keys (N tracks)
|
||||
Worker->>S3: generate_presigned_url(track_key, DAILYCO_BUCKET)
|
||||
S3-->>Worker: presigned_url (2hr)
|
||||
Note over Worker: PyAV: read WebM, extract start_time
|
||||
Note over Worker: PyAV: adelay filter (pad silence)
|
||||
Worker->>S3: PUT file_pipeline/{id}/tracks/padded_{idx}.webm
|
||||
Worker->>S3: generate_presigned_url(padded_{idx}.webm)
|
||||
end
|
||||
|
||||
Note over Worker,S3: Phase 3: Audio Mixdown
|
||||
Note over Worker: PyAV: amix filter → stereo MP3
|
||||
Worker->>DB: UPDATE transcript SET duration=X
|
||||
Worker->>WS: broadcast DURATION
|
||||
Worker->>S3: PUT {transcript_id}/audio.mp3
|
||||
Worker->>DB: UPDATE transcript SET audio_location='storage'
|
||||
|
||||
Note over Worker: Phase 4: Waveform
|
||||
Note over Worker: Generate peaks from MP3
|
||||
Worker->>DB: UPDATE events+=WAVEFORM
|
||||
Worker->>WS: broadcast WAVEFORM
|
||||
|
||||
Note over Worker,GPU: Phase 5: Transcription (N GPU calls)
|
||||
loop For each padded track URL (N tracks)
|
||||
Worker->>GPU: POST /v1/audio/transcriptions-from-url
|
||||
Note right of GPU: {audio_file_url, language, batch:true}
|
||||
GPU-->>Worker: {words: [{word, start, end}, ...]}
|
||||
Note over Worker: Assign speaker=track_idx to words
|
||||
end
|
||||
Note over Worker: Merge all words, sort by start time
|
||||
Worker->>DB: UPDATE events+=TRANSCRIPT
|
||||
Worker->>WS: broadcast TRANSCRIPT
|
||||
|
||||
Note over Worker,S3: Cleanup temp files
|
||||
loop For each padded file
|
||||
Worker->>S3: DELETE padded_{idx}.webm
|
||||
end
|
||||
|
||||
Note over Worker,LLM: Phase 6: Topic Detection (C LLM calls)
|
||||
Note over Worker: C = ceil(total_words / 300)
|
||||
loop For each 300-word chunk (C chunks)
|
||||
Worker->>LLM: TOPIC_PROMPT + words[i:i+300]
|
||||
Note right of LLM: "Extract main topic title + 2-sentence summary"
|
||||
LLM-->>Worker: TitleSummary{title, summary}
|
||||
Worker->>DB: UPSERT topics[]
|
||||
Worker->>DB: UPDATE events+=TOPIC
|
||||
Worker->>WS: broadcast TOPIC
|
||||
end
|
||||
|
||||
Note over Worker,LLM: Phase 7a: Title Generation (1 LLM call)
|
||||
Note over Worker: Input: all TitleSummary[].title joined
|
||||
Worker->>LLM: TITLE_PROMPT
|
||||
Note right of LLM: "Generate concise title from topic titles"
|
||||
LLM-->>Worker: "Meeting Title"
|
||||
Worker->>DB: UPDATE transcript SET title=X
|
||||
Worker->>DB: UPDATE events+=FINAL_TITLE
|
||||
Worker->>WS: broadcast FINAL_TITLE
|
||||
|
||||
Note over Worker,LLM: Phase 7b: Summary Generation (2+2M LLM calls)
|
||||
Note over Worker: Reconstruct full transcript from TitleSummary[].transcript
|
||||
opt If participants unknown
|
||||
Worker->>LLM: PARTICIPANTS_PROMPT
|
||||
LLM-->>Worker: ParticipantsResponse
|
||||
end
|
||||
Worker->>LLM: SUBJECTS_PROMPT (call #1)
|
||||
Note right of LLM: "Main high-level topics? Max 6"
|
||||
LLM-->>Worker: SubjectsResponse{subjects: ["A", "B", ...]}
|
||||
|
||||
loop For each subject (M subjects, max 6)
|
||||
Worker->>LLM: DETAILED_SUBJECT_PROMPT (call #2..#1+M)
|
||||
Note right of LLM: "Info about 'A': decisions, actions, deadlines"
|
||||
LLM-->>Worker: detailed_response (discarded after next call)
|
||||
Worker->>LLM: PARAGRAPH_SUMMARY_PROMPT (call #2+M..#1+2M)
|
||||
Note right of LLM: "Summarize in 1 paragraph"
|
||||
LLM-->>Worker: paragraph → summaries[]
|
||||
end
|
||||
|
||||
Worker->>LLM: RECAP_PROMPT (call #2+2M)
|
||||
Note right of LLM: "High-level quick recap, 1 paragraph"
|
||||
LLM-->>Worker: recap
|
||||
Note over Worker: long_summary = "# Quick recap\n{recap}\n# Summary\n**A**\n{para1}..."
|
||||
Note over Worker: short_summary = recap only
|
||||
Worker->>DB: UPDATE long_summary, short_summary
|
||||
Worker->>DB: UPDATE events+=FINAL_LONG_SUMMARY
|
||||
Worker->>WS: broadcast FINAL_LONG_SUMMARY
|
||||
Worker->>DB: UPDATE events+=FINAL_SHORT_SUMMARY
|
||||
Worker->>WS: broadcast FINAL_SHORT_SUMMARY
|
||||
|
||||
Note over Worker,DB: Phase 8: Finalize
|
||||
Worker->>DB: UPDATE transcript SET status='ended'
|
||||
Worker->>DB: UPDATE events+=STATUS
|
||||
Worker->>WS: broadcast STATUS='ended'
|
||||
|
||||
Note over Worker,ExtWH: Phase 9: Post-Processing Chain
|
||||
Worker->>DB: SELECT meeting_consent WHERE meeting_id=?
|
||||
alt Any consent denied
|
||||
Worker->>S3: DELETE tracks from DAILYCO_BUCKET
|
||||
Worker->>S3: DELETE audio.mp3 from TRANSCRIPT_BUCKET
|
||||
Worker->>DB: UPDATE transcript SET audio_deleted=true
|
||||
end
|
||||
|
||||
opt Room has zulip_auto_post=true
|
||||
alt Existing zulip_message_id
|
||||
Worker->>Zulip: PATCH /api/v1/messages/{id}
|
||||
else New
|
||||
Worker->>Zulip: POST /api/v1/messages
|
||||
Zulip-->>Worker: {id}
|
||||
Worker->>DB: UPDATE transcript SET zulip_message_id=X
|
||||
end
|
||||
end
|
||||
|
||||
opt Room has webhook_url
|
||||
Worker->>ExtWH: POST {webhook_url}
|
||||
Note right of ExtWH: X-Webhook-Signature: HMAC-SHA256
|
||||
Note right of ExtWH: Body: {transcript_id, room_id, ...}
|
||||
end
|
||||
```
|
||||
|
||||
## Title & Summary Generation Data Flow
|
||||
|
||||
```mermaid
|
||||
flowchart TB
|
||||
subgraph Input["Input: TitleSummary[] from Topic Detection"]
|
||||
TS1["TitleSummary 1<br/>title: 'Q1 Budget'<br/>transcript: words[0:300]"]
|
||||
TS2["TitleSummary 2<br/>title: 'Product Launch'<br/>transcript: words[300:600]"]
|
||||
TS3["TitleSummary N..."]
|
||||
end
|
||||
|
||||
subgraph TitleGen["Title Generation"]
|
||||
T1["Extract .title from each TitleSummary"]
|
||||
T2["Concatenate: '- Q1 Budget\n- Product Launch\n...'"]
|
||||
T3["LLM: TITLE_PROMPT\n'Generate concise title from topic titles'"]
|
||||
T4["Output: FinalTitle"]
|
||||
|
||||
T1 --> T2 --> T3 --> T4
|
||||
end
|
||||
|
||||
subgraph SummaryGen["Summary Generation"]
|
||||
direction TB
|
||||
|
||||
subgraph Reconstruct["1. Reconstruct Full Transcript"]
|
||||
S1["For each TitleSummary.transcript.as_segments()"]
|
||||
S2["Map speaker ID → name"]
|
||||
S3["Build: 'Alice: hello\nBob: hi\n...'"]
|
||||
S1 --> S2 --> S3
|
||||
end
|
||||
|
||||
subgraph Subjects["2. Extract Subjects - LLM call #1"]
|
||||
S4["LLM: SUBJECTS_PROMPT\n'Main high-level topics? Max 6'"]
|
||||
S5["subjects[] = ['Budget Review', ...]"]
|
||||
S4 --> S5
|
||||
end
|
||||
|
||||
subgraph DetailedSum["3. Per-Subject Summary - LLM calls #2 to #(1+2M)"]
|
||||
S6["For each subject:"]
|
||||
S7["LLM: DETAILED_SUBJECT_PROMPT\n'Info about subject: decisions, actions...'"]
|
||||
S8["detailed_response - NOT STORED"]
|
||||
S9["LLM: PARAGRAPH_SUMMARY_PROMPT\n'Summarize in 1 paragraph'"]
|
||||
S10["paragraph → summaries[]"]
|
||||
|
||||
S6 --> S7 --> S8 --> S9 --> S10
|
||||
end
|
||||
|
||||
subgraph Recap["4. Generate Recap - LLM call #(2+2M)"]
|
||||
S11["Concatenate paragraph summaries"]
|
||||
S12["LLM: RECAP_PROMPT\n'High-level recap, 1 paragraph'"]
|
||||
S13["recap"]
|
||||
S11 --> S12 --> S13
|
||||
end
|
||||
|
||||
subgraph Output["5. Output"]
|
||||
S14["long_summary = markdown:\n# Quick recap\n[recap]\n# Summary\n**Subject 1**\n[para1]..."]
|
||||
S15["short_summary = recap only"]
|
||||
S14 --> S15
|
||||
end
|
||||
|
||||
Reconstruct --> Subjects --> DetailedSum --> Recap --> Output
|
||||
end
|
||||
|
||||
Input --> TitleGen
|
||||
Input --> SummaryGen
|
||||
```
|
||||
|
||||
### topics[] vs subjects[]
|
||||
|
||||
| | topics[] | subjects[] |
|
||||
|-|----------|------------|
|
||||
| **Source** | 300-word chunk splitting | LLM extraction from full text |
|
||||
| **Count** | Variable (words / 300) | Max 6 |
|
||||
| **Purpose** | Timeline segmentation | Summary structure |
|
||||
| **Has timestamp?** | Yes | No |
|
||||
|
||||
## External API Calls Summary
|
||||
|
||||
### 1. Daily.co REST API (called during initialization)
|
||||
|
||||
| Endpoint | Method | When | Purpose |
|
||||
|----------|--------|------|---------|
|
||||
| `GET /recordings/{recording_id}` | GET | After webhook | Get mtgSessionId for participant lookup |
|
||||
| `GET /meetings/{mtgSessionId}/participants` | GET | After above | Map participant_id → user_name |
|
||||
|
||||
### 2. GPU Service (Modal.com or Self-Hosted)
|
||||
|
||||
| Endpoint | Method | Count | Request |
|
||||
|----------|--------|-------|---------|
|
||||
| `{TRANSCRIPT_URL}/v1/audio/transcriptions-from-url` | POST | **N** (N = num tracks) | `{audio_file_url, language, batch: true}` |
|
||||
|
||||
**Note**: Diarization is NOT called for multitrack - speaker identification comes from separate tracks.
|
||||
|
||||
### 3. LLM Service (OpenAI-compatible via LlamaIndex)
|
||||
|
||||
| Phase | Operation | Input | LLM Calls | Output |
|
||||
|-------|-----------|-------|-----------|--------|
|
||||
| Topic Detection | TOPIC_PROMPT per 300-word chunk | words[i:i+300] | **C** = ceil(words/300) | TitleSummary{title, summary, timestamp} |
|
||||
| Title Generation | TITLE_PROMPT | All topic titles joined | **1** | FinalTitle |
|
||||
| Participant ID | PARTICIPANTS_PROMPT | Full transcript | **0-1** (skipped if known) | ParticipantsResponse |
|
||||
| Subject Extraction | SUBJECTS_PROMPT | Full transcript | **1** | SubjectsResponse{subjects[]} |
|
||||
| Subject Detail | DETAILED_SUBJECT_PROMPT | Full transcript + subject name | **M** (M = subjects, max 6) | detailed text (discarded) |
|
||||
| Subject Paragraph | PARAGRAPH_SUMMARY_PROMPT | Detailed text | **M** | paragraph text → summaries[] |
|
||||
| Recap | RECAP_PROMPT | All paragraph summaries | **1** | recap text |
|
||||
|
||||
**Total LLM calls**: C + 2M + 3 (+ 1 if participants unknown)
|
||||
- Short meeting (1000 words, 3 subjects): ~4 + 6 + 3 = **13 calls**
|
||||
- Long meeting (5000 words, 6 subjects): ~17 + 12 + 3 = **32 calls**
|
||||
|
||||
## S3 Operations Summary
|
||||
|
||||
### Source Bucket: `DAILYCO_STORAGE_AWS_BUCKET_NAME`
|
||||
Daily.co uploads raw-tracks recordings here.
|
||||
|
||||
| Operation | Key Pattern | When |
|
||||
|-----------|-------------|------|
|
||||
| **READ** (presign) | `{domain}/{room_name}/{ts}/{participant_id}-cam-audio-{ts}.webm` | Track acquisition |
|
||||
| **DELETE** | Same as above | Consent denied cleanup |
|
||||
|
||||
### Transcript Storage Bucket: `TRANSCRIPT_STORAGE_AWS_BUCKET_NAME`
|
||||
Reflector's own storage.
|
||||
|
||||
| Operation | Key Pattern | When |
|
||||
|-----------|-------------|------|
|
||||
| **PUT** | `file_pipeline/{transcript_id}/tracks/padded_{idx}.webm` | After track padding |
|
||||
| **READ** (presign) | Same | For GPU transcription |
|
||||
| **DELETE** | Same | After transcription complete |
|
||||
| **PUT** | `{transcript_id}/audio.mp3` | After mixdown |
|
||||
| **DELETE** | Same | Consent denied cleanup |
|
||||
|
||||
## Database Operations
|
||||
|
||||
### Tables Written
|
||||
|
||||
| Table | Operation | When |
|
||||
|-------|-----------|------|
|
||||
| `recording` | INSERT | Initialization |
|
||||
| `transcript` | INSERT | Initialization |
|
||||
| `transcript` | UPDATE (participants) | After Daily API participant fetch |
|
||||
| `transcript` | UPDATE (status, events, duration, topics, title, summaries, etc.) | Throughout pipeline |
|
||||
|
||||
### Transcript Update Sequence
|
||||
|
||||
```
|
||||
1. INSERT: id, name, status='idle', source_kind='room', user_id, recording_id, room_id, meeting_id
|
||||
2. UPDATE: participants[] (speaker index → participant name mapping)
|
||||
3. UPDATE: status='processing', events+=[{event:'STATUS', data:{value:'processing'}}]
|
||||
4. UPDATE: duration=X, events+=[{event:'DURATION', data:{duration:X}}]
|
||||
5. UPDATE: audio_location='storage'
|
||||
6. UPDATE: events+=[{event:'WAVEFORM', data:{waveform:[...]}}]
|
||||
7. UPDATE: events+=[{event:'TRANSCRIPT', data:{text, translation}}]
|
||||
8. UPDATE: topics[]+=topic, events+=[{event:'TOPIC'}] -- repeated per chunk
|
||||
9. UPDATE: title=X, events+=[{event:'FINAL_TITLE'}]
|
||||
10. UPDATE: long_summary=X, events+=[{event:'FINAL_LONG_SUMMARY'}]
|
||||
11. UPDATE: short_summary=X, events+=[{event:'FINAL_SHORT_SUMMARY'}]
|
||||
12. UPDATE: status='ended', events+=[{event:'STATUS', data:{value:'ended'}}]
|
||||
13. UPDATE: zulip_message_id=X -- if Zulip enabled
|
||||
14. UPDATE: audio_deleted=true -- if consent denied
|
||||
```
|
||||
|
||||
## WebSocket Events
|
||||
|
||||
All broadcast to room `ts:{transcript_id}`:
|
||||
|
||||
| Event | Payload | Trigger |
|
||||
|-------|---------|---------|
|
||||
| STATUS | `{value: "processing"\|"ended"\|"error"}` | Status transitions |
|
||||
| DURATION | `{duration: float}` | After audio processing |
|
||||
| WAVEFORM | `{waveform: float[]}` | After waveform generation |
|
||||
| TRANSCRIPT | `{text: string, translation: string\|null}` | After transcription merge |
|
||||
| TOPIC | `{id, title, summary, timestamp, duration, transcript, words}` | Per topic detected |
|
||||
| FINAL_TITLE | `{title: string}` | After LLM title generation |
|
||||
| FINAL_LONG_SUMMARY | `{long_summary: string}` | After LLM summary |
|
||||
| FINAL_SHORT_SUMMARY | `{short_summary: string}` | After LLM recap |
|
||||
|
||||
User-room broadcasts to `user:{user_id}`:
|
||||
- `TRANSCRIPT_STATUS`
|
||||
- `TRANSCRIPT_FINAL_TITLE`
|
||||
- `TRANSCRIPT_DURATION`
|
||||
238
server/docs/video-platforms/README.md
Normal file
238
server/docs/video-platforms/README.md
Normal file
@@ -0,0 +1,238 @@
|
||||
# Reflector Architecture: Whereby + Daily.co Recording Storage
|
||||
|
||||
## System Overview
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
subgraph "Actors"
|
||||
APP[Our App<br/>Reflector]
|
||||
WHEREBY[Whereby Service<br/>External]
|
||||
DAILY[Daily.co Service<br/>External]
|
||||
end
|
||||
|
||||
subgraph "AWS S3 Buckets"
|
||||
TRANSCRIPT_BUCKET[Transcript Bucket<br/>reflector-transcripts<br/>Output: Processed MP3s]
|
||||
WHEREBY_BUCKET[Whereby Bucket<br/>reflector-whereby-recordings<br/>Input: Raw MP4s]
|
||||
DAILY_BUCKET[Daily.co Bucket<br/>reflector-dailyco-recordings<br/>Input: Raw WebM tracks]
|
||||
end
|
||||
|
||||
subgraph "AWS Infrastructure"
|
||||
SQS[SQS Queue<br/>Whereby notifications]
|
||||
end
|
||||
|
||||
subgraph "Database"
|
||||
DB[(PostgreSQL<br/>Recordings, Transcripts, Meetings)]
|
||||
end
|
||||
|
||||
APP -->|Write processed| TRANSCRIPT_BUCKET
|
||||
APP -->|Read/Delete| WHEREBY_BUCKET
|
||||
APP -->|Read/Delete| DAILY_BUCKET
|
||||
APP -->|Poll| SQS
|
||||
APP -->|Store metadata| DB
|
||||
|
||||
WHEREBY -->|Write recordings| WHEREBY_BUCKET
|
||||
WHEREBY_BUCKET -->|S3 Event| SQS
|
||||
WHEREBY -->|Participant webhooks<br/>room.client.joined/left| APP
|
||||
|
||||
DAILY -->|Write recordings| DAILY_BUCKET
|
||||
DAILY -->|Recording webhook<br/>recording.ready-to-download| APP
|
||||
```
|
||||
|
||||
**Note on Webhook vs S3 Event for Recording Processing:**
|
||||
- **Whereby**: Uses S3 Events → SQS for recording availability (S3 as source of truth, no race conditions)
|
||||
- **Daily.co**: Uses webhooks for recording availability (more immediate, built-in reliability)
|
||||
- **Both**: Use webhooks for participant tracking (real-time updates)
|
||||
|
||||
## Credentials & Permissions
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
subgraph "Master Credentials"
|
||||
MASTER[TRANSCRIPT_STORAGE_AWS_*<br/>Access Key ID + Secret]
|
||||
end
|
||||
|
||||
subgraph "Whereby Upload Credentials"
|
||||
WHEREBY_CREDS[AWS_WHEREBY_ACCESS_KEY_*<br/>Access Key ID + Secret]
|
||||
end
|
||||
|
||||
subgraph "Daily.co Upload Role"
|
||||
DAILY_ROLE[DAILY_STORAGE_AWS_ROLE_ARN<br/>IAM Role ARN]
|
||||
end
|
||||
|
||||
subgraph "Our App Uses"
|
||||
MASTER -->|Read/Write/Delete| TRANSCRIPT_BUCKET[Transcript Bucket]
|
||||
MASTER -->|Read/Delete| WHEREBY_BUCKET[Whereby Bucket]
|
||||
MASTER -->|Read/Delete| DAILY_BUCKET[Daily.co Bucket]
|
||||
MASTER -->|Poll/Delete| SQS[SQS Queue]
|
||||
end
|
||||
|
||||
subgraph "We Give To Services"
|
||||
WHEREBY_CREDS -->|Passed in API call| WHEREBY_SERVICE[Whereby Service]
|
||||
WHEREBY_SERVICE -->|Write Only| WHEREBY_BUCKET
|
||||
|
||||
DAILY_ROLE -->|Passed in API call| DAILY_SERVICE[Daily.co Service]
|
||||
DAILY_SERVICE -->|Assume Role| DAILY_ROLE
|
||||
DAILY_SERVICE -->|Write Only| DAILY_BUCKET
|
||||
end
|
||||
```
|
||||
|
||||
# Video Platform Recording Integration
|
||||
|
||||
This document explains how Reflector receives and identifies multitrack audio recordings from different video platforms.
|
||||
|
||||
## Platform Comparison
|
||||
|
||||
| Platform | Delivery Method | Track Identification |
|
||||
|----------|----------------|---------------------|
|
||||
| **Daily.co** | Webhook | Explicit track list in payload |
|
||||
| **Whereby** | SQS (S3 notifications) | Single file per notification |
|
||||
|
||||
---
|
||||
|
||||
## Daily.co
|
||||
|
||||
**Note:** Primary discovery via polling (`poll_daily_recordings`), webhooks as backup.
|
||||
|
||||
Daily.co uses **webhooks** to notify Reflector when recordings are ready.
|
||||
|
||||
### How It Works
|
||||
|
||||
1. **Daily.co sends webhook** when recording is ready
|
||||
- Event type: `recording.ready-to-download`
|
||||
- Endpoint: `/v1/daily/webhook` (`reflector/views/daily.py:46-102`)
|
||||
|
||||
2. **Webhook payload explicitly includes track list**:
|
||||
```json
|
||||
{
|
||||
"recording_id": "7443ee0a-dab1-40eb-b316-33d6c0d5ff88",
|
||||
"room_name": "daily-20251020193458",
|
||||
"tracks": [
|
||||
{
|
||||
"type": "audio",
|
||||
"s3Key": "monadical/daily-20251020193458/1760988935484-52f7f48b-fbab-431f-9a50-87b9abfc8255-cam-audio-1760988935922",
|
||||
"size": 831843
|
||||
},
|
||||
{
|
||||
"type": "audio",
|
||||
"s3Key": "monadical/daily-20251020193458/1760988935484-a37c35e3-6f8e-4274-a482-e9d0f102a732-cam-audio-1760988943823",
|
||||
"size": 408438
|
||||
},
|
||||
{
|
||||
"type": "video",
|
||||
"s3Key": "monadical/daily-20251020193458/...-video.webm",
|
||||
"size": 30000000
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
3. **System extracts audio tracks** (`daily.py:211`):
|
||||
```python
|
||||
track_keys = [t.s3Key for t in tracks if t.type == "audio"]
|
||||
```
|
||||
|
||||
4. **Triggers multitrack processing** (`daily.py:213-218`):
|
||||
```python
|
||||
process_multitrack_recording.delay(
|
||||
bucket_name=bucket_name, # reflector-dailyco-local
|
||||
room_name=room_name, # daily-20251020193458
|
||||
recording_id=recording_id, # 7443ee0a-dab1-40eb-b316-33d6c0d5ff88
|
||||
track_keys=track_keys # Only audio s3Keys
|
||||
)
|
||||
```
|
||||
|
||||
### Key Advantage: No Ambiguity
|
||||
|
||||
Even though multiple meetings may share the same S3 bucket/folder (`monadical/`), **there's no ambiguity** because:
|
||||
- Each webhook payload contains the exact `s3Key` list for that specific `recording_id`
|
||||
- No need to scan folders or guess which files belong together
|
||||
- Each track's s3Key includes the room timestamp subfolder (e.g., `daily-20251020193458/`)
|
||||
|
||||
The room name includes timestamp (`daily-20251020193458`) to keep recordings organized, but **the webhook's explicit track list is what prevents mixing files from different meetings**.
|
||||
|
||||
### Track Timeline Extraction
|
||||
|
||||
Daily.co provides timing information in two places:
|
||||
|
||||
**1. PyAV WebM Metadata (current approach)**:
|
||||
```python
|
||||
# Read from WebM container stream metadata
|
||||
stream.start_time = 8.130s # Meeting-relative timing
|
||||
```
|
||||
|
||||
**2. Filename Timestamps (alternative approach, commit 3bae9076)**:
|
||||
```
|
||||
Filename format: {recording_start_ts}-{uuid}-cam-audio-{track_start_ts}.webm
|
||||
Example: 1760988935484-52f7f48b-fbab-431f-9a50-87b9abfc8255-cam-audio-1760988935922.webm
|
||||
|
||||
Parse timestamps:
|
||||
- recording_start_ts: 1760988935484 (Unix ms)
|
||||
- track_start_ts: 1760988935922 (Unix ms)
|
||||
- offset: (1760988935922 - 1760988935484) / 1000 = 0.438s
|
||||
```
|
||||
|
||||
**Time Difference (PyAV vs Filename)**:
|
||||
```
|
||||
Track 0:
|
||||
Filename offset: 438ms
|
||||
PyAV metadata: 229ms
|
||||
Difference: ~200ms
|
||||
|
||||
Track 1:
|
||||
Filename offset: 8339ms
|
||||
PyAV metadata: 8130ms
|
||||
Difference: ~200ms
|
||||
```
|
||||
|
||||
**Consistent ~200ms delta** suggests network/encoding delay between file upload initiation (filename) and actual audio stream start (metadata).
|
||||
|
||||
**Note:** The ~200ms difference observed in this test recording is not crucial for timing accuracy. Either method (filename timestamps or PyAV metadata) works well for multi-track alignment. Filename timestamps are preferable as they are better officially documented by Daily.co.
|
||||
|
||||
**Current implementation uses PyAV metadata** because:
|
||||
- More accurate (represents when audio actually started)
|
||||
- Padding BEFORE transcription produces correct Whisper timestamps automatically
|
||||
- No manual offset adjustment needed during transcript merge
|
||||
|
||||
### Why Re-encoding During Padding
|
||||
|
||||
Padding coincidentally involves re-encoding, which is important for Daily.co + Whisper:
|
||||
|
||||
**Problem:** Daily.co skips frames in recordings when microphone is muted or paused
|
||||
- WebM containers have gaps where audio frames should be
|
||||
- Whisper doesn't understand these gaps and produces incorrect timestamps
|
||||
- Example: 5s of audio with 2s muted → file has frames only for 3s, Whisper thinks duration is 3s
|
||||
|
||||
**Solution:** Re-encoding via PyAV filter graph (`adelay` + `aresample`)
|
||||
- Restores missing frames as silence
|
||||
- Produces continuous audio stream without gaps
|
||||
- Whisper now sees correct duration and produces accurate timestamps
|
||||
|
||||
**Why combined with padding:**
|
||||
- Already re-encoding for padding (adding initial silence)
|
||||
- More performant to do both operations in single PyAV pipeline
|
||||
- Padded values needed for mixdown anyway (creating final MP3)
|
||||
|
||||
Implementation: `main_multitrack_pipeline.py:_apply_audio_padding_streaming()`
|
||||
|
||||
---
|
||||
|
||||
## Whereby (SQS-based)
|
||||
|
||||
Whereby uses **AWS SQS** (via S3 notifications) to notify Reflector when files are uploaded.
|
||||
|
||||
### How It Works
|
||||
|
||||
1. **Whereby uploads recording** to S3
|
||||
2. **S3 sends notification** to SQS queue (one notification per file)
|
||||
3. **Reflector polls SQS queue** (`worker/process.py:process_messages()`)
|
||||
4. **System processes single file** (`worker/process.py:process_recording()`)
|
||||
|
||||
### Key Difference from Daily.co
|
||||
|
||||
**Whereby (SQS):** System receives S3 notification "file X was created" - only knows about one file at a time, would need to scan folder to find related files
|
||||
|
||||
**Daily.co (Webhook):** Daily explicitly tells system which files belong together in the webhook payload
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
#
|
||||
# This file serve as an example of possible configuration
|
||||
# All the settings are described here: reflector/settings.py
|
||||
#
|
||||
|
||||
## =======================================================
|
||||
## User authentication
|
||||
## =======================================================
|
||||
|
||||
## Using jwt/authentik
|
||||
AUTH_BACKEND=jwt
|
||||
AUTH_JWT_AUDIENCE=
|
||||
|
||||
## =======================================================
|
||||
## Transcription backend
|
||||
##
|
||||
## Check reflector/processors/audio_transcript_* for the
|
||||
## full list of available transcription backend
|
||||
## =======================================================
|
||||
|
||||
## Using local whisper
|
||||
#TRANSCRIPT_BACKEND=whisper
|
||||
|
||||
## Using serverless modal.com (require reflector-gpu-modal deployed)
|
||||
#TRANSCRIPT_BACKEND=modal
|
||||
#TRANSCRIPT_URL=https://xxxxx--reflector-transcriber-web.modal.run
|
||||
#TRANSCRIPT_MODAL_API_KEY=xxxxx
|
||||
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-parakeet-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=
|
||||
|
||||
## =======================================================
|
||||
## Translation backend
|
||||
##
|
||||
## Only available in modal atm
|
||||
## =======================================================
|
||||
TRANSLATION_BACKEND=modal
|
||||
TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run
|
||||
#TRANSLATION_MODAL_API_KEY=xxxxx
|
||||
|
||||
## =======================================================
|
||||
## LLM backend
|
||||
##
|
||||
## Responsible for titles and short summary
|
||||
## Check reflector/llm/* for the full list of available
|
||||
## llm backend implementation
|
||||
## =======================================================
|
||||
|
||||
## Context size for summary generation (tokens)
|
||||
# LLM_MODEL=microsoft/phi-4
|
||||
LLM_CONTEXT_WINDOW=16000
|
||||
LLM_URL=
|
||||
LLM_API_KEY=sk-
|
||||
|
||||
## =======================================================
|
||||
## Diarization
|
||||
##
|
||||
## Only available on modal
|
||||
## To allow diarization, you need to expose expose the files to be dowloded by the pipeline
|
||||
## =======================================================
|
||||
DIARIZATION_ENABLED=false
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run
|
||||
#DIARIZATION_MODAL_API_KEY=xxxxx
|
||||
|
||||
|
||||
## =======================================================
|
||||
## Sentry
|
||||
## =======================================================
|
||||
|
||||
## Sentry DSN configuration
|
||||
#SENTRY_DSN=
|
||||
26
server/migrations/versions/05f8688d6895_add_action_items.py
Normal file
26
server/migrations/versions/05f8688d6895_add_action_items.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""add_action_items
|
||||
|
||||
Revision ID: 05f8688d6895
|
||||
Revises: bbafedfa510c
|
||||
Create Date: 2025-12-12 11:57:50.209658
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "05f8688d6895"
|
||||
down_revision: Union[str, None] = "bbafedfa510c"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column("transcript", sa.Column("action_items", sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("transcript", "action_items")
|
||||
@@ -0,0 +1,28 @@
|
||||
"""add workflow_run_id to transcript
|
||||
|
||||
Revision ID: 0f943fede0e0
|
||||
Revises: 20251217000000
|
||||
Create Date: 2025-12-16 01:54:13.855106
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "0f943fede0e0"
|
||||
down_revision: Union[str, None] = "20251217000000"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with op.batch_alter_table("transcript", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("workflow_run_id", sa.String(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with op.batch_alter_table("transcript", schema=None) as batch_op:
|
||||
batch_op.drop_column("workflow_run_id")
|
||||
@@ -0,0 +1,40 @@
|
||||
"""add cloud recording support
|
||||
|
||||
Revision ID: 1b1e6a6fc465
|
||||
Revises: bd3a729bb379
|
||||
Create Date: 2026-01-09 17:17:33.535620
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "1b1e6a6fc465"
|
||||
down_revision: Union[str, None] = "bd3a729bb379"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column("daily_composed_video_s3_key", sa.String(), nullable=True)
|
||||
)
|
||||
batch_op.add_column(
|
||||
sa.Column("daily_composed_video_duration", sa.Integer(), nullable=True)
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.drop_column("daily_composed_video_duration")
|
||||
batch_op.drop_column("daily_composed_video_s3_key")
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,50 @@
|
||||
"""add_platform_support
|
||||
|
||||
Revision ID: 1e49625677e4
|
||||
Revises: 9e3f7b2a4c8e
|
||||
Create Date: 2025-10-08 13:17:29.943612
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "1e49625677e4"
|
||||
down_revision: Union[str, None] = "9e3f7b2a4c8e"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add platform field with default 'whereby' for backward compatibility."""
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"platform",
|
||||
sa.String(),
|
||||
nullable=True,
|
||||
server_default=None,
|
||||
)
|
||||
)
|
||||
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"platform",
|
||||
sa.String(),
|
||||
nullable=False,
|
||||
server_default="whereby",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove platform field."""
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.drop_column("platform")
|
||||
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.drop_column("platform")
|
||||
@@ -0,0 +1,35 @@
|
||||
"""add skip_consent to room
|
||||
|
||||
Revision ID: 20251217000000
|
||||
Revises: 05f8688d6895
|
||||
Create Date: 2025-12-17 00:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "20251217000000"
|
||||
down_revision: Union[str, None] = "05f8688d6895"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"skip_consent",
|
||||
sa.Boolean(),
|
||||
nullable=False,
|
||||
server_default=sa.text("false"),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.drop_column("skip_consent")
|
||||
@@ -0,0 +1,79 @@
|
||||
"""add daily participant session table with immutable left_at
|
||||
|
||||
Revision ID: 2b92a1b03caa
|
||||
Revises: f8294b31f022
|
||||
Create Date: 2025-11-13 20:29:30.486577
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "2b92a1b03caa"
|
||||
down_revision: Union[str, None] = "f8294b31f022"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create table
|
||||
op.create_table(
|
||||
"daily_participant_session",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("meeting_id", sa.String(), nullable=False),
|
||||
sa.Column("room_id", sa.String(), nullable=False),
|
||||
sa.Column("session_id", sa.String(), nullable=False),
|
||||
sa.Column("user_id", sa.String(), nullable=True),
|
||||
sa.Column("user_name", sa.String(), nullable=False),
|
||||
sa.Column("joined_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("left_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(["meeting_id"], ["meeting.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["room_id"], ["room.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("daily_participant_session", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
"idx_daily_session_meeting_left", ["meeting_id", "left_at"], unique=False
|
||||
)
|
||||
batch_op.create_index("idx_daily_session_room", ["room_id"], unique=False)
|
||||
|
||||
# Create trigger function to prevent left_at from being updated once set
|
||||
op.execute("""
|
||||
CREATE OR REPLACE FUNCTION prevent_left_at_update()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF OLD.left_at IS NOT NULL THEN
|
||||
RAISE EXCEPTION 'left_at is immutable once set';
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
""")
|
||||
|
||||
# Create trigger
|
||||
op.execute("""
|
||||
CREATE TRIGGER prevent_left_at_update_trigger
|
||||
BEFORE UPDATE ON daily_participant_session
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION prevent_left_at_update();
|
||||
""")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop trigger
|
||||
op.execute(
|
||||
"DROP TRIGGER IF EXISTS prevent_left_at_update_trigger ON daily_participant_session;"
|
||||
)
|
||||
|
||||
# Drop trigger function
|
||||
op.execute("DROP FUNCTION IF EXISTS prevent_left_at_update();")
|
||||
|
||||
# Drop indexes and table
|
||||
with op.batch_alter_table("daily_participant_session", schema=None) as batch_op:
|
||||
batch_op.drop_index("idx_daily_session_room")
|
||||
batch_op.drop_index("idx_daily_session_meeting_left")
|
||||
|
||||
op.drop_table("daily_participant_session")
|
||||
@@ -0,0 +1,30 @@
|
||||
"""Make room platform non-nullable with dynamic default
|
||||
|
||||
Revision ID: 5d6b9df9b045
|
||||
Revises: 2b92a1b03caa
|
||||
Create Date: 2025-11-21 13:22:25.756584
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "5d6b9df9b045"
|
||||
down_revision: Union[str, None] = "2b92a1b03caa"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.execute("UPDATE room SET platform = 'whereby' WHERE platform IS NULL")
|
||||
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.alter_column("platform", existing_type=sa.String(), nullable=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.alter_column("platform", existing_type=sa.String(), nullable=True)
|
||||
@@ -0,0 +1,44 @@
|
||||
"""replace_use_hatchet_with_use_celery
|
||||
|
||||
Revision ID: 80beb1ea3269
|
||||
Revises: bd3a729bb379
|
||||
Create Date: 2026-01-20 16:26:25.555869
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "80beb1ea3269"
|
||||
down_revision: Union[str, None] = "bd3a729bb379"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"use_celery",
|
||||
sa.Boolean(),
|
||||
server_default=sa.text("false"),
|
||||
nullable=False,
|
||||
)
|
||||
)
|
||||
batch_op.drop_column("use_hatchet")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"use_hatchet",
|
||||
sa.Boolean(),
|
||||
server_default=sa.text("false"),
|
||||
nullable=False,
|
||||
)
|
||||
)
|
||||
batch_op.drop_column("use_celery")
|
||||
38
server/migrations/versions/bbafedfa510c_add_user_table.py
Normal file
38
server/migrations/versions/bbafedfa510c_add_user_table.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""add user table
|
||||
|
||||
Revision ID: bbafedfa510c
|
||||
Revises: 5d6b9df9b045
|
||||
Create Date: 2025-11-19 21:06:30.543262
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "bbafedfa510c"
|
||||
down_revision: Union[str, None] = "5d6b9df9b045"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"user",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("email", sa.String(), nullable=False),
|
||||
sa.Column("authentik_uid", sa.String(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.create_index("idx_user_authentik_uid", ["authentik_uid"], unique=True)
|
||||
batch_op.create_index("idx_user_email", ["email"], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("user")
|
||||
@@ -0,0 +1,35 @@
|
||||
"""add use_hatchet to room
|
||||
|
||||
Revision ID: bd3a729bb379
|
||||
Revises: 0f943fede0e0
|
||||
Create Date: 2025-12-16 16:34:03.594231
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "bd3a729bb379"
|
||||
down_revision: Union[str, None] = "0f943fede0e0"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"use_hatchet",
|
||||
sa.Boolean(),
|
||||
server_default=sa.text("false"),
|
||||
nullable=False,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.drop_column("use_hatchet")
|
||||
@@ -0,0 +1,23 @@
|
||||
"""merge cloud recording and celery heads
|
||||
|
||||
Revision ID: e69f08ead8ea
|
||||
Revises: 1b1e6a6fc465, 80beb1ea3269
|
||||
Create Date: 2026-01-21 21:39:10.326841
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "e69f08ead8ea"
|
||||
down_revision: Union[str, None] = ("1b1e6a6fc465", "80beb1ea3269")
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
pass
|
||||
28
server/migrations/versions/f8294b31f022_add_track_keys.py
Normal file
28
server/migrations/versions/f8294b31f022_add_track_keys.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""add_track_keys
|
||||
|
||||
Revision ID: f8294b31f022
|
||||
Revises: 1e49625677e4
|
||||
Create Date: 2025-10-27 18:52:17.589167
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "f8294b31f022"
|
||||
down_revision: Union[str, None] = "1e49625677e4"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with op.batch_alter_table("recording", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("track_keys", sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with op.batch_alter_table("recording", schema=None) as batch_op:
|
||||
batch_op.drop_column("track_keys")
|
||||
@@ -8,7 +8,7 @@ readme = "README.md"
|
||||
dependencies = [
|
||||
"aiohttp>=3.9.0",
|
||||
"aiohttp-cors>=0.7.0",
|
||||
"av>=10.0.0",
|
||||
"av>=15.0.0",
|
||||
"requests>=2.31.0",
|
||||
"aiortc>=1.5.0",
|
||||
"sortedcontainers>=2.4.0",
|
||||
@@ -39,6 +39,7 @@ dependencies = [
|
||||
"pytest-env>=1.1.5",
|
||||
"webvtt-py>=0.5.0",
|
||||
"icalendar>=6.0.0",
|
||||
"hatchet-sdk>=0.47.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
@@ -126,6 +127,7 @@ markers = [
|
||||
select = [
|
||||
"I", # isort - import sorting
|
||||
"F401", # unused imports
|
||||
"E402", # module level import not at top of file
|
||||
"PLC0415", # import-outside-top-level - detect inline imports
|
||||
]
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ from reflector.events import subscribers_shutdown, subscribers_startup
|
||||
from reflector.logger import logger
|
||||
from reflector.metrics import metrics_init
|
||||
from reflector.settings import settings
|
||||
from reflector.views.daily import router as daily_router
|
||||
from reflector.views.meetings import router as meetings_router
|
||||
from reflector.views.rooms import router as rooms_router
|
||||
from reflector.views.rtc_offer import router as rtc_offer_router
|
||||
@@ -96,6 +97,7 @@ app.include_router(user_api_keys_router, prefix="/v1")
|
||||
app.include_router(user_ws_router, prefix="/v1")
|
||||
app.include_router(zulip_router, prefix="/v1")
|
||||
app.include_router(whereby_router, prefix="/v1")
|
||||
app.include_router(daily_router, prefix="/v1/daily")
|
||||
add_pagination(app)
|
||||
|
||||
# prepare celery
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
import asyncio
|
||||
import functools
|
||||
from uuid import uuid4
|
||||
|
||||
from celery import current_task
|
||||
|
||||
from reflector.db import get_database
|
||||
from reflector.llm import llm_session_id
|
||||
|
||||
|
||||
def asynctask(f):
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
async def run_with_db():
|
||||
task_id = current_task.request.id if current_task else None
|
||||
llm_session_id.set(task_id or f"random-{uuid4().hex}")
|
||||
database = get_database()
|
||||
await database.connect()
|
||||
try:
|
||||
|
||||
@@ -6,8 +6,10 @@ from jose import JWTError, jwt
|
||||
from pydantic import BaseModel
|
||||
|
||||
from reflector.db.user_api_keys import user_api_keys_controller
|
||||
from reflector.db.users import user_controller
|
||||
from reflector.logger import logger
|
||||
from reflector.settings import settings
|
||||
from reflector.utils import generate_uuid4
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
|
||||
api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
|
||||
@@ -74,9 +76,21 @@ async def _authenticate_user(
|
||||
if jwt_token:
|
||||
try:
|
||||
payload = jwtauth.verify_token(jwt_token)
|
||||
sub = payload["sub"]
|
||||
authentik_uid = payload["sub"]
|
||||
email = payload["email"]
|
||||
user_infos.append(UserInfo(sub=sub, email=email))
|
||||
|
||||
user = await user_controller.get_by_authentik_uid(authentik_uid)
|
||||
if not user:
|
||||
logger.info(
|
||||
f"Creating new user on first login: {authentik_uid} ({email})"
|
||||
)
|
||||
user = await user_controller.create_or_update(
|
||||
id=generate_uuid4(),
|
||||
authentik_uid=authentik_uid,
|
||||
email=email,
|
||||
)
|
||||
|
||||
user_infos.append(UserInfo(sub=user.id, email=email))
|
||||
except JWTError as e:
|
||||
logger.error(f"JWT error: {e}")
|
||||
raise HTTPException(status_code=401, detail="Invalid authentication")
|
||||
|
||||
6
server/reflector/dailyco_api/README.md
Normal file
6
server/reflector/dailyco_api/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
anything about Daily.co api interaction
|
||||
|
||||
- webhook event shapes
|
||||
- REST api client
|
||||
|
||||
No REST api client existing found in the wild; the official lib is about working with videocall as a bot
|
||||
111
server/reflector/dailyco_api/__init__.py
Normal file
111
server/reflector/dailyco_api/__init__.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Daily.co API Module
|
||||
"""
|
||||
|
||||
# Client
|
||||
from .client import DailyApiClient, DailyApiError, RecordingType
|
||||
|
||||
# Request models
|
||||
from .requests import (
|
||||
CreateMeetingTokenRequest,
|
||||
CreateRoomRequest,
|
||||
CreateWebhookRequest,
|
||||
MeetingTokenProperties,
|
||||
RecordingsBucketConfig,
|
||||
RoomProperties,
|
||||
UpdateWebhookRequest,
|
||||
)
|
||||
|
||||
# Response models
|
||||
from .responses import (
|
||||
FinishedRecordingResponse,
|
||||
MeetingParticipant,
|
||||
MeetingParticipantsResponse,
|
||||
MeetingResponse,
|
||||
MeetingTokenResponse,
|
||||
RecordingResponse,
|
||||
RecordingS3Info,
|
||||
RoomPresenceParticipant,
|
||||
RoomPresenceResponse,
|
||||
RoomResponse,
|
||||
WebhookResponse,
|
||||
)
|
||||
|
||||
# Webhook utilities
|
||||
from .webhook_utils import (
|
||||
extract_room_name,
|
||||
parse_participant_joined,
|
||||
parse_participant_left,
|
||||
parse_recording_error,
|
||||
parse_recording_ready,
|
||||
parse_recording_started,
|
||||
parse_webhook_payload,
|
||||
verify_webhook_signature,
|
||||
)
|
||||
|
||||
# Webhook models
|
||||
from .webhooks import (
|
||||
DailyTrack,
|
||||
DailyWebhookEvent,
|
||||
DailyWebhookEventUnion,
|
||||
ParticipantJoinedEvent,
|
||||
ParticipantJoinedPayload,
|
||||
ParticipantLeftEvent,
|
||||
ParticipantLeftPayload,
|
||||
RecordingErrorEvent,
|
||||
RecordingErrorPayload,
|
||||
RecordingReadyEvent,
|
||||
RecordingReadyToDownloadPayload,
|
||||
RecordingStartedEvent,
|
||||
RecordingStartedPayload,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Client
|
||||
"DailyApiClient",
|
||||
"DailyApiError",
|
||||
"RecordingType",
|
||||
# Requests
|
||||
"CreateRoomRequest",
|
||||
"RoomProperties",
|
||||
"RecordingsBucketConfig",
|
||||
"CreateMeetingTokenRequest",
|
||||
"MeetingTokenProperties",
|
||||
"CreateWebhookRequest",
|
||||
"UpdateWebhookRequest",
|
||||
# Responses
|
||||
"RoomResponse",
|
||||
"RoomPresenceResponse",
|
||||
"RoomPresenceParticipant",
|
||||
"MeetingParticipantsResponse",
|
||||
"MeetingParticipant",
|
||||
"MeetingResponse",
|
||||
"RecordingResponse",
|
||||
"FinishedRecordingResponse",
|
||||
"RecordingS3Info",
|
||||
"MeetingTokenResponse",
|
||||
"WebhookResponse",
|
||||
# Webhooks
|
||||
"DailyWebhookEvent",
|
||||
"DailyWebhookEventUnion",
|
||||
"DailyTrack",
|
||||
"ParticipantJoinedEvent",
|
||||
"ParticipantJoinedPayload",
|
||||
"ParticipantLeftEvent",
|
||||
"ParticipantLeftPayload",
|
||||
"RecordingStartedEvent",
|
||||
"RecordingStartedPayload",
|
||||
"RecordingReadyEvent",
|
||||
"RecordingReadyToDownloadPayload",
|
||||
"RecordingErrorEvent",
|
||||
"RecordingErrorPayload",
|
||||
# Webhook utilities
|
||||
"verify_webhook_signature",
|
||||
"extract_room_name",
|
||||
"parse_webhook_payload",
|
||||
"parse_participant_joined",
|
||||
"parse_participant_left",
|
||||
"parse_recording_started",
|
||||
"parse_recording_ready",
|
||||
"parse_recording_error",
|
||||
]
|
||||
608
server/reflector/dailyco_api/client.py
Normal file
608
server/reflector/dailyco_api/client.py
Normal file
@@ -0,0 +1,608 @@
|
||||
"""
|
||||
Daily.co API Client
|
||||
|
||||
Complete async client for Daily.co REST API with Pydantic models.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api
|
||||
"""
|
||||
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Literal
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
import structlog
|
||||
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
from .requests import (
|
||||
CreateMeetingTokenRequest,
|
||||
CreateRoomRequest,
|
||||
CreateWebhookRequest,
|
||||
UpdateWebhookRequest,
|
||||
)
|
||||
from .responses import (
|
||||
MeetingParticipantsResponse,
|
||||
MeetingResponse,
|
||||
MeetingTokenResponse,
|
||||
RecordingResponse,
|
||||
RoomPresenceResponse,
|
||||
RoomResponse,
|
||||
WebhookResponse,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
RecordingType = Literal["cloud", "raw-tracks"]
|
||||
|
||||
|
||||
class DailyApiError(Exception):
|
||||
"""Daily.co API error with full request/response context."""
|
||||
|
||||
def __init__(self, operation: str, response: httpx.Response):
|
||||
self.operation = operation
|
||||
self.response = response
|
||||
self.status_code = response.status_code
|
||||
self.response_body = response.text
|
||||
self.url = str(response.url)
|
||||
self.request_body = (
|
||||
response.request.content.decode() if response.request.content else None
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
f"Daily.co API error: {operation} failed with status {self.status_code}: {response.text}"
|
||||
)
|
||||
|
||||
|
||||
class DailyApiClient:
|
||||
"""
|
||||
Complete async client for Daily.co REST API.
|
||||
|
||||
Usage:
|
||||
# Direct usage
|
||||
client = DailyApiClient(api_key="your_api_key")
|
||||
room = await client.create_room(CreateRoomRequest(name="my-room"))
|
||||
await client.close() # Clean up when done
|
||||
|
||||
# Context manager (recommended)
|
||||
async with DailyApiClient(api_key="your_api_key") as client:
|
||||
room = await client.create_room(CreateRoomRequest(name="my-room"))
|
||||
"""
|
||||
|
||||
BASE_URL = "https://api.daily.co/v1"
|
||||
DEFAULT_TIMEOUT = 10.0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
api_key: NonEmptyString,
|
||||
webhook_secret: NonEmptyString | None = None,
|
||||
timeout: float = DEFAULT_TIMEOUT,
|
||||
base_url: NonEmptyString | None = None,
|
||||
):
|
||||
"""
|
||||
Initialize Daily.co API client.
|
||||
|
||||
Args:
|
||||
api_key: Daily.co API key (Bearer token)
|
||||
webhook_secret: Base64-encoded HMAC secret for webhook verification.
|
||||
Must match the 'hmac' value provided when creating webhooks.
|
||||
Generate with: base64.b64encode(os.urandom(32)).decode()
|
||||
timeout: Default request timeout in seconds
|
||||
base_url: Override base URL (for testing)
|
||||
"""
|
||||
self.api_key = api_key
|
||||
self.webhook_secret = webhook_secret
|
||||
self.timeout = timeout
|
||||
self.base_url = base_url or self.BASE_URL
|
||||
|
||||
self.headers = {
|
||||
"Authorization": f"Bearer {api_key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
self._client: httpx.AsyncClient | None = None
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
await self.close()
|
||||
|
||||
async def _get_client(self) -> httpx.AsyncClient:
|
||||
if self._client is None:
|
||||
self._client = httpx.AsyncClient(timeout=self.timeout)
|
||||
return self._client
|
||||
|
||||
async def close(self):
|
||||
if self._client is not None:
|
||||
await self._client.aclose()
|
||||
self._client = None
|
||||
|
||||
async def _handle_response(
|
||||
self, response: httpx.Response, operation: str
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Handle API response with error logging.
|
||||
|
||||
Args:
|
||||
response: HTTP response
|
||||
operation: Operation name for logging (e.g., "create_room")
|
||||
|
||||
Returns:
|
||||
Parsed JSON response
|
||||
|
||||
Raises:
|
||||
DailyApiError: If request failed with full context
|
||||
"""
|
||||
if response.status_code >= 400:
|
||||
logger.error(
|
||||
f"Daily.co API error: {operation}",
|
||||
status_code=response.status_code,
|
||||
response_body=response.text,
|
||||
request_body=response.request.content.decode()
|
||||
if response.request.content
|
||||
else None,
|
||||
url=str(response.url),
|
||||
)
|
||||
raise DailyApiError(operation, response)
|
||||
|
||||
return response.json()
|
||||
|
||||
# ============================================================================
|
||||
# ROOMS
|
||||
# ============================================================================
|
||||
|
||||
async def create_room(self, request: CreateRoomRequest) -> RoomResponse:
|
||||
"""
|
||||
Create a new Daily.co room.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/create-room
|
||||
|
||||
Args:
|
||||
request: Room creation request with name, privacy, and properties
|
||||
|
||||
Returns:
|
||||
Created room data including URL and ID
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.post(
|
||||
f"{self.base_url}/rooms",
|
||||
headers=self.headers,
|
||||
json=request.model_dump(exclude_none=True),
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "create_room")
|
||||
return RoomResponse(**data)
|
||||
|
||||
async def get_room(self, room_name: NonEmptyString) -> RoomResponse:
|
||||
"""
|
||||
Get room configuration.
|
||||
|
||||
Args:
|
||||
room_name: Daily.co room name
|
||||
|
||||
Returns:
|
||||
Room configuration data
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.get(
|
||||
f"{self.base_url}/rooms/{room_name}",
|
||||
headers=self.headers,
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "get_room")
|
||||
return RoomResponse(**data)
|
||||
|
||||
async def get_room_presence(
|
||||
self, room_name: NonEmptyString
|
||||
) -> RoomPresenceResponse:
|
||||
"""
|
||||
Get current participants in a room (real-time presence).
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/get-room-presence
|
||||
|
||||
Args:
|
||||
room_name: Daily.co room name
|
||||
|
||||
Returns:
|
||||
List of currently present participants with join time and duration
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.get(
|
||||
f"{self.base_url}/rooms/{room_name}/presence",
|
||||
headers=self.headers,
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "get_room_presence")
|
||||
return RoomPresenceResponse(**data)
|
||||
|
||||
async def delete_room(self, room_name: NonEmptyString) -> None:
|
||||
"""
|
||||
Delete a room (idempotent - succeeds even if room doesn't exist).
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/delete-room
|
||||
|
||||
Args:
|
||||
room_name: Daily.co room name
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails (except 404)
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.delete(
|
||||
f"{self.base_url}/rooms/{room_name}",
|
||||
headers=self.headers,
|
||||
)
|
||||
|
||||
# Idempotent delete - 404 means already deleted
|
||||
if response.status_code == HTTPStatus.NOT_FOUND:
|
||||
logger.debug("Room not found (already deleted)", room_name=room_name)
|
||||
return
|
||||
|
||||
await self._handle_response(response, "delete_room")
|
||||
|
||||
# ============================================================================
|
||||
# MEETINGS
|
||||
# ============================================================================
|
||||
|
||||
async def get_meeting(self, meeting_id: NonEmptyString) -> MeetingResponse:
|
||||
"""
|
||||
Get full meeting information including participants.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meetings/get-meeting-information
|
||||
|
||||
Args:
|
||||
meeting_id: Daily.co meeting/session ID
|
||||
|
||||
Returns:
|
||||
Meeting metadata including room, duration, participants, and status
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.get(
|
||||
f"{self.base_url}/meetings/{meeting_id}",
|
||||
headers=self.headers,
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "get_meeting")
|
||||
return MeetingResponse(**data)
|
||||
|
||||
async def get_meeting_participants(
|
||||
self,
|
||||
meeting_id: NonEmptyString,
|
||||
limit: int | None = None,
|
||||
joined_after: NonEmptyString | None = None,
|
||||
joined_before: NonEmptyString | None = None,
|
||||
) -> MeetingParticipantsResponse:
|
||||
"""
|
||||
Get historical participant data from a completed meeting (paginated).
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meetings/get-meeting-participants
|
||||
|
||||
Args:
|
||||
meeting_id: Daily.co meeting/session ID
|
||||
limit: Maximum number of participant records to return
|
||||
joined_after: Return participants who joined after this participant_id
|
||||
joined_before: Return participants who joined before this participant_id
|
||||
|
||||
Returns:
|
||||
List of participants with join times and duration
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails (404 when no more participants)
|
||||
|
||||
Note:
|
||||
For pagination, use joined_after with the last participant_id from previous response.
|
||||
Returns 404 when no more participants remain.
|
||||
"""
|
||||
params = {}
|
||||
if limit is not None:
|
||||
params["limit"] = limit
|
||||
if joined_after is not None:
|
||||
params["joined_after"] = joined_after
|
||||
if joined_before is not None:
|
||||
params["joined_before"] = joined_before
|
||||
|
||||
client = await self._get_client()
|
||||
response = await client.get(
|
||||
f"{self.base_url}/meetings/{meeting_id}/participants",
|
||||
headers=self.headers,
|
||||
params=params,
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "get_meeting_participants")
|
||||
return MeetingParticipantsResponse(**data)
|
||||
|
||||
# ============================================================================
|
||||
# RECORDINGS
|
||||
# ============================================================================
|
||||
|
||||
async def get_recording(self, recording_id: NonEmptyString) -> RecordingResponse:
|
||||
"""
|
||||
https://docs.daily.co/reference/rest-api/recordings/get-recording-information
|
||||
Get recording metadata and status.
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.get(
|
||||
f"{self.base_url}/recordings/{recording_id}",
|
||||
headers=self.headers,
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "get_recording")
|
||||
return RecordingResponse(**data)
|
||||
|
||||
async def list_recordings(
|
||||
self,
|
||||
room_name: NonEmptyString | None = None,
|
||||
starting_after: str | None = None,
|
||||
ending_before: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> list[RecordingResponse]:
|
||||
"""
|
||||
List recordings with optional filters.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/recordings
|
||||
|
||||
Args:
|
||||
room_name: Filter by room name
|
||||
starting_after: Pagination cursor - recording ID to start after
|
||||
ending_before: Pagination cursor - recording ID to end before
|
||||
limit: Max results per page (default 100, max 100)
|
||||
|
||||
Note: starting_after/ending_before are pagination cursors (recording IDs),
|
||||
NOT time filters. API returns recordings in reverse chronological order.
|
||||
"""
|
||||
client = await self._get_client()
|
||||
|
||||
params = {"limit": limit}
|
||||
if room_name:
|
||||
params["room_name"] = room_name
|
||||
if starting_after:
|
||||
params["starting_after"] = starting_after
|
||||
if ending_before:
|
||||
params["ending_before"] = ending_before
|
||||
|
||||
response = await client.get(
|
||||
f"{self.base_url}/recordings",
|
||||
headers=self.headers,
|
||||
params=params,
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "list_recordings")
|
||||
|
||||
if not isinstance(data, dict) or "data" not in data:
|
||||
logger.error(
|
||||
"Daily.co API returned unexpected format for list_recordings",
|
||||
data_type=type(data).__name__,
|
||||
data_keys=list(data.keys()) if isinstance(data, dict) else None,
|
||||
data_sample=str(data)[:500],
|
||||
room_name=room_name,
|
||||
operation="list_recordings",
|
||||
)
|
||||
raise httpx.HTTPStatusError(
|
||||
message=f"Unexpected response format from list_recordings: {type(data).__name__}",
|
||||
request=response.request,
|
||||
response=response,
|
||||
)
|
||||
|
||||
return [RecordingResponse(**r) for r in data["data"]]
|
||||
|
||||
async def start_recording(
|
||||
self,
|
||||
room_name: NonEmptyString,
|
||||
recording_type: RecordingType,
|
||||
instance_id: UUID,
|
||||
) -> dict[str, Any]:
|
||||
"""Start recording via REST API.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/recordings/start
|
||||
|
||||
Args:
|
||||
room_name: Daily.co room name
|
||||
recording_type: Recording type
|
||||
instance_id: UUID for this recording session
|
||||
|
||||
Returns:
|
||||
Recording start confirmation from Daily.co API
|
||||
|
||||
Raises:
|
||||
DailyApiError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.post(
|
||||
f"{self.base_url}/rooms/{room_name}/recordings/start",
|
||||
headers=self.headers,
|
||||
json={
|
||||
"type": recording_type,
|
||||
"instanceId": str(instance_id),
|
||||
},
|
||||
)
|
||||
return await self._handle_response(response, "start_recording")
|
||||
|
||||
# ============================================================================
|
||||
# MEETING TOKENS
|
||||
# ============================================================================
|
||||
|
||||
async def create_meeting_token(
|
||||
self, request: CreateMeetingTokenRequest
|
||||
) -> MeetingTokenResponse:
|
||||
"""
|
||||
Create a meeting token for participant authentication.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meeting-tokens/create-meeting-token
|
||||
|
||||
Args:
|
||||
request: Token properties including room name, user_id, permissions
|
||||
|
||||
Returns:
|
||||
JWT meeting token
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.post(
|
||||
f"{self.base_url}/meeting-tokens",
|
||||
headers=self.headers,
|
||||
json=request.model_dump(exclude_none=True),
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "create_meeting_token")
|
||||
return MeetingTokenResponse(**data)
|
||||
|
||||
# ============================================================================
|
||||
# WEBHOOKS
|
||||
# ============================================================================
|
||||
|
||||
async def list_webhooks(self) -> list[WebhookResponse]:
|
||||
"""
|
||||
List all configured webhooks for this account.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
|
||||
Returns:
|
||||
List of webhook configurations
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.get(
|
||||
f"{self.base_url}/webhooks",
|
||||
headers=self.headers,
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "list_webhooks")
|
||||
|
||||
# Daily.co returns array directly (not paginated)
|
||||
if isinstance(data, list):
|
||||
return [WebhookResponse(**wh) for wh in data]
|
||||
|
||||
# Future-proof: handle potential pagination envelope
|
||||
if isinstance(data, dict) and "data" in data:
|
||||
return [WebhookResponse(**wh) for wh in data["data"]]
|
||||
|
||||
logger.warning("Unexpected webhook list response format", data=data)
|
||||
return []
|
||||
|
||||
async def create_webhook(self, request: CreateWebhookRequest) -> WebhookResponse:
|
||||
"""
|
||||
Create a new webhook subscription.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
|
||||
Args:
|
||||
request: Webhook configuration with URL, event types, and HMAC secret
|
||||
|
||||
Returns:
|
||||
Created webhook with UUID and state
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.post(
|
||||
f"{self.base_url}/webhooks",
|
||||
headers=self.headers,
|
||||
json=request.model_dump(exclude_none=True),
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "create_webhook")
|
||||
return WebhookResponse(**data)
|
||||
|
||||
async def update_webhook(
|
||||
self, webhook_uuid: NonEmptyString, request: UpdateWebhookRequest
|
||||
) -> WebhookResponse:
|
||||
"""
|
||||
Update webhook configuration.
|
||||
|
||||
Note: Daily.co may not support PATCH for all fields.
|
||||
Common pattern is delete + recreate.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
|
||||
Args:
|
||||
webhook_uuid: Webhook UUID to update
|
||||
request: Updated webhook configuration
|
||||
|
||||
Returns:
|
||||
Updated webhook configuration
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.patch(
|
||||
f"{self.base_url}/webhooks/{webhook_uuid}",
|
||||
headers=self.headers,
|
||||
json=request.model_dump(exclude_none=True),
|
||||
)
|
||||
|
||||
data = await self._handle_response(response, "update_webhook")
|
||||
return WebhookResponse(**data)
|
||||
|
||||
async def delete_webhook(self, webhook_uuid: NonEmptyString) -> None:
|
||||
"""
|
||||
Delete a webhook.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
|
||||
Args:
|
||||
webhook_uuid: Webhook UUID to delete
|
||||
|
||||
Raises:
|
||||
httpx.HTTPStatusError: If webhook not found or deletion fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
response = await client.delete(
|
||||
f"{self.base_url}/webhooks/{webhook_uuid}",
|
||||
headers=self.headers,
|
||||
)
|
||||
|
||||
await self._handle_response(response, "delete_webhook")
|
||||
|
||||
# ============================================================================
|
||||
# HELPER METHODS
|
||||
# ============================================================================
|
||||
|
||||
async def find_webhook_by_url(self, url: NonEmptyString) -> WebhookResponse | None:
|
||||
"""
|
||||
Find a webhook by its URL.
|
||||
|
||||
Args:
|
||||
url: Webhook endpoint URL to search for
|
||||
|
||||
Returns:
|
||||
Webhook if found, None otherwise
|
||||
"""
|
||||
webhooks = await self.list_webhooks()
|
||||
for webhook in webhooks:
|
||||
if webhook.url == url:
|
||||
return webhook
|
||||
return None
|
||||
|
||||
async def find_webhooks_by_pattern(
|
||||
self, pattern: NonEmptyString
|
||||
) -> list[WebhookResponse]:
|
||||
"""
|
||||
Find webhooks matching a URL pattern (e.g., 'ngrok').
|
||||
|
||||
Args:
|
||||
pattern: String to match in webhook URLs
|
||||
|
||||
Returns:
|
||||
List of matching webhooks
|
||||
"""
|
||||
webhooks = await self.list_webhooks()
|
||||
return [wh for wh in webhooks if pattern in wh.url]
|
||||
37
server/reflector/dailyco_api/instance_id.py
Normal file
37
server/reflector/dailyco_api/instance_id.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
Daily.co recording instanceId generation utilities.
|
||||
|
||||
Deterministic instance ID generation for cloud and raw-tracks recordings.
|
||||
MUST match frontend logic
|
||||
"""
|
||||
|
||||
from uuid import UUID, uuid5
|
||||
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
# Namespace UUID for UUIDv5 generation of raw-tracks instanceIds
|
||||
# DO NOT CHANGE: Breaks instanceId determinism across deployments and frontend/backend matching
|
||||
RAW_TRACKS_NAMESPACE = UUID("a1b2c3d4-e5f6-7890-abcd-ef1234567890")
|
||||
|
||||
|
||||
def generate_cloud_instance_id(meeting_id: NonEmptyString) -> UUID:
|
||||
"""
|
||||
Generate instanceId for cloud recording.
|
||||
|
||||
Cloud recordings use meeting ID directly as instanceId.
|
||||
This ensures each meeting has one unique cloud recording.
|
||||
"""
|
||||
return UUID(meeting_id)
|
||||
|
||||
|
||||
def generate_raw_tracks_instance_id(meeting_id: NonEmptyString) -> UUID:
|
||||
"""
|
||||
Generate instanceId for raw-tracks recording.
|
||||
|
||||
Raw-tracks recordings use UUIDv5(meeting_id, namespace) to ensure
|
||||
different instanceId from cloud while remaining deterministic.
|
||||
|
||||
Daily.co requires cloud and raw-tracks to have different instanceIds
|
||||
for concurrent recording.
|
||||
"""
|
||||
return uuid5(RAW_TRACKS_NAMESPACE, meeting_id)
|
||||
159
server/reflector/dailyco_api/requests.py
Normal file
159
server/reflector/dailyco_api/requests.py
Normal file
@@ -0,0 +1,159 @@
|
||||
"""
|
||||
Daily.co API Request Models
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api
|
||||
"""
|
||||
|
||||
from typing import List, Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
|
||||
class RecordingsBucketConfig(BaseModel):
|
||||
"""
|
||||
S3 bucket configuration for raw-tracks recordings.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/create-room
|
||||
"""
|
||||
|
||||
bucket_name: NonEmptyString = Field(description="S3 bucket name")
|
||||
bucket_region: NonEmptyString = Field(description="AWS region (e.g., 'us-east-1')")
|
||||
assume_role_arn: NonEmptyString = Field(
|
||||
description="AWS IAM role ARN that Daily.co will assume to write recordings"
|
||||
)
|
||||
allow_api_access: bool = Field(
|
||||
default=True,
|
||||
description="Whether to allow API access to recording metadata",
|
||||
)
|
||||
|
||||
|
||||
class RoomProperties(BaseModel):
|
||||
"""
|
||||
Room configuration properties.
|
||||
"""
|
||||
|
||||
enable_recording: Literal["cloud", "local", "raw-tracks"] | None = Field(
|
||||
default=None,
|
||||
description="Recording mode: 'cloud' for mixed, 'local' for local recording, 'raw-tracks' for multitrack, None to disable",
|
||||
)
|
||||
enable_chat: bool = Field(default=True, description="Enable in-meeting chat")
|
||||
enable_screenshare: bool = Field(default=True, description="Enable screen sharing")
|
||||
enable_knocking: bool = Field(
|
||||
default=False,
|
||||
description="Enable knocking for private rooms (allows participants to request access)",
|
||||
)
|
||||
start_video_off: bool = Field(
|
||||
default=False, description="Start with video off for all participants"
|
||||
)
|
||||
start_audio_off: bool = Field(
|
||||
default=False, description="Start with audio muted for all participants"
|
||||
)
|
||||
exp: int | None = Field(
|
||||
None, description="Room expiration timestamp (Unix epoch seconds)"
|
||||
)
|
||||
recordings_bucket: RecordingsBucketConfig | None = Field(
|
||||
None, description="S3 bucket configuration for raw-tracks recordings"
|
||||
)
|
||||
|
||||
|
||||
class CreateRoomRequest(BaseModel):
|
||||
"""
|
||||
Request to create a new Daily.co room.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/create-room
|
||||
"""
|
||||
|
||||
name: NonEmptyString = Field(description="Room name (must be unique within domain)")
|
||||
privacy: Literal["public", "private"] = Field(
|
||||
default="public", description="Room privacy setting"
|
||||
)
|
||||
properties: RoomProperties = Field(
|
||||
default_factory=RoomProperties, description="Room configuration properties"
|
||||
)
|
||||
|
||||
|
||||
class MeetingTokenProperties(BaseModel):
|
||||
"""
|
||||
Properties for meeting token creation.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meeting-tokens/create-meeting-token
|
||||
"""
|
||||
|
||||
room_name: NonEmptyString = Field(description="Room name this token is valid for")
|
||||
user_id: NonEmptyString | None = Field(
|
||||
None, description="User identifier to associate with token"
|
||||
)
|
||||
is_owner: bool = Field(
|
||||
default=False, description="Grant owner privileges to token holder"
|
||||
)
|
||||
enable_recording_ui: bool = Field(
|
||||
default=True, description="Show recording controls in UI"
|
||||
)
|
||||
eject_at_token_exp: bool = Field(
|
||||
default=False, description="Eject participant when token expires"
|
||||
)
|
||||
nbf: int | None = Field(
|
||||
None, description="Not-before timestamp (Unix epoch seconds)"
|
||||
)
|
||||
exp: int | None = Field(
|
||||
None, description="Expiration timestamp (Unix epoch seconds)"
|
||||
)
|
||||
|
||||
|
||||
class CreateMeetingTokenRequest(BaseModel):
|
||||
"""
|
||||
Request to create a meeting token for participant authentication.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meeting-tokens/create-meeting-token
|
||||
"""
|
||||
|
||||
properties: MeetingTokenProperties = Field(description="Token properties")
|
||||
|
||||
|
||||
class CreateWebhookRequest(BaseModel):
|
||||
"""
|
||||
Request to create a webhook subscription.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
"""
|
||||
|
||||
url: NonEmptyString = Field(description="Webhook endpoint URL (must be HTTPS)")
|
||||
eventTypes: List[
|
||||
Literal[
|
||||
"participant.joined",
|
||||
"participant.left",
|
||||
"recording.started",
|
||||
"recording.ready-to-download",
|
||||
"recording.error",
|
||||
]
|
||||
] = Field(
|
||||
description="Array of event types to subscribe to (only events we handle)"
|
||||
)
|
||||
hmac: NonEmptyString = Field(
|
||||
description="Base64-encoded HMAC secret for webhook signature verification"
|
||||
)
|
||||
basicAuth: NonEmptyString | None = Field(
|
||||
None, description="Optional basic auth credentials for webhook endpoint"
|
||||
)
|
||||
|
||||
|
||||
class UpdateWebhookRequest(BaseModel):
|
||||
"""
|
||||
Request to update an existing webhook.
|
||||
|
||||
Note: Daily.co API may not support PATCH for webhooks.
|
||||
Common pattern is to delete and recreate.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
"""
|
||||
|
||||
url: NonEmptyString | None = Field(None, description="New webhook endpoint URL")
|
||||
eventTypes: List[NonEmptyString] | None = Field(
|
||||
None, description="New array of event types"
|
||||
)
|
||||
hmac: NonEmptyString | None = Field(None, description="New HMAC secret")
|
||||
basicAuth: NonEmptyString | None = Field(
|
||||
None, description="New basic auth credentials"
|
||||
)
|
||||
224
server/reflector/dailyco_api/responses.py
Normal file
224
server/reflector/dailyco_api/responses.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""
|
||||
Daily.co API Response Models
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from reflector.dailyco_api.webhooks import DailyTrack
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
# not documented in daily; we fill it according to observations
|
||||
RecordingStatus = Literal["in-progress", "finished"]
|
||||
|
||||
|
||||
class RoomResponse(BaseModel):
|
||||
"""
|
||||
Response from room creation or retrieval.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/create-room
|
||||
"""
|
||||
|
||||
id: NonEmptyString = Field(description="Unique room identifier (UUID)")
|
||||
name: NonEmptyString = Field(description="Room name used in URLs")
|
||||
api_created: bool = Field(description="Whether room was created via API")
|
||||
privacy: Literal["public", "private"] = Field(description="Room privacy setting")
|
||||
url: NonEmptyString = Field(description="Full room URL")
|
||||
created_at: NonEmptyString = Field(description="ISO 8601 creation timestamp")
|
||||
config: Dict[NonEmptyString, Any] = Field(
|
||||
default_factory=dict, description="Room configuration properties"
|
||||
)
|
||||
|
||||
|
||||
class RoomPresenceParticipant(BaseModel):
|
||||
"""
|
||||
Participant presence information in a room.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/get-room-presence
|
||||
"""
|
||||
|
||||
room: NonEmptyString = Field(description="Room name")
|
||||
id: NonEmptyString = Field(description="Participant session ID")
|
||||
userId: NonEmptyString | None = Field(None, description="User ID if provided")
|
||||
userName: NonEmptyString | None = Field(None, description="User display name")
|
||||
joinTime: NonEmptyString = Field(description="ISO 8601 join timestamp")
|
||||
duration: int = Field(description="Duration in room (seconds)")
|
||||
|
||||
|
||||
class RoomPresenceResponse(BaseModel):
|
||||
"""
|
||||
Response from room presence endpoint.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/rooms/get-room-presence
|
||||
"""
|
||||
|
||||
total_count: int = Field(
|
||||
description="Total number of participants currently in room"
|
||||
)
|
||||
data: List[RoomPresenceParticipant] = Field(
|
||||
default_factory=list, description="Array of participant presence data"
|
||||
)
|
||||
|
||||
|
||||
class MeetingParticipant(BaseModel):
|
||||
"""
|
||||
Historical participant data from a meeting.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meetings/get-meeting-participants
|
||||
"""
|
||||
|
||||
user_id: NonEmptyString | None = Field(None, description="User identifier")
|
||||
participant_id: NonEmptyString = Field(description="Participant session identifier")
|
||||
user_name: NonEmptyString | None = Field(None, description="User display name")
|
||||
join_time: int = Field(description="Join timestamp (Unix epoch seconds)")
|
||||
duration: int = Field(description="Duration in meeting (seconds)")
|
||||
|
||||
|
||||
class MeetingParticipantsResponse(BaseModel):
|
||||
"""
|
||||
Response from meeting participants endpoint.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meetings/get-meeting-participants
|
||||
"""
|
||||
|
||||
data: List[MeetingParticipant] = Field(
|
||||
default_factory=list, description="Array of participant data"
|
||||
)
|
||||
|
||||
|
||||
class MeetingResponse(BaseModel):
|
||||
"""
|
||||
Response from meeting information endpoint.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meetings/get-meeting-information
|
||||
"""
|
||||
|
||||
id: NonEmptyString = Field(description="Meeting session identifier (UUID)")
|
||||
room: NonEmptyString = Field(description="Room name where meeting occurred")
|
||||
start_time: int = Field(
|
||||
description="Meeting start Unix timestamp (~15s granularity)"
|
||||
)
|
||||
duration: int = Field(description="Total meeting duration in seconds")
|
||||
ongoing: bool = Field(description="Whether meeting is currently active")
|
||||
max_participants: int = Field(description="Peak concurrent participant count")
|
||||
participants: List[MeetingParticipant] = Field(
|
||||
default_factory=list, description="Array of participant session data"
|
||||
)
|
||||
|
||||
|
||||
class RecordingS3Info(BaseModel):
|
||||
"""
|
||||
S3 bucket information for a recording.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/recordings
|
||||
"""
|
||||
|
||||
bucket_name: NonEmptyString
|
||||
bucket_region: NonEmptyString
|
||||
key: NonEmptyString | None = None
|
||||
endpoint: NonEmptyString | None = None
|
||||
|
||||
|
||||
class RecordingResponse(BaseModel):
|
||||
"""
|
||||
Response from recording retrieval endpoint (network layer).
|
||||
|
||||
Duration may be None for recordings still being processed by Daily.
|
||||
Use FinishedRecordingResponse for recordings ready for processing.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/recordings
|
||||
"""
|
||||
|
||||
id: NonEmptyString = Field(description="Recording identifier")
|
||||
room_name: NonEmptyString = Field(description="Room where recording occurred")
|
||||
start_ts: int = Field(description="Recording start timestamp (Unix epoch seconds)")
|
||||
type: Literal["cloud", "raw-tracks"] | None = Field(
|
||||
None, description="Recording type (may be missing from API)"
|
||||
)
|
||||
status: RecordingStatus = Field(
|
||||
description="Recording status ('in-progress' or 'finished')"
|
||||
)
|
||||
max_participants: int | None = Field(
|
||||
None, description="Maximum participants during recording (may be missing)"
|
||||
)
|
||||
duration: int | None = Field(
|
||||
None, description="Recording duration in seconds (None if still processing)"
|
||||
)
|
||||
share_token: NonEmptyString | None = Field(
|
||||
None, description="Token for sharing recording"
|
||||
)
|
||||
s3: RecordingS3Info | None = Field(None, description="S3 bucket information")
|
||||
s3key: NonEmptyString | None = Field(
|
||||
None, description="S3 key for cloud recordings (top-level field)"
|
||||
)
|
||||
tracks: list[DailyTrack] = Field(
|
||||
default_factory=list,
|
||||
description="Track list for raw-tracks recordings (always array, never null)",
|
||||
)
|
||||
# this is not a mistake but a deliberate Daily.co naming decision
|
||||
mtgSessionId: NonEmptyString | None = Field(
|
||||
None, description="Meeting session identifier (may be missing)"
|
||||
)
|
||||
|
||||
def to_finished(self) -> "FinishedRecordingResponse | None":
|
||||
"""Convert to FinishedRecordingResponse if duration is available and status is finished."""
|
||||
if self.duration is None or self.status != "finished":
|
||||
return None
|
||||
return FinishedRecordingResponse(**self.model_dump())
|
||||
|
||||
|
||||
class FinishedRecordingResponse(RecordingResponse):
|
||||
"""
|
||||
Recording with confirmed duration - ready for processing.
|
||||
|
||||
This model guarantees duration is present and status is finished.
|
||||
"""
|
||||
|
||||
status: Literal["finished"] = Field(
|
||||
description="Recording status (always 'finished')"
|
||||
)
|
||||
duration: int = Field(description="Recording duration in seconds")
|
||||
|
||||
|
||||
class MeetingTokenResponse(BaseModel):
|
||||
"""
|
||||
Response from meeting token creation.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/meeting-tokens/create-meeting-token
|
||||
"""
|
||||
|
||||
token: NonEmptyString = Field(
|
||||
description="JWT meeting token for participant authentication"
|
||||
)
|
||||
|
||||
|
||||
class WebhookResponse(BaseModel):
|
||||
"""
|
||||
Response from webhook creation or retrieval.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
"""
|
||||
|
||||
uuid: NonEmptyString = Field(description="Unique webhook identifier")
|
||||
url: NonEmptyString = Field(description="Webhook endpoint URL")
|
||||
hmac: NonEmptyString | None = Field(
|
||||
None, description="Base64-encoded HMAC secret for signature verification"
|
||||
)
|
||||
basicAuth: NonEmptyString | None = Field(
|
||||
None, description="Basic auth credentials if configured"
|
||||
)
|
||||
eventTypes: List[NonEmptyString] = Field(
|
||||
default_factory=list,
|
||||
description="Array of event types (e.g., ['recording.started', 'participant.joined'])",
|
||||
)
|
||||
state: Literal["ACTIVE", "FAILED"] = Field(
|
||||
description="Webhook state - FAILED after 3+ consecutive failures"
|
||||
)
|
||||
failedCount: int = Field(default=0, description="Number of consecutive failures")
|
||||
lastMomentPushed: NonEmptyString | None = Field(
|
||||
None, description="ISO 8601 timestamp of last successful push"
|
||||
)
|
||||
domainId: NonEmptyString = Field(description="Daily.co domain/account identifier")
|
||||
createdAt: NonEmptyString = Field(description="ISO 8601 creation timestamp")
|
||||
updatedAt: NonEmptyString = Field(description="ISO 8601 last update timestamp")
|
||||
228
server/reflector/dailyco_api/webhook_utils.py
Normal file
228
server/reflector/dailyco_api/webhook_utils.py
Normal file
@@ -0,0 +1,228 @@
|
||||
"""
|
||||
Daily.co Webhook Utilities
|
||||
|
||||
Utilities for verifying and parsing Daily.co webhook events.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
"""
|
||||
|
||||
import base64
|
||||
import hmac
|
||||
from hashlib import sha256
|
||||
|
||||
import structlog
|
||||
|
||||
from .webhooks import (
|
||||
DailyWebhookEvent,
|
||||
ParticipantJoinedPayload,
|
||||
ParticipantLeftPayload,
|
||||
RecordingErrorPayload,
|
||||
RecordingReadyToDownloadPayload,
|
||||
RecordingStartedPayload,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
def verify_webhook_signature(
|
||||
body: bytes,
|
||||
signature: str,
|
||||
timestamp: str,
|
||||
webhook_secret: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Verify Daily.co webhook signature using HMAC-SHA256.
|
||||
|
||||
Daily.co signature verification:
|
||||
1. Base64-decode the webhook secret
|
||||
2. Create signed content: timestamp + '.' + body
|
||||
3. Compute HMAC-SHA256(secret, signed_content)
|
||||
4. Base64-encode the result
|
||||
5. Compare with provided signature using constant-time comparison
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
|
||||
Args:
|
||||
body: Raw request body bytes
|
||||
signature: X-Webhook-Signature header value
|
||||
timestamp: X-Webhook-Timestamp header value
|
||||
webhook_secret: Base64-encoded HMAC secret
|
||||
|
||||
Returns:
|
||||
True if signature is valid, False otherwise
|
||||
|
||||
Example:
|
||||
>>> body = b'{"version":"1.0.0","type":"participant.joined",...}'
|
||||
>>> signature = "abc123..."
|
||||
>>> timestamp = "1234567890"
|
||||
>>> secret = "your-base64-secret"
|
||||
>>> is_valid = verify_webhook_signature(body, signature, timestamp, secret)
|
||||
"""
|
||||
if not signature or not timestamp or not webhook_secret:
|
||||
logger.warning(
|
||||
"Missing required data for webhook verification",
|
||||
has_signature=bool(signature),
|
||||
has_timestamp=bool(timestamp),
|
||||
has_secret=bool(webhook_secret),
|
||||
)
|
||||
return False
|
||||
|
||||
try:
|
||||
secret_bytes = base64.b64decode(webhook_secret)
|
||||
signed_content = timestamp.encode() + b"." + body
|
||||
expected = hmac.new(secret_bytes, signed_content, sha256).digest()
|
||||
expected_b64 = base64.b64encode(expected).decode()
|
||||
|
||||
# Constant-time comparison to prevent timing attacks
|
||||
return hmac.compare_digest(expected_b64, signature)
|
||||
|
||||
except (base64.binascii.Error, ValueError, TypeError, UnicodeDecodeError) as e:
|
||||
logger.error(
|
||||
"Webhook signature verification failed",
|
||||
error=str(e),
|
||||
error_type=type(e).__name__,
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def extract_room_name(event: DailyWebhookEvent) -> str | None:
|
||||
"""
|
||||
Extract room name from Daily.co webhook event payload.
|
||||
|
||||
Args:
|
||||
event: Parsed webhook event
|
||||
|
||||
Returns:
|
||||
Room name if present and is a string, None otherwise
|
||||
|
||||
Example:
|
||||
>>> event = DailyWebhookEvent(**webhook_payload)
|
||||
>>> room_name = extract_room_name(event)
|
||||
"""
|
||||
room = event.payload.get("room_name")
|
||||
# Ensure we return a string, not any falsy value that might be in payload
|
||||
return room if isinstance(room, str) else None
|
||||
|
||||
|
||||
def parse_participant_joined(event: DailyWebhookEvent) -> ParticipantJoinedPayload:
|
||||
"""
|
||||
Parse participant.joined webhook event payload.
|
||||
|
||||
Args:
|
||||
event: Webhook event with type "participant.joined"
|
||||
|
||||
Returns:
|
||||
Parsed participant joined payload
|
||||
|
||||
Raises:
|
||||
pydantic.ValidationError: If payload doesn't match expected schema
|
||||
"""
|
||||
return ParticipantJoinedPayload(**event.payload)
|
||||
|
||||
|
||||
def parse_participant_left(event: DailyWebhookEvent) -> ParticipantLeftPayload:
|
||||
"""
|
||||
Parse participant.left webhook event payload.
|
||||
|
||||
Args:
|
||||
event: Webhook event with type "participant.left"
|
||||
|
||||
Returns:
|
||||
Parsed participant left payload
|
||||
|
||||
Raises:
|
||||
pydantic.ValidationError: If payload doesn't match expected schema
|
||||
"""
|
||||
return ParticipantLeftPayload(**event.payload)
|
||||
|
||||
|
||||
def parse_recording_started(event: DailyWebhookEvent) -> RecordingStartedPayload:
|
||||
"""
|
||||
Parse recording.started webhook event payload.
|
||||
|
||||
Args:
|
||||
event: Webhook event with type "recording.started"
|
||||
|
||||
Returns:
|
||||
Parsed recording started payload
|
||||
|
||||
Raises:
|
||||
pydantic.ValidationError: If payload doesn't match expected schema
|
||||
"""
|
||||
return RecordingStartedPayload(**event.payload)
|
||||
|
||||
|
||||
def parse_recording_ready(
|
||||
event: DailyWebhookEvent,
|
||||
) -> RecordingReadyToDownloadPayload:
|
||||
"""
|
||||
Parse recording.ready-to-download webhook event payload.
|
||||
|
||||
This event is sent when raw-tracks recordings are complete and uploaded to S3.
|
||||
The payload includes a 'tracks' array with individual audio/video files.
|
||||
|
||||
Args:
|
||||
event: Webhook event with type "recording.ready-to-download"
|
||||
|
||||
Returns:
|
||||
Parsed recording ready payload with tracks array
|
||||
|
||||
Raises:
|
||||
pydantic.ValidationError: If payload doesn't match expected schema
|
||||
|
||||
Example:
|
||||
>>> event = DailyWebhookEvent(**webhook_payload)
|
||||
>>> if event.type == "recording.ready-to-download":
|
||||
... payload = parse_recording_ready(event)
|
||||
... audio_tracks = [t for t in payload.tracks if t.type == "audio"]
|
||||
"""
|
||||
return RecordingReadyToDownloadPayload(**event.payload)
|
||||
|
||||
|
||||
def parse_recording_error(event: DailyWebhookEvent) -> RecordingErrorPayload:
|
||||
"""
|
||||
Parse recording.error webhook event payload.
|
||||
|
||||
Args:
|
||||
event: Webhook event with type "recording.error"
|
||||
|
||||
Returns:
|
||||
Parsed recording error payload
|
||||
|
||||
Raises:
|
||||
pydantic.ValidationError: If payload doesn't match expected schema
|
||||
"""
|
||||
return RecordingErrorPayload(**event.payload)
|
||||
|
||||
|
||||
WEBHOOK_PARSERS = {
|
||||
"participant.joined": parse_participant_joined,
|
||||
"participant.left": parse_participant_left,
|
||||
"recording.started": parse_recording_started,
|
||||
"recording.ready-to-download": parse_recording_ready,
|
||||
"recording.error": parse_recording_error,
|
||||
}
|
||||
|
||||
|
||||
def parse_webhook_payload(event: DailyWebhookEvent):
|
||||
"""
|
||||
Parse webhook event payload based on event type.
|
||||
|
||||
Args:
|
||||
event: Webhook event
|
||||
|
||||
Returns:
|
||||
Typed payload model based on event type, or raw dict if unknown
|
||||
|
||||
Example:
|
||||
>>> event = DailyWebhookEvent(**webhook_payload)
|
||||
>>> payload = parse_webhook_payload(event)
|
||||
>>> if isinstance(payload, ParticipantJoinedPayload):
|
||||
... print(f"User {payload.user_name} joined")
|
||||
"""
|
||||
parser = WEBHOOK_PARSERS.get(event.type)
|
||||
if parser:
|
||||
return parser(event)
|
||||
else:
|
||||
logger.warning("Unknown webhook event type", event_type=event.type)
|
||||
return event.payload
|
||||
271
server/reflector/dailyco_api/webhooks.py
Normal file
271
server/reflector/dailyco_api/webhooks.py
Normal file
@@ -0,0 +1,271 @@
|
||||
"""
|
||||
Daily.co Webhook Event Models
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
"""
|
||||
|
||||
from typing import Annotated, Any, Dict, Literal, Union
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
|
||||
def normalize_timestamp_to_int(v):
|
||||
"""
|
||||
Normalize float timestamps to int by truncating decimal part.
|
||||
|
||||
Daily.co sometimes sends timestamps as floats (e.g., 1708972279.96).
|
||||
Pydantic expects int for fields typed as `int`.
|
||||
"""
|
||||
if v is None:
|
||||
return v
|
||||
if isinstance(v, float):
|
||||
return int(v)
|
||||
return v
|
||||
|
||||
|
||||
WebhookEventType = Literal[
|
||||
"participant.joined",
|
||||
"participant.left",
|
||||
"recording.started",
|
||||
"recording.ready-to-download",
|
||||
"recording.error",
|
||||
]
|
||||
|
||||
|
||||
class DailyTrack(BaseModel):
|
||||
"""
|
||||
Individual audio or video track from a multitrack recording.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/recordings
|
||||
"""
|
||||
|
||||
type: Literal["audio", "video"]
|
||||
s3Key: NonEmptyString = Field(description="S3 object key for the track file")
|
||||
size: int = Field(description="File size in bytes")
|
||||
|
||||
|
||||
class DailyWebhookEvent(BaseModel):
|
||||
"""
|
||||
Base structure for all Daily.co webhook events.
|
||||
All events share five common fields documented below.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks
|
||||
"""
|
||||
|
||||
version: NonEmptyString = Field(
|
||||
description="Represents the version of the event. This uses semantic versioning to inform a consumer if the payload has introduced any breaking changes"
|
||||
)
|
||||
type: WebhookEventType = Field(
|
||||
description="Represents the type of the event described in the payload"
|
||||
)
|
||||
id: NonEmptyString = Field(
|
||||
description="An identifier representing this specific event"
|
||||
)
|
||||
payload: Dict[NonEmptyString, Any] = Field(
|
||||
description="An object representing the event, whose fields are described in the corresponding payload class"
|
||||
)
|
||||
event_ts: int = Field(
|
||||
description="Documenting when the webhook itself was sent. This timestamp is different than the time of the event the webhook describes. For example, a recording.started event will contain a start_ts timestamp of when the actual recording started, and a slightly later event_ts timestamp indicating when the webhook event was sent"
|
||||
)
|
||||
|
||||
_normalize_event_ts = field_validator("event_ts", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class ParticipantJoinedPayload(BaseModel):
|
||||
"""
|
||||
Payload for participant.joined webhook event.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/participant-joined
|
||||
"""
|
||||
|
||||
room_name: NonEmptyString | None = Field(None, description="Daily.co room name")
|
||||
session_id: NonEmptyString = Field(description="Daily.co session identifier")
|
||||
user_id: NonEmptyString = Field(description="User identifier (may be encoded)")
|
||||
user_name: NonEmptyString | None = Field(None, description="User display name")
|
||||
joined_at: int = Field(description="Join timestamp in Unix epoch seconds")
|
||||
|
||||
_normalize_joined_at = field_validator("joined_at", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class ParticipantLeftPayload(BaseModel):
|
||||
"""
|
||||
Payload for participant.left webhook event.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/participant-left
|
||||
"""
|
||||
|
||||
room_name: NonEmptyString | None = Field(None, description="Daily.co room name")
|
||||
session_id: NonEmptyString = Field(description="Daily.co session identifier")
|
||||
user_id: NonEmptyString = Field(description="User identifier (may be encoded)")
|
||||
user_name: NonEmptyString | None = Field(None, description="User display name")
|
||||
joined_at: int = Field(description="Join timestamp in Unix epoch seconds")
|
||||
duration: int | None = Field(
|
||||
None, description="Duration of participation in seconds"
|
||||
)
|
||||
|
||||
_normalize_joined_at = field_validator("joined_at", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class RecordingStartedPayload(BaseModel):
|
||||
"""
|
||||
Payload for recording.started webhook event.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/recording-started
|
||||
"""
|
||||
|
||||
room_name: NonEmptyString | None = Field(None, description="Daily.co room name")
|
||||
recording_id: NonEmptyString = Field(description="Recording identifier")
|
||||
start_ts: int | None = Field(None, description="Recording start timestamp")
|
||||
|
||||
_normalize_start_ts = field_validator("start_ts", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class RecordingReadyToDownloadPayload(BaseModel):
|
||||
"""
|
||||
Payload for recording.ready-to-download webhook event.
|
||||
This is sent when raw-tracks recordings are complete and uploaded to S3.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/recording-ready-to-download
|
||||
"""
|
||||
|
||||
type: Literal["cloud", "raw-tracks"] = Field(
|
||||
description="The type of recording that was generated"
|
||||
)
|
||||
recording_id: NonEmptyString = Field(
|
||||
description="An ID identifying the recording that was generated"
|
||||
)
|
||||
room_name: NonEmptyString = Field(
|
||||
description="The name of the room where the recording was made"
|
||||
)
|
||||
start_ts: int = Field(
|
||||
description="The Unix epoch time in seconds representing when the recording started"
|
||||
)
|
||||
status: Literal["finished"] = Field(
|
||||
description="The status of the given recording (always 'finished' in ready-to-download webhook, see RecordingStatus in responses.py for full API statuses)"
|
||||
)
|
||||
max_participants: int = Field(
|
||||
description="The number of participants on the call that were recorded"
|
||||
)
|
||||
duration: int = Field(description="The duration in seconds of the call")
|
||||
s3_key: NonEmptyString = Field(
|
||||
description="The location of the recording in the provided S3 bucket"
|
||||
)
|
||||
share_token: NonEmptyString | None = Field(
|
||||
None, description="undocumented documented secret field"
|
||||
)
|
||||
tracks: list[DailyTrack] | None = Field(
|
||||
None,
|
||||
description="If the recording is a raw-tracks recording, a tracks field will be provided. If role permissions have been removed, the tracks field may be null",
|
||||
)
|
||||
|
||||
_normalize_start_ts = field_validator("start_ts", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class RecordingErrorPayload(BaseModel):
|
||||
"""
|
||||
Payload for recording.error webhook event.
|
||||
|
||||
Reference: https://docs.daily.co/reference/rest-api/webhooks/events/recording-error
|
||||
"""
|
||||
|
||||
action: Literal["clourd-recording-err", "cloud-recording-error"] = Field(
|
||||
description="A string describing the event that was emitted (both variants are documented)"
|
||||
)
|
||||
error_msg: NonEmptyString = Field(description="The error message returned")
|
||||
instance_id: NonEmptyString = Field(
|
||||
description="The recording instance ID that was passed into the start recording command"
|
||||
)
|
||||
room_name: NonEmptyString = Field(
|
||||
description="The name of the room where the recording was made"
|
||||
)
|
||||
timestamp: int = Field(
|
||||
description="The Unix epoch time in seconds representing when the error was emitted"
|
||||
)
|
||||
|
||||
_normalize_timestamp = field_validator("timestamp", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class ParticipantJoinedEvent(BaseModel):
|
||||
version: NonEmptyString
|
||||
type: Literal["participant.joined"]
|
||||
id: NonEmptyString
|
||||
payload: ParticipantJoinedPayload
|
||||
event_ts: int
|
||||
|
||||
_normalize_event_ts = field_validator("event_ts", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class ParticipantLeftEvent(BaseModel):
|
||||
version: NonEmptyString
|
||||
type: Literal["participant.left"]
|
||||
id: NonEmptyString
|
||||
payload: ParticipantLeftPayload
|
||||
event_ts: int
|
||||
|
||||
_normalize_event_ts = field_validator("event_ts", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class RecordingStartedEvent(BaseModel):
|
||||
version: NonEmptyString
|
||||
type: Literal["recording.started"]
|
||||
id: NonEmptyString
|
||||
payload: RecordingStartedPayload
|
||||
event_ts: int
|
||||
|
||||
_normalize_event_ts = field_validator("event_ts", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class RecordingReadyEvent(BaseModel):
|
||||
version: NonEmptyString
|
||||
type: Literal["recording.ready-to-download"]
|
||||
id: NonEmptyString
|
||||
payload: RecordingReadyToDownloadPayload
|
||||
event_ts: int
|
||||
|
||||
_normalize_event_ts = field_validator("event_ts", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
class RecordingErrorEvent(BaseModel):
|
||||
version: NonEmptyString
|
||||
type: Literal["recording.error"]
|
||||
id: NonEmptyString
|
||||
payload: RecordingErrorPayload
|
||||
event_ts: int
|
||||
|
||||
_normalize_event_ts = field_validator("event_ts", mode="before")(
|
||||
normalize_timestamp_to_int
|
||||
)
|
||||
|
||||
|
||||
DailyWebhookEventUnion = Annotated[
|
||||
Union[
|
||||
ParticipantJoinedEvent,
|
||||
ParticipantLeftEvent,
|
||||
RecordingStartedEvent,
|
||||
RecordingReadyEvent,
|
||||
RecordingErrorEvent,
|
||||
],
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
@@ -25,11 +25,13 @@ def get_database() -> databases.Database:
|
||||
|
||||
# import models
|
||||
import reflector.db.calendar_events # noqa
|
||||
import reflector.db.daily_participant_sessions # noqa
|
||||
import reflector.db.meetings # noqa
|
||||
import reflector.db.recordings # noqa
|
||||
import reflector.db.rooms # noqa
|
||||
import reflector.db.transcripts # noqa
|
||||
import reflector.db.user_api_keys # noqa
|
||||
import reflector.db.users # noqa
|
||||
|
||||
kwargs = {}
|
||||
if "postgres" not in settings.DATABASE_URL:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user