mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-20 20:29:06 +00:00
Compare commits
99 Commits
mathieu/sq
...
mathieu/re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e3d796bc8c | ||
|
|
96c5a1d1ea | ||
|
|
0939d2aef9 | ||
| 964cd78bb6 | |||
| 5f458aa4a7 | |||
| 5f7dfadabd | |||
| 0bc971ba96 | |||
|
|
c62e3c0753 | ||
|
|
16284e1ac3 | ||
|
|
443982617d | ||
|
|
23023b3cdb | ||
|
|
ba8568752e | ||
|
|
fd5298c1ee | ||
| 90c3ecc9c3 | |||
| d7f140b7d1 | |||
| a47a5f5781 | |||
| 0eba147018 | |||
|
|
1d584f4b53 | ||
|
|
406a7529ee | ||
| 18a27f7b45 | |||
| 32a049c134 | |||
| 91650ec65f | |||
|
|
b340f3c74e | ||
|
|
8db31a493d | ||
|
|
2321519722 | ||
|
|
061eff3024 | ||
|
|
d890061056 | ||
|
|
2b3f28993f | ||
|
|
5779478d3c | ||
|
|
e55e520043 | ||
|
|
8e7819d73c | ||
|
|
b819d0abc1 | ||
|
|
426a5dd70d | ||
|
|
f6a4830add | ||
|
|
8a1699ab5b | ||
|
|
a4cd433daa | ||
|
|
28d2168209 | ||
|
|
3ef51ad1c8 | ||
|
|
61f0e29d4c | ||
|
|
ec17ed7b58 | ||
|
|
f9c8223e50 | ||
|
|
00549f153a | ||
| 3ad78be762 | |||
| d3a5cd12d2 | |||
| af921ce927 | |||
|
|
bd5df1ce2e | ||
| c8024484b3 | |||
| 28f87c09dc | |||
| dabf7251db | |||
|
|
b51b7aa917 | ||
|
|
a8983b4e7e | ||
|
|
fe47c46489 | ||
| a2bb6a27d6 | |||
| 7f0b728991 | |||
| 692895c859 | |||
|
|
d63040e2fd | ||
| 8d696aa775 | |||
| f6ca07505f | |||
|
|
3aef926203 | ||
|
|
0b2c82227d | ||
|
|
689c8075cc | ||
| 201671368a | |||
|
|
86d5e26224 | ||
| 9bec39808f | |||
| 86ac23868b | |||
|
|
caba506cde | ||
|
|
c442a62787 | ||
|
|
8e438ca285 | ||
| 0ea7ffac89 | |||
|
|
11731c9d38 | ||
|
|
4287f8b8ae | ||
| 3e47c2c057 | |||
|
|
616092a9bb | ||
| 18ed713369 | |||
| 2801ab3643 | |||
|
|
b20cad76e6 | ||
| 28a7258e45 | |||
| a9a4f32324 | |||
|
|
857e035562 | ||
| 34a3f5618c | |||
|
|
1473fd82dc | ||
| 372202b0e1 | |||
|
|
d20aac66c4 | ||
| dc4b737daa | |||
|
|
0baff7abf7 | ||
|
|
962c40e2b6 | ||
|
|
3c4b9f2103 | ||
|
|
c6c035aacf | ||
| c086b91445 | |||
|
|
9a258abc02 | ||
| af86c47f1d | |||
| 5f6910e513 | |||
| 9a71af145e | |||
| eef6dc3903 | |||
|
|
1dee255fed | ||
| 5d98754305 | |||
|
|
969bd84fcc | ||
|
|
36608849ec | ||
|
|
5bf64b5a41 |
90
.github/workflows/deploy.yml
vendored
90
.github/workflows/deploy.yml
vendored
@@ -1,90 +0,0 @@
|
||||
name: Deploy to Amazon ECS
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
env:
|
||||
# 950402358378.dkr.ecr.us-east-1.amazonaws.com/reflector
|
||||
AWS_REGION: us-east-1
|
||||
ECR_REPOSITORY: reflector
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: linux-amd64
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: linux-arm64
|
||||
arch: arm64
|
||||
|
||||
runs-on: ${{ matrix.runner }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
outputs:
|
||||
registry: ${{ steps.login-ecr.outputs.registry }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push ${{ matrix.arch }}
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: server
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: true
|
||||
tags: ${{ steps.login-ecr.outputs.registry }}/${{ env.ECR_REPOSITORY }}:latest-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
github-token: ${{ secrets.GHA_CACHE_TOKEN }}
|
||||
provenance: false
|
||||
|
||||
create-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build]
|
||||
|
||||
permissions:
|
||||
deployments: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Create and push multi-arch manifest
|
||||
run: |
|
||||
# Get the registry URL (since we can't easily access job outputs in matrix)
|
||||
ECR_REGISTRY=$(aws ecr describe-registry --query 'registryId' --output text).dkr.ecr.${{ env.AWS_REGION }}.amazonaws.com
|
||||
|
||||
docker manifest create \
|
||||
$ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest \
|
||||
$ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest-amd64 \
|
||||
$ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest-arm64
|
||||
|
||||
docker manifest push $ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest
|
||||
|
||||
echo "✅ Multi-arch manifest pushed: $ECR_REGISTRY/${{ env.ECR_REPOSITORY }}:latest"
|
||||
53
.github/workflows/dockerhub-backend.yml
vendored
Normal file
53
.github/workflows/dockerhub-backend.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: Build and Push Backend Docker Image (Docker Hub)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: monadicalsas/reflector-backend
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: monadicalsas
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./server
|
||||
file: ./server/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64,linux/arm64
|
||||
70
.github/workflows/dockerhub-frontend.yml
vendored
Normal file
70
.github/workflows/dockerhub-frontend.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Build and Push Frontend Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: monadicalsas/reflector-frontend
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: monadicalsas
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./www
|
||||
file: ./www/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
deploy:
|
||||
needs: build-and-push
|
||||
runs-on: ubuntu-latest
|
||||
if: success()
|
||||
strategy:
|
||||
matrix:
|
||||
environment: [reflector-monadical, reflector-media]
|
||||
environment: ${{ matrix.environment }}
|
||||
steps:
|
||||
- name: Trigger Coolify deployment
|
||||
run: |
|
||||
curl -X POST "${{ secrets.COOLIFY_WEBHOOK_URL }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_WEBHOOK_TOKEN }}" \
|
||||
-f || (echo "Failed to trigger Coolify deployment for ${{ matrix.environment }}" && exit 1)
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
.DS_Store
|
||||
server/.env
|
||||
.env
|
||||
Caddyfile
|
||||
server/exportdanswer
|
||||
.vercel
|
||||
.env*.local
|
||||
@@ -18,3 +19,5 @@ CLAUDE.local.md
|
||||
www/.env.development
|
||||
www/.env.production
|
||||
.playwright-mcp
|
||||
docs/pnpm-lock.yaml
|
||||
.secrets
|
||||
|
||||
@@ -1 +1,5 @@
|
||||
b9d891d3424f371642cb032ecfd0e2564470a72c:server/tests/test_transcripts_recording_deletion.py:generic-api-key:15
|
||||
docs/docs/installation/auth-setup.md:curl-auth-header:250
|
||||
docs/docs/installation/daily-setup.md:curl-auth-header:277
|
||||
gpu/self_hosted/DEV_SETUP.md:curl-auth-header:74
|
||||
gpu/self_hosted/DEV_SETUP.md:curl-auth-header:83
|
||||
|
||||
24
.secrets.example
Normal file
24
.secrets.example
Normal file
@@ -0,0 +1,24 @@
|
||||
# Example secrets file for GitHub Actions workflows
|
||||
# Copy this to .secrets and fill in your values
|
||||
# These secrets should be configured in GitHub repository settings:
|
||||
# Settings > Secrets and variables > Actions
|
||||
|
||||
# DockerHub Configuration (required for frontend and backend deployment)
|
||||
# Create a Docker Hub access token at https://hub.docker.com/settings/security
|
||||
# Username: monadicalsas
|
||||
DOCKERHUB_TOKEN=your-dockerhub-access-token
|
||||
|
||||
# GitHub Token (required for frontend and backend deployment)
|
||||
# Used by docker/metadata-action for extracting image metadata
|
||||
# Can use the default GITHUB_TOKEN or create a personal access token
|
||||
GITHUB_TOKEN=your-github-token-or-use-default-GITHUB_TOKEN
|
||||
|
||||
# Coolify Deployment Webhook (required for frontend deployment)
|
||||
# Used to trigger automatic deployment after image push
|
||||
# Configure these secrets in GitHub Environments:
|
||||
# Each environment should have:
|
||||
# - COOLIFY_WEBHOOK_URL: The webhook URL for that specific deployment
|
||||
# - COOLIFY_WEBHOOK_TOKEN: The webhook token (can be the same for both if using same token)
|
||||
|
||||
# Optional: GitHub Actions Cache Token (for local testing with act)
|
||||
GHA_CACHE_TOKEN=your-github-token-or-empty
|
||||
149
CHANGELOG.md
149
CHANGELOG.md
@@ -1,5 +1,154 @@
|
||||
# Changelog
|
||||
|
||||
## [0.23.2](https://github.com/Monadical-SAS/reflector/compare/v0.23.1...v0.23.2) (2025-12-11)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* build on push tags ([#785](https://github.com/Monadical-SAS/reflector/issues/785)) ([d7f140b](https://github.com/Monadical-SAS/reflector/commit/d7f140b7d1f4660d5da7a0da1357f68869e0b5cd))
|
||||
|
||||
## [0.23.1](https://github.com/Monadical-SAS/reflector/compare/v0.23.0...v0.23.1) (2025-12-11)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* populate room_name in transcript GET endpoint ([#783](https://github.com/Monadical-SAS/reflector/issues/783)) ([0eba147](https://github.com/Monadical-SAS/reflector/commit/0eba1470181c7b9e0a79964a1ef28c09bcbdd9d7))
|
||||
|
||||
## [0.23.0](https://github.com/Monadical-SAS/reflector/compare/v0.22.4...v0.23.0) (2025-12-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* dockerhub ci ([#772](https://github.com/Monadical-SAS/reflector/issues/772)) ([00549f1](https://github.com/Monadical-SAS/reflector/commit/00549f153ade922cf4cb6c5358a7d11a39c426d2))
|
||||
* llm retries ([#739](https://github.com/Monadical-SAS/reflector/issues/739)) ([61f0e29](https://github.com/Monadical-SAS/reflector/commit/61f0e29d4c51eab54ee67af92141fbb171e8ccaa))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* celery inspect bug sidestep in restart script ([#766](https://github.com/Monadical-SAS/reflector/issues/766)) ([ec17ed7](https://github.com/Monadical-SAS/reflector/commit/ec17ed7b587cf6ee143646baaee67a7c017044d4))
|
||||
* deploy frontend to coolify ([#779](https://github.com/Monadical-SAS/reflector/issues/779)) ([91650ec](https://github.com/Monadical-SAS/reflector/commit/91650ec65f65713faa7ee0dcfb75af427b7c4ba0))
|
||||
* hide rooms settings instead of disabling ([#763](https://github.com/Monadical-SAS/reflector/issues/763)) ([3ad78be](https://github.com/Monadical-SAS/reflector/commit/3ad78be7628c0d029296b301a0e87236c76b7598))
|
||||
* return participant emails from transcript endpoint ([#769](https://github.com/Monadical-SAS/reflector/issues/769)) ([d3a5cd1](https://github.com/Monadical-SAS/reflector/commit/d3a5cd12d2d0d9c32af2d5bd9322e030ef69b85d))
|
||||
|
||||
## [0.22.4](https://github.com/Monadical-SAS/reflector/compare/v0.22.3...v0.22.4) (2025-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Multitrack mixdown optimisation 2 ([#764](https://github.com/Monadical-SAS/reflector/issues/764)) ([bd5df1c](https://github.com/Monadical-SAS/reflector/commit/bd5df1ce2ebf35d7f3413b295e56937a9a28ef7b))
|
||||
|
||||
## [0.22.3](https://github.com/Monadical-SAS/reflector/compare/v0.22.2...v0.22.3) (2025-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* align daily room settings ([#759](https://github.com/Monadical-SAS/reflector/issues/759)) ([28f87c0](https://github.com/Monadical-SAS/reflector/commit/28f87c09dc459846873d0dde65b03e3d7b2b9399))
|
||||
|
||||
## [0.22.2](https://github.com/Monadical-SAS/reflector/compare/v0.22.1...v0.22.2) (2025-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* daily auto refresh fix ([#755](https://github.com/Monadical-SAS/reflector/issues/755)) ([fe47c46](https://github.com/Monadical-SAS/reflector/commit/fe47c46489c5aa0cc538109f7559cc9accb35c01))
|
||||
* Skip mixdown for multitrack ([#760](https://github.com/Monadical-SAS/reflector/issues/760)) ([b51b7aa](https://github.com/Monadical-SAS/reflector/commit/b51b7aa9176c1a53ba57ad99f5e976c804a1e80c))
|
||||
|
||||
## [0.22.1](https://github.com/Monadical-SAS/reflector/compare/v0.22.0...v0.22.1) (2025-11-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* participants update from daily ([#749](https://github.com/Monadical-SAS/reflector/issues/749)) ([7f0b728](https://github.com/Monadical-SAS/reflector/commit/7f0b728991c1b9f9aae702c96297eae63b561ef5))
|
||||
|
||||
## [0.22.0](https://github.com/Monadical-SAS/reflector/compare/v0.21.0...v0.22.0) (2025-11-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Multitrack segmentation ([#747](https://github.com/Monadical-SAS/reflector/issues/747)) ([d63040e](https://github.com/Monadical-SAS/reflector/commit/d63040e2fdc07e7b272e85a39eb2411cd6a14798))
|
||||
|
||||
## [0.21.0](https://github.com/Monadical-SAS/reflector/compare/v0.20.0...v0.21.0) (2025-11-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add transcript format parameter to GET endpoint ([#709](https://github.com/Monadical-SAS/reflector/issues/709)) ([f6ca075](https://github.com/Monadical-SAS/reflector/commit/f6ca07505f34483b02270a2ef3bd809e9d2e1045))
|
||||
|
||||
## [0.20.0](https://github.com/Monadical-SAS/reflector/compare/v0.19.0...v0.20.0) (2025-11-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* link transcript participants ([#737](https://github.com/Monadical-SAS/reflector/issues/737)) ([9bec398](https://github.com/Monadical-SAS/reflector/commit/9bec39808fc6322612d8b87e922a6f7901fc01c1))
|
||||
* transcript restart script ([#742](https://github.com/Monadical-SAS/reflector/issues/742)) ([86d5e26](https://github.com/Monadical-SAS/reflector/commit/86d5e26224bb55a0f1cc785aeda52065bb92ee6f))
|
||||
|
||||
## [0.19.0](https://github.com/Monadical-SAS/reflector/compare/v0.18.0...v0.19.0) (2025-11-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* dailyco api module ([#725](https://github.com/Monadical-SAS/reflector/issues/725)) ([4287f8b](https://github.com/Monadical-SAS/reflector/commit/4287f8b8aeee60e51db7539f4dcbda5f6e696bd8))
|
||||
* dailyco poll ([#730](https://github.com/Monadical-SAS/reflector/issues/730)) ([8e438ca](https://github.com/Monadical-SAS/reflector/commit/8e438ca285152bd48fdc42767e706fb448d3525c))
|
||||
* multitrack cli ([#735](https://github.com/Monadical-SAS/reflector/issues/735)) ([11731c9](https://github.com/Monadical-SAS/reflector/commit/11731c9d38439b04e93b1c3afbd7090bad11a11f))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* default platform fix ([#736](https://github.com/Monadical-SAS/reflector/issues/736)) ([c442a62](https://github.com/Monadical-SAS/reflector/commit/c442a627873ca667656eeaefb63e54ab10b8d19e))
|
||||
* parakeet vad not getting the end timestamp ([#728](https://github.com/Monadical-SAS/reflector/issues/728)) ([18ed713](https://github.com/Monadical-SAS/reflector/commit/18ed7133693653ef4ddac6c659a8c14b320d1657))
|
||||
* start raw tracks recording ([#729](https://github.com/Monadical-SAS/reflector/issues/729)) ([3e47c2c](https://github.com/Monadical-SAS/reflector/commit/3e47c2c0573504858e0d2e1798b6ed31f16b4a5d))
|
||||
|
||||
## [0.18.0](https://github.com/Monadical-SAS/reflector/compare/v0.17.0...v0.18.0) (2025-11-14)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* daily QOL: participants dictionary ([#721](https://github.com/Monadical-SAS/reflector/issues/721)) ([b20cad7](https://github.com/Monadical-SAS/reflector/commit/b20cad76e69fb6a76405af299a005f1ddcf60eae))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add proccessing page to file upload and reprocessing ([#650](https://github.com/Monadical-SAS/reflector/issues/650)) ([28a7258](https://github.com/Monadical-SAS/reflector/commit/28a7258e45317b78e60e6397be2bc503647eaace))
|
||||
* copy transcript ([#674](https://github.com/Monadical-SAS/reflector/issues/674)) ([a9a4f32](https://github.com/Monadical-SAS/reflector/commit/a9a4f32324f66c838e081eee42bb9502f38c1db1))
|
||||
|
||||
## [0.17.0](https://github.com/Monadical-SAS/reflector/compare/v0.16.0...v0.17.0) (2025-11-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add API key management UI ([#716](https://github.com/Monadical-SAS/reflector/issues/716)) ([372202b](https://github.com/Monadical-SAS/reflector/commit/372202b0e1a86823900b0aa77be1bfbc2893d8a1))
|
||||
* daily.co support as alternative to whereby ([#691](https://github.com/Monadical-SAS/reflector/issues/691)) ([1473fd8](https://github.com/Monadical-SAS/reflector/commit/1473fd82dc472c394cbaa2987212ad662a74bcac))
|
||||
|
||||
## [0.16.0](https://github.com/Monadical-SAS/reflector/compare/v0.15.0...v0.16.0) (2025-10-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* search date filter ([#710](https://github.com/Monadical-SAS/reflector/issues/710)) ([962c40e](https://github.com/Monadical-SAS/reflector/commit/962c40e2b6428ac42fd10aea926782d7a6f3f902))
|
||||
|
||||
## [0.15.0](https://github.com/Monadical-SAS/reflector/compare/v0.14.0...v0.15.0) (2025-10-20)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* api tokens ([#705](https://github.com/Monadical-SAS/reflector/issues/705)) ([9a258ab](https://github.com/Monadical-SAS/reflector/commit/9a258abc0209b0ac3799532a507ea6a9125d703a))
|
||||
|
||||
## [0.14.0](https://github.com/Monadical-SAS/reflector/compare/v0.13.1...v0.14.0) (2025-10-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add calendar event data to transcript webhook payload ([#689](https://github.com/Monadical-SAS/reflector/issues/689)) ([5f6910e](https://github.com/Monadical-SAS/reflector/commit/5f6910e5131b7f28f86c9ecdcc57fed8412ee3cd))
|
||||
* container build for www / github ([#672](https://github.com/Monadical-SAS/reflector/issues/672)) ([969bd84](https://github.com/Monadical-SAS/reflector/commit/969bd84fcc14851d1a101412a0ba115f1b7cde82))
|
||||
* docker-compose for production frontend ([#664](https://github.com/Monadical-SAS/reflector/issues/664)) ([5bf64b5](https://github.com/Monadical-SAS/reflector/commit/5bf64b5a41f64535e22849b4bb11734d4dbb4aae))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* restore feature boolean logic ([#671](https://github.com/Monadical-SAS/reflector/issues/671)) ([3660884](https://github.com/Monadical-SAS/reflector/commit/36608849ec64e953e3be456172502762e3c33df9))
|
||||
* security review ([#656](https://github.com/Monadical-SAS/reflector/issues/656)) ([5d98754](https://github.com/Monadical-SAS/reflector/commit/5d98754305c6c540dd194dda268544f6d88bfaf8))
|
||||
* update transcript list on reprocess ([#676](https://github.com/Monadical-SAS/reflector/issues/676)) ([9a71af1](https://github.com/Monadical-SAS/reflector/commit/9a71af145ee9b833078c78d0c684590ab12e9f0e))
|
||||
* upgrade nemo toolkit ([#678](https://github.com/Monadical-SAS/reflector/issues/678)) ([eef6dc3](https://github.com/Monadical-SAS/reflector/commit/eef6dc39037329b65804297786d852dddb0557f9))
|
||||
|
||||
## [0.13.1](https://github.com/Monadical-SAS/reflector/compare/v0.13.0...v0.13.1) (2025-09-22)
|
||||
|
||||
|
||||
|
||||
@@ -151,7 +151,7 @@ All endpoints prefixed `/v1/`:
|
||||
|
||||
**Frontend** (`www/.env`):
|
||||
- `NEXTAUTH_URL`, `NEXTAUTH_SECRET` - Authentication configuration
|
||||
- `NEXT_PUBLIC_REFLECTOR_API_URL` - Backend API endpoint
|
||||
- `REFLECTOR_API_URL` - Backend API endpoint
|
||||
- `REFLECTOR_DOMAIN_CONFIG` - Feature flags and domain settings
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
22
Caddyfile.example
Normal file
22
Caddyfile.example
Normal file
@@ -0,0 +1,22 @@
|
||||
# Reflector Caddyfile
|
||||
# Replace example.com with your actual domains
|
||||
# CORS is handled by the backend - Caddy just proxies
|
||||
#
|
||||
# For environment variable substitution, set:
|
||||
# FRONTEND_DOMAIN=app.example.com
|
||||
# API_DOMAIN=api.example.com
|
||||
# AUTHENTIK_DOMAIN=authentik.example.com (optional, for authentication)
|
||||
# Or edit this file directly with your domains.
|
||||
|
||||
{$FRONTEND_DOMAIN:app.example.com} {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
|
||||
{$API_DOMAIN:api.example.com} {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
|
||||
# Uncomment if using Authentik for authentication (see auth-setup.md)
|
||||
# {$AUTHENTIK_DOMAIN:authentik.example.com} {
|
||||
# reverse_proxy authentik-server-1:9000
|
||||
# }
|
||||
31
README.md
31
README.md
@@ -168,6 +168,19 @@ You can manually process an audio file by calling the process tool:
|
||||
uv run python -m reflector.tools.process path/to/audio.wav
|
||||
```
|
||||
|
||||
## Reprocessing any transcription
|
||||
|
||||
```bash
|
||||
uv run -m reflector.tools.process_transcript 81ec38d1-9dd7-43d2-b3f8-51f4d34a07cd --sync
|
||||
```
|
||||
|
||||
## Build-time env variables
|
||||
|
||||
Next.js projects are more used to NEXT_PUBLIC_ prefixed buildtime vars. We don't have those for the reason we need to serve a ccustomizable prebuild docker container.
|
||||
|
||||
Instead, all the variables are runtime. Variables needed to the frontend are served to the frontend app at initial render.
|
||||
|
||||
It also means there's no static prebuild and no static files to serve for js/html.
|
||||
|
||||
## Feature Flags
|
||||
|
||||
@@ -177,24 +190,24 @@ Reflector uses environment variable-based feature flags to control application f
|
||||
|
||||
| Feature Flag | Environment Variable |
|
||||
|-------------|---------------------|
|
||||
| `requireLogin` | `NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN` |
|
||||
| `privacy` | `NEXT_PUBLIC_FEATURE_PRIVACY` |
|
||||
| `browse` | `NEXT_PUBLIC_FEATURE_BROWSE` |
|
||||
| `sendToZulip` | `NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP` |
|
||||
| `rooms` | `NEXT_PUBLIC_FEATURE_ROOMS` |
|
||||
| `requireLogin` | `FEATURE_REQUIRE_LOGIN` |
|
||||
| `privacy` | `FEATURE_PRIVACY` |
|
||||
| `browse` | `FEATURE_BROWSE` |
|
||||
| `sendToZulip` | `FEATURE_SEND_TO_ZULIP` |
|
||||
| `rooms` | `FEATURE_ROOMS` |
|
||||
|
||||
### Setting Feature Flags
|
||||
|
||||
Feature flags are controlled via environment variables using the pattern `NEXT_PUBLIC_FEATURE_{FEATURE_NAME}` where `{FEATURE_NAME}` is the SCREAMING_SNAKE_CASE version of the feature name.
|
||||
Feature flags are controlled via environment variables using the pattern `FEATURE_{FEATURE_NAME}` where `{FEATURE_NAME}` is the SCREAMING_SNAKE_CASE version of the feature name.
|
||||
|
||||
**Examples:**
|
||||
```bash
|
||||
# Enable user authentication requirement
|
||||
NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN=true
|
||||
FEATURE_REQUIRE_LOGIN=true
|
||||
|
||||
# Disable browse functionality
|
||||
NEXT_PUBLIC_FEATURE_BROWSE=false
|
||||
FEATURE_BROWSE=false
|
||||
|
||||
# Enable Zulip integration
|
||||
NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP=true
|
||||
FEATURE_SEND_TO_ZULIP=true
|
||||
```
|
||||
|
||||
113
docker-compose.prod.yml
Normal file
113
docker-compose.prod.yml
Normal file
@@ -0,0 +1,113 @@
|
||||
# Production Docker Compose configuration
|
||||
# Usage: docker compose -f docker-compose.prod.yml up -d
|
||||
#
|
||||
# Prerequisites:
|
||||
# 1. Copy .env.example to .env and configure for both server/ and www/
|
||||
# 2. Copy Caddyfile.example to Caddyfile and edit with your domains
|
||||
# 3. Deploy Modal GPU functions (see gpu/modal_deployments/deploy-all.sh)
|
||||
|
||||
services:
|
||||
web:
|
||||
image: monadicalsas/reflector-frontend:latest
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./www/.env
|
||||
pull_policy: always
|
||||
environment:
|
||||
- KV_URL=redis://redis:6379
|
||||
depends_on:
|
||||
- redis
|
||||
|
||||
server:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./server/.env
|
||||
environment:
|
||||
ENTRYPOINT: server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
- ./server/reflector/auth/jwt/keys:/app/reflector/auth/jwt/keys:ro
|
||||
|
||||
worker:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./server/.env
|
||||
environment:
|
||||
ENTRYPOINT: worker
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
- ./server/reflector/auth/jwt/keys:/app/reflector/auth/jwt/keys:ro
|
||||
|
||||
beat:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./server/.env
|
||||
environment:
|
||||
ENTRYPOINT: beat
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
|
||||
redis:
|
||||
image: redis:7.2-alpine
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 3s
|
||||
retries: 3
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: reflector
|
||||
POSTGRES_PASSWORD: reflector
|
||||
POSTGRES_DB: reflector
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U reflector"]
|
||||
interval: 30s
|
||||
timeout: 3s
|
||||
retries: 3
|
||||
|
||||
caddy:
|
||||
image: caddy:2-alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./Caddyfile:/etc/caddy/Caddyfile:ro
|
||||
- caddy_data:/data
|
||||
- caddy_config:/config
|
||||
depends_on:
|
||||
- web
|
||||
- server
|
||||
|
||||
docs:
|
||||
build: ./docs
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
postgres_data:
|
||||
server_data:
|
||||
caddy_data:
|
||||
caddy_config:
|
||||
|
||||
networks:
|
||||
default:
|
||||
attachable: true
|
||||
@@ -39,7 +39,7 @@ services:
|
||||
ports:
|
||||
- 6379:6379
|
||||
web:
|
||||
image: node:18
|
||||
image: node:22-alpine
|
||||
ports:
|
||||
- "3000:3000"
|
||||
command: sh -c "corepack enable && pnpm install && pnpm dev"
|
||||
@@ -50,6 +50,8 @@ services:
|
||||
- /app/node_modules
|
||||
env_file:
|
||||
- ./www/.env.local
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
|
||||
postgres:
|
||||
image: postgres:17
|
||||
20
docs/.gitignore
vendored
Normal file
20
docs/.gitignore
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
# Dependencies
|
||||
/node_modules
|
||||
|
||||
# Production
|
||||
/build
|
||||
|
||||
# Generated files
|
||||
.docusaurus
|
||||
.cache-loader
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
39
docs/Dockerfile
Normal file
39
docs/Dockerfile
Normal file
@@ -0,0 +1,39 @@
|
||||
FROM node:18-alpine AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# Install curl for fetching OpenAPI spec
|
||||
RUN apk add --no-cache curl
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci
|
||||
|
||||
# Copy source
|
||||
COPY . .
|
||||
|
||||
# Fetch OpenAPI spec from production API
|
||||
ARG OPENAPI_URL=https://api-reflector.monadical.com/openapi.json
|
||||
RUN mkdir -p ./static && curl -sf "${OPENAPI_URL}" -o ./static/openapi.json || echo '{}' > ./static/openapi.json
|
||||
|
||||
# Fix docusaurus config: change onBrokenLinks to 'warn' for Docker build
|
||||
RUN sed -i "s/onBrokenLinks: 'throw'/onBrokenLinks: 'warn'/g" docusaurus.config.ts
|
||||
|
||||
# Build static site (skip prebuild hook by calling docusaurus directly)
|
||||
RUN npx docusaurus build
|
||||
|
||||
# Production image
|
||||
FROM nginx:alpine
|
||||
|
||||
# Copy built static files
|
||||
COPY --from=builder /app/build /usr/share/nginx/html
|
||||
|
||||
# Healthcheck for container orchestration
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost/ || exit 1
|
||||
|
||||
# Expose port
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
41
docs/README.md
Normal file
41
docs/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Website
|
||||
|
||||
This website is built using [Docusaurus](https://docusaurus.io/), a modern static website generator.
|
||||
|
||||
### Installation
|
||||
|
||||
```
|
||||
$ yarn
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
```
|
||||
$ yarn start
|
||||
```
|
||||
|
||||
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
|
||||
|
||||
### Build
|
||||
|
||||
```
|
||||
$ yarn build
|
||||
```
|
||||
|
||||
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
||||
|
||||
### Deployment
|
||||
|
||||
Using SSH:
|
||||
|
||||
```
|
||||
$ USE_SSH=true yarn deploy
|
||||
```
|
||||
|
||||
Not using SSH:
|
||||
|
||||
```
|
||||
$ GIT_USER=<Your GitHub username> yarn deploy
|
||||
```
|
||||
|
||||
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
|
||||
170
docs/TODO.md
Normal file
170
docs/TODO.md
Normal file
@@ -0,0 +1,170 @@
|
||||
# Documentation TODO List
|
||||
|
||||
This file tracks information needed from the user to complete the documentation.
|
||||
|
||||
## Required Information
|
||||
|
||||
### Processing Times & Costs
|
||||
|
||||
Please provide actual numbers for:
|
||||
|
||||
- [ ] **Modal.com GPU Costs**
|
||||
- Cost per hour of audio for Whisper transcription
|
||||
- Cost per hour of audio for Pyannote diarization
|
||||
- Cost per hour of audio for Seamless-M4T translation
|
||||
- Typical GPU instance used (T4, A10, etc.)
|
||||
|
||||
- [ ] **RunPod LLM Costs**
|
||||
- Cost per 1000 tokens for summarization
|
||||
- Model used (phi-4-unsloth-bnb-4bit)
|
||||
- RTX 4000 Ada instance cost per hour
|
||||
|
||||
- [ ] **AWS S3 Storage**
|
||||
- Cost per GB per month
|
||||
- Data transfer costs
|
||||
- Typical storage requirements per hour of audio
|
||||
|
||||
- [ ] **Whereby API**
|
||||
- Monthly cost structure
|
||||
- API call limits
|
||||
- Room participant limits
|
||||
|
||||
- [ ] **Actual Processing Times**
|
||||
- Whisper tiny model: X minutes per hour of audio
|
||||
- Whisper base model: X minutes per hour of audio
|
||||
- Whisper large-v3 model: X minutes per hour of audio
|
||||
- Diarization: X minutes per hour of audio
|
||||
- Translation: X minutes per hour of audio
|
||||
|
||||
### Screenshots Needed
|
||||
|
||||
Location: `/docs/static/screenshots/`
|
||||
|
||||
Please provide screenshots of:
|
||||
|
||||
- [ ] **Dashboard Overview** - Main dashboard showing recent transcripts
|
||||
- [ ] **Live Transcription** - Active transcription in progress
|
||||
- [ ] **Meeting Room Interface** - Whereby room with participants
|
||||
- [ ] **Transcript with Diarization** - Showing speaker labels
|
||||
- [ ] **Settings Page** - Configuration options
|
||||
- [ ] **API Documentation** - OpenAPI/Swagger interface
|
||||
- [ ] **File Upload Interface** - Drag and drop upload
|
||||
- [ ] **Translation View** - Showing original and translated text
|
||||
- [ ] **Summary View** - Generated summary and topics
|
||||
|
||||
### Setup Screenshots
|
||||
|
||||
Please provide step-by-step screenshots for:
|
||||
|
||||
- [ ] **Modal.com Setup**
|
||||
- Creating account
|
||||
- Getting API keys
|
||||
- Deploying functions
|
||||
|
||||
- [ ] **Whereby Configuration**
|
||||
- Creating developer account
|
||||
- Getting API credentials
|
||||
- Setting up rooms
|
||||
|
||||
- [ ] **AWS S3 Setup**
|
||||
- Creating bucket
|
||||
- Setting permissions
|
||||
- Getting access keys
|
||||
|
||||
- [ ] **Authentik Integration**
|
||||
- Adding application
|
||||
- Configuring OAuth
|
||||
- Setting up users
|
||||
|
||||
### Technical Details
|
||||
|
||||
Please provide specific values for:
|
||||
|
||||
- [ ] **WebRTC Configuration**
|
||||
- Exact UDP port range used (e.g., 10000-20000)
|
||||
- STUN server configuration (if any)
|
||||
- ICE candidate gathering timeout
|
||||
- https://docs.daily.co/guides/privacy-and-security/corporate-firewalls-nats-allowed-ip-list
|
||||
|
||||
- [ ] **Worker Configuration**
|
||||
- Default Celery worker count
|
||||
- Worker memory limits
|
||||
- Queue priorities
|
||||
|
||||
- [ ] **Redis Requirements**
|
||||
- Typical memory usage
|
||||
- Persistence configuration
|
||||
- Eviction policies
|
||||
|
||||
- [ ] **PostgreSQL**
|
||||
- Expected database growth (MB per hour of audio)
|
||||
- Recommended connection pool size
|
||||
- Backup strategy
|
||||
|
||||
- [ ] **Performance Metrics**
|
||||
- Average transcription accuracy (WER)
|
||||
- Average diarization accuracy (DER)
|
||||
- Translation quality scores
|
||||
- Typical latency for live streaming
|
||||
|
||||
### Configuration Examples
|
||||
|
||||
Please provide real-world examples for:
|
||||
|
||||
- [ ] **Production .env file** (sanitized)
|
||||
- [ ] **Caddy configuration** for production
|
||||
- [ ] **Docker compose** for production deployment
|
||||
- [ ] **Nginx configuration** (if alternative to Caddy)
|
||||
|
||||
### API Examples
|
||||
|
||||
Please provide:
|
||||
|
||||
- [ ] **Sample API requests** for common operations
|
||||
- [ ] **WebSocket message examples**
|
||||
- [ ] **Webhook payload examples**
|
||||
- [ ] **Error response examples**
|
||||
|
||||
## How to Add Information
|
||||
|
||||
1. **For text information**: Edit the relevant markdown files in `/docs/docs/`
|
||||
2. **For screenshots**: Add to `/docs/static/screenshots/` and reference in docs
|
||||
3. **For code examples**: Add to documentation with proper syntax highlighting
|
||||
|
||||
## Priority Items
|
||||
|
||||
High priority (blocks documentation completeness):
|
||||
1. Modal.com costs and setup steps
|
||||
2. Basic screenshots (dashboard, transcription)
|
||||
3. Docker deployment configuration
|
||||
|
||||
Medium priority (enhances documentation):
|
||||
1. Performance metrics
|
||||
2. Advanced configuration examples
|
||||
3. Troubleshooting scenarios
|
||||
|
||||
Low priority (nice to have):
|
||||
1. Video tutorials
|
||||
2. Architecture diagrams
|
||||
3. Benchmark comparisons
|
||||
|
||||
## Documentation Structure
|
||||
|
||||
Once information is provided, update these files:
|
||||
- `/docs/docs/installation/modal-setup.md` - Add Modal.com setup screenshots
|
||||
- `/docs/docs/installation/whereby-setup.md` - Add Whereby configuration steps
|
||||
- `/docs/docs/reference/configuration.md` - Add environment variable details
|
||||
- `/docs/docs/pipelines/file-pipeline.md` - Add actual processing times
|
||||
- `/docs/docs/pipelines/live-pipeline.md` - Add latency measurements
|
||||
|
||||
## Notes
|
||||
|
||||
- Replace placeholder values with actual data
|
||||
- Ensure all sensitive information is sanitized
|
||||
- Test all configuration examples before documenting
|
||||
- Verify all costs are up-to-date
|
||||
|
||||
---
|
||||
|
||||
Last updated: 2025-08-20
|
||||
Contact: [Your Email]
|
||||
777
docs/create-docs.sh
Executable file
777
docs/create-docs.sh
Executable file
@@ -0,0 +1,777 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Create directory structure
|
||||
mkdir -p docs/concepts
|
||||
mkdir -p docs/installation
|
||||
mkdir -p docs/pipelines
|
||||
mkdir -p docs/reference/architecture
|
||||
mkdir -p docs/reference/processors
|
||||
mkdir -p docs/reference/api
|
||||
|
||||
# Create all documentation files with content
|
||||
echo "Creating documentation files..."
|
||||
|
||||
# Concepts - Modes
|
||||
cat > docs/concepts/modes.md << 'EOF'
|
||||
---
|
||||
sidebar_position: 2
|
||||
title: Operating Modes
|
||||
---
|
||||
|
||||
# Operating Modes
|
||||
|
||||
Reflector operates in two distinct modes to accommodate different use cases and security requirements.
|
||||
|
||||
## Public Mode
|
||||
|
||||
Public mode provides immediate access to core transcription features without requiring authentication.
|
||||
|
||||
### Features Available
|
||||
- **File Upload**: Process audio files up to 2GB
|
||||
- **Live Transcription**: Stream audio from microphone
|
||||
- **Basic Processing**: Transcription and diarization
|
||||
- **Temporary Storage**: Results available for 24 hours
|
||||
|
||||
### Limitations
|
||||
- No persistent storage
|
||||
- No meeting rooms
|
||||
- Limited to single-user sessions
|
||||
- No team collaboration features
|
||||
|
||||
### Use Cases
|
||||
- Quick transcription needs
|
||||
- Testing and evaluation
|
||||
- Individual users
|
||||
- Public demonstrations
|
||||
|
||||
## Private Mode
|
||||
|
||||
Private mode unlocks the full potential of Reflector with authentication and persistent storage.
|
||||
|
||||
### Additional Features
|
||||
- **Virtual Meeting Rooms**: Whereby integration
|
||||
- **Team Collaboration**: Share transcripts with team
|
||||
- **Persistent Storage**: Long-term transcript archive
|
||||
- **Advanced Analytics**: Meeting insights and trends
|
||||
- **Custom Integration**: Webhooks and API access
|
||||
- **User Management**: Role-based access control
|
||||
|
||||
### Authentication Options
|
||||
|
||||
#### Authentik Integration
|
||||
Enterprise-grade SSO with support for:
|
||||
- SAML 2.0
|
||||
- OAuth 2.0 / OIDC
|
||||
- LDAP / Active Directory
|
||||
- Multi-factor authentication
|
||||
|
||||
#### JWT Authentication
|
||||
Stateless token-based auth for:
|
||||
- API access
|
||||
- Service-to-service communication
|
||||
- Mobile applications
|
||||
|
||||
### Room Management
|
||||
|
||||
Virtual rooms provide dedicated spaces for meetings:
|
||||
- **Persistent URLs**: Same link for recurring meetings
|
||||
- **Access Control**: Invite-only or open rooms
|
||||
- **Recording Consent**: Automatic consent management
|
||||
- **Custom Settings**: Per-room configuration
|
||||
|
||||
## Mode Selection
|
||||
|
||||
The mode is determined by your deployment configuration:
|
||||
|
||||
```yaml
|
||||
# Public Mode (no authentication)
|
||||
REFLECTOR_AUTH_BACKEND=none
|
||||
|
||||
# Private Mode (with authentication)
|
||||
REFLECTOR_AUTH_BACKEND=jwt
|
||||
# or
|
||||
REFLECTOR_AUTH_BACKEND=authentik
|
||||
```
|
||||
|
||||
## Feature Comparison
|
||||
|
||||
| Feature | Public Mode | Private Mode |
|
||||
|---------|------------|--------------|
|
||||
| File Upload | ✅ | ✅ |
|
||||
| Live Transcription | ✅ | ✅ |
|
||||
| Speaker Diarization | ✅ | ✅ |
|
||||
| Translation | ✅ | ✅ |
|
||||
| Summarization | ✅ | ✅ |
|
||||
| Meeting Rooms | ❌ | ✅ |
|
||||
| Persistent Storage | ❌ | ✅ |
|
||||
| Team Collaboration | ❌ | ✅ |
|
||||
| API Access | Limited | Full |
|
||||
| User Management | ❌ | ✅ |
|
||||
| Custom Branding | ❌ | ✅ |
|
||||
| Analytics | ❌ | ✅ |
|
||||
| Webhooks | ❌ | ✅ |
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### Public Mode Security
|
||||
- Rate limiting to prevent abuse
|
||||
- File size restrictions
|
||||
- Automatic cleanup of old data
|
||||
- No PII storage
|
||||
|
||||
### Private Mode Security
|
||||
- Encrypted data storage
|
||||
- Audit logging
|
||||
- Session management
|
||||
- Access control lists
|
||||
- Data retention policies
|
||||
|
||||
## Choosing the Right Mode
|
||||
|
||||
### Choose Public Mode if:
|
||||
- You need quick, one-time transcriptions
|
||||
- You're evaluating Reflector
|
||||
- You don't need persistent storage
|
||||
- You're processing non-sensitive content
|
||||
|
||||
### Choose Private Mode if:
|
||||
- You need team collaboration
|
||||
- You require persistent storage
|
||||
- You're processing sensitive content
|
||||
- You need meeting room functionality
|
||||
- You want advanced analytics
|
||||
EOF
|
||||
|
||||
# Concepts - Independence
|
||||
cat > docs/concepts/independence.md << 'EOF'
|
||||
---
|
||||
sidebar_position: 3
|
||||
title: Data Independence
|
||||
---
|
||||
|
||||
# Data Independence & Privacy
|
||||
|
||||
Reflector is designed with privacy and data independence as core principles, giving you complete control over your data and processing.
|
||||
|
||||
## Privacy by Design
|
||||
|
||||
### No Third-Party Data Sharing
|
||||
|
||||
Your audio and transcripts are never shared with third parties:
|
||||
- **Local Processing**: All ML models can run on your infrastructure
|
||||
- **No Training on User Data**: Your content is never used to improve models
|
||||
- **Isolated Processing**: Each transcript is processed in isolation
|
||||
- **No Analytics Tracking**: No usage analytics sent to external services
|
||||
|
||||
### Data Ownership
|
||||
|
||||
You maintain complete ownership of all data:
|
||||
- **Export Anytime**: Download all your transcripts and audio
|
||||
- **Delete on Demand**: Permanent deletion with no recovery
|
||||
- **API Access**: Full programmatic access to your data
|
||||
- **No Vendor Lock-in**: Standard formats for easy migration
|
||||
|
||||
## Processing Transparency
|
||||
|
||||
### What Happens to Your Audio
|
||||
|
||||
1. **Upload/Stream**: Audio received by your server
|
||||
2. **Temporary Storage**: Stored only for processing duration
|
||||
3. **Processing**: ML models process audio locally or on Modal
|
||||
4. **Results Storage**: Transcripts stored in your database
|
||||
5. **Cleanup**: Original audio deleted (unless configured otherwise)
|
||||
|
||||
### Local vs Cloud Processing
|
||||
|
||||
#### Local Processing
|
||||
When configured for local processing:
|
||||
- All models run on your hardware
|
||||
- No data leaves your infrastructure
|
||||
- Complete air-gap capability
|
||||
- Higher hardware requirements
|
||||
|
||||
#### Modal.com Processing
|
||||
When using Modal for GPU acceleration:
|
||||
- Audio chunks sent to Modal for processing
|
||||
- Processed immediately and deleted
|
||||
- No long-term storage on Modal
|
||||
- Modal's security: SOC 2 Type II compliant
|
||||
|
||||
### Data Retention
|
||||
|
||||
Default retention policies:
|
||||
- **Public Mode**: 24 hours then automatic deletion
|
||||
- **Private Mode**: Configurable (default: indefinite)
|
||||
- **Audio Files**: Deleted after processing (configurable)
|
||||
- **Transcripts**: Retained based on policy
|
||||
|
||||
## Compliance Features
|
||||
|
||||
### GDPR Compliance
|
||||
|
||||
- **Right to Access**: Export all user data
|
||||
- **Right to Deletion**: Permanent data removal
|
||||
- **Data Portability**: Standard export formats
|
||||
- **Privacy by Default**: Minimal data collection
|
||||
|
||||
### HIPAA Considerations
|
||||
|
||||
For healthcare deployments:
|
||||
- **Self-hosted Option**: Complete infrastructure control
|
||||
- **Encryption**: At rest and in transit
|
||||
- **Audit Logging**: Complete access trail
|
||||
- **Access Controls**: Role-based permissions
|
||||
|
||||
### Industry Standards
|
||||
|
||||
- **TLS 1.3**: Modern encryption for data in transit
|
||||
- **AES-256**: Encryption for data at rest
|
||||
- **JWT Tokens**: Secure, stateless authentication
|
||||
- **OWASP Guidelines**: Security best practices
|
||||
|
||||
## Self-Hosted Deployment
|
||||
|
||||
### Complete Independence
|
||||
|
||||
Self-hosting provides maximum control:
|
||||
- **Your Infrastructure**: Run on your servers
|
||||
- **Your Network**: No external connections required
|
||||
- **Your Policies**: Implement custom retention
|
||||
- **Your Compliance**: Meet specific requirements
|
||||
|
||||
### Air-Gap Capability
|
||||
|
||||
Reflector can run completely offline:
|
||||
1. Download all models during setup
|
||||
2. Configure for local processing only
|
||||
3. Disable all external integrations
|
||||
4. Run in isolated network environment
|
||||
|
||||
## Data Flow Control
|
||||
|
||||
### Configurable Processing
|
||||
|
||||
Control where each step happens:
|
||||
|
||||
```yaml
|
||||
# All local processing
|
||||
TRANSCRIPT_BACKEND=local
|
||||
DIARIZATION_BACKEND=local
|
||||
TRANSLATION_BACKEND=local
|
||||
|
||||
# Hybrid approach
|
||||
TRANSCRIPT_BACKEND=modal # Fast GPU processing
|
||||
DIARIZATION_BACKEND=local # Sensitive speaker data
|
||||
TRANSLATION_BACKEND=modal # Non-sensitive translation
|
||||
```
|
||||
|
||||
### Storage Options
|
||||
|
||||
Choose where data is stored:
|
||||
- **Local Filesystem**: Complete control
|
||||
- **PostgreSQL**: Self-hosted database
|
||||
- **S3-Compatible**: MinIO or AWS with encryption
|
||||
- **Hybrid**: Different storage for different data types
|
||||
|
||||
## Security Architecture
|
||||
|
||||
### Defense in Depth
|
||||
|
||||
Multiple layers of security:
|
||||
1. **Network Security**: Firewalls and VPNs
|
||||
2. **Application Security**: Input validation and sanitization
|
||||
3. **Data Security**: Encryption and access controls
|
||||
4. **Operational Security**: Logging and monitoring
|
||||
|
||||
### Zero Trust Principles
|
||||
|
||||
- **Verify Everything**: All requests authenticated
|
||||
- **Least Privilege**: Minimal permissions granted
|
||||
- **Assume Breach**: Design for compromise containment
|
||||
- **Encrypt Everything**: No plaintext transmission
|
||||
|
||||
## Audit and Compliance
|
||||
|
||||
### Audit Logging
|
||||
|
||||
Comprehensive logging of:
|
||||
- **Access Events**: Who accessed what and when
|
||||
- **Processing Events**: What was processed and how
|
||||
- **Configuration Changes**: System modifications
|
||||
- **Security Events**: Failed authentication attempts
|
||||
|
||||
### Compliance Reporting
|
||||
|
||||
Generate reports for:
|
||||
- **Data Processing**: What data was processed
|
||||
- **Data Access**: Who accessed the data
|
||||
- **Data Retention**: What was retained or deleted
|
||||
- **Security Events**: Security-related incidents
|
||||
|
||||
## Best Practices
|
||||
|
||||
### For Maximum Privacy
|
||||
|
||||
1. **Self-host** all components
|
||||
2. **Use local processing** for all models
|
||||
3. **Implement short retention** periods
|
||||
4. **Encrypt all storage** at rest
|
||||
5. **Use VPN** for all connections
|
||||
6. **Regular audits** of access logs
|
||||
|
||||
### For Balanced Approach
|
||||
|
||||
1. **Self-host core services** (database, API)
|
||||
2. **Use Modal for processing** (faster, cost-effective)
|
||||
3. **Implement encryption** everywhere
|
||||
4. **Regular backups** with encryption
|
||||
5. **Monitor access** patterns
|
||||
EOF
|
||||
|
||||
# Concepts - Pipeline
|
||||
cat > docs/concepts/pipeline.md << 'EOF'
|
||||
---
|
||||
sidebar_position: 4
|
||||
title: Processing Pipeline
|
||||
---
|
||||
|
||||
# Processing Pipeline
|
||||
|
||||
Reflector uses a sophisticated pipeline architecture to process audio efficiently and accurately.
|
||||
|
||||
## Pipeline Overview
|
||||
|
||||
The processing pipeline consists of modular components that can be combined and configured based on your needs:
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
A[Audio Input] --> B[Pre-processing]
|
||||
B --> C[Chunking]
|
||||
C --> D[Transcription]
|
||||
D --> E[Diarization]
|
||||
E --> F[Alignment]
|
||||
F --> G[Post-processing]
|
||||
G --> H[Output]
|
||||
```
|
||||
|
||||
## Pipeline Components
|
||||
|
||||
### Audio Input
|
||||
|
||||
Accepts various input sources:
|
||||
- **File Upload**: MP3, WAV, M4A, WebM, MP4
|
||||
- **WebRTC Stream**: Live browser audio
|
||||
- **Recording Integration**: Whereby recordings
|
||||
- **API Upload**: Direct API submission
|
||||
|
||||
### Pre-processing
|
||||
|
||||
Prepares audio for optimal processing:
|
||||
- **Format Conversion**: Convert to 16kHz mono WAV
|
||||
- **Normalization**: Adjust volume to -23 LUFS
|
||||
- **Noise Reduction**: Optional background noise removal
|
||||
- **Validation**: Check duration and quality
|
||||
|
||||
### Chunking
|
||||
|
||||
Splits audio for parallel processing:
|
||||
- **Fixed Size**: 30-second chunks by default
|
||||
- **Overlap**: 1-second overlap for continuity
|
||||
- **Smart Boundaries**: Attempt to split at silence
|
||||
- **Metadata**: Track chunk positions
|
||||
|
||||
### Transcription
|
||||
|
||||
Converts speech to text:
|
||||
- **Model Selection**: Whisper or Parakeet
|
||||
- **Language Detection**: Automatic or specified
|
||||
- **Timestamp Generation**: Word-level timing
|
||||
- **Confidence Scores**: Quality indicators
|
||||
|
||||
### Diarization
|
||||
|
||||
Identifies different speakers:
|
||||
- **Voice Activity Detection**: Find speech segments
|
||||
- **Speaker Embedding**: Extract voice characteristics
|
||||
- **Clustering**: Group similar voices
|
||||
- **Label Assignment**: Assign speaker IDs
|
||||
|
||||
### Alignment
|
||||
|
||||
Merges all processing results:
|
||||
- **Chunk Assembly**: Combine transcription chunks
|
||||
- **Speaker Mapping**: Align speakers with text
|
||||
- **Overlap Resolution**: Handle chunk boundaries
|
||||
- **Timeline Creation**: Build unified timeline
|
||||
|
||||
### Post-processing
|
||||
|
||||
Enhances the final output:
|
||||
- **Formatting**: Apply punctuation and capitalization
|
||||
- **Translation**: Convert to target languages
|
||||
- **Summarization**: Generate concise summaries
|
||||
- **Topic Extraction**: Identify key themes
|
||||
- **Action Items**: Extract tasks and decisions
|
||||
|
||||
## Processing Modes
|
||||
|
||||
### Batch Processing
|
||||
|
||||
For uploaded files:
|
||||
- Optimized for throughput
|
||||
- Parallel chunk processing
|
||||
- Higher accuracy models
|
||||
- Complete file analysis
|
||||
|
||||
### Stream Processing
|
||||
|
||||
For live audio:
|
||||
- Optimized for latency
|
||||
- Sequential processing
|
||||
- Real-time feedback
|
||||
- Progressive results
|
||||
|
||||
### Hybrid Processing
|
||||
|
||||
For meetings:
|
||||
- Stream during meeting
|
||||
- Batch after completion
|
||||
- Best of both modes
|
||||
- Maximum accuracy
|
||||
|
||||
## Pipeline Configuration
|
||||
|
||||
### Model Selection
|
||||
|
||||
Choose models based on requirements:
|
||||
|
||||
```python
|
||||
# High accuracy (slower)
|
||||
config = {
|
||||
"transcription_model": "whisper-large-v3",
|
||||
"diarization_model": "pyannote-3.1",
|
||||
"translation_model": "seamless-m4t-large"
|
||||
}
|
||||
|
||||
# Balanced (default)
|
||||
config = {
|
||||
"transcription_model": "whisper-base",
|
||||
"diarization_model": "pyannote-3.1",
|
||||
"translation_model": "seamless-m4t-medium"
|
||||
}
|
||||
|
||||
# Fast processing
|
||||
config = {
|
||||
"transcription_model": "whisper-tiny",
|
||||
"diarization_model": "pyannote-3.1-fast",
|
||||
"translation_model": "seamless-m4t-small"
|
||||
}
|
||||
```
|
||||
|
||||
### Processing Options
|
||||
|
||||
Customize pipeline behavior:
|
||||
|
||||
```yaml
|
||||
# Parallel processing
|
||||
max_parallel_chunks: 10
|
||||
chunk_size_seconds: 30
|
||||
chunk_overlap_seconds: 1
|
||||
|
||||
# Quality settings
|
||||
enable_noise_reduction: true
|
||||
enable_normalization: true
|
||||
min_speech_confidence: 0.5
|
||||
|
||||
# Post-processing
|
||||
enable_translation: true
|
||||
target_languages: ["es", "fr", "de"]
|
||||
enable_summarization: true
|
||||
summary_length: "medium"
|
||||
```
|
||||
|
||||
## Performance Characteristics
|
||||
|
||||
### Processing Times
|
||||
|
||||
For 1 hour of audio:
|
||||
|
||||
| Pipeline Config | Processing Time | Accuracy |
|
||||
|----------------|-----------------|----------|
|
||||
| Fast | 2-3 minutes | 85-90% |
|
||||
| Balanced | 5-8 minutes | 92-95% |
|
||||
| High Accuracy | 15-20 minutes | 95-98% |
|
||||
|
||||
### Resource Usage
|
||||
|
||||
| Component | CPU Usage | Memory | GPU |
|
||||
|-----------|-----------|---------|-----|
|
||||
| Transcription | Medium | 2-4 GB | Required |
|
||||
| Diarization | High | 4-8 GB | Required |
|
||||
| Translation | Low | 2-3 GB | Optional |
|
||||
| Post-processing | Low | 1-2 GB | Not needed |
|
||||
|
||||
## Pipeline Orchestration
|
||||
|
||||
### Celery Task Chain
|
||||
|
||||
The pipeline is orchestrated using Celery:
|
||||
|
||||
```python
|
||||
chain = (
|
||||
chunk_audio.s(audio_id) |
|
||||
group(transcribe_chunk.s(chunk) for chunk in chunks) |
|
||||
merge_transcriptions.s() |
|
||||
diarize_audio.s() |
|
||||
align_speakers.s() |
|
||||
post_process.s()
|
||||
)
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Robust error recovery:
|
||||
- **Automatic Retry**: Failed tasks retry up to 3 times
|
||||
- **Partial Recovery**: Continue with successful chunks
|
||||
- **Fallback Models**: Use alternative models on failure
|
||||
- **Error Reporting**: Detailed error messages
|
||||
|
||||
### Progress Tracking
|
||||
|
||||
Real-time progress updates:
|
||||
- **Chunk Progress**: Track individual chunk processing
|
||||
- **Overall Progress**: Percentage completion
|
||||
- **ETA Calculation**: Estimated completion time
|
||||
- **WebSocket Updates**: Live progress to clients
|
||||
|
||||
## Optimization Strategies
|
||||
|
||||
### GPU Utilization
|
||||
|
||||
Maximize GPU efficiency:
|
||||
- **Batch Processing**: Process multiple chunks together
|
||||
- **Model Caching**: Keep models loaded in memory
|
||||
- **Dynamic Batching**: Adjust batch size based on GPU memory
|
||||
- **Multi-GPU Support**: Distribute across available GPUs
|
||||
|
||||
### Memory Management
|
||||
|
||||
Efficient memory usage:
|
||||
- **Streaming Processing**: Process large files in chunks
|
||||
- **Garbage Collection**: Clean up after each chunk
|
||||
- **Memory Limits**: Prevent out-of-memory errors
|
||||
- **Disk Caching**: Use disk for large intermediate results
|
||||
|
||||
### Network Optimization
|
||||
|
||||
Minimize network overhead:
|
||||
- **Compression**: Compress audio before transfer
|
||||
- **CDN Integration**: Use CDN for static assets
|
||||
- **Connection Pooling**: Reuse network connections
|
||||
- **Parallel Uploads**: Multiple concurrent uploads
|
||||
|
||||
## Quality Assurance
|
||||
|
||||
### Accuracy Metrics
|
||||
|
||||
Monitor processing quality:
|
||||
- **Word Error Rate (WER)**: Transcription accuracy
|
||||
- **Diarization Error Rate (DER)**: Speaker identification accuracy
|
||||
- **Translation BLEU Score**: Translation quality
|
||||
- **Summary Coherence**: Summary quality metrics
|
||||
|
||||
### Validation Steps
|
||||
|
||||
Ensure output quality:
|
||||
- **Confidence Thresholds**: Filter low-confidence segments
|
||||
- **Consistency Checks**: Verify timeline consistency
|
||||
- **Language Validation**: Ensure correct language detection
|
||||
- **Format Validation**: Check output format compliance
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Custom Models
|
||||
|
||||
Use your own models:
|
||||
- **Fine-tuned Whisper**: Domain-specific models
|
||||
- **Custom Diarization**: Trained on your speakers
|
||||
- **Specialized Post-processing**: Industry-specific formatting
|
||||
|
||||
### Pipeline Extensions
|
||||
|
||||
Add custom processing steps:
|
||||
- **Sentiment Analysis**: Analyze emotional tone
|
||||
- **Entity Extraction**: Identify people, places, organizations
|
||||
- **Custom Metrics**: Calculate domain-specific metrics
|
||||
- **Integration Hooks**: Call external services
|
||||
EOF
|
||||
|
||||
# Create installation documentation
|
||||
cat > docs/installation/overview.md << 'EOF'
|
||||
---
|
||||
sidebar_position: 1
|
||||
title: Installation Overview
|
||||
---
|
||||
|
||||
# Installation Overview
|
||||
|
||||
Reflector is designed for self-hosted deployment, giving you complete control over your infrastructure and data.
|
||||
|
||||
## Deployment Options
|
||||
|
||||
### Docker Deployment (Recommended)
|
||||
|
||||
The easiest way to deploy Reflector:
|
||||
- Pre-configured containers
|
||||
- Automated dependency management
|
||||
- Consistent environment
|
||||
- Easy updates
|
||||
|
||||
### Manual Installation
|
||||
|
||||
For custom deployments:
|
||||
- Greater control over configuration
|
||||
- Integration with existing infrastructure
|
||||
- Custom optimization options
|
||||
- Development environments
|
||||
|
||||
## Requirements
|
||||
|
||||
### System Requirements
|
||||
|
||||
**Minimum Requirements:**
|
||||
- CPU: 4 cores
|
||||
- RAM: 8 GB
|
||||
- Storage: 50 GB
|
||||
- OS: Ubuntu 20.04+ or similar Linux
|
||||
|
||||
**Recommended Requirements:**
|
||||
- CPU: 8+ cores
|
||||
- RAM: 16 GB
|
||||
- Storage: 100 GB SSD
|
||||
- GPU: NVIDIA GPU with 8GB+ VRAM (for local processing)
|
||||
|
||||
### Network Requirements
|
||||
|
||||
- Public IP address (for WebRTC)
|
||||
- Ports: 80, 443, 8000, 3000
|
||||
- Domain name (for SSL)
|
||||
- SSL certificate (Let's Encrypt supported)
|
||||
|
||||
## Required Services
|
||||
|
||||
### Core Services
|
||||
|
||||
These services are required for basic operation:
|
||||
|
||||
1. **PostgreSQL** - Primary database
|
||||
2. **Redis** - Message broker and cache
|
||||
3. **Docker** - Container runtime
|
||||
|
||||
### GPU Processing
|
||||
|
||||
Choose one:
|
||||
- **Modal.com** - Serverless GPU (recommended)
|
||||
- **Local GPU** - Self-hosted GPU processing
|
||||
|
||||
### Optional Services
|
||||
|
||||
Enhance functionality with:
|
||||
- **AWS S3** - Long-term storage
|
||||
- **Whereby** - Video conferencing rooms
|
||||
- **Authentik** - Enterprise authentication
|
||||
- **Zulip** - Chat integration
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Using Docker Compose
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector
|
||||
```
|
||||
|
||||
2. Navigate to docker directory:
|
||||
```bash
|
||||
cd docker
|
||||
```
|
||||
|
||||
3. Copy and configure environment:
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit .env with your settings
|
||||
```
|
||||
|
||||
4. Start services:
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
5. Access Reflector:
|
||||
- Frontend: https://your-domain.com
|
||||
- API: https://your-domain.com/api
|
||||
|
||||
## Configuration Overview
|
||||
|
||||
### Essential Configuration
|
||||
|
||||
```env
|
||||
# Database
|
||||
DATABASE_URL=postgresql://user:pass@localhost/reflector
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
# Modal.com (for GPU processing)
|
||||
TRANSCRIPT_MODAL_API_KEY=your-key
|
||||
DIARIZATION_MODAL_API_KEY=your-key
|
||||
|
||||
# Domain
|
||||
DOMAIN=your-domain.com
|
||||
```
|
||||
|
||||
### Security Configuration
|
||||
|
||||
```env
|
||||
# Authentication
|
||||
REFLECTOR_AUTH_BACKEND=jwt
|
||||
NEXTAUTH_SECRET=generate-strong-secret
|
||||
|
||||
# SSL (handled by Caddy)
|
||||
# Automatic with Let's Encrypt
|
||||
```
|
||||
|
||||
## Service Architecture
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[Caddy Reverse Proxy] --> B[Frontend - Next.js]
|
||||
A --> C[Backend - FastAPI]
|
||||
C --> D[PostgreSQL]
|
||||
C --> E[Redis]
|
||||
C --> F[Celery Workers]
|
||||
F --> G[Modal.com GPU]
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Review Requirements**: [System Requirements](./requirements)
|
||||
2. **Docker Setup**: [Docker Deployment Guide](./docker-setup)
|
||||
3. **Configure Services**:
|
||||
- [Modal.com Setup](./modal-setup)
|
||||
- [Whereby Setup](./whereby-setup)
|
||||
- [AWS S3 Setup](./aws-setup)
|
||||
4. **Optional Services**:
|
||||
- [Authentik Setup](./authentik-setup)
|
||||
- [Zulip Setup](./zulip-setup)
|
||||
|
||||
## Getting Help
|
||||
|
||||
- [Troubleshooting Guide](../reference/troubleshooting)
|
||||
- [GitHub Issues](https://github.com/monadical-sas/reflector/issues)
|
||||
- [Community Discord](#)
|
||||
EOF
|
||||
|
||||
chmod +x create-docs.sh
|
||||
echo "Documentation creation script ready. Run ./create-docs.sh to generate all docs."
|
||||
125
docs/docs/concepts/modes.md
Normal file
125
docs/docs/concepts/modes.md
Normal file
@@ -0,0 +1,125 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
title: Operating Modes
|
||||
---
|
||||
|
||||
# Operating Modes
|
||||
|
||||
Reflector operates in two distinct modes to accommodate different use cases and security requirements.
|
||||
|
||||
## Public Mode
|
||||
|
||||
Public mode provides immediate access to core transcription features without requiring authentication.
|
||||
|
||||
### Features Available
|
||||
- **File Upload**: Process audio files up to 2GB
|
||||
- **Live Transcription**: Stream audio from microphone
|
||||
- **Basic Processing**: Transcription and diarization
|
||||
- **Temporary Storage**: Results available for 24 hours
|
||||
|
||||
### Limitations
|
||||
- No persistent storage
|
||||
- No meeting rooms
|
||||
- Limited to single-user sessions
|
||||
- No team collaboration features
|
||||
|
||||
### Use Cases
|
||||
- Quick transcription needs
|
||||
- Testing and evaluation
|
||||
- Individual users
|
||||
- Public demonstrations
|
||||
|
||||
## Private Mode
|
||||
|
||||
Private mode unlocks the full potential of Reflector with authentication and persistent storage.
|
||||
|
||||
### Additional Features
|
||||
- **Virtual Meeting Rooms**: Whereby and Daily.co integration
|
||||
- **Team Collaboration**: Share transcripts with team
|
||||
- **Persistent Storage**: Long-term transcript archive
|
||||
- **Advanced Analytics**: Meeting insights and trends
|
||||
- **Custom Integration**: Webhooks and API access
|
||||
- **User Management**: Role-based access control
|
||||
|
||||
### Authentication Options
|
||||
|
||||
#### Authentik Integration
|
||||
Enterprise-grade SSO with support for:
|
||||
- SAML 2.0
|
||||
- OAuth 2.0 / OIDC
|
||||
- LDAP / Active Directory
|
||||
- Multi-factor authentication
|
||||
|
||||
#### JWT Authentication
|
||||
Stateless token-based auth for:
|
||||
- API access
|
||||
- Service-to-service communication
|
||||
- Mobile applications
|
||||
|
||||
### Room Management
|
||||
|
||||
Virtual rooms provide dedicated spaces for meetings:
|
||||
- **Persistent URLs**: Same link for recurring meetings
|
||||
- **Access Control**: Invite-only or open rooms
|
||||
- **Recording Consent**: Automatic consent management
|
||||
- **Custom Settings**: Per-room configuration
|
||||
|
||||
## Mode Selection
|
||||
|
||||
The mode is determined by your deployment configuration:
|
||||
|
||||
```yaml
|
||||
# Public Mode (no authentication)
|
||||
REFLECTOR_AUTH_BACKEND=none
|
||||
|
||||
# Private Mode (with authentication)
|
||||
REFLECTOR_AUTH_BACKEND=jwt
|
||||
# or
|
||||
REFLECTOR_AUTH_BACKEND=authentik
|
||||
```
|
||||
|
||||
## Feature Comparison
|
||||
|
||||
| Feature | Public Mode | Private Mode |
|
||||
|---------|------------|--------------|
|
||||
| File Upload | ✅ | ✅ |
|
||||
| Live Transcription | ✅ | ✅ |
|
||||
| Speaker Diarization | ✅ | ✅ |
|
||||
| Translation | ✅ | ✅ |
|
||||
| Summarization | ✅ | ✅ |
|
||||
| Meeting Rooms | ❌ | ✅ |
|
||||
| Persistent Storage | ❌ | ✅ |
|
||||
| Team Collaboration | ❌ | ✅ |
|
||||
| API Access | Limited | Full |
|
||||
| User Management | ❌ | ✅ |
|
||||
| Custom Branding | ❌ | ✅ |
|
||||
| Analytics | ❌ | ✅ |
|
||||
| Webhooks | ❌ | ✅ |
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### Public Mode Security
|
||||
- File size restrictions
|
||||
- Automatic cleanup of old data
|
||||
|
||||
### Private Mode Security
|
||||
- Encrypted data storage
|
||||
- Audit logging
|
||||
- Session management
|
||||
- Access control lists
|
||||
- Data retention policies
|
||||
|
||||
## Choosing the Right Mode
|
||||
|
||||
### Choose Public Mode if:
|
||||
- You need quick, one-time transcriptions
|
||||
- You're evaluating Reflector
|
||||
- You don't need persistent storage
|
||||
- You're processing non-sensitive content
|
||||
|
||||
### Choose Private Mode if:
|
||||
- You need team collaboration
|
||||
- You require persistent storage
|
||||
- You're processing sensitive content
|
||||
- You need meeting room functionality
|
||||
- You want advanced analytics
|
||||
194
docs/docs/concepts/overview.md
Normal file
194
docs/docs/concepts/overview.md
Normal file
@@ -0,0 +1,194 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
title: Architecture Overview
|
||||
---
|
||||
|
||||
# Architecture Overview
|
||||
|
||||
Reflector is built as a modern, scalable, microservices-based application designed to handle audio processing workloads efficiently while maintaining data privacy and control.
|
||||
|
||||
## System Components
|
||||
|
||||
### Frontend Application
|
||||
|
||||
The user interface is built with **Next.js 14** using the App Router pattern, providing:
|
||||
|
||||
- Server-side rendering for optimal performance
|
||||
- Real-time WebSocket connections for live transcription
|
||||
- WebRTC support for audio streaming and live meetings
|
||||
- Responsive design with Chakra UI components
|
||||
|
||||
### Backend API Server
|
||||
|
||||
The core API is powered by **FastAPI**, a modern Python framework that provides:
|
||||
|
||||
- High-performance async request handling
|
||||
- Automatic OpenAPI documentation generation
|
||||
- Type safety with Pydantic models
|
||||
- WebSocket support for real-time updates
|
||||
|
||||
### Processing Pipeline
|
||||
|
||||
Audio processing is handled through a modular pipeline architecture:
|
||||
|
||||
```
|
||||
Audio Input → Chunking → Transcription → Diarization → Post-Processing → Storage
|
||||
```
|
||||
|
||||
Each step can run independently and in parallel, allowing for:
|
||||
- Scalable processing of large files
|
||||
- Real-time streaming capabilities
|
||||
- Fault tolerance and retry mechanisms
|
||||
|
||||
### Worker Architecture
|
||||
|
||||
Background tasks are managed by **Celery** workers with **Redis** as the message broker:
|
||||
|
||||
- Distributed task processing
|
||||
- Priority queues for time-sensitive operations
|
||||
- Automatic retry on failure
|
||||
- Progress tracking and notifications
|
||||
|
||||
### GPU Acceleration
|
||||
|
||||
ML models run on GPU-accelerated infrastructure:
|
||||
|
||||
- **Modal.com** for serverless GPU processing
|
||||
- Support for local GPU deployment (coming soon)
|
||||
- Automatic scaling based on demand
|
||||
- Cost-effective pay-per-use model
|
||||
|
||||
## Data Flow
|
||||
|
||||
### File Processing Flow
|
||||
|
||||
1. **Upload**: User uploads audio file through web interface
|
||||
2. **Storage**: File stored temporarily or in S3
|
||||
3. **Queue**: Processing job added to Celery queue
|
||||
4. **Chunking**: Audio split into 30-second segments
|
||||
5. **Parallel Processing**: Chunks processed simultaneously
|
||||
6. **Assembly**: Results merged and aligned
|
||||
7. **Post-Processing**: Summary, topics, translation
|
||||
8. **Delivery**: Results stored and user notified
|
||||
|
||||
### Live Streaming Flow
|
||||
|
||||
1. **WebRTC Connection**: Browser establishes peer connection
|
||||
2. **Audio Capture**: Microphone audio streamed to server
|
||||
3. **Buffering**: Audio buffered for processing
|
||||
4. **VAD**: Voice activity detection segments speech
|
||||
5. **Real-time Processing**: Segments transcribed immediately
|
||||
6. **WebSocket Updates**: Results streamed back to client
|
||||
7. **Continuous Assembly**: Full transcript built progressively
|
||||
|
||||
## Deployment Architecture
|
||||
|
||||
### Container-Based Deployment
|
||||
|
||||
All components are containerized for consistent deployment:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
frontend: # Next.js application
|
||||
backend: # FastAPI server
|
||||
worker: # Celery workers
|
||||
redis: # Message broker
|
||||
postgres: # Database
|
||||
caddy: # Reverse proxy
|
||||
```
|
||||
|
||||
### Networking
|
||||
|
||||
- **Host Network Mode**: Required for WebRTC/ICE compatibility
|
||||
- **Caddy Reverse Proxy**: Handles SSL termination and routing
|
||||
- **WebSocket Upgrade**: Supports real-time connections
|
||||
|
||||
## Scalability Considerations
|
||||
|
||||
### Horizontal Scaling
|
||||
|
||||
- **Stateless Backend**: Multiple API server instances
|
||||
- **Worker Pools**: Add workers based on queue depth
|
||||
- **Database Pooling**: Connection management for concurrent access
|
||||
|
||||
### Vertical Scaling
|
||||
|
||||
- **GPU Workers**: Scale up for faster model inference
|
||||
- **Memory Optimization**: Efficient audio buffering
|
||||
- **CPU Optimization**: Multi-threaded processing where applicable
|
||||
|
||||
## Security Architecture
|
||||
|
||||
### Authentication & Authorization
|
||||
|
||||
- **JWT Tokens**: Stateless authentication
|
||||
- **Authentik Integration**: Enterprise SSO support
|
||||
- **Role-Based Access**: Granular permissions
|
||||
|
||||
### Data Protection
|
||||
|
||||
- **Encryption at Rest**: Database and S3 encryption
|
||||
- **Encryption in Transit**: TLS for all connections
|
||||
- **Temporary Storage**: Automatic cleanup of processed files
|
||||
|
||||
### Privacy by Design
|
||||
|
||||
- **Local Processing**: Option to process entirely on-premises
|
||||
- **No Training on User Data**: Models are pre-trained
|
||||
- **Data Isolation**: Multi-tenant data separation
|
||||
|
||||
## Integration Points
|
||||
|
||||
### External Services
|
||||
|
||||
- **Modal.com**: GPU processing
|
||||
- **AWS S3**: Long-term storage
|
||||
- **Whereby**: Video conferencing rooms
|
||||
- **Zulip**: Chat integration (optional)
|
||||
|
||||
### APIs and Webhooks
|
||||
|
||||
- **RESTful API**: Standard CRUD operations
|
||||
- **WebSocket API**: Real-time updates
|
||||
- **Webhook Notifications**: Processing completion events
|
||||
- **OpenAPI Specification**: Machine-readable API definition
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
### Caching Strategy
|
||||
|
||||
- **Redis Cache**: Frequently accessed data
|
||||
- **CDN**: Static asset delivery
|
||||
- **Browser Cache**: Client-side optimization
|
||||
|
||||
### Database Optimization
|
||||
|
||||
- **Indexed Queries**: Fast search and retrieval
|
||||
- **Connection Pooling**: Efficient resource usage
|
||||
- **Query Optimization**: N+1 query prevention
|
||||
|
||||
### Processing Optimization
|
||||
|
||||
- **Batch Processing**: Efficient GPU utilization
|
||||
- **Parallel Execution**: Multi-core CPU usage
|
||||
- **Stream Processing**: Reduced memory footprint
|
||||
|
||||
## Monitoring and Observability
|
||||
|
||||
### Metrics Collection
|
||||
|
||||
- **Application Metrics**: Request rates, response times
|
||||
- **System Metrics**: CPU, memory, disk usage
|
||||
- **Business Metrics**: Transcription accuracy, processing times
|
||||
|
||||
### Logging
|
||||
|
||||
- **Structured Logging**: JSON format for analysis
|
||||
- **Log Aggregation**: Centralized log management
|
||||
- **Error Tracking**: Sentry integration
|
||||
|
||||
### Health Checks
|
||||
|
||||
- **Liveness Probes**: Component availability
|
||||
- **Readiness Probes**: Service readiness
|
||||
- **Dependency Checks**: External service status
|
||||
272
docs/docs/concepts/pipeline.md
Normal file
272
docs/docs/concepts/pipeline.md
Normal file
@@ -0,0 +1,272 @@
|
||||
---
|
||||
sidebar_position: 4
|
||||
title: Processing Pipeline
|
||||
---
|
||||
|
||||
# Processing Pipeline
|
||||
|
||||
Reflector uses a modular pipeline architecture to process audio efficiently and accurately.
|
||||
|
||||
## Pipeline Overview
|
||||
|
||||
The processing pipeline consists of modular components that can be combined and configured based on your needs:
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
A[Audio Input] --> B[Pre-processing]
|
||||
B --> C[Chunking]
|
||||
C --> D[Transcription]
|
||||
D --> E[Diarization]
|
||||
E --> F[Alignment]
|
||||
F --> G[Post-processing]
|
||||
G --> H[Output]
|
||||
```
|
||||
|
||||
## Pipeline Components
|
||||
|
||||
### Audio Input
|
||||
|
||||
Accepts various input sources:
|
||||
- **File Upload**: MP3, WAV, M4A, WebM, MP4
|
||||
- **WebRTC Stream**: Live browser audio
|
||||
- **Recording Integration**: Whereby recordings
|
||||
- **API Upload**: Direct API submission
|
||||
|
||||
### Pre-processing
|
||||
|
||||
Prepares audio for optimal processing:
|
||||
- **Format Conversion**: Convert to 16kHz mono WAV
|
||||
- **Noise Reduction**: Optional background noise removal
|
||||
- **Validation**: Check duration and quality
|
||||
|
||||
### Chunking
|
||||
|
||||
Splits audio for parallel processing:
|
||||
- **Fixed Size**: 30-second chunks by default
|
||||
- **Overlap**: 1-second overlap for continuity
|
||||
- **Silence Detection**: Attempt to split at silence
|
||||
- **Metadata**: Track chunk positions
|
||||
|
||||
### Transcription
|
||||
|
||||
Converts speech to text:
|
||||
- **Model Selection**: Whisper or Parakeet
|
||||
- **Language Detection**: Automatic or specified
|
||||
- **Timestamp Generation**: Word-level timing
|
||||
- **Confidence Scores**: Quality indicators
|
||||
|
||||
### Diarization
|
||||
|
||||
Identifies different speakers:
|
||||
- **Voice Activity Detection**: Find speech segments
|
||||
- **Speaker Embedding**: Extract voice characteristics
|
||||
- **Clustering**: Group similar voices
|
||||
- **Label Assignment**: Assign speaker IDs
|
||||
|
||||
### Alignment
|
||||
|
||||
Merges all processing results:
|
||||
- **Chunk Assembly**: Combine transcription chunks
|
||||
- **Speaker Mapping**: Align speakers with text
|
||||
- **Overlap Resolution**: Handle chunk boundaries
|
||||
- **Timeline Creation**: Build unified timeline
|
||||
|
||||
### Post-processing
|
||||
|
||||
Enhances the final output:
|
||||
- **Formatting**: Apply punctuation and capitalization
|
||||
- **Translation**: Convert to target languages
|
||||
- **Summarization**: Generate concise summaries
|
||||
- **Topic Extraction**: Identify key themes
|
||||
- **Action Items**: Extract tasks and decisions
|
||||
|
||||
## Processing Modes
|
||||
|
||||
### Batch Processing
|
||||
|
||||
For uploaded files:
|
||||
- Optimized for throughput
|
||||
- Parallel chunk processing
|
||||
- Higher accuracy models
|
||||
- Complete file analysis
|
||||
|
||||
### Stream Processing
|
||||
|
||||
For live audio:
|
||||
- Optimized for latency
|
||||
- Sequential processing
|
||||
- Real-time feedback
|
||||
- Progressive results
|
||||
|
||||
### Hybrid Processing
|
||||
|
||||
For meetings:
|
||||
- Stream during meeting
|
||||
- Batch after completion
|
||||
- Best of both modes
|
||||
- Maximum accuracy
|
||||
|
||||
## Pipeline Configuration
|
||||
|
||||
### Model Selection
|
||||
|
||||
Choose models based on requirements:
|
||||
|
||||
```python
|
||||
# High accuracy (slower)
|
||||
config = {
|
||||
"transcription_model": "whisper-large-v3",
|
||||
"diarization_model": "pyannote-3.1",
|
||||
"translation_model": "seamless-m4t-large"
|
||||
}
|
||||
|
||||
# Balanced (default)
|
||||
config = {
|
||||
"transcription_model": "whisper-base",
|
||||
"diarization_model": "pyannote-3.1",
|
||||
"translation_model": "seamless-m4t-medium"
|
||||
}
|
||||
|
||||
# Fast processing
|
||||
config = {
|
||||
"transcription_model": "whisper-tiny",
|
||||
"diarization_model": "pyannote-3.1-fast",
|
||||
"translation_model": "seamless-m4t-small"
|
||||
}
|
||||
```
|
||||
|
||||
### Processing Options
|
||||
|
||||
Customize pipeline behavior:
|
||||
|
||||
```yaml
|
||||
# Parallel processing
|
||||
max_parallel_chunks: 10
|
||||
chunk_size_seconds: 30
|
||||
chunk_overlap_seconds: 1
|
||||
|
||||
# Quality settings
|
||||
enable_noise_reduction: true
|
||||
min_speech_confidence: 0.5
|
||||
|
||||
# Post-processing
|
||||
enable_translation: true
|
||||
target_languages: ["es", "fr", "de"]
|
||||
enable_summarization: true
|
||||
summary_length: "medium"
|
||||
```
|
||||
|
||||
## Performance Characteristics
|
||||
|
||||
### Processing Times
|
||||
|
||||
For 1 hour of audio:
|
||||
|
||||
| Pipeline Config | Processing Time | Accuracy |
|
||||
|----------------|-----------------|----------|
|
||||
| Fast | 2-3 minutes | 85-90% |
|
||||
| Balanced | 5-8 minutes | 92-95% |
|
||||
| High Accuracy | 15-20 minutes | 95-98% |
|
||||
|
||||
### Resource Usage
|
||||
|
||||
| Component | CPU Usage | Memory | GPU |
|
||||
|-----------|-----------|---------|-----|
|
||||
| Transcription | Medium | 2-4 GB | Required |
|
||||
| Diarization | High | 4-8 GB | Required |
|
||||
| Translation | Low | 2-3 GB | Optional |
|
||||
| Post-processing | Low | 1-2 GB | Not needed |
|
||||
|
||||
## Pipeline Orchestration
|
||||
|
||||
### Celery Task Chain
|
||||
|
||||
The pipeline is orchestrated using Celery:
|
||||
|
||||
```python
|
||||
chain = (
|
||||
chunk_audio.s(audio_id) |
|
||||
group(transcribe_chunk.s(chunk) for chunk in chunks) |
|
||||
merge_transcriptions.s() |
|
||||
diarize_audio.s() |
|
||||
align_speakers.s() |
|
||||
post_process.s()
|
||||
)
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Error recovery:
|
||||
- **Automatic Retry**: Failed tasks retry up to 3 times
|
||||
- **Partial Recovery**: Continue with successful chunks
|
||||
- **Fallback Models**: Use alternative models on failure
|
||||
- **Error Reporting**: Detailed error messages
|
||||
|
||||
### Progress Tracking
|
||||
|
||||
Real-time progress updates:
|
||||
- **Chunk Progress**: Track individual chunk processing
|
||||
- **Overall Progress**: Percentage completion
|
||||
- **ETA Calculation**: Estimated completion time
|
||||
- **WebSocket Updates**: Live progress to clients
|
||||
|
||||
## Optimization Strategies
|
||||
|
||||
### GPU Utilization
|
||||
|
||||
Maximize GPU efficiency:
|
||||
- **Batch Processing**: Process multiple chunks together
|
||||
- **Model Caching**: Keep models loaded in memory
|
||||
- **Dynamic Batching**: Adjust batch size based on GPU memory
|
||||
- **Multi-GPU Support**: Distribute across available GPUs
|
||||
|
||||
### Memory Management
|
||||
|
||||
Efficient memory usage:
|
||||
- **Streaming Processing**: Process large files in chunks
|
||||
- **Garbage Collection**: Clean up after each chunk
|
||||
- **Memory Limits**: Prevent out-of-memory errors
|
||||
- **Disk Caching**: Use disk for large intermediate results
|
||||
|
||||
### Network Optimization
|
||||
|
||||
Minimize network overhead:
|
||||
- **Compression**: Compress audio before transfer
|
||||
- **CDN Integration**: Use CDN for static assets
|
||||
- **Connection Pooling**: Reuse network connections
|
||||
- **Parallel Uploads**: Multiple concurrent uploads
|
||||
|
||||
## Quality Assurance
|
||||
|
||||
### Accuracy Metrics
|
||||
|
||||
Monitor processing quality:
|
||||
- **Word Error Rate (WER)**: Transcription accuracy
|
||||
- **Diarization Error Rate (DER)**: Speaker identification accuracy
|
||||
- **Translation BLEU Score**: Translation quality
|
||||
- **Summary Coherence**: Summary quality metrics
|
||||
|
||||
### Validation Steps
|
||||
|
||||
Ensure output quality:
|
||||
- **Confidence Thresholds**: Filter low-confidence segments
|
||||
- **Consistency Checks**: Verify timeline consistency
|
||||
- **Language Validation**: Ensure correct language detection
|
||||
- **Format Validation**: Check output format compliance
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Custom Models
|
||||
|
||||
Use your own models:
|
||||
- **Fine-tuned Whisper**: Domain-specific models
|
||||
- **Custom Diarization**: Trained on your speakers
|
||||
- **Specialized Post-processing**: Industry-specific formatting
|
||||
|
||||
### Pipeline Extensions
|
||||
|
||||
Add custom processing steps:
|
||||
- **Sentiment Analysis**: Analyze emotional tone
|
||||
- **Entity Extraction**: Identify people, places, organizations
|
||||
- **Custom Metrics**: Calculate domain-specific metrics
|
||||
- **Integration Hooks**: Call external services
|
||||
285
docs/docs/installation/auth-setup.md
Normal file
285
docs/docs/installation/auth-setup.md
Normal file
@@ -0,0 +1,285 @@
|
||||
---
|
||||
sidebar_position: 5
|
||||
title: Authentication Setup
|
||||
---
|
||||
|
||||
# Authentication Setup
|
||||
|
||||
This page covers authentication setup in detail. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
Reflector uses [Authentik](https://goauthentik.io/) for OAuth/OIDC authentication. This guide walks you through setting up Authentik and connecting it to Reflector.
|
||||
|
||||
The guide simplistically sets Authentic on the same server as Reflector. You can use your own Authentic instance instead.
|
||||
|
||||
## Overview
|
||||
|
||||
Reflector's authentication flow:
|
||||
1. User clicks "Sign In" on frontend
|
||||
2. Frontend redirects to Authentik login page
|
||||
3. User authenticates with Authentik
|
||||
4. Authentik redirects back with OAuth tokens
|
||||
5. Frontend stores tokens, backends verify JWT signature
|
||||
|
||||
## Option 1: Self-Hosted Authentik (Same Server)
|
||||
|
||||
This setup runs Authentik on the same server as Reflector, with Caddy proxying to both.
|
||||
|
||||
### Deploy Authentik
|
||||
|
||||
```bash
|
||||
# Create directory for Authentik
|
||||
mkdir -p ~/authentik && cd ~/authentik
|
||||
|
||||
# Download docker-compose file
|
||||
curl -O https://goauthentik.io/docker-compose.yml
|
||||
|
||||
# Generate secrets and bootstrap credentials
|
||||
cat > .env << 'EOF'
|
||||
PG_PASS=$(openssl rand -base64 36 | tr -d '\n')
|
||||
AUTHENTIK_SECRET_KEY=$(openssl rand -base64 60 | tr -d '\n')
|
||||
# Privacy-focused choice for self-hosted deployments
|
||||
AUTHENTIK_ERROR_REPORTING__ENABLED=false
|
||||
AUTHENTIK_BOOTSTRAP_PASSWORD=YourSecurePassword123
|
||||
AUTHENTIK_BOOTSTRAP_EMAIL=admin@example.com
|
||||
EOF
|
||||
|
||||
# Start Authentik
|
||||
sudo docker compose up -d
|
||||
```
|
||||
|
||||
Authentik takes ~2 minutes to run migrations and apply blueprints on first start.
|
||||
|
||||
### Connect Authentik to Reflector's Network
|
||||
|
||||
If Authentik runs in a separate Docker Compose project, connect it to Reflector's network so Caddy can proxy to it:
|
||||
|
||||
```bash
|
||||
# Wait for Authentik to be healthy
|
||||
# Connect Authentik server to Reflector's network
|
||||
sudo docker network connect reflector_default authentik-server-1
|
||||
```
|
||||
|
||||
**Important:** This step must be repeated if you restart Authentik with `docker compose down`. Add it to your deployment scripts or use `docker compose up -d` (which preserves containers) instead of down/up.
|
||||
|
||||
### Add Authentik to Caddy
|
||||
|
||||
Uncomment the Authentik section in your `Caddyfile` and set your domain:
|
||||
|
||||
```bash
|
||||
nano Caddyfile
|
||||
```
|
||||
|
||||
Uncomment and edit:
|
||||
```
|
||||
{$AUTHENTIK_DOMAIN:authentik.example.com} {
|
||||
reverse_proxy authentik-server-1:9000
|
||||
}
|
||||
```
|
||||
|
||||
Reload Caddy:
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml exec caddy caddy reload --config /etc/caddy/Caddyfile
|
||||
```
|
||||
|
||||
### Create OAuth2 Provider in Authentik
|
||||
|
||||
**Option A: Automated Setup (Recommended)**
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Run the setup script from the Reflector repository:
|
||||
|
||||
```bash
|
||||
ssh user@your-server-ip
|
||||
cd ~/reflector
|
||||
./scripts/setup-authentik-oauth.sh https://authentik.example.com YourSecurePassword123 https://app.example.com
|
||||
```
|
||||
|
||||
**Important:** The script must be run from the `~/reflector` directory on your server, as it creates files using relative paths.
|
||||
|
||||
The script will output the configuration values to add to your `.env` files. Skip to "Update docker-compose.prod.yml".
|
||||
|
||||
**Option B: Manual Setup**
|
||||
|
||||
1. **Login to Authentik Admin** at `https://authentik.example.com/`
|
||||
- Username: `akadmin`
|
||||
- Password: The `AUTHENTIK_BOOTSTRAP_PASSWORD` you set in .env
|
||||
|
||||
2. **Create OAuth2 Provider:**
|
||||
- Go to **Applications > Providers > Create**
|
||||
- Select **OAuth2/OpenID Provider**
|
||||
- Configure:
|
||||
- **Name**: `Reflector`
|
||||
- **Authorization flow**: `default-provider-authorization-implicit-consent`
|
||||
- **Client type**: `Confidential`
|
||||
- **Client ID**: Note this value (auto-generated)
|
||||
- **Client Secret**: Note this value (auto-generated)
|
||||
- **Redirect URIs**: Add entry with:
|
||||
```
|
||||
https://app.example.com/api/auth/callback/authentik
|
||||
```
|
||||
- Scroll down to **Advanced protocol settings**
|
||||
- In **Scopes**, add these three mappings:
|
||||
- `authentik default OAuth Mapping: OpenID 'email'`
|
||||
- `authentik default OAuth Mapping: OpenID 'openid'`
|
||||
- `authentik default OAuth Mapping: OpenID 'profile'`
|
||||
- Click **Finish**
|
||||
|
||||
3. **Create Application:**
|
||||
- Go to **Applications > Applications > Create**
|
||||
- Configure:
|
||||
- **Name**: `Reflector`
|
||||
- **Slug**: `reflector` (auto-filled)
|
||||
- **Provider**: Select the `Reflector` provider you just created
|
||||
- Click **Create**
|
||||
|
||||
### Get Public Key for JWT Verification
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Extract the public key from Authentik's JWKS endpoint:
|
||||
|
||||
```bash
|
||||
mkdir -p ~/reflector/server/reflector/auth/jwt/keys
|
||||
curl -s https://authentik.example.com/application/o/reflector/jwks/ | \
|
||||
jq -r '.keys[0].x5c[0]' | base64 -d | openssl x509 -pubkey -noout \
|
||||
> ~/reflector/server/reflector/auth/jwt/keys/authentik_public.pem
|
||||
```
|
||||
|
||||
### Update docker-compose.prod.yml
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
**Note:** This step is already done in the current `docker-compose.prod.yml`. Verify the volume mounts exist:
|
||||
|
||||
```yaml
|
||||
server:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
# ... other config ...
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
- ./server/reflector/auth/jwt/keys:/app/reflector/auth/jwt/keys:ro
|
||||
|
||||
worker:
|
||||
image: monadicalsas/reflector-backend:latest
|
||||
# ... other config ...
|
||||
volumes:
|
||||
- server_data:/app/data
|
||||
- ./server/reflector/auth/jwt/keys:/app/reflector/auth/jwt/keys:ro
|
||||
```
|
||||
|
||||
### Configure Reflector Backend
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Update `server/.env`:
|
||||
```env
|
||||
# Authentication
|
||||
AUTH_BACKEND=jwt
|
||||
AUTH_JWT_PUBLIC_KEY=authentik_public.pem
|
||||
AUTH_JWT_AUDIENCE=<your-client-id>
|
||||
CORS_ALLOW_CREDENTIALS=true
|
||||
```
|
||||
|
||||
Replace `<your-client-id>` with the Client ID from previous steps.
|
||||
|
||||
### Configure Reflector Frontend
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Update `www/.env`:
|
||||
```env
|
||||
# Authentication
|
||||
FEATURE_REQUIRE_LOGIN=true
|
||||
|
||||
# Authentik OAuth
|
||||
AUTHENTIK_ISSUER=https://authentik.example.com/application/o/reflector
|
||||
AUTHENTIK_REFRESH_TOKEN_URL=https://authentik.example.com/application/o/token/
|
||||
AUTHENTIK_CLIENT_ID=<your-client-id>
|
||||
AUTHENTIK_CLIENT_SECRET=<your-client-secret>
|
||||
|
||||
# NextAuth
|
||||
NEXTAUTH_SECRET=<generate-with-openssl-rand-hex-32>
|
||||
```
|
||||
|
||||
### Restart Services
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
```bash
|
||||
cd ~/reflector
|
||||
sudo docker compose -f docker-compose.prod.yml up -d --force-recreate server worker web
|
||||
```
|
||||
|
||||
### Verify Authentication
|
||||
|
||||
1. Visit `https://app.example.com`
|
||||
2. Click "Log in" or navigate to `/api/auth/signin`
|
||||
3. Click "Sign in with Authentik"
|
||||
4. Login with your Authentik credentials
|
||||
5. You should be redirected back and see "Log out" in the header
|
||||
|
||||
## Option 2: Disable Authentication
|
||||
|
||||
For testing or internal deployments where authentication isn't needed:
|
||||
|
||||
**Backend `server/.env`:**
|
||||
```env
|
||||
AUTH_BACKEND=none
|
||||
```
|
||||
|
||||
**Frontend `www/.env`:**
|
||||
```env
|
||||
FEATURE_REQUIRE_LOGIN=false
|
||||
```
|
||||
|
||||
**Note:** The pre-built Docker images have `FEATURE_REQUIRE_LOGIN=true` baked in. To disable auth, you'll need to rebuild the frontend image with the env var set at build time, or set up Authentik.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Invalid redirect URI" error
|
||||
- Verify the redirect URI in Authentik matches exactly:
|
||||
```
|
||||
https://app.example.com/api/auth/callback/authentik
|
||||
```
|
||||
- Check for trailing slashes - they must match exactly
|
||||
|
||||
### "Invalid audience" JWT error
|
||||
- Ensure `AUTH_JWT_AUDIENCE` in `server/.env` matches the Client ID from Authentik
|
||||
- The audience value is the OAuth Client ID, not the issuer URL
|
||||
|
||||
### "JWT verification failed" error
|
||||
- Verify the public key file is mounted in the container
|
||||
- Check `AUTH_JWT_PUBLIC_KEY` points to the correct filename
|
||||
- Ensure the key was extracted from the correct provider's JWKS endpoint
|
||||
|
||||
### Caddy returns 503 for Authentik
|
||||
- Verify Authentik container is connected to Reflector's network:
|
||||
```bash
|
||||
sudo docker network connect reflector_default authentik-server-1
|
||||
```
|
||||
- Check Authentik is healthy: `cd ~/authentik && sudo docker compose ps`
|
||||
|
||||
### Users can't access protected pages
|
||||
- Verify `FEATURE_REQUIRE_LOGIN=true` in frontend
|
||||
- Check `AUTH_BACKEND=jwt` in backend
|
||||
- Verify CORS settings allow credentials
|
||||
|
||||
### Token refresh errors
|
||||
- Ensure Redis is running (frontend uses Redis for token caching)
|
||||
- Verify `KV_URL` is set correctly in frontend env
|
||||
- Check `AUTHENTIK_REFRESH_TOKEN_URL` is correct
|
||||
|
||||
## API Key Authentication
|
||||
|
||||
For programmatic access (scripts, integrations), users can generate API keys:
|
||||
|
||||
1. Login to Reflector
|
||||
2. Go to Settings > API Keys
|
||||
3. Click "Generate New Key"
|
||||
4. Use the key in requests:
|
||||
```bash
|
||||
curl -H "X-API-Key: your-api-key" https://api.example.com/v1/transcripts
|
||||
```
|
||||
|
||||
API keys are stored hashed and can be revoked at any time.
|
||||
165
docs/docs/installation/daily-setup.md
Normal file
165
docs/docs/installation/daily-setup.md
Normal file
@@ -0,0 +1,165 @@
|
||||
---
|
||||
sidebar_position: 6
|
||||
title: Daily.co Setup
|
||||
---
|
||||
|
||||
# Daily.co Setup
|
||||
|
||||
This page covers Daily.co video platform setup for live meeting rooms. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
Daily.co enables live video meetings with automatic recording and transcription.
|
||||
|
||||
## What You'll Set Up
|
||||
|
||||
```
|
||||
User joins meeting → Daily.co video room → Recording to S3 → [Webhook] → Reflector transcribes
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] **Daily.co account** - Free tier at https://dashboard.daily.co
|
||||
- [ ] **AWS account** - For S3 storage
|
||||
- [ ] **Reflector deployed** - Complete steps from [Deployment Guide](./overview)
|
||||
|
||||
---
|
||||
|
||||
## Create Daily.co Account
|
||||
|
||||
1. Visit https://dashboard.daily.co and sign up
|
||||
2. Verify your email
|
||||
3. Note your subdomain (e.g., `yourname.daily.co` → subdomain is `yourname`)
|
||||
|
||||
---
|
||||
|
||||
## Get Daily.co API Key
|
||||
|
||||
1. In Daily.co dashboard, go to **Developers**
|
||||
2. Click **API Keys**
|
||||
3. Click **Create API Key**
|
||||
4. Copy the key (starts with a long string)
|
||||
|
||||
Save this for later.
|
||||
|
||||
---
|
||||
|
||||
## Create AWS S3 Bucket
|
||||
|
||||
Daily.co needs somewhere to store recordings before Reflector processes them.
|
||||
|
||||
```bash
|
||||
# Choose a unique bucket name
|
||||
BUCKET_NAME="reflector-dailyco-yourname" # -yourname is not a requirement, you can name the bucket as you wish
|
||||
AWS_REGION="us-east-1"
|
||||
|
||||
# Create bucket
|
||||
aws s3 mb s3://$BUCKET_NAME --region $AWS_REGION
|
||||
|
||||
# Enable versioning (required)
|
||||
aws s3api put-bucket-versioning \
|
||||
--bucket $BUCKET_NAME \
|
||||
--versioning-configuration Status=Enabled
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Create IAM Role for Daily.co
|
||||
|
||||
Daily.co needs permission to write recordings to your S3 bucket.
|
||||
|
||||
Follow the guide https://docs.daily.co/guides/products/live-streaming-recording/storing-recordings-in-a-custom-s3-bucket
|
||||
|
||||
Save the role ARN - you'll need it soon.
|
||||
|
||||
It looks like: `arn:aws:iam::123456789012:role/DailyCo`
|
||||
|
||||
Shortly, you'll need to set up a role and give this role your s3 bucket access
|
||||
|
||||
No additional setup is required from Daily.co settings website side: the app code takes care of letting Daily know where to save the recordings.
|
||||
|
||||
---
|
||||
|
||||
## Configure Reflector
|
||||
|
||||
**Location: Reflector server**
|
||||
|
||||
Add to `server/.env`:
|
||||
|
||||
```env
|
||||
# Daily.co Configuration
|
||||
DEFAULT_VIDEO_PLATFORM=daily
|
||||
DAILY_API_KEY=<your-api-key-from-daily-setup>
|
||||
DAILY_SUBDOMAIN=<your-subdomain-from-daily-setup>
|
||||
|
||||
# S3 Storage for Daily.co recordings
|
||||
DAILYCO_STORAGE_AWS_BUCKET_NAME=<your-bucket-from-daily-setup>
|
||||
DAILYCO_STORAGE_AWS_REGION=us-east-1
|
||||
DAILYCO_STORAGE_AWS_ROLE_ARN=<your-role-arn-from-daily-setup>
|
||||
|
||||
# Transcript storage (should already be configured from main setup)
|
||||
# TRANSCRIPT_STORAGE_BACKEND=aws
|
||||
# TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=<your-key>
|
||||
# TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY=<your-secret>
|
||||
# TRANSCRIPT_STORAGE_AWS_BUCKET_NAME=<your-bucket-name>
|
||||
# TRANSCRIPT_STORAGE_AWS_REGION=<your-bucket-region>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Restart Services
|
||||
|
||||
After changing `.env` files, reload with `up -d`:
|
||||
|
||||
```bash
|
||||
sudo docker compose -f docker-compose.prod.yml up -d server worker
|
||||
```
|
||||
|
||||
**Note**: `docker compose up -d` detects env changes and recreates containers automatically.
|
||||
|
||||
---
|
||||
|
||||
## Test Live Room
|
||||
|
||||
1. Visit your Reflector frontend: `https://app.example.com`
|
||||
2. Go to **Rooms**
|
||||
3. Click **Create Room**
|
||||
4. Select **Daily** as the platform
|
||||
5. Allow camera/microphone access
|
||||
6. You should see Daily.co video interface
|
||||
7. Speak for 10-20 seconds
|
||||
8. Leave the meeting
|
||||
9. Recording should appear in **Transcripts** within 5 minutes (if webhooks aren't set up yet, see [Webhook Configuration](#webhook-configuration-optional) below)
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Recording doesn't appear in S3
|
||||
|
||||
1. Check Daily.co dashboard → **Logs** for errors
|
||||
2. Verify IAM role trust policy has correct Daily.co account ID and your Daily.co subdomain
|
||||
3. Verify that the bucket has
|
||||
|
||||
### Recording in S3 but not transcribed
|
||||
|
||||
1. Check webhook is configured (Reflector should auto-create it)
|
||||
2. Check worker logs:
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml logs worker --tail 50
|
||||
```
|
||||
3. Verify `DAILYCO_STORAGE_AWS_*` vars in `server/.env`
|
||||
|
||||
### "Access Denied" when Daily.co tries to write to S3
|
||||
|
||||
1. Double-check IAM role ARN in Daily.co settings
|
||||
2. Verify bucket name matches exactly
|
||||
3. Check IAM policy has `s3:PutObject` permission
|
||||
|
||||
---
|
||||
|
||||
## Webhook Configuration [optional]
|
||||
|
||||
`manage_daily_webhook.py` script guides you through creating a webhook for Daily recordings.
|
||||
|
||||
The webhook isn't required - polling mechanism is the default and performed automatically.
|
||||
|
||||
This guide won't go deep into webhook setup.
|
||||
192
docs/docs/installation/docker-setup.md
Normal file
192
docs/docs/installation/docker-setup.md
Normal file
@@ -0,0 +1,192 @@
|
||||
---
|
||||
sidebar_position: 3
|
||||
title: Docker Reference
|
||||
---
|
||||
|
||||
# Docker Reference
|
||||
|
||||
This page documents the Docker Compose configuration for Reflector. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
## Services
|
||||
|
||||
The `docker-compose.prod.yml` includes these services:
|
||||
|
||||
| Service | Image | Purpose |
|
||||
|---------|-------|---------|
|
||||
| `web` | `monadicalsas/reflector-frontend` | Next.js frontend |
|
||||
| `server` | `monadicalsas/reflector-backend` | FastAPI backend |
|
||||
| `worker` | `monadicalsas/reflector-backend` | Celery worker for background tasks |
|
||||
| `beat` | `monadicalsas/reflector-backend` | Celery beat scheduler |
|
||||
| `redis` | `redis:7.2-alpine` | Message broker and cache |
|
||||
| `postgres` | `postgres:17-alpine` | Primary database |
|
||||
| `caddy` | `caddy:2-alpine` | Reverse proxy with auto-SSL |
|
||||
|
||||
## Environment Files
|
||||
|
||||
Reflector uses two separate environment files:
|
||||
|
||||
### Backend (`server/.env`)
|
||||
|
||||
Used by: `server`, `worker`, `beat`
|
||||
|
||||
Key variables:
|
||||
```env
|
||||
# Database connection
|
||||
DATABASE_URL=postgresql+asyncpg://reflector:reflector@postgres:5432/reflector
|
||||
|
||||
# Redis
|
||||
REDIS_HOST=redis
|
||||
CELERY_BROKER_URL=redis://redis:6379/1
|
||||
CELERY_RESULT_BACKEND=redis://redis:6379/1
|
||||
|
||||
# API domain and CORS
|
||||
BASE_URL=https://api.example.com
|
||||
CORS_ORIGIN=https://app.example.com
|
||||
|
||||
# Modal GPU processing
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://...
|
||||
TRANSCRIPT_MODAL_API_KEY=...
|
||||
```
|
||||
|
||||
### Frontend (`www/.env`)
|
||||
|
||||
Used by: `web`
|
||||
|
||||
Key variables:
|
||||
```env
|
||||
# Domain configuration
|
||||
SITE_URL=https://app.example.com
|
||||
API_URL=https://api.example.com
|
||||
WEBSOCKET_URL=wss://api.example.com
|
||||
SERVER_API_URL=http://server:1250
|
||||
|
||||
# Authentication
|
||||
NEXTAUTH_URL=https://app.example.com
|
||||
NEXTAUTH_SECRET=...
|
||||
```
|
||||
|
||||
Note: `API_URL` is used client-side (browser), `SERVER_API_URL` is used server-side (SSR).
|
||||
|
||||
## Volumes
|
||||
|
||||
| Volume | Purpose |
|
||||
|--------|---------|
|
||||
| `redis_data` | Redis persistence |
|
||||
| `postgres_data` | PostgreSQL data |
|
||||
| `server_data` | Uploaded files, local storage |
|
||||
| `caddy_data` | SSL certificates |
|
||||
| `caddy_config` | Caddy configuration |
|
||||
|
||||
## Network
|
||||
|
||||
All services share the default network. The network is marked `attachable: true` to allow external containers (like Authentik) to join.
|
||||
|
||||
## Common Commands
|
||||
|
||||
### Start all services
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
### View logs
|
||||
```bash
|
||||
# All services
|
||||
docker compose -f docker-compose.prod.yml logs -f
|
||||
|
||||
# Specific service
|
||||
docker compose -f docker-compose.prod.yml logs server --tail 50
|
||||
```
|
||||
|
||||
### Restart a service
|
||||
```bash
|
||||
# Quick restart (doesn't reload .env changes)
|
||||
docker compose -f docker-compose.prod.yml restart server
|
||||
|
||||
# Reload .env and restart
|
||||
docker compose -f docker-compose.prod.yml up -d server
|
||||
```
|
||||
|
||||
### Run database migrations
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml exec server uv run alembic upgrade head
|
||||
```
|
||||
|
||||
### Access database
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml exec postgres psql -U reflector
|
||||
```
|
||||
|
||||
### Pull latest images
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml pull
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
### Stop all services
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml down
|
||||
```
|
||||
|
||||
### Full reset (WARNING: deletes data)
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml down -v
|
||||
```
|
||||
|
||||
## Customization
|
||||
|
||||
### Using a different database
|
||||
|
||||
To use an external PostgreSQL:
|
||||
|
||||
1. Remove `postgres` service from compose file
|
||||
2. Update `DATABASE_URL` in `server/.env`:
|
||||
```env
|
||||
DATABASE_URL=postgresql+asyncpg://user:pass@external-host:5432/reflector
|
||||
```
|
||||
|
||||
### Using external Redis
|
||||
|
||||
1. Remove `redis` service from compose file
|
||||
2. Update Redis settings in `server/.env`:
|
||||
```env
|
||||
REDIS_HOST=external-redis-host
|
||||
CELERY_BROKER_URL=redis://external-redis-host:6379/1
|
||||
```
|
||||
|
||||
### Adding Authentik
|
||||
|
||||
To add Authentik for authentication, see [Authentication Setup](./auth-setup). Quick steps:
|
||||
|
||||
1. Deploy Authentik separately
|
||||
2. Connect to Reflector's network:
|
||||
```bash
|
||||
docker network connect reflector_default authentik-server-1
|
||||
```
|
||||
3. Add to Caddyfile:
|
||||
```
|
||||
authentik.example.com {
|
||||
reverse_proxy authentik-server-1:9000
|
||||
}
|
||||
```
|
||||
|
||||
## Caddyfile Reference
|
||||
|
||||
The Caddyfile supports environment variable substitution:
|
||||
|
||||
```
|
||||
{$FRONTEND_DOMAIN:app.example.com} {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
|
||||
{$API_DOMAIN:api.example.com} {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
```
|
||||
|
||||
Set `FRONTEND_DOMAIN` and `API_DOMAIN` environment variables, or edit the file directly.
|
||||
|
||||
### Reload Caddy after changes
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml exec caddy caddy reload --config /etc/caddy/Caddyfile
|
||||
```
|
||||
139
docs/docs/installation/docs-deployment.md
Normal file
139
docs/docs/installation/docs-deployment.md
Normal file
@@ -0,0 +1,139 @@
|
||||
---
|
||||
sidebar_position: 10
|
||||
title: Docs Website Deployment
|
||||
---
|
||||
|
||||
# Docs Website Deployment
|
||||
|
||||
This guide covers deploying the Reflector documentation website. **This is optional and intended for internal/experimental use only.**
|
||||
|
||||
## Overview
|
||||
|
||||
The documentation is built using Docusaurus and deployed as a static nginx-served site.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Reflector already deployed (Steps 1-7 from [Deployment Guide](./overview))
|
||||
- DNS A record for docs subdomain (e.g., `docs.example.com`)
|
||||
|
||||
## Deployment Steps
|
||||
|
||||
### Step 1: Pre-fetch OpenAPI Spec
|
||||
|
||||
The docs site includes API reference from your running backend. Fetch it before building:
|
||||
|
||||
```bash
|
||||
cd ~/reflector
|
||||
docker compose -f docker-compose.prod.yml exec server curl -s http://localhost:1250/openapi.json > docs/static/openapi.json
|
||||
```
|
||||
|
||||
This creates `docs/static/openapi.json` (should be ~70KB) which will be copied during Docker build.
|
||||
|
||||
**Why not fetch during build?** Docker build containers are network-isolated and can't access the running backend services.
|
||||
|
||||
### Step 2: Verify Dockerfile
|
||||
|
||||
The Dockerfile is already in `docs/Dockerfile`:
|
||||
|
||||
```dockerfile
|
||||
FROM node:18-alpine AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
|
||||
# Inshall dependencies
|
||||
RUN npm ci
|
||||
|
||||
# Copy source (includes static/openapi.json if pre-fetched)
|
||||
COPY . .
|
||||
|
||||
# Fix docusaurus config: change onBrokenLinks to 'warn' for Docker build
|
||||
RUN sed -i "s/onBrokenLinks: 'throw'/onBrokenLinks: 'warn'/g" docusaurus.config.ts
|
||||
|
||||
# Build static site
|
||||
RUN npx docusaurus build
|
||||
|
||||
FROM nginx:alpine
|
||||
COPY --from=builder /app/build /usr/share/nginx/html
|
||||
EXPOSE 80
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
```
|
||||
|
||||
### Step 3: Add Docs Service to docker-compose.prod.yml
|
||||
|
||||
Add this service to `docker-compose.prod.yml`:
|
||||
|
||||
```yaml
|
||||
docs:
|
||||
build: ./docs
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- default
|
||||
```
|
||||
|
||||
### Step 4: Add Caddy Route
|
||||
|
||||
Add to `Caddyfile`:
|
||||
|
||||
```
|
||||
{$DOCS_DOMAIN:docs.example.com} {
|
||||
reverse_proxy docs:80
|
||||
}
|
||||
```
|
||||
|
||||
### Step 5: Build and Deploy
|
||||
|
||||
```bash
|
||||
cd ~/reflector
|
||||
docker compose -f docker-compose.prod.yml up -d --build docs
|
||||
docker compose -f docker-compose.prod.yml exec caddy caddy reload --config /etc/caddy/Caddyfile
|
||||
```
|
||||
|
||||
### Step 6: Verify
|
||||
|
||||
```bash
|
||||
# Check container status
|
||||
docker compose -f docker-compose.prod.yml ps docs
|
||||
# Should show "Up"
|
||||
|
||||
# Test URL
|
||||
curl -I https://docs.example.com
|
||||
# Should return HTTP/2 200
|
||||
```
|
||||
|
||||
Visit `https://docs.example.com` in your browser
|
||||
|
||||
## Updating Documentation
|
||||
|
||||
When docs are updated:
|
||||
|
||||
```bash
|
||||
cd ~/reflector
|
||||
git pull
|
||||
|
||||
# Refresh OpenAPI spec from backend
|
||||
docker compose -f docker-compose.prod.yml exec server curl -s http://localhost:1250/openapi.json > docs/static/openapi.json
|
||||
|
||||
# Rebuild docs
|
||||
docker compose -f docker-compose.prod.yml up -d --build docs
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Missing openapi.json during build
|
||||
- Make sure you ran the pre-fetch step first (Step 1)
|
||||
- Verify `docs/static/openapi.json` exists and is ~70KB
|
||||
- Re-run: `docker compose exec server curl -s http://localhost:1250/openapi.json > docs/static/openapi.json`
|
||||
|
||||
### Build fails with "Docusaurus found broken links"
|
||||
- This happens if `onBrokenLinks: 'throw'` is set in docusaurus.config.ts
|
||||
- Solution is already in Dockerfile: uses `sed` to change to `'warn'` during build
|
||||
|
||||
### 404 on all pages
|
||||
- Docusaurus baseUrl might be wrong - should be `/` for custom domain
|
||||
- Check `docs/docusaurus.config.ts`: `baseUrl: '/'`
|
||||
|
||||
### Docs not updating after rebuild
|
||||
- Force rebuild: `docker compose -f docker-compose.prod.yml build --no-cache docs`
|
||||
- Then: `docker compose -f docker-compose.prod.yml up -d docs`
|
||||
171
docs/docs/installation/modal-setup.md
Normal file
171
docs/docs/installation/modal-setup.md
Normal file
@@ -0,0 +1,171 @@
|
||||
---
|
||||
sidebar_position: 4
|
||||
title: Modal.com Setup
|
||||
---
|
||||
|
||||
# Modal.com Setup
|
||||
|
||||
This page covers Modal.com GPU setup in detail. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
Reflector uses [Modal.com](https://modal.com) for GPU-accelerated audio processing. This guide walks you through deploying the required GPU functions.
|
||||
|
||||
## What is Modal.com?
|
||||
|
||||
Modal is a serverless GPU platform. You deploy Python code that runs on their GPUs, and pay only for actual compute time. Reflector uses Modal for:
|
||||
|
||||
- **Transcription**: Whisper model for speech-to-text
|
||||
- **Diarization**: Pyannote model for speaker identification
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. **Modal.com account** - Sign up at https://modal.com (free tier available)
|
||||
2. **HuggingFace account** - Required for Pyannote diarization models:
|
||||
- Create account at https://huggingface.co
|
||||
- Accept **both** Pyannote licenses:
|
||||
- https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
- https://huggingface.co/pyannote/segmentation-3.0
|
||||
- Generate access token at https://huggingface.co/settings/tokens
|
||||
|
||||
## Deployment
|
||||
|
||||
**Location: YOUR LOCAL COMPUTER (laptop/desktop)**
|
||||
|
||||
Modal CLI requires browser authentication, so this must run on a machine with a browser - not on a headless server.
|
||||
|
||||
### Install Modal CLI
|
||||
|
||||
```bash
|
||||
pip install modal
|
||||
```
|
||||
|
||||
### Authenticate with Modal
|
||||
|
||||
```bash
|
||||
modal setup
|
||||
```
|
||||
|
||||
This opens your browser for authentication. Complete the login flow.
|
||||
|
||||
### Clone Repository and Deploy
|
||||
|
||||
```bash
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector/gpu/modal_deployments
|
||||
./deploy-all.sh --hf-token YOUR_HUGGINGFACE_TOKEN
|
||||
```
|
||||
|
||||
Or run interactively (script will prompt for token):
|
||||
```bash
|
||||
./deploy-all.sh
|
||||
```
|
||||
|
||||
### What the Script Does
|
||||
|
||||
1. **Prompts for HuggingFace token** - Needed to download the Pyannote diarization model
|
||||
2. **Generates API key** - Creates a secure random key for authenticating requests to GPU functions
|
||||
3. **Creates Modal secrets**:
|
||||
- `hf_token` - Your HuggingFace token
|
||||
- `reflector-gpu` - The generated API key
|
||||
4. **Deploys GPU functions** - Transcriber (Whisper) and Diarizer (Pyannote)
|
||||
5. **Outputs configuration** - Prints URLs and API key to console
|
||||
|
||||
### Example Output
|
||||
|
||||
```
|
||||
==========================================
|
||||
Reflector GPU Functions Deployment
|
||||
==========================================
|
||||
|
||||
Generating API key for GPU services...
|
||||
Creating Modal secrets...
|
||||
-> Creating secret: hf_token
|
||||
-> Creating secret: reflector-gpu
|
||||
|
||||
Deploying transcriber (Whisper)...
|
||||
-> https://yourname--reflector-transcriber-web.modal.run
|
||||
|
||||
Deploying diarizer (Pyannote)...
|
||||
-> https://yourname--reflector-diarizer-web.modal.run
|
||||
|
||||
==========================================
|
||||
Deployment complete!
|
||||
==========================================
|
||||
|
||||
Copy these values to your server's server/.env file:
|
||||
|
||||
# --- Modal GPU Configuration ---
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://yourname--reflector-transcriber-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=abc123...
|
||||
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://yourname--reflector-diarizer-web.modal.run
|
||||
DIARIZATION_MODAL_API_KEY=abc123...
|
||||
# --- End Modal Configuration ---
|
||||
```
|
||||
|
||||
Copy the output and paste it into your `server/.env` file on your server.
|
||||
|
||||
## Costs
|
||||
|
||||
Modal charges based on GPU compute time:
|
||||
- Functions scale to zero when not in use (no cost when idle)
|
||||
- You only pay for actual processing time
|
||||
- Free tier includes $30/month of credits
|
||||
|
||||
Typical costs for audio processing:
|
||||
- Transcription: ~$0.01-0.05 per minute of audio
|
||||
- Diarization: ~$0.02-0.10 per minute of audio
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Modal CLI not installed"
|
||||
```bash
|
||||
pip install modal
|
||||
```
|
||||
|
||||
### "Not authenticated with Modal"
|
||||
```bash
|
||||
modal setup
|
||||
# Complete browser authentication
|
||||
```
|
||||
|
||||
### "Failed to create secret hf_token"
|
||||
- Verify your HuggingFace token is valid
|
||||
- Ensure you've accepted the Pyannote license
|
||||
- Token needs `read` permission
|
||||
|
||||
### Deployment fails
|
||||
Check the Modal dashboard for detailed error logs:
|
||||
- Visit https://modal.com/apps
|
||||
- Click on the failed function
|
||||
- View build and runtime logs
|
||||
|
||||
### Re-running deployment
|
||||
The script is safe to re-run. It will:
|
||||
- Update existing secrets if they exist
|
||||
- Redeploy functions with latest code
|
||||
- Output new configuration (API key stays the same if secret exists)
|
||||
|
||||
## Manual Deployment (Advanced)
|
||||
|
||||
If you prefer to deploy functions individually:
|
||||
|
||||
```bash
|
||||
cd gpu/modal_deployments
|
||||
|
||||
# Create secrets manually
|
||||
modal secret create hf_token HF_TOKEN=your-hf-token
|
||||
modal secret create reflector-gpu REFLECTOR_GPU_APIKEY=$(openssl rand -hex 32)
|
||||
|
||||
# Deploy each function
|
||||
modal deploy reflector_transcriber.py
|
||||
modal deploy reflector_diarizer.py
|
||||
```
|
||||
|
||||
## Monitoring
|
||||
|
||||
View your deployed functions and their usage:
|
||||
- **Modal Dashboard**: https://modal.com/apps
|
||||
- **Function logs**: Click on any function to view logs
|
||||
- **Usage**: View compute time and costs in the dashboard
|
||||
401
docs/docs/installation/overview.md
Normal file
401
docs/docs/installation/overview.md
Normal file
@@ -0,0 +1,401 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
title: Deployment Guide
|
||||
---
|
||||
|
||||
# Deployment Guide
|
||||
|
||||
This guide walks you through deploying Reflector from scratch. Follow these steps in order.
|
||||
|
||||
## What You'll Set Up
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
User --> Caddy["Caddy (auto-SSL)"]
|
||||
Caddy --> Frontend["Frontend (Next.js)"]
|
||||
Caddy --> Backend["Backend (FastAPI)"]
|
||||
Backend --> PostgreSQL
|
||||
Backend --> Redis
|
||||
Backend --> Workers["Celery Workers"]
|
||||
Workers --> PostgreSQL
|
||||
Workers --> Redis
|
||||
Workers --> GPU["GPU Processing<br/>(Modal.com OR Self-hosted)"]
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before starting, you need:
|
||||
|
||||
- **Production server** - 4+ cores, 8GB+ RAM, public IP
|
||||
- **Two domain names** - e.g., `app.example.com` (frontend) and `api.example.com` (backend)
|
||||
- **GPU processing** - Choose one:
|
||||
- Modal.com account, OR
|
||||
- GPU server with NVIDIA GPU (8GB+ VRAM)
|
||||
- **HuggingFace account** - Free at https://huggingface.co
|
||||
- Accept both Pyannote licenses (required for speaker diarization):
|
||||
- https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
- https://huggingface.co/pyannote/segmentation-3.0
|
||||
- **LLM API** - For summaries and topic detection. Choose one:
|
||||
- OpenAI API key at https://platform.openai.com/account/api-keys, OR
|
||||
- Any OpenAI-compatible endpoint (vLLM, LiteLLM, Ollama, etc.)
|
||||
- **AWS S3 bucket** - For storing audio files and transcripts (see [S3 Setup](#create-s3-bucket-for-transcript-storage) below)
|
||||
|
||||
### Optional (for live meeting rooms)
|
||||
|
||||
- [ ] **Daily.co account** - Free tier at https://dashboard.daily.co
|
||||
- [ ] **AWS S3 bucket + IAM Role** - For Daily.co recording storage (separate from transcript storage)
|
||||
|
||||
---
|
||||
|
||||
## Configure DNS
|
||||
|
||||
```
|
||||
Type: A Name: app Value: <your-server-ip>
|
||||
Type: A Name: api Value: <your-server-ip>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Deploy GPU Processing
|
||||
|
||||
Reflector requires GPU processing for transcription and speaker diarization. Choose one option:
|
||||
|
||||
| | **Modal.com (Cloud)** | **Self-Hosted GPU** |
|
||||
|---|---|---|
|
||||
| **Best for** | No GPU hardware, zero maintenance | Own GPU server, full control |
|
||||
| **Pricing** | Pay-per-use | Fixed infrastructure cost |
|
||||
|
||||
### Option A: Modal.com (Serverless Cloud GPU)
|
||||
|
||||
#### Accept HuggingFace Licenses
|
||||
|
||||
Visit both pages and click "Accept":
|
||||
- https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
- https://huggingface.co/pyannote/segmentation-3.0
|
||||
|
||||
Generate a token at https://huggingface.co/settings/tokens
|
||||
|
||||
#### Deploy to Modal
|
||||
|
||||
There's an install script to help with this setup. It's using modal API to set all necessary moving parts.
|
||||
|
||||
As an alternative, all those operations that script does could be performed in modal settings in modal UI.
|
||||
|
||||
```bash
|
||||
pip install modal
|
||||
modal setup # opens browser for authentication
|
||||
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector/gpu/modal_deployments
|
||||
./deploy-all.sh --hf-token YOUR_HUGGINGFACE_TOKEN
|
||||
```
|
||||
|
||||
**Save the output** - copy the configuration block, you'll need it soon.
|
||||
|
||||
See [Modal Setup](./modal-setup) for troubleshooting and details.
|
||||
|
||||
### Option B: Self-Hosted GPU
|
||||
|
||||
**Location: YOUR GPU SERVER**
|
||||
|
||||
Requires: NVIDIA GPU with 8GB+ VRAM, Ubuntu 22.04+, 40-50GB disk (Docker) or 25-30GB (Systemd).
|
||||
|
||||
See [Self-Hosted GPU Setup](./self-hosted-gpu-setup) for complete instructions. Quick summary:
|
||||
|
||||
1. Install NVIDIA drivers and Docker (or uv for systemd)
|
||||
2. Clone repository: `git clone https://github.com/monadical-sas/reflector.git`
|
||||
3. Configure `.env` with HuggingFace token
|
||||
4. Start service (Docker compose or systemd)
|
||||
5. Set up Caddy reverse proxy for HTTPS
|
||||
|
||||
**Save your API key and HTTPS URL** - you'll need them soon.
|
||||
|
||||
---
|
||||
|
||||
## Prepare Server
|
||||
|
||||
**Location: dedicated reflector server**
|
||||
|
||||
### Install Docker
|
||||
|
||||
```bash
|
||||
ssh user@your-server-ip
|
||||
|
||||
curl -fsSL https://get.docker.com | sh
|
||||
sudo usermod -aG docker $USER
|
||||
|
||||
# Log out and back in for group changes
|
||||
exit
|
||||
ssh user@your-server-ip
|
||||
|
||||
docker --version # verify
|
||||
```
|
||||
|
||||
### Firewall
|
||||
|
||||
Ensure ports 80 (HTTP) and 443 (HTTPS) are open for inbound traffic. The method varies by cloud provider and OS configuration.
|
||||
|
||||
### Clone Repository
|
||||
|
||||
The Docker images contain all application code. You clone the repository for configuration files and the compose definition:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Create S3 Bucket for Transcript Storage
|
||||
|
||||
Reflector requires AWS S3 to store audio files during processing.
|
||||
|
||||
### Create Bucket
|
||||
|
||||
```bash
|
||||
# Choose a unique bucket name
|
||||
BUCKET_NAME="reflector-transcripts-yourname"
|
||||
AWS_REGION="us-east-1"
|
||||
|
||||
# Create bucket
|
||||
aws s3 mb s3://$BUCKET_NAME --region $AWS_REGION
|
||||
```
|
||||
|
||||
### Create IAM User
|
||||
|
||||
Create an IAM user with S3 access for Reflector:
|
||||
|
||||
1. Go to AWS IAM Console → Users → Create User
|
||||
2. Name: `reflector-transcripts`
|
||||
3. Attach policy: `AmazonS3FullAccess` (or create a custom policy for just your bucket)
|
||||
4. Create access key (Access key ID + Secret access key)
|
||||
|
||||
Save these credentials - you'll need them in the next step.
|
||||
|
||||
---
|
||||
|
||||
## Configure Environment
|
||||
|
||||
Reflector has two env files:
|
||||
- `server/.env` - Backend configuration
|
||||
- `www/.env` - Frontend configuration
|
||||
|
||||
### Backend Configuration
|
||||
|
||||
```bash
|
||||
cp server/.env.example server/.env
|
||||
nano server/.env
|
||||
```
|
||||
|
||||
**Required settings:**
|
||||
```env
|
||||
# Database (defaults work with docker-compose.prod.yml)
|
||||
DATABASE_URL=postgresql+asyncpg://reflector:reflector@postgres:5432/reflector
|
||||
|
||||
# Redis
|
||||
REDIS_HOST=redis
|
||||
CELERY_BROKER_URL=redis://redis:6379/1
|
||||
CELERY_RESULT_BACKEND=redis://redis:6379/1
|
||||
|
||||
# Your domains
|
||||
BASE_URL=https://api.example.com
|
||||
CORS_ORIGIN=https://app.example.com
|
||||
CORS_ALLOW_CREDENTIALS=true
|
||||
|
||||
# Secret key - generate with: openssl rand -hex 32
|
||||
SECRET_KEY=<your-generated-secret>
|
||||
|
||||
# GPU Processing - choose ONE option:
|
||||
|
||||
# Option A: Modal.com (paste from deploy-all.sh output)
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://yourname--reflector-transcriber-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=<from-deploy-all.sh-output>
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://yourname--reflector-diarizer-web.modal.run
|
||||
DIARIZATION_MODAL_API_KEY=<from-deploy-all.sh-output>
|
||||
|
||||
# Option B: Self-hosted GPU (use your GPU server URL and API key)
|
||||
# TRANSCRIPT_BACKEND=modal
|
||||
# TRANSCRIPT_URL=https://gpu.example.com
|
||||
# TRANSCRIPT_MODAL_API_KEY=<your-generated-api-key>
|
||||
# DIARIZATION_BACKEND=modal
|
||||
# DIARIZATION_URL=https://gpu.example.com
|
||||
# DIARIZATION_MODAL_API_KEY=<your-generated-api-key>
|
||||
|
||||
# Storage - where to store audio files and transcripts (requires AWS S3)
|
||||
TRANSCRIPT_STORAGE_BACKEND=aws
|
||||
TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=your-aws-access-key
|
||||
TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY=your-aws-secret-key
|
||||
TRANSCRIPT_STORAGE_AWS_BUCKET_NAME=reflector-media
|
||||
TRANSCRIPT_STORAGE_AWS_REGION=us-east-1
|
||||
|
||||
# LLM - for generating titles, summaries, and topics
|
||||
LLM_API_KEY=sk-your-openai-api-key
|
||||
LLM_MODEL=gpt-4o-mini
|
||||
# LLM_URL=https://api.openai.com/v1 # Optional: custom endpoint (vLLM, LiteLLM, Ollama, etc.)
|
||||
|
||||
# Auth - disable for initial setup (see a dedicated step for authentication)
|
||||
AUTH_BACKEND=none
|
||||
```
|
||||
|
||||
### Frontend Configuration
|
||||
|
||||
```bash
|
||||
cp www/.env.example www/.env
|
||||
nano www/.env
|
||||
```
|
||||
|
||||
**Required settings:**
|
||||
```env
|
||||
# Your domains
|
||||
SITE_URL=https://app.example.com
|
||||
API_URL=https://api.example.com
|
||||
WEBSOCKET_URL=wss://api.example.com
|
||||
SERVER_API_URL=http://server:1250
|
||||
|
||||
# NextAuth
|
||||
NEXTAUTH_URL=https://app.example.com
|
||||
NEXTAUTH_SECRET=<generate-with-openssl-rand-hex-32>
|
||||
|
||||
# Disable login requirement for initial setup
|
||||
FEATURE_REQUIRE_LOGIN=false
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configure Caddy
|
||||
|
||||
```bash
|
||||
cp Caddyfile.example Caddyfile
|
||||
nano Caddyfile
|
||||
```
|
||||
|
||||
Replace `example.com` with your domains. The `{$VAR:default}` syntax uses Caddy's env var substitution - you can either edit the file directly or set `FRONTEND_DOMAIN` and `API_DOMAIN` environment variables.
|
||||
|
||||
```
|
||||
{$FRONTEND_DOMAIN:app.example.com} {
|
||||
reverse_proxy web:3000
|
||||
}
|
||||
|
||||
{$API_DOMAIN:api.example.com} {
|
||||
reverse_proxy server:1250
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Start Services
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
Wait for PostgreSQL to be ready, then run migrations:
|
||||
|
||||
```bash
|
||||
# Wait for postgres to be healthy (may take 30-60 seconds on first run)
|
||||
docker compose -f docker-compose.prod.yml exec postgres pg_isready -U reflector
|
||||
|
||||
# Run database migrations
|
||||
docker compose -f docker-compose.prod.yml exec server uv run alembic upgrade head
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Verify Deployment
|
||||
|
||||
### Check services
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml ps
|
||||
# All should show "Up"
|
||||
```
|
||||
|
||||
### Test API
|
||||
```bash
|
||||
curl https://api.example.com/health
|
||||
# Should return: {"status":"healthy"}
|
||||
```
|
||||
|
||||
### Test Frontend
|
||||
- Visit https://app.example.com
|
||||
- You should see the Reflector interface
|
||||
- Try uploading an audio file to test transcription
|
||||
|
||||
---
|
||||
|
||||
## Enable Authentication (Required for Live Rooms)
|
||||
|
||||
By default, Reflector is open (no login required). **Authentication is required if you want to use Live Meeting Rooms.**
|
||||
|
||||
See [Authentication Setup](./auth-setup) for full Authentik OAuth configuration.
|
||||
|
||||
Quick summary:
|
||||
1. Deploy Authentik on your server
|
||||
2. Create OAuth provider in Authentik
|
||||
3. Extract public key for JWT verification
|
||||
4. Update `server/.env`: `AUTH_BACKEND=jwt` + `AUTH_JWT_AUDIENCE`
|
||||
5. Update `www/.env`: `FEATURE_REQUIRE_LOGIN=true` + Authentik credentials
|
||||
6. Mount JWT keys volume and restart services
|
||||
|
||||
---
|
||||
|
||||
## Enable Live Meeting Rooms
|
||||
|
||||
**Requires: Authentication Step**
|
||||
|
||||
Live rooms require Daily.co and AWS S3. See [Daily.co Setup](./daily-setup) for complete S3/IAM configuration instructions.
|
||||
|
||||
Note that Reflector also supports Whereby as a call provider - this doc doesn't cover its setup yet.
|
||||
|
||||
Quick config - Add to `server/.env`:
|
||||
|
||||
```env
|
||||
DEFAULT_VIDEO_PLATFORM=daily
|
||||
DAILY_API_KEY=<from-daily.co-dashboard>
|
||||
DAILY_SUBDOMAIN=<your-daily-subdomain>
|
||||
|
||||
# S3 for recording storage
|
||||
DAILYCO_STORAGE_AWS_BUCKET_NAME=<your-bucket>
|
||||
DAILYCO_STORAGE_AWS_REGION=us-east-1
|
||||
DAILYCO_STORAGE_AWS_ROLE_ARN=<arn:aws:iam::ACCOUNT:role/DailyCo>
|
||||
```
|
||||
|
||||
Reload env and restart:
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml up -d server worker
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Check logs for errors
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml logs server --tail 20
|
||||
docker compose -f docker-compose.prod.yml logs worker --tail 20
|
||||
```
|
||||
|
||||
### Services won't start
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml logs
|
||||
```
|
||||
|
||||
### CORS errors in browser
|
||||
- Verify `CORS_ORIGIN` in `server/.env` matches your frontend domain exactly (including `https://`)
|
||||
- Reload env: `docker compose -f docker-compose.prod.yml up -d server`
|
||||
|
||||
### SSL certificate errors
|
||||
- Caddy auto-provisions Let's Encrypt certificates
|
||||
- Ensure ports 80 and 443 are open
|
||||
- Check: `docker compose -f docker-compose.prod.yml logs caddy`
|
||||
|
||||
### Transcription not working
|
||||
- Check Modal dashboard: https://modal.com/apps
|
||||
- Verify URLs in `server/.env` match deployed functions
|
||||
- Check worker logs: `docker compose -f docker-compose.prod.yml logs worker`
|
||||
|
||||
### "Login required" but auth not configured
|
||||
- Set `FEATURE_REQUIRE_LOGIN=false` in `www/.env`
|
||||
- Rebuild frontend: `docker compose -f docker-compose.prod.yml up -d --force-recreate web`
|
||||
|
||||
60
docs/docs/installation/requirements.md
Normal file
60
docs/docs/installation/requirements.md
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
title: System Requirements
|
||||
---
|
||||
|
||||
# System Requirements
|
||||
|
||||
This page lists hardware and software requirements. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
## Server Requirements
|
||||
|
||||
### Minimum Requirements
|
||||
|
||||
- **CPU**: 4 cores
|
||||
- **RAM**: 8 GB
|
||||
- **Storage**: 50 GB SSD
|
||||
- **OS**: Ubuntu 22.04+ or compatible Linux
|
||||
- **Network**: Public IP address
|
||||
|
||||
### Recommended Requirements
|
||||
|
||||
- **CPU**: 8+ cores
|
||||
- **RAM**: 16 GB
|
||||
- **Storage**: 100 GB SSD
|
||||
- **Network**: 1 Gbps connection
|
||||
|
||||
## Software Requirements
|
||||
|
||||
- Docker Engine 20.10+
|
||||
- Docker Compose 2.0+
|
||||
|
||||
## External Services
|
||||
|
||||
### Required
|
||||
|
||||
- **Two domain names** - One for frontend (e.g., `app.example.com`), one for API (e.g., `api.example.com`)
|
||||
- **Modal.com account** - For GPU-accelerated transcription and diarization (free tier available)
|
||||
- **HuggingFace account** - For Pyannote diarization model access
|
||||
- **OpenAI API key** - For generating summaries and topic detection (https://platform.openai.com/account/api-keys)
|
||||
|
||||
### Required for Live Meeting Rooms
|
||||
|
||||
- **Daily.co account** - For video conferencing (free tier available at https://dashboard.daily.co)
|
||||
- **AWS S3 bucket + IAM Role** - For Daily.co to store recordings
|
||||
- **Another AWS S3 bucket (optional, can reuse the one above)** - For Reflector to store "compiled" mp3 files and transient diarization process temporary files
|
||||
|
||||
### Optional
|
||||
|
||||
- **AWS S3** - For cloud storage of recordings and transcripts
|
||||
- **Authentik** - For SSO/OIDC authentication
|
||||
- **Sentry** - For error tracking
|
||||
|
||||
## Development Requirements
|
||||
|
||||
For local development only (not required for production deployment):
|
||||
|
||||
- Node.js 22+ (for frontend development)
|
||||
- Python 3.12+ (for backend development)
|
||||
- pnpm (for frontend package management)
|
||||
- uv (for Python package management)
|
||||
465
docs/docs/installation/self-hosted-gpu-setup.md
Normal file
465
docs/docs/installation/self-hosted-gpu-setup.md
Normal file
@@ -0,0 +1,465 @@
|
||||
---
|
||||
sidebar_position: 5
|
||||
title: Self-Hosted GPU Setup
|
||||
---
|
||||
|
||||
# Self-Hosted GPU Setup
|
||||
|
||||
This guide covers deploying Reflector's GPU processing on your own server instead of Modal.com. For the complete deployment guide, see [Deployment Guide](./overview).
|
||||
|
||||
## When to Use Self-Hosted GPU
|
||||
|
||||
**Choose self-hosted GPU if you:**
|
||||
- Have GPU hardware available (NVIDIA required)
|
||||
- Want full control over processing
|
||||
- Prefer fixed infrastructure costs over pay-per-use
|
||||
- Have privacy or data locality requirements
|
||||
- Need to process audio without external API calls
|
||||
|
||||
**Choose Modal.com instead if you:**
|
||||
- Don't have GPU hardware
|
||||
- Want zero infrastructure management
|
||||
- Prefer pay-per-use pricing
|
||||
- Need instant scaling for variable workloads
|
||||
|
||||
See [Modal.com Setup](./modal-setup) for cloud GPU deployment.
|
||||
|
||||
## What Gets Deployed
|
||||
|
||||
The self-hosted GPU service provides the same API endpoints as Modal:
|
||||
- `POST /v1/audio/transcriptions` - Whisper transcription
|
||||
- `POST /diarize` - Pyannote speaker diarization
|
||||
|
||||
Your main Reflector server connects to this service exactly like it connects to Modal - only the URL changes.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Hardware
|
||||
- **GPU**: NVIDIA GPU with 8GB+ VRAM (tested on Tesla T4 with 15GB)
|
||||
- **CPU**: 4+ cores recommended
|
||||
- **RAM**: 8GB minimum, 16GB recommended
|
||||
- **Disk**:
|
||||
- Docker method: 40-50GB minimum
|
||||
- Systemd method: 25-30GB minimum
|
||||
|
||||
### Software
|
||||
- Public IP address
|
||||
- Domain name with DNS A record pointing to server
|
||||
|
||||
### Accounts
|
||||
- **HuggingFace account** with accepted Pyannote licenses:
|
||||
- https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
- https://huggingface.co/pyannote/segmentation-3.0
|
||||
- **HuggingFace access token** from https://huggingface.co/settings/tokens
|
||||
|
||||
## Choose Deployment Method
|
||||
|
||||
---
|
||||
|
||||
## Docker Deployment
|
||||
|
||||
### Step 1: Install NVIDIA Driver
|
||||
|
||||
```bash
|
||||
sudo apt update
|
||||
sudo apt install -y nvidia-driver-535
|
||||
|
||||
# Load kernel modules
|
||||
sudo modprobe nvidia
|
||||
|
||||
# Verify installation
|
||||
nvidia-smi
|
||||
```
|
||||
|
||||
Expected output: GPU details with driver version and CUDA version.
|
||||
|
||||
### Step 2: Install Docker
|
||||
|
||||
```bash
|
||||
curl -fsSL https://get.docker.com | sudo sh
|
||||
sudo usermod -aG docker $USER
|
||||
|
||||
# Log out and back in for group changes
|
||||
exit
|
||||
# SSH back in
|
||||
```
|
||||
|
||||
### Step 3: Install NVIDIA Container Toolkit
|
||||
|
||||
```bash
|
||||
# Add NVIDIA repository
|
||||
curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | \
|
||||
sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg
|
||||
|
||||
curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \
|
||||
sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \
|
||||
sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list
|
||||
|
||||
# Install toolkit
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y nvidia-container-toolkit
|
||||
|
||||
# Configure Docker runtime
|
||||
sudo nvidia-ctk runtime configure --runtime=docker
|
||||
sudo systemctl restart docker
|
||||
```
|
||||
|
||||
### Step 4: Clone Repository and Configure
|
||||
|
||||
```bash
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector/gpu/self_hosted
|
||||
|
||||
# Create environment file
|
||||
cat > .env << EOF
|
||||
REFLECTOR_GPU_APIKEY=$(openssl rand -hex 16)
|
||||
HF_TOKEN=your_huggingface_token_here
|
||||
EOF
|
||||
|
||||
# Note the generated API key - you'll need it for main server config
|
||||
cat .env
|
||||
```
|
||||
|
||||
### Step 5: Create Docker Compose File
|
||||
|
||||
```bash
|
||||
cat > compose.yml << 'EOF'
|
||||
services:
|
||||
reflector_gpu:
|
||||
build:
|
||||
context: .
|
||||
ports:
|
||||
- "8000:8000"
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ./cache:/root/.cache
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: all
|
||||
capabilities: [gpu]
|
||||
restart: unless-stopped
|
||||
EOF
|
||||
```
|
||||
|
||||
### Step 6: Build and Start
|
||||
|
||||
```bash
|
||||
# Build image (takes ~5 minutes, downloads ~10GB)
|
||||
sudo docker compose build
|
||||
|
||||
# Start service
|
||||
sudo docker compose up -d
|
||||
|
||||
# Wait for startup and verify
|
||||
sleep 30
|
||||
sudo docker compose logs
|
||||
```
|
||||
|
||||
Look for: `INFO: Application startup complete. Uvicorn running on http://0.0.0.0:8000`
|
||||
|
||||
### Step 7: Verify GPU Access
|
||||
|
||||
```bash
|
||||
# Check GPU is accessible from container
|
||||
sudo docker exec $(sudo docker ps -q) nvidia-smi
|
||||
```
|
||||
|
||||
Should show GPU with ~3GB VRAM used (models loaded).
|
||||
|
||||
---
|
||||
|
||||
## Systemd Deployment
|
||||
|
||||
### Step 1: Install NVIDIA Driver
|
||||
|
||||
```bash
|
||||
sudo apt update
|
||||
sudo apt install -y nvidia-driver-535
|
||||
|
||||
# Load kernel modules
|
||||
sudo modprobe nvidia
|
||||
|
||||
# Verify installation
|
||||
nvidia-smi
|
||||
```
|
||||
|
||||
### Step 2: Install Dependencies
|
||||
|
||||
```bash
|
||||
# Install ffmpeg
|
||||
sudo apt install -y ffmpeg
|
||||
|
||||
# Install uv package manager
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
source ~/.local/bin/env
|
||||
|
||||
# Clone repository
|
||||
git clone https://github.com/monadical-sas/reflector.git
|
||||
cd reflector/gpu/self_hosted
|
||||
```
|
||||
|
||||
### Step 3: Configure Environment
|
||||
|
||||
```bash
|
||||
# Create environment file
|
||||
cat > .env << EOF
|
||||
REFLECTOR_GPU_APIKEY=$(openssl rand -hex 16)
|
||||
HF_TOKEN=your_huggingface_token_here
|
||||
EOF
|
||||
|
||||
# Note the generated API key
|
||||
cat .env
|
||||
```
|
||||
|
||||
### Step 4: Install Python Packages
|
||||
|
||||
```bash
|
||||
# Install dependencies (~3GB download)
|
||||
uv sync
|
||||
```
|
||||
|
||||
### Step 5: Create Systemd Service
|
||||
|
||||
```bash
|
||||
# Generate library paths for NVIDIA packages
|
||||
export NVIDIA_LIBS=$(find ~/reflector/gpu/self_hosted/.venv/lib/python3.12/site-packages/nvidia -name lib -type d | tr '\n' ':')
|
||||
|
||||
# Load environment variables
|
||||
source ~/reflector/gpu/self_hosted/.env
|
||||
|
||||
# Create service file
|
||||
sudo tee /etc/systemd/system/reflector-gpu.service << EOFSVC
|
||||
[Unit]
|
||||
Description=Reflector GPU Service (Transcription & Diarization)
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=$USER
|
||||
WorkingDirectory=$HOME/reflector/gpu/self_hosted
|
||||
Environment="PATH=$HOME/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
Environment="HF_TOKEN=${HF_TOKEN}"
|
||||
Environment="REFLECTOR_GPU_APIKEY=${REFLECTOR_GPU_APIKEY}"
|
||||
Environment="LD_LIBRARY_PATH=${NVIDIA_LIBS}"
|
||||
ExecStart=$HOME/reflector/gpu/self_hosted/.venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOFSVC
|
||||
|
||||
# Enable and start
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable reflector-gpu
|
||||
sudo systemctl start reflector-gpu
|
||||
```
|
||||
|
||||
### Step 6: Verify Service
|
||||
|
||||
```bash
|
||||
# Check status
|
||||
sudo systemctl status reflector-gpu
|
||||
|
||||
# View logs
|
||||
sudo journalctl -u reflector-gpu -f
|
||||
```
|
||||
|
||||
Look for: `INFO: Application startup complete.`
|
||||
|
||||
---
|
||||
|
||||
## Configure HTTPS with Caddy
|
||||
|
||||
Both deployment methods need HTTPS for production. Caddy handles SSL automatically.
|
||||
|
||||
### Install Caddy
|
||||
|
||||
```bash
|
||||
sudo apt install -y debian-keyring debian-archive-keyring apt-transport-https curl
|
||||
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/gpg.key' | \
|
||||
sudo gpg --dearmor -o /usr/share/keyrings/caddy-stable-archive-keyring.gpg
|
||||
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/debian.deb.txt' | \
|
||||
sudo tee /etc/apt/sources.list.d/caddy-stable.list
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y caddy
|
||||
```
|
||||
|
||||
### Configure Reverse Proxy
|
||||
|
||||
```bash
|
||||
sudo tee /etc/caddy/Caddyfile << 'EOF'
|
||||
gpu.example.com {
|
||||
reverse_proxy localhost:8000
|
||||
}
|
||||
EOF
|
||||
|
||||
# Reload Caddy (auto-provisions SSL certificate)
|
||||
sudo systemctl reload caddy
|
||||
```
|
||||
|
||||
Replace `gpu.example.com` with your domain.
|
||||
|
||||
### Verify HTTPS
|
||||
|
||||
```bash
|
||||
curl -I https://gpu.example.com/docs
|
||||
# Should return HTTP/2 200
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configure Main Reflector Server
|
||||
|
||||
On your main Reflector server, update `server/.env`:
|
||||
|
||||
```env
|
||||
# GPU Processing - Self-hosted
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://gpu.example.com
|
||||
TRANSCRIPT_MODAL_API_KEY=<your-generated-api-key>
|
||||
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://gpu.example.com
|
||||
DIARIZATION_MODAL_API_KEY=<your-generated-api-key>
|
||||
```
|
||||
|
||||
**Note:** The backend type is `modal` because the self-hosted GPU service implements the same API contract as Modal.com. This allows you to switch between cloud and self-hosted GPU processing by only changing the URL and API key.
|
||||
|
||||
Restart services to apply:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml restart server worker
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Service Management
|
||||
|
||||
All commands in this section assume you're in `~/reflector/gpu/self_hosted/`.
|
||||
|
||||
### Docker
|
||||
|
||||
```bash
|
||||
# View logs
|
||||
sudo docker compose logs -f
|
||||
|
||||
# Restart service
|
||||
sudo docker compose restart
|
||||
|
||||
# Stop service
|
||||
sudo docker compose down
|
||||
|
||||
# Check status
|
||||
sudo docker compose ps
|
||||
```
|
||||
|
||||
### Systemd
|
||||
|
||||
```bash
|
||||
# View logs
|
||||
sudo journalctl -u reflector-gpu -f
|
||||
|
||||
# Restart service
|
||||
sudo systemctl restart reflector-gpu
|
||||
|
||||
# Stop service
|
||||
sudo systemctl stop reflector-gpu
|
||||
|
||||
# Check status
|
||||
sudo systemctl status reflector-gpu
|
||||
```
|
||||
|
||||
### Monitor GPU
|
||||
|
||||
```bash
|
||||
# Check GPU usage
|
||||
nvidia-smi
|
||||
|
||||
# Watch in real-time
|
||||
watch -n 1 nvidia-smi
|
||||
```
|
||||
|
||||
**Typical GPU memory usage:**
|
||||
- Idle (models loaded): ~3GB VRAM
|
||||
- During transcription: ~4-5GB VRAM
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### nvidia-smi fails after driver install
|
||||
|
||||
```bash
|
||||
# Manually load kernel modules
|
||||
sudo modprobe nvidia
|
||||
nvidia-smi
|
||||
```
|
||||
|
||||
### Service fails with "Could not download pyannote pipeline"
|
||||
|
||||
1. Verify HF_TOKEN is valid: `echo $HF_TOKEN`
|
||||
2. Check model access at https://huggingface.co/pyannote/speaker-diarization-3.1
|
||||
3. Regenerate service/compose with correct token
|
||||
4. Restart service
|
||||
|
||||
### cuDNN library loading errors (Systemd only)
|
||||
|
||||
Symptom: `Unable to load libcudnn_cnn.so`
|
||||
|
||||
Regenerate the systemd service file - the `LD_LIBRARY_PATH` must include all NVIDIA package directories.
|
||||
|
||||
### Cannot connect to HTTPS endpoint
|
||||
|
||||
1. Verify DNS resolves: `dig +short gpu.example.com`
|
||||
2. Check firewall: `sudo ufw status` (ports 80, 443 must be open)
|
||||
3. Check Caddy: `sudo systemctl status caddy`
|
||||
4. View Caddy logs: `sudo journalctl -u caddy -n 50`
|
||||
|
||||
### SSL certificate not provisioning
|
||||
|
||||
Requirements for Let's Encrypt:
|
||||
- Ports 80 and 443 publicly accessible
|
||||
- DNS resolves to server's public IP
|
||||
- Valid domain (not localhost or private IP)
|
||||
|
||||
### Docker container won't start
|
||||
|
||||
```bash
|
||||
# Check logs
|
||||
sudo docker compose logs
|
||||
|
||||
# Common issues:
|
||||
# - Port 8000 already in use
|
||||
# - GPU not accessible (nvidia-ctk not configured)
|
||||
# - Missing .env file
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Updating
|
||||
|
||||
### Docker
|
||||
|
||||
```bash
|
||||
cd ~/reflector/gpu/self_hosted
|
||||
git pull
|
||||
sudo docker compose build
|
||||
sudo docker compose up -d
|
||||
```
|
||||
|
||||
### Systemd
|
||||
|
||||
```bash
|
||||
cd ~/reflector/gpu/self_hosted
|
||||
git pull
|
||||
uv sync
|
||||
sudo systemctl restart reflector-gpu
|
||||
```
|
||||
7
docs/docs/installation/whereby-setup.md
Normal file
7
docs/docs/installation/whereby-setup.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: whereby setup
|
||||
---
|
||||
|
||||
# whereby setup
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/installation/zulip-setup.md
Normal file
7
docs/docs/installation/zulip-setup.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: zulip setup
|
||||
---
|
||||
|
||||
# zulip setup
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
61
docs/docs/intro.md
Normal file
61
docs/docs/intro.md
Normal file
@@ -0,0 +1,61 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
title: Introduction
|
||||
---
|
||||
|
||||
# Welcome to Reflector
|
||||
|
||||
Reflector is a privacy-focused, self-hosted AI-powered audio transcription and meeting analysis platform that provides real-time transcription, speaker diarization, translation, and summarization for audio content and live meetings. With complete control over your data and infrastructure, you can run models on your own hardware (roadmap - currently supports Modal.com for GPU processing).
|
||||
|
||||
## What is Reflector?
|
||||
|
||||
Reflector is a web application that utilizes AI to process audio content, providing:
|
||||
|
||||
- **Real-time Transcription**: Convert speech to text using [Whisper](https://github.com/openai/whisper) (multi-language) or [Parakeet](https://github.com/NVIDIA/NeMo) (English) models
|
||||
- **Speaker Diarization**: Identify and label different speakers using [Pyannote](https://github.com/pyannote/pyannote-audio) 3.1
|
||||
- **Live Translation**: Translate audio content in real-time to 100+ languages with [Facebook Seamless-M4T](https://github.com/facebookresearch/seamless_communication)
|
||||
- **Topic Detection & Summarization**: Extract key topics and generate concise summaries using LLMs
|
||||
- **Meeting Recording**: Create permanent records of meetings with searchable transcripts
|
||||
|
||||
## Features
|
||||
|
||||
| Feature | Public Mode | Private Mode |
|
||||
|---------|------------|--------------|
|
||||
| **Authentication** | None required | Required |
|
||||
| **Audio Upload** | ✅ | ✅ |
|
||||
| **Live Microphone Streaming** | ✅ | ✅ |
|
||||
| **Transcription** | ✅ | ✅ |
|
||||
| **Speaker Diarization** | ✅ | ✅ |
|
||||
| **Translation** | ✅ | ✅ |
|
||||
| **Topic Detection** | ✅ | ✅ |
|
||||
| **Summarization** | ✅ | ✅ |
|
||||
| **Virtual Meeting Rooms (Whereby)** | ❌ | ✅ |
|
||||
| **Browse Transcripts Page** | ❌ | ✅ |
|
||||
| **Search Functionality** | ❌ | ✅ |
|
||||
| **Persistent Storage** | ❌ | ✅ |
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Reflector consists of three main components:
|
||||
|
||||
- **Frontend**: React application built with Next.js 14
|
||||
- **Backend**: Python server using FastAPI
|
||||
- **Processing**: Scalable GPU workers for ML inference (Modal.com or local)
|
||||
|
||||
## Getting Started
|
||||
|
||||
Ready to deploy Reflector? Head over to our [Installation Guide](./installation/overview) to set up your own instance.
|
||||
|
||||
For a quick overview of how Reflector processes audio, check out our [Pipeline Documentation](./pipelines/overview).
|
||||
|
||||
## Open Source
|
||||
|
||||
Reflector is open source software developed by [Monadical](https://monadical.com) and licensed under the **MIT License**. We welcome contributions from the community!
|
||||
|
||||
- [GitHub Repository](https://github.com/monadical-sas/reflector)
|
||||
- [Issue Tracker](https://github.com/monadical-sas/reflector/issues)
|
||||
- [Pull Requests](https://github.com/monadical-sas/reflector/pulls)
|
||||
|
||||
## Support
|
||||
|
||||
Need help? Reach out to the community through GitHub Discussions.
|
||||
343
docs/docs/pipelines/file-pipeline.md
Normal file
343
docs/docs/pipelines/file-pipeline.md
Normal file
@@ -0,0 +1,343 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
title: File Processing Pipeline
|
||||
---
|
||||
|
||||
# File Processing Pipeline
|
||||
|
||||
The file processing pipeline handles uploaded audio files, optimizing for accuracy and throughput.
|
||||
|
||||
## Pipeline Stages
|
||||
|
||||
### 1. Input Stage
|
||||
|
||||
**Accepted Formats:**
|
||||
- MP3 (most common)
|
||||
- WAV (uncompressed)
|
||||
- M4A (Apple format)
|
||||
- WebM (browser recordings)
|
||||
- MP4 (video with audio track)
|
||||
|
||||
**File Validation:**
|
||||
- Maximum size: 2GB (configurable)
|
||||
- Minimum duration: 5 seconds
|
||||
- Maximum duration: 6 hours
|
||||
- Sample rate: Any (will be resampled)
|
||||
|
||||
### 2. Pre-processing
|
||||
|
||||
**Audio Normalization:**
|
||||
```yaml
|
||||
# Convert to standard format
|
||||
- Sample rate: 16kHz (Whisper requirement)
|
||||
- Channels: Mono
|
||||
- Bit depth: 16-bit
|
||||
- Format: WAV
|
||||
```
|
||||
|
||||
**Noise Reduction (Optional):**
|
||||
- Background noise removal
|
||||
- Echo cancellation
|
||||
- High-pass filter for rumble
|
||||
|
||||
### 3. Chunking Strategy
|
||||
|
||||
**Default Configuration:**
|
||||
```yaml
|
||||
chunk_size: 30 # seconds
|
||||
overlap: 1 # seconds
|
||||
max_parallel: 10
|
||||
silence_detection: true
|
||||
```
|
||||
|
||||
**Chunking with Silence Detection:**
|
||||
- Detects silence periods
|
||||
- Attempts to break at natural pauses
|
||||
- Maintains context with overlap
|
||||
- Preserves sentence boundaries
|
||||
|
||||
**Chunk Metadata:**
|
||||
```json
|
||||
{
|
||||
"chunk_id": "chunk_001",
|
||||
"start_time": 0.0,
|
||||
"end_time": 30.0,
|
||||
"duration": 30.0,
|
||||
"has_speech": true,
|
||||
"audio_hash": "sha256:..."
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Transcription Processing
|
||||
|
||||
**Whisper Models:**
|
||||
|
||||
| Model | Size | Speed | Accuracy | Use Case |
|
||||
|-------|------|-------|----------|----------|
|
||||
| tiny | 39M | Very Fast | 85% | Quick drafts |
|
||||
| base | 74M | Fast | 89% | Good balance |
|
||||
| small | 244M | Medium | 91% | Better accuracy |
|
||||
| medium | 769M | Slow | 93% | High quality |
|
||||
| large-v3 | 1550M | Very Slow | 96% | Best quality |
|
||||
|
||||
**Processing Configuration:**
|
||||
```python
|
||||
transcription_config = {
|
||||
"model": "whisper-base",
|
||||
"language": "auto", # or specify: "en", "es", etc.
|
||||
"task": "transcribe", # or "translate"
|
||||
"temperature": 0, # deterministic
|
||||
"compression_ratio_threshold": 2.4,
|
||||
"no_speech_threshold": 0.6,
|
||||
"condition_on_previous_text": True,
|
||||
"initial_prompt": None, # optional context
|
||||
}
|
||||
```
|
||||
|
||||
**Parallel Processing:**
|
||||
- Each chunk processed independently
|
||||
- GPU batching for efficiency
|
||||
- Automatic load balancing
|
||||
- Failure isolation
|
||||
|
||||
### 5. Diarization (Speaker Identification)
|
||||
|
||||
**Pyannote 3.1 Pipeline:**
|
||||
|
||||
1. **Voice Activity Detection (VAD)**
|
||||
- Identifies speech segments
|
||||
- Filters out silence and noise
|
||||
- Precision: 95%+
|
||||
|
||||
2. **Speaker Embedding**
|
||||
- Extracts voice characteristics
|
||||
- 256-dimensional vectors
|
||||
- Speaker-invariant features
|
||||
|
||||
3. **Clustering**
|
||||
- Groups similar voice embeddings
|
||||
- Agglomerative clustering
|
||||
- Automatic speaker count detection
|
||||
|
||||
4. **Segmentation**
|
||||
- Assigns speaker labels to time segments
|
||||
- Handles overlapping speech
|
||||
- Minimum segment duration: 0.5s
|
||||
|
||||
**Configuration:**
|
||||
```python
|
||||
diarization_config = {
|
||||
"min_speakers": 1,
|
||||
"max_speakers": 10,
|
||||
"min_duration": 0.5,
|
||||
"clustering": "AgglomerativeClustering",
|
||||
"embedding_model": "speechbrain/spkrec-ecapa-voxceleb",
|
||||
}
|
||||
```
|
||||
|
||||
### 6. Alignment & Merging
|
||||
|
||||
**Chunk Assembly:**
|
||||
```python
|
||||
# Merge overlapping segments
|
||||
for chunk in chunks:
|
||||
# Remove overlap duplicates
|
||||
if chunk.start < previous.end:
|
||||
chunk.text = resolve_overlap(previous, chunk)
|
||||
|
||||
# Maintain timeline
|
||||
merged_transcript.append(chunk)
|
||||
```
|
||||
|
||||
**Speaker Alignment:**
|
||||
- Map diarization timeline to transcript
|
||||
- Resolve speaker changes mid-sentence
|
||||
- Handle multiple speakers per segment
|
||||
|
||||
**Quality Checks:**
|
||||
- Timeline consistency
|
||||
- No gaps in transcript
|
||||
- Speaker label continuity
|
||||
- Confidence score validation
|
||||
|
||||
### 7. Post-processing Chain
|
||||
|
||||
**Text Formatting:**
|
||||
- Sentence capitalization
|
||||
- Punctuation restoration
|
||||
- Number formatting
|
||||
- Acronym detection
|
||||
|
||||
**Translation (Optional):**
|
||||
```python
|
||||
translation_config = {
|
||||
"model": "facebook/seamless-m4t-medium",
|
||||
"source_lang": "auto",
|
||||
"target_langs": ["es", "fr", "de"],
|
||||
"preserve_formatting": True
|
||||
}
|
||||
```
|
||||
|
||||
**Topic Detection:**
|
||||
- LLM-based analysis
|
||||
- Extract 3-5 key topics
|
||||
- Keyword extraction
|
||||
- Entity recognition
|
||||
|
||||
**Summarization:**
|
||||
```python
|
||||
summary_config = {
|
||||
"model": "openai-compatible",
|
||||
"max_length": 500,
|
||||
"style": "bullets", # or "paragraph"
|
||||
"include_action_items": True,
|
||||
"include_decisions": True
|
||||
}
|
||||
```
|
||||
|
||||
### 8. Storage & Delivery
|
||||
|
||||
**Database Storage:**
|
||||
```sql
|
||||
-- Main transcript record
|
||||
INSERT INTO transcripts (
|
||||
id, title, duration, language,
|
||||
transcript_text, transcript_json,
|
||||
speakers, topics, summary,
|
||||
created_at, processing_time
|
||||
) VALUES (...);
|
||||
|
||||
-- Processing metadata
|
||||
INSERT INTO processing_metadata (
|
||||
transcript_id, model_versions,
|
||||
chunk_count, total_chunks,
|
||||
error_count, warnings
|
||||
) VALUES (...);
|
||||
```
|
||||
|
||||
**File Storage:**
|
||||
- Original audio: S3 (optional)
|
||||
- Processed chunks: Temporary (24h)
|
||||
- Transcript exports: JSON, SRT, VTT, TXT
|
||||
|
||||
**Notification:**
|
||||
```json
|
||||
{
|
||||
"type": "webhook",
|
||||
"url": "https://your-app.com/webhook",
|
||||
"payload": {
|
||||
"transcript_id": "...",
|
||||
"status": "completed",
|
||||
"duration": 3600,
|
||||
"processing_time": 180
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Processing Times
|
||||
|
||||
**Estimated times for 1 hour of audio:**
|
||||
|
||||
| Component | Fast Mode | Balanced | High Quality |
|
||||
|-----------|-----------|----------|--------------|
|
||||
| Pre-processing | 10s | 10s | 10s |
|
||||
| Transcription | 60s | 180s | 600s |
|
||||
| Diarization | 30s | 60s | 120s |
|
||||
| Post-processing | 20s | 30s | 60s |
|
||||
| **Total** | **2 min** | **5 min** | **13 min** |
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Retry Strategy
|
||||
|
||||
```python
|
||||
@celery.task(
|
||||
bind=True,
|
||||
max_retries=3,
|
||||
default_retry_delay=60,
|
||||
retry_backoff=True
|
||||
)
|
||||
def process_chunk(self, chunk_id):
|
||||
try:
|
||||
# Process chunk
|
||||
result = transcribe(chunk_id)
|
||||
except Exception as exc:
|
||||
# Exponential backoff
|
||||
raise self.retry(exc=exc)
|
||||
```
|
||||
|
||||
### Partial Recovery
|
||||
|
||||
- Continue with successful chunks
|
||||
- Mark failed chunks in output
|
||||
- Provide partial transcript
|
||||
- Report processing issues
|
||||
|
||||
### Fallback Options
|
||||
|
||||
1. **Model Fallback:**
|
||||
- If large model fails, try medium
|
||||
- If GPU fails, try CPU
|
||||
- If Modal fails, try local
|
||||
|
||||
2. **Quality Degradation:**
|
||||
- Reduce chunk size
|
||||
- Disable post-processing
|
||||
- Skip diarization if needed
|
||||
|
||||
## Optimization Tips
|
||||
|
||||
### For Speed
|
||||
|
||||
1. Use smaller models (tiny/base)
|
||||
2. Increase parallel chunks
|
||||
3. Disable diarization
|
||||
4. Skip post-processing
|
||||
5. Use GPU acceleration
|
||||
|
||||
### For Accuracy
|
||||
|
||||
1. Use larger models (medium/large)
|
||||
2. Enable all pre-processing
|
||||
3. Reduce chunk size
|
||||
4. Enable silence detection
|
||||
5. Multiple pass processing
|
||||
|
||||
### For Cost
|
||||
|
||||
1. Use Modal spot instances
|
||||
2. Batch multiple files
|
||||
3. Cache common phrases
|
||||
4. Optimize chunk size
|
||||
5. Selective post-processing
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Metrics to Track
|
||||
|
||||
```python
|
||||
metrics = {
|
||||
"processing_time": histogram,
|
||||
"chunk_success_rate": gauge,
|
||||
"model_accuracy": histogram,
|
||||
"queue_depth": gauge,
|
||||
"gpu_utilization": gauge,
|
||||
"cost_per_hour": counter
|
||||
}
|
||||
```
|
||||
|
||||
### Quality Metrics
|
||||
|
||||
- Word Error Rate (WER)
|
||||
- Diarization Error Rate (DER)
|
||||
- Confidence scores
|
||||
- Processing speed
|
||||
- User feedback
|
||||
|
||||
### Alerts
|
||||
|
||||
- Processing time > 30 minutes
|
||||
- Error rate > 5%
|
||||
- Queue depth > 100
|
||||
- GPU memory > 90%
|
||||
- Cost spike detected
|
||||
7
docs/docs/pipelines/live-pipeline.md
Normal file
7
docs/docs/pipelines/live-pipeline.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: Live pipeline
|
||||
---
|
||||
|
||||
# Live pipeline
|
||||
|
||||
Documentation coming soon.
|
||||
7
docs/docs/pipelines/overview.md
Normal file
7
docs/docs/pipelines/overview.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: overview
|
||||
---
|
||||
|
||||
# overview
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
441
docs/docs/reference/api.md
Normal file
441
docs/docs/reference/api.md
Normal file
@@ -0,0 +1,441 @@
|
||||
---
|
||||
title: API Reference
|
||||
---
|
||||
|
||||
# API Reference
|
||||
|
||||
The Reflector API provides a comprehensive RESTful interface for audio transcription, meeting management, and real-time streaming capabilities.
|
||||
|
||||
## Base URL
|
||||
|
||||
```
|
||||
http://localhost:8000/v1
|
||||
```
|
||||
|
||||
All API endpoints are prefixed with `/v1/` for versioning.
|
||||
|
||||
## Authentication
|
||||
|
||||
Reflector supports multiple authentication modes:
|
||||
|
||||
- **No Authentication** (Public Mode): Basic transcription and upload functionality
|
||||
- **JWT Authentication** (Private Mode): Full feature access including meeting rooms and persistent storage
|
||||
- **OAuth/OIDC via Authentik**: Enterprise single sign-on integration
|
||||
|
||||
## Core Endpoints
|
||||
|
||||
### Transcripts
|
||||
|
||||
Manage audio transcriptions and their associated metadata.
|
||||
|
||||
#### List Transcripts
|
||||
```http
|
||||
GET /v1/transcripts/
|
||||
```
|
||||
|
||||
Returns a paginated list of transcripts with filtering options.
|
||||
|
||||
#### Create Transcript
|
||||
```http
|
||||
POST /v1/transcripts/
|
||||
```
|
||||
|
||||
Create a new transcript from uploaded audio or initialize for streaming.
|
||||
|
||||
#### Get Transcript
|
||||
```http
|
||||
GET /v1/transcripts/{transcript_id}
|
||||
```
|
||||
|
||||
Retrieve detailed information about a specific transcript.
|
||||
|
||||
#### Update Transcript
|
||||
```http
|
||||
PATCH /v1/transcripts/{transcript_id}
|
||||
```
|
||||
|
||||
Update transcript metadata, summary, or processing status.
|
||||
|
||||
#### Delete Transcript
|
||||
```http
|
||||
DELETE /v1/transcripts/{transcript_id}
|
||||
```
|
||||
|
||||
Remove a transcript and its associated data.
|
||||
|
||||
### Audio Processing
|
||||
|
||||
#### Upload Audio
|
||||
```http
|
||||
POST /v1/transcripts_audio/{transcript_id}/upload
|
||||
```
|
||||
|
||||
Upload an audio file for transcription processing.
|
||||
|
||||
**Supported formats:**
|
||||
- WAV, MP3, M4A, FLAC, OGG
|
||||
- Maximum file size: 500MB
|
||||
- Sample rates: 8kHz - 48kHz
|
||||
|
||||
#### Download Audio
|
||||
```http
|
||||
GET /v1/transcripts_audio/{transcript_id}/download
|
||||
```
|
||||
|
||||
Download the original or processed audio file.
|
||||
|
||||
#### Stream Audio
|
||||
```http
|
||||
GET /v1/transcripts_audio/{transcript_id}/stream
|
||||
```
|
||||
|
||||
Stream audio content with range support for progressive playback.
|
||||
|
||||
### WebRTC Streaming
|
||||
|
||||
Real-time audio streaming via WebRTC for live transcription.
|
||||
|
||||
#### Initialize WebRTC Session
|
||||
```http
|
||||
POST /v1/transcripts_webrtc/{transcript_id}/offer
|
||||
```
|
||||
|
||||
Create a WebRTC offer for establishing a peer connection.
|
||||
|
||||
#### Complete WebRTC Handshake
|
||||
```http
|
||||
POST /v1/transcripts_webrtc/{transcript_id}/answer
|
||||
```
|
||||
|
||||
Submit the WebRTC answer to complete connection setup.
|
||||
|
||||
### WebSocket Streaming
|
||||
|
||||
Real-time updates and live transcription via WebSocket.
|
||||
|
||||
#### WebSocket Endpoint
|
||||
```ws
|
||||
ws://localhost:8000/v1/transcripts_websocket/{transcript_id}
|
||||
```
|
||||
|
||||
Receive real-time transcription updates, speaker changes, and processing status.
|
||||
|
||||
**Message Types:**
|
||||
- `transcription`: New transcribed text segments
|
||||
- `diarization`: Speaker identification updates
|
||||
- `status`: Processing status changes
|
||||
- `error`: Error notifications
|
||||
|
||||
### Meetings
|
||||
|
||||
Manage virtual meeting rooms and recordings.
|
||||
|
||||
#### List Meetings
|
||||
```http
|
||||
GET /v1/meetings/
|
||||
```
|
||||
|
||||
Get all meetings for the authenticated user.
|
||||
|
||||
#### Create Meeting
|
||||
```http
|
||||
POST /v1/meetings/
|
||||
```
|
||||
|
||||
Initialize a new meeting room with Whereby integration.
|
||||
|
||||
#### Join Meeting
|
||||
```http
|
||||
POST /v1/meetings/{meeting_id}/join
|
||||
```
|
||||
|
||||
Join an existing meeting and start recording.
|
||||
|
||||
#### End Meeting
|
||||
```http
|
||||
POST /v1/meetings/{meeting_id}/end
|
||||
```
|
||||
|
||||
End the meeting and finalize the recording.
|
||||
|
||||
### Rooms
|
||||
|
||||
Virtual meeting room configuration and management.
|
||||
|
||||
#### List Rooms
|
||||
```http
|
||||
GET /v1/rooms/
|
||||
```
|
||||
|
||||
Get available meeting rooms.
|
||||
|
||||
#### Create Room
|
||||
```http
|
||||
POST /v1/rooms/
|
||||
```
|
||||
|
||||
Create a new persistent meeting room.
|
||||
|
||||
#### Update Room Settings
|
||||
```http
|
||||
PATCH /v1/rooms/{room_id}
|
||||
```
|
||||
|
||||
Modify room configuration and permissions.
|
||||
|
||||
## Response Formats
|
||||
|
||||
### Success Response
|
||||
```json
|
||||
{
|
||||
"id": "uuid",
|
||||
"created_at": "2025-01-20T10:00:00Z",
|
||||
"updated_at": "2025-01-20T10:30:00Z",
|
||||
"data": {...}
|
||||
}
|
||||
```
|
||||
|
||||
### Error Response
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": "ERROR_CODE",
|
||||
"message": "Human-readable error message",
|
||||
"details": {...}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Status Codes
|
||||
|
||||
- `200 OK`: Successful request
|
||||
- `201 Created`: Resource created successfully
|
||||
- `204 No Content`: Successful deletion
|
||||
- `400 Bad Request`: Invalid request parameters
|
||||
- `401 Unauthorized`: Authentication required
|
||||
- `403 Forbidden`: Insufficient permissions
|
||||
- `404 Not Found`: Resource not found
|
||||
- `409 Conflict`: Resource conflict
|
||||
- `422 Unprocessable Entity`: Validation error
|
||||
- `429 Too Many Requests`: Rate limit exceeded
|
||||
- `500 Internal Server Error`: Server error
|
||||
|
||||
## WebSocket Protocol
|
||||
|
||||
The WebSocket connection provides real-time updates during transcription processing. The server sends structured messages to communicate different events and data updates.
|
||||
|
||||
### Connection
|
||||
```javascript
|
||||
const ws = new WebSocket('ws://localhost:8000/v1/transcripts_websocket/{transcript_id}');
|
||||
```
|
||||
|
||||
### Message Types and Formats
|
||||
|
||||
#### Transcription Update
|
||||
Sent when new text is transcribed from the audio stream.
|
||||
```json
|
||||
{
|
||||
"type": "transcription",
|
||||
"data": {
|
||||
"text": "The transcribed text segment",
|
||||
"speaker": "Speaker 1",
|
||||
"timestamp": 1705745623.456,
|
||||
"confidence": 0.95,
|
||||
"segment_id": "seg_001",
|
||||
"is_final": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Diarization Update
|
||||
Sent when speaker changes are detected or speaker labels are updated.
|
||||
```json
|
||||
{
|
||||
"type": "diarization",
|
||||
"data": {
|
||||
"speaker": "Speaker 2",
|
||||
"speaker_id": "spk_002",
|
||||
"start_time": 1705745620.123,
|
||||
"end_time": 1705745625.456,
|
||||
"confidence": 0.87
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Processing Status
|
||||
Sent to indicate changes in the processing pipeline status.
|
||||
```json
|
||||
{
|
||||
"type": "status",
|
||||
"data": {
|
||||
"status": "processing",
|
||||
"stage": "transcription",
|
||||
"progress": 45.5,
|
||||
"message": "Processing audio chunk 12 of 26"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Status values:
|
||||
- `initializing`: Setting up processing pipeline
|
||||
- `processing`: Active transcription/diarization
|
||||
- `completed`: Processing finished successfully
|
||||
- `failed`: Processing encountered an error
|
||||
- `paused`: Processing temporarily suspended
|
||||
|
||||
#### Summary Update
|
||||
Sent when AI-generated summaries or topics are available.
|
||||
```json
|
||||
{
|
||||
"type": "summary",
|
||||
"data": {
|
||||
"summary": "Brief summary of the conversation",
|
||||
"topics": ["topic1", "topic2", "topic3"],
|
||||
"action_items": ["action 1", "action 2"],
|
||||
"key_points": ["point 1", "point 2"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Error Messages
|
||||
Sent when errors occur during processing.
|
||||
```json
|
||||
{
|
||||
"type": "error",
|
||||
"data": {
|
||||
"code": "AUDIO_FORMAT_ERROR",
|
||||
"message": "Unsupported audio format",
|
||||
"details": {
|
||||
"format": "unknown",
|
||||
"sample_rate": 0
|
||||
},
|
||||
"recoverable": false
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Heartbeat/Keepalive
|
||||
Sent periodically to maintain the connection.
|
||||
```json
|
||||
{
|
||||
"type": "ping",
|
||||
"data": {
|
||||
"timestamp": 1705745630.000
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Client-to-Server Messages
|
||||
|
||||
Clients can send control messages to the server:
|
||||
|
||||
#### Start/Resume Processing
|
||||
```json
|
||||
{
|
||||
"action": "start",
|
||||
"params": {}
|
||||
}
|
||||
```
|
||||
|
||||
#### Pause Processing
|
||||
```json
|
||||
{
|
||||
"action": "pause",
|
||||
"params": {}
|
||||
}
|
||||
```
|
||||
|
||||
#### Request Status
|
||||
```json
|
||||
{
|
||||
"action": "get_status",
|
||||
"params": {}
|
||||
}
|
||||
```
|
||||
|
||||
## OpenAPI Specification
|
||||
|
||||
The complete OpenAPI 3.0 specification is available at:
|
||||
|
||||
```
|
||||
http://localhost:8000/v1/openapi.json
|
||||
```
|
||||
|
||||
You can import this specification into tools like:
|
||||
- Postman
|
||||
- Insomnia
|
||||
- Swagger UI
|
||||
- OpenAPI Generator (for client SDK generation)
|
||||
|
||||
## SDK Support
|
||||
|
||||
While Reflector doesn't provide official SDKs, you can generate client libraries using the OpenAPI specification with tools like:
|
||||
|
||||
- **Python**: `openapi-python-client`
|
||||
- **TypeScript**: `openapi-typescript-codegen`
|
||||
- **Go**: `oapi-codegen`
|
||||
- **Java**: `openapi-generator`
|
||||
|
||||
## Example Usage
|
||||
|
||||
### Python Example
|
||||
```python
|
||||
import requests
|
||||
|
||||
# Upload and transcribe audio
|
||||
with open('meeting.mp3', 'rb') as f:
|
||||
response = requests.post(
|
||||
'http://localhost:8000/v1/transcripts/',
|
||||
files={'file': f}
|
||||
)
|
||||
transcript_id = response.json()['id']
|
||||
|
||||
# Check transcription status
|
||||
status = requests.get(
|
||||
f'http://localhost:8000/v1/transcripts/{transcript_id}'
|
||||
).json()
|
||||
|
||||
print(f"Transcription status: {status['status']}")
|
||||
```
|
||||
|
||||
### JavaScript WebSocket Example
|
||||
```javascript
|
||||
// Connect to WebSocket for real-time transcription updates
|
||||
const ws = new WebSocket(`ws://localhost:8000/v1/transcripts_websocket/${transcriptId}`);
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log('Connected to transcription WebSocket');
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
const message = JSON.parse(event.data);
|
||||
|
||||
switch(message.type) {
|
||||
case 'transcription':
|
||||
console.log(`[${message.data.speaker}]: ${message.data.text}`);
|
||||
break;
|
||||
case 'diarization':
|
||||
console.log(`Speaker change: ${message.data.speaker}`);
|
||||
break;
|
||||
case 'status':
|
||||
console.log(`Status: ${message.data.status}`);
|
||||
break;
|
||||
case 'error':
|
||||
console.error(`Error: ${message.data.message}`);
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error('WebSocket error:', error);
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
console.log('WebSocket connection closed');
|
||||
};
|
||||
```
|
||||
|
||||
## Need Help?
|
||||
|
||||
- Review [example implementations](https://github.com/monadical-sas/reflector/tree/main/examples)
|
||||
- Open an issue on [GitHub](https://github.com/monadical-sas/reflector/issues)
|
||||
7
docs/docs/reference/api/overview.md
Normal file
7
docs/docs/reference/api/overview.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: overview
|
||||
---
|
||||
|
||||
# overview
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/architecture/backend.md
Normal file
7
docs/docs/reference/architecture/backend.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: backend
|
||||
---
|
||||
|
||||
# backend
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/architecture/database.md
Normal file
7
docs/docs/reference/architecture/database.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: database
|
||||
---
|
||||
|
||||
# database
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/architecture/frontend.md
Normal file
7
docs/docs/reference/architecture/frontend.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: frontend
|
||||
---
|
||||
|
||||
# frontend
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/architecture/overview.md
Normal file
7
docs/docs/reference/architecture/overview.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: overview
|
||||
---
|
||||
|
||||
# overview
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/architecture/workers.md
Normal file
7
docs/docs/reference/architecture/workers.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: workers
|
||||
---
|
||||
|
||||
# workers
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/configuration.md
Normal file
7
docs/docs/reference/configuration.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: configuration
|
||||
---
|
||||
|
||||
# configuration
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/processors/analysis.md
Normal file
7
docs/docs/reference/processors/analysis.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: analysis
|
||||
---
|
||||
|
||||
# analysis
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/processors/diarization.md
Normal file
7
docs/docs/reference/processors/diarization.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: diarization
|
||||
---
|
||||
|
||||
# diarization
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/processors/transcription.md
Normal file
7
docs/docs/reference/processors/transcription.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: transcription
|
||||
---
|
||||
|
||||
# transcription
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
7
docs/docs/reference/processors/translation.md
Normal file
7
docs/docs/reference/processors/translation.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: translation
|
||||
---
|
||||
|
||||
# translation
|
||||
|
||||
Documentation coming soon. See [TODO.md](/docs/TODO) for required information.
|
||||
139
docs/docs/roadmap.md
Normal file
139
docs/docs/roadmap.md
Normal file
@@ -0,0 +1,139 @@
|
||||
---
|
||||
sidebar_position: 100
|
||||
title: Roadmap
|
||||
---
|
||||
|
||||
# Product Roadmap
|
||||
|
||||
Our development roadmap for Reflector, focusing on expanding capabilities while maintaining privacy and performance.
|
||||
|
||||
## Planned Features
|
||||
|
||||
### 🌍 Multi-Language Support Enhancement
|
||||
|
||||
**Current State:**
|
||||
- Whisper supports 99+ languages for transcription
|
||||
- Parakeet supports English only with high accuracy
|
||||
- Translation available to 100+ languages
|
||||
|
||||
**Planned Improvements:**
|
||||
- Default language selection per room/user
|
||||
- Automatic language detection improvements
|
||||
- Multi-language diarization support
|
||||
- RTL (Right-to-Left) language UI support
|
||||
- Language-specific post-processing rules
|
||||
|
||||
### 🏠 Self-Hosted Room Providers
|
||||
|
||||
**Jitsi Integration**
|
||||
|
||||
Moving beyond Whereby to support self-hosted video conferencing:
|
||||
|
||||
- No API keys required
|
||||
- Complete control over video infrastructure
|
||||
- Custom branding and configuration
|
||||
- Lower operational costs
|
||||
- Enhanced privacy with self-hosted video
|
||||
|
||||
**Implementation Plan:**
|
||||
- WebRTC bridge for Jitsi Meet
|
||||
- Room management API integration
|
||||
- Recording synchronization
|
||||
- Participant tracking
|
||||
|
||||
### 📅 Calendar Integration
|
||||
|
||||
**Planned Capabilities:**
|
||||
- Google Calendar synchronization
|
||||
- Microsoft Outlook integration
|
||||
- Automatic meeting room creation
|
||||
- Pre-meeting document preparation
|
||||
- Post-meeting transcript delivery
|
||||
- Recurring meeting support
|
||||
|
||||
**Features:**
|
||||
- Auto-join scheduled meetings
|
||||
- Calendar-based access control
|
||||
- Meeting agenda import
|
||||
- Action item export to calendar
|
||||
|
||||
### 🖥️ Self-Hosted GPU Service
|
||||
|
||||
**For organizations with dedicated GPU hardware (H100, A100, RTX 4090):**
|
||||
|
||||
**Docker GPU Worker Image:**
|
||||
- Self-contained processing service
|
||||
- CUDA 11/12 support
|
||||
- Pre-loaded models:
|
||||
- Whisper (all sizes)
|
||||
- Pyannote diarization
|
||||
- Seamless-M4T translation
|
||||
- Automatic model management
|
||||
|
||||
**Deployment Options:**
|
||||
- Kubernetes GPU operators
|
||||
- Docker Compose with nvidia-docker
|
||||
- Bare metal installation
|
||||
- Hybrid cloud/on-premise
|
||||
|
||||
**Benefits:**
|
||||
- No Modal.com dependency
|
||||
- Complete data isolation
|
||||
- Predictable costs
|
||||
- Maximum performance
|
||||
- Custom model support
|
||||
|
||||
## Future Considerations
|
||||
|
||||
### Enhanced Analytics
|
||||
- Meeting insights dashboard
|
||||
- Speaker participation metrics
|
||||
- Topic trends over time
|
||||
- Team collaboration patterns
|
||||
|
||||
### Advanced AI Features
|
||||
- Real-time sentiment analysis
|
||||
- Emotion detection
|
||||
- Meeting quality scores
|
||||
- Automated coaching suggestions
|
||||
|
||||
### Integration Ecosystem
|
||||
- Slack/Teams notifications
|
||||
- CRM integration (Salesforce, HubSpot)
|
||||
- Project management tools (Jira, Asana)
|
||||
- Knowledge bases (Notion, Confluence)
|
||||
|
||||
### Performance Improvements
|
||||
- WebAssembly for client-side processing
|
||||
- Edge computing support
|
||||
- 5G network optimization
|
||||
- Blockchain for transcript verification
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome community contributions! Areas where you can help:
|
||||
|
||||
1. **Language Support**: Add support for your language
|
||||
2. **Integrations**: Connect with your favorite tools
|
||||
3. **Models**: Fine-tune models for specific domains
|
||||
4. **Documentation**: Improve guides and examples
|
||||
|
||||
See our [Contributing Guide](https://github.com/monadical-sas/reflector/blob/main/CONTRIBUTING.md) for details.
|
||||
|
||||
## Timeline
|
||||
|
||||
We don't provide specific dates as development depends on community contributions and priorities. Features are generally released when they're ready and properly tested.
|
||||
|
||||
## Feature Requests
|
||||
|
||||
Have an idea for Reflector? We'd love to hear it!
|
||||
|
||||
- [Open a GitHub Issue](https://github.com/monadical-sas/reflector/issues/new)
|
||||
- [Join our Discord](#)
|
||||
- [Email us](mailto:reflector@monadical.com)
|
||||
|
||||
## Stay Updated
|
||||
|
||||
- Watch our [GitHub repository](https://github.com/monadical-sas/reflector)
|
||||
- Follow our [blog](#)
|
||||
- Subscribe to our [newsletter](#)
|
||||
163
docs/docusaurus.config.ts
Normal file
163
docs/docusaurus.config.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import {themes as prismThemes} from 'prism-react-renderer';
|
||||
import type {Config} from '@docusaurus/types';
|
||||
import type * as Preset from '@docusaurus/preset-classic';
|
||||
import type * as OpenApiPlugin from 'docusaurus-plugin-openapi-docs';
|
||||
|
||||
const config: Config = {
|
||||
title: 'Reflector',
|
||||
tagline: 'AI-powered audio transcription and meeting analysis platform',
|
||||
favicon: 'img/favicon.ico',
|
||||
|
||||
url: 'https://monadical-sas.github.io',
|
||||
baseUrl: '/',
|
||||
|
||||
organizationName: 'monadical-sas',
|
||||
projectName: 'reflector',
|
||||
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
},
|
||||
|
||||
i18n: {
|
||||
defaultLocale: 'en',
|
||||
locales: ['en'],
|
||||
},
|
||||
|
||||
presets: [
|
||||
[
|
||||
'classic',
|
||||
{
|
||||
docs: {
|
||||
sidebarPath: './sidebars.ts',
|
||||
editUrl: 'https://github.com/monadical-sas/reflector/tree/main/docs/',
|
||||
},
|
||||
blog: false,
|
||||
theme: {
|
||||
customCss: './src/css/custom.css',
|
||||
},
|
||||
} satisfies Preset.Options,
|
||||
],
|
||||
],
|
||||
|
||||
plugins: [
|
||||
[
|
||||
'docusaurus-plugin-openapi-docs',
|
||||
{
|
||||
id: 'openapi',
|
||||
docsPluginId: 'classic',
|
||||
config: {
|
||||
reflectorapi: {
|
||||
specPath: 'static/openapi.json', // Use local file fetched by script
|
||||
outputDir: 'docs/reference/api-generated',
|
||||
sidebarOptions: {
|
||||
groupPathsBy: 'tag',
|
||||
categoryLinkSource: 'tag',
|
||||
},
|
||||
downloadUrl: '/openapi.json',
|
||||
hideSendButton: false,
|
||||
showExtensions: true,
|
||||
},
|
||||
} satisfies OpenApiPlugin.Options,
|
||||
},
|
||||
],
|
||||
],
|
||||
|
||||
themes: ['docusaurus-theme-openapi-docs', '@docusaurus/theme-mermaid'],
|
||||
|
||||
themeConfig: {
|
||||
image: 'img/reflector-social-card.jpg',
|
||||
colorMode: {
|
||||
defaultMode: 'light',
|
||||
disableSwitch: false,
|
||||
respectPrefersColorScheme: true,
|
||||
},
|
||||
navbar: {
|
||||
title: 'Reflector',
|
||||
logo: {
|
||||
alt: 'Reflector Logo',
|
||||
src: 'img/reflector-logo.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'docSidebar',
|
||||
sidebarId: 'tutorialSidebar',
|
||||
position: 'left',
|
||||
label: 'Documentation',
|
||||
},
|
||||
{
|
||||
to: '/docs/reference/api',
|
||||
label: 'API',
|
||||
position: 'left',
|
||||
},
|
||||
{
|
||||
href: 'https://github.com/monadical-sas/reflector',
|
||||
label: 'GitHub',
|
||||
position: 'right',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: 'dark',
|
||||
links: [
|
||||
{
|
||||
title: 'Documentation',
|
||||
items: [
|
||||
{
|
||||
label: 'Introduction',
|
||||
to: '/docs/intro',
|
||||
},
|
||||
{
|
||||
label: 'Installation',
|
||||
to: '/docs/installation/overview',
|
||||
},
|
||||
{
|
||||
label: 'API Reference',
|
||||
to: '/docs/reference/api',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Resources',
|
||||
items: [
|
||||
{
|
||||
label: 'Architecture',
|
||||
to: '/docs/reference/architecture/overview',
|
||||
},
|
||||
{
|
||||
label: 'Pipelines',
|
||||
to: '/docs/pipelines/overview',
|
||||
},
|
||||
{
|
||||
label: 'Roadmap',
|
||||
to: '/docs/roadmap',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'More',
|
||||
items: [
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/monadical-sas/reflector',
|
||||
},
|
||||
{
|
||||
label: 'Docker Hub',
|
||||
href: 'https://hub.docker.com/r/reflector/backend',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
copyright: `Copyright © ${new Date().getFullYear()} <a href="https://monadical.com" target="_blank" rel="noopener noreferrer">Monadical</a>. Licensed under MIT. Built with Docusaurus.`,
|
||||
},
|
||||
prism: {
|
||||
theme: prismThemes.github,
|
||||
darkTheme: prismThemes.dracula,
|
||||
additionalLanguages: ['python', 'bash', 'docker', 'yaml'],
|
||||
},
|
||||
} satisfies Preset.ThemeConfig,
|
||||
};
|
||||
|
||||
export default config;
|
||||
23526
docs/package-lock.json
generated
Normal file
23526
docs/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
53
docs/package.json
Normal file
53
docs/package.json
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "docs",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"start": "docusaurus start",
|
||||
"build": "docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
"serve": "docusaurus serve",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids",
|
||||
"typecheck": "tsc",
|
||||
"fetch-openapi": "./scripts/fetch-openapi.sh",
|
||||
"gen-api-docs": "npm run fetch-openapi && docusaurus gen-api-docs reflector",
|
||||
"prebuild": "npm run fetch-openapi"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.6.3",
|
||||
"@docusaurus/preset-classic": "3.6.3",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"docusaurus-plugin-openapi-docs": "^4.5.1",
|
||||
"docusaurus-theme-openapi-docs": "^4.5.1",
|
||||
"@docusaurus/theme-mermaid": "3.6.3",
|
||||
"prism-react-renderer": "^2.3.0",
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.6.3",
|
||||
"@docusaurus/tsconfig": "3.6.3",
|
||||
"@docusaurus/types": "3.6.3",
|
||||
"typescript": "~5.6.2"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
">0.5%",
|
||||
"not dead",
|
||||
"not op_mini all"
|
||||
],
|
||||
"development": [
|
||||
"last 3 chrome version",
|
||||
"last 3 firefox version",
|
||||
"last 5 safari version"
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
}
|
||||
}
|
||||
115
docs/scripts/fetch-openapi.sh
Executable file
115
docs/scripts/fetch-openapi.sh
Executable file
@@ -0,0 +1,115 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to fetch OpenAPI specification from FastAPI backend
|
||||
# Used during documentation build process
|
||||
|
||||
set -e
|
||||
|
||||
echo "📡 Fetching OpenAPI specification from FastAPI backend..."
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Configuration
|
||||
BACKEND_DIR="../server"
|
||||
OPENAPI_OUTPUT="./static/openapi.json"
|
||||
SERVER_PORT=1250 # Reflector uses port 1250 by default
|
||||
MAX_WAIT=30
|
||||
|
||||
# Check if backend directory exists
|
||||
if [ ! -d "$BACKEND_DIR" ]; then
|
||||
echo -e "${RED}Error: Backend directory not found at $BACKEND_DIR${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Function to check if server is running
|
||||
check_server() {
|
||||
curl -s -o /dev/null -w "%{http_code}" "http://localhost:${SERVER_PORT}/openapi.json" 2>/dev/null
|
||||
}
|
||||
|
||||
# Function to cleanup on exit
|
||||
cleanup() {
|
||||
if [ ! -z "$SERVER_PID" ]; then
|
||||
echo -e "\n${YELLOW}Stopping FastAPI server (PID: $SERVER_PID)...${NC}"
|
||||
kill $SERVER_PID 2>/dev/null || true
|
||||
wait $SERVER_PID 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
|
||||
# Set trap to cleanup on exit
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
# Change to backend directory
|
||||
cd "$BACKEND_DIR"
|
||||
|
||||
# Check if uv is installed
|
||||
if ! command -v uv &> /dev/null; then
|
||||
echo -e "${YELLOW}uv not found, checking for python...${NC}"
|
||||
if command -v python3 &> /dev/null; then
|
||||
PYTHON_CMD="python3"
|
||||
elif command -v python &> /dev/null; then
|
||||
PYTHON_CMD="python"
|
||||
else
|
||||
echo -e "${RED}Error: Neither uv nor python found${NC}"
|
||||
exit 1
|
||||
fi
|
||||
RUN_CMD="$PYTHON_CMD -m"
|
||||
else
|
||||
RUN_CMD="uv run -m"
|
||||
fi
|
||||
|
||||
# Start the FastAPI server in the background (let it use default port 1250)
|
||||
echo -e "${YELLOW}Starting FastAPI server...${NC}"
|
||||
$RUN_CMD reflector.app > /dev/null 2>&1 &
|
||||
SERVER_PID=$!
|
||||
|
||||
# Wait for server to be ready
|
||||
echo -n "Waiting for server to be ready"
|
||||
WAITED=0
|
||||
while [ $WAITED -lt $MAX_WAIT ]; do
|
||||
if [ "$(check_server)" = "200" ]; then
|
||||
echo -e " ${GREEN}✓${NC}"
|
||||
break
|
||||
fi
|
||||
echo -n "."
|
||||
sleep 1
|
||||
WAITED=$((WAITED + 1))
|
||||
done
|
||||
|
||||
if [ $WAITED -ge $MAX_WAIT ]; then
|
||||
echo -e " ${RED}✗${NC}"
|
||||
echo -e "${RED}Error: Server failed to start within ${MAX_WAIT} seconds${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Change back to docs directory
|
||||
cd - > /dev/null
|
||||
|
||||
# Create static directory if it doesn't exist
|
||||
mkdir -p "$(dirname "$OPENAPI_OUTPUT")"
|
||||
|
||||
# Fetch the OpenAPI specification
|
||||
echo -e "${YELLOW}Fetching OpenAPI specification...${NC}"
|
||||
if curl -s "http://localhost:${SERVER_PORT}/openapi.json" -o "$OPENAPI_OUTPUT"; then
|
||||
echo -e "${GREEN}✓ OpenAPI specification saved to $OPENAPI_OUTPUT${NC}"
|
||||
|
||||
# Validate JSON
|
||||
if command -v jq &> /dev/null; then
|
||||
if jq empty "$OPENAPI_OUTPUT" 2>/dev/null; then
|
||||
echo -e "${GREEN}✓ OpenAPI specification is valid JSON${NC}"
|
||||
# Pretty print the JSON
|
||||
jq . "$OPENAPI_OUTPUT" > "${OPENAPI_OUTPUT}.tmp" && mv "${OPENAPI_OUTPUT}.tmp" "$OPENAPI_OUTPUT"
|
||||
else
|
||||
echo -e "${RED}Error: Invalid JSON in OpenAPI specification${NC}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}Error: Failed to fetch OpenAPI specification${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}✅ OpenAPI specification successfully fetched!${NC}"
|
||||
95
docs/sidebars.ts
Normal file
95
docs/sidebars.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
|
||||
|
||||
const sidebars: SidebarsConfig = {
|
||||
tutorialSidebar: [
|
||||
'intro',
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Concepts',
|
||||
collapsed: false,
|
||||
items: [
|
||||
'concepts/overview',
|
||||
'concepts/modes',
|
||||
'concepts/pipeline',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Installation',
|
||||
collapsed: false,
|
||||
items: [
|
||||
'installation/overview',
|
||||
'installation/requirements',
|
||||
'installation/docker-setup',
|
||||
'installation/modal-setup',
|
||||
'installation/self-hosted-gpu-setup',
|
||||
'installation/auth-setup',
|
||||
'installation/daily-setup',
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Other Integrations',
|
||||
collapsed: true,
|
||||
items: [
|
||||
'installation/whereby-setup',
|
||||
'installation/zulip-setup',
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Pipelines',
|
||||
items: [
|
||||
'pipelines/overview',
|
||||
'pipelines/file-pipeline',
|
||||
'pipelines/live-pipeline',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Reference',
|
||||
items: [
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Architecture',
|
||||
items: [
|
||||
'reference/architecture/overview',
|
||||
'reference/architecture/backend',
|
||||
'reference/architecture/frontend',
|
||||
'reference/architecture/workers',
|
||||
'reference/architecture/database',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Processors',
|
||||
items: [
|
||||
'reference/processors/transcription',
|
||||
'reference/processors/diarization',
|
||||
'reference/processors/translation',
|
||||
'reference/processors/analysis',
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'API',
|
||||
items: [
|
||||
{
|
||||
type: 'doc',
|
||||
id: 'reference/api/overview',
|
||||
},
|
||||
{
|
||||
type: 'link',
|
||||
label: 'OpenAPI Reference',
|
||||
href: '/docs/reference/api',
|
||||
},
|
||||
],
|
||||
},
|
||||
'reference/configuration',
|
||||
],
|
||||
},
|
||||
'roadmap',
|
||||
],
|
||||
};
|
||||
|
||||
export default sidebars;
|
||||
70
docs/src/components/HomepageFeatures/index.tsx
Normal file
70
docs/src/components/HomepageFeatures/index.tsx
Normal file
@@ -0,0 +1,70 @@
|
||||
import clsx from 'clsx';
|
||||
import Heading from '@theme/Heading';
|
||||
import styles from './styles.module.css';
|
||||
|
||||
type FeatureItem = {
|
||||
title: string;
|
||||
Svg: React.ComponentType<React.ComponentProps<'svg'>>;
|
||||
description: JSX.Element;
|
||||
};
|
||||
|
||||
const FeatureList: FeatureItem[] = [
|
||||
{
|
||||
title: 'Easy to Use',
|
||||
Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Docusaurus was designed from the ground up to be easily installed and
|
||||
used to get your website up and running quickly.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Focus on What Matters',
|
||||
Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Docusaurus lets you focus on your docs, and we'll do the chores. Go
|
||||
ahead and move your docs into the <code>docs</code> directory.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Powered by React',
|
||||
Svg: require('@site/static/img/undraw_docusaurus_react.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Extend or customize your website layout by reusing React. Docusaurus can
|
||||
be extended while reusing the same header and footer.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
function Feature({title, Svg, description}: FeatureItem) {
|
||||
return (
|
||||
<div className={clsx('col col--4')}>
|
||||
<div className="text--center">
|
||||
<Svg className={styles.featureSvg} role="img" />
|
||||
</div>
|
||||
<div className="text--center padding-horiz--md">
|
||||
<Heading as="h3">{title}</Heading>
|
||||
<p>{description}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function HomepageFeatures(): JSX.Element {
|
||||
return (
|
||||
<section className={styles.features}>
|
||||
<div className="container">
|
||||
<div className="row">
|
||||
{FeatureList.map((props, idx) => (
|
||||
<Feature key={idx} {...props} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
11
docs/src/components/HomepageFeatures/styles.module.css
Normal file
11
docs/src/components/HomepageFeatures/styles.module.css
Normal file
@@ -0,0 +1,11 @@
|
||||
.features {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 2rem 0;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.featureSvg {
|
||||
height: 200px;
|
||||
width: 200px;
|
||||
}
|
||||
46
docs/src/css/custom.css
Normal file
46
docs/src/css/custom.css
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* Reflector Documentation Theme
|
||||
* Based on frontend colors from www/app/styles/theme.ts
|
||||
*/
|
||||
|
||||
@import url('https://fonts.googleapis.com/css2?family=Poppins:wght@300;400;500;600;700&display=swap');
|
||||
|
||||
:root {
|
||||
--ifm-color-primary: #3158E2;
|
||||
--ifm-color-primary-dark: #2847C9;
|
||||
--ifm-color-primary-darker: #2442BF;
|
||||
--ifm-color-primary-darkest: #1D369C;
|
||||
--ifm-color-primary-light: #4A6FE5;
|
||||
--ifm-color-primary-lighter: #5F81E8;
|
||||
--ifm-color-primary-lightest: #8DA6F0;
|
||||
|
||||
--ifm-background-color: #FFFFFF;
|
||||
--ifm-background-surface-color: #F4F4F4;
|
||||
--ifm-font-color-base: #1A202C;
|
||||
--ifm-font-color-secondary: #838383;
|
||||
|
||||
--ifm-code-font-size: 95%;
|
||||
--docusaurus-highlighted-code-line-bg: rgba(49, 88, 226, 0.1);
|
||||
|
||||
--ifm-font-family-base: 'Poppins', system-ui, -apple-system, sans-serif;
|
||||
--ifm-font-family-monospace: 'Fira Code', 'Monaco', 'Consolas', monospace;
|
||||
--ifm-navbar-background-color: #FFFFFF;
|
||||
--ifm-heading-font-weight: 600;
|
||||
}
|
||||
|
||||
[data-theme='dark'] {
|
||||
--ifm-color-primary: #B1CBFF;
|
||||
--ifm-color-primary-dark: #91B3FF;
|
||||
--ifm-color-primary-darker: #81A7FF;
|
||||
--ifm-color-primary-darkest: #5189FF;
|
||||
--ifm-color-primary-light: #D1DFFF;
|
||||
--ifm-color-primary-lighter: #E1EBFF;
|
||||
--ifm-color-primary-lightest: #F0F5FF;
|
||||
|
||||
--ifm-background-color: #0C0D0E;
|
||||
--ifm-background-surface-color: #1A202C;
|
||||
--ifm-font-color-base: #E2E8F0;
|
||||
--ifm-font-color-secondary: #A0AEC0;
|
||||
--docusaurus-highlighted-code-line-bg: rgba(177, 203, 255, 0.1);
|
||||
--ifm-navbar-background-color: #1A202C;
|
||||
}
|
||||
23
docs/src/pages/index.module.css
Normal file
23
docs/src/pages/index.module.css
Normal file
@@ -0,0 +1,23 @@
|
||||
/**
|
||||
* CSS files with the .module.css suffix will be treated as CSS modules
|
||||
* and scoped locally.
|
||||
*/
|
||||
|
||||
.heroBanner {
|
||||
padding: 4rem 0;
|
||||
text-align: center;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 996px) {
|
||||
.heroBanner {
|
||||
padding: 2rem;
|
||||
}
|
||||
}
|
||||
|
||||
.buttons {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
7
docs/src/pages/index.tsx
Normal file
7
docs/src/pages/index.tsx
Normal file
@@ -0,0 +1,7 @@
|
||||
import React from 'react';
|
||||
import { Redirect } from '@docusaurus/router';
|
||||
import useBaseUrl from '@docusaurus/useBaseUrl';
|
||||
|
||||
export default function Home(): JSX.Element {
|
||||
return <Redirect to={useBaseUrl('/docs/intro')} />;
|
||||
}
|
||||
7
docs/src/pages/markdown-page.md
Normal file
7
docs/src/pages/markdown-page.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: Markdown page example
|
||||
---
|
||||
|
||||
# Markdown page example
|
||||
|
||||
You don't need React to write simple standalone pages.
|
||||
0
docs/static/.nojekyll
vendored
Normal file
0
docs/static/.nojekyll
vendored
Normal file
0
docs/static/img/docusaurus-social-card.jpg
vendored
Normal file
0
docs/static/img/docusaurus-social-card.jpg
vendored
Normal file
0
docs/static/img/docusaurus.png
vendored
Normal file
0
docs/static/img/docusaurus.png
vendored
Normal file
0
docs/static/img/favicon.ico
vendored
Normal file
0
docs/static/img/favicon.ico
vendored
Normal file
17
docs/static/img/logo.svg
vendored
Normal file
17
docs/static/img/logo.svg
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.9.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 500 500" style="enable-background:new 0 0 500 500;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#B6B6B6;}
|
||||
.st1{fill:#4A4A4A;}
|
||||
</style>
|
||||
<g>
|
||||
<polygon class="st0" points="227.5,51.5 86.5,150.1 100.8,383.9 244.3,249.8 "/>
|
||||
<polygon class="st1" points="305.4,421.4 423.9,286 244.3,249.8 100.8,383.9 "/>
|
||||
</g>
|
||||
<image style="overflow:visible;" width="1504" height="1128" xlink:href="Ref/original-12843059d855efa50c3a12db8586ced7.jpg" transform="matrix(1 0 0 1 1857.8739 723.9433)">
|
||||
</image>
|
||||
<image style="overflow:visible;" width="1504" height="1128" xlink:href="Ref/original-f72ce8039f760337a51b47d045b477b8.jpg" transform="matrix(1 0 0 1 1857.8739 -512.4843)">
|
||||
</image>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 965 B |
17
docs/static/img/reflector-logo.svg
vendored
Normal file
17
docs/static/img/reflector-logo.svg
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.9.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 500 500" style="enable-background:new 0 0 500 500;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#B6B6B6;}
|
||||
.st1{fill:#4A4A4A;}
|
||||
</style>
|
||||
<g>
|
||||
<polygon class="st0" points="227.5,51.5 86.5,150.1 100.8,383.9 244.3,249.8 "/>
|
||||
<polygon class="st1" points="305.4,421.4 423.9,286 244.3,249.8 100.8,383.9 "/>
|
||||
</g>
|
||||
<image style="overflow:visible;" width="1504" height="1128" xlink:href="Ref/original-12843059d855efa50c3a12db8586ced7.jpg" transform="matrix(1 0 0 1 1857.8739 723.9433)">
|
||||
</image>
|
||||
<image style="overflow:visible;" width="1504" height="1128" xlink:href="Ref/original-f72ce8039f760337a51b47d045b477b8.jpg" transform="matrix(1 0 0 1 1857.8739 -512.4843)">
|
||||
</image>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 965 B |
171
docs/static/img/undraw_docusaurus_mountain.svg
vendored
Normal file
171
docs/static/img/undraw_docusaurus_mountain.svg
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1088" height="687.962" viewBox="0 0 1088 687.962">
|
||||
<title>Easy to Use</title>
|
||||
<g id="Group_12" data-name="Group 12" transform="translate(-57 -56)">
|
||||
<g id="Group_11" data-name="Group 11" transform="translate(57 56)">
|
||||
<path id="Path_83" data-name="Path 83" d="M1017.81,560.461c-5.27,45.15-16.22,81.4-31.25,110.31-20,38.52-54.21,54.04-84.77,70.28a193.275,193.275,0,0,1-27.46,11.94c-55.61,19.3-117.85,14.18-166.74,3.99a657.282,657.282,0,0,0-104.09-13.16q-14.97-.675-29.97-.67c-15.42.02-293.07,5.29-360.67-131.57-16.69-33.76-28.13-75-32.24-125.27-11.63-142.12,52.29-235.46,134.74-296.47,155.97-115.41,369.76-110.57,523.43,7.88C941.15,276.621,1036.99,396.031,1017.81,560.461Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_84" data-name="Path 84" d="M986.56,670.771c-20,38.52-47.21,64.04-77.77,80.28a193.272,193.272,0,0,1-27.46,11.94c-55.61,19.3-117.85,14.18-166.74,3.99a657.3,657.3,0,0,0-104.09-13.16q-14.97-.675-29.97-.67-23.13.03-46.25,1.72c-100.17,7.36-253.82-6.43-321.42-143.29L382,283.981,444.95,445.6l20.09,51.59,55.37-75.98L549,381.981l130.2,149.27,36.8-81.27L970.78,657.9l14.21,11.59Z" transform="translate(-56 -106.019)" fill="#f2f2f2"/>
|
||||
<path id="Path_85" data-name="Path 85" d="M302,282.962l26-57,36,83-31-60Z" opacity="0.1"/>
|
||||
<path id="Path_86" data-name="Path 86" d="M610.5,753.821q-14.97-.675-29.97-.67L465.04,497.191Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<path id="Path_87" data-name="Path 87" d="M464.411,315.191,493,292.962l130,150-132-128Z" opacity="0.1"/>
|
||||
<path id="Path_88" data-name="Path 88" d="M908.79,751.051a193.265,193.265,0,0,1-27.46,11.94L679.2,531.251Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<circle id="Ellipse_11" data-name="Ellipse 11" cx="3" cy="3" r="3" transform="translate(479 98.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_12" data-name="Ellipse 12" cx="3" cy="3" r="3" transform="translate(396 201.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_13" data-name="Ellipse 13" cx="2" cy="2" r="2" transform="translate(600 220.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_14" data-name="Ellipse 14" cx="2" cy="2" r="2" transform="translate(180 265.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_15" data-name="Ellipse 15" cx="2" cy="2" r="2" transform="translate(612 96.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_16" data-name="Ellipse 16" cx="2" cy="2" r="2" transform="translate(736 192.962)" fill="#f2f2f2"/>
|
||||
<circle id="Ellipse_17" data-name="Ellipse 17" cx="2" cy="2" r="2" transform="translate(858 344.962)" fill="#f2f2f2"/>
|
||||
<path id="Path_89" data-name="Path 89" d="M306,121.222h-2.76v-2.76h-1.48v2.76H299V122.7h2.76v2.759h1.48V122.7H306Z" fill="#f2f2f2"/>
|
||||
<path id="Path_90" data-name="Path 90" d="M848,424.222h-2.76v-2.76h-1.48v2.76H841V425.7h2.76v2.759h1.48V425.7H848Z" fill="#f2f2f2"/>
|
||||
<path id="Path_91" data-name="Path 91" d="M1144,719.981c0,16.569-243.557,74-544,74s-544-57.431-544-74,243.557,14,544,14S1144,703.413,1144,719.981Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_92" data-name="Path 92" d="M1144,719.981c0,16.569-243.557,74-544,74s-544-57.431-544-74,243.557,14,544,14S1144,703.413,1144,719.981Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<ellipse id="Ellipse_18" data-name="Ellipse 18" cx="544" cy="30" rx="544" ry="30" transform="translate(0 583.962)" fill="#3f3d56"/>
|
||||
<path id="Path_93" data-name="Path 93" d="M624,677.981c0,33.137-14.775,24-33,24s-33,9.137-33-24,33-96,33-96S624,644.844,624,677.981Z" transform="translate(-56 -106.019)" fill="#ff6584"/>
|
||||
<path id="Path_94" data-name="Path 94" d="M606,690.66c0,15.062-6.716,10.909-15,10.909s-15,4.153-15-10.909,15-43.636,15-43.636S606,675.6,606,690.66Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<rect id="Rectangle_97" data-name="Rectangle 97" width="92" height="18" rx="9" transform="translate(489 604.962)" fill="#2f2e41"/>
|
||||
<rect id="Rectangle_98" data-name="Rectangle 98" width="92" height="18" rx="9" transform="translate(489 586.962)" fill="#2f2e41"/>
|
||||
<path id="Path_95" data-name="Path 95" d="M193,596.547c0,55.343,34.719,100.126,77.626,100.126" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_96" data-name="Path 96" d="M270.626,696.673c0-55.965,38.745-101.251,86.626-101.251" transform="translate(-56 -106.019)" fill="#6c63ff"/>
|
||||
<path id="Path_97" data-name="Path 97" d="M221.125,601.564c0,52.57,22.14,95.109,49.5,95.109" transform="translate(-56 -106.019)" fill="#6c63ff"/>
|
||||
<path id="Path_98" data-name="Path 98" d="M270.626,696.673c0-71.511,44.783-129.377,100.126-129.377" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_99" data-name="Path 99" d="M254.3,697.379s11.009-.339,14.326-2.7,16.934-5.183,17.757-1.395,16.544,18.844,4.115,18.945-28.879-1.936-32.19-3.953S254.3,697.379,254.3,697.379Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_100" data-name="Path 100" d="M290.716,710.909c-12.429.1-28.879-1.936-32.19-3.953-2.522-1.536-3.527-7.048-3.863-9.591l-.368.014s.7,8.879,4.009,10.9,19.761,4.053,32.19,3.953c3.588-.029,4.827-1.305,4.759-3.2C294.755,710.174,293.386,710.887,290.716,710.909Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_101" data-name="Path 101" d="M777.429,633.081c0,38.029,23.857,68.8,53.341,68.8" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_102" data-name="Path 102" d="M830.769,701.882c0-38.456,26.623-69.575,59.525-69.575" transform="translate(-56 -106.019)" fill="#6c63ff"/>
|
||||
<path id="Path_103" data-name="Path 103" d="M796.755,636.528c0,36.124,15.213,65.354,34.014,65.354" transform="translate(-56 -106.019)" fill="#6c63ff"/>
|
||||
<path id="Path_104" data-name="Path 104" d="M830.769,701.882c0-49.139,30.773-88.9,68.8-88.9" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
<path id="Path_105" data-name="Path 105" d="M819.548,702.367s7.565-.233,9.844-1.856,11.636-3.562,12.2-.958,11.368,12.949,2.828,13.018-19.844-1.33-22.119-2.716S819.548,702.367,819.548,702.367Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_106" data-name="Path 106" d="M844.574,711.664c-8.54.069-19.844-1.33-22.119-2.716-1.733-1.056-2.423-4.843-2.654-6.59l-.253.01s.479,6.1,2.755,7.487,13.579,2.785,22.119,2.716c2.465-.02,3.317-.9,3.27-2.2C847.349,711.159,846.409,711.649,844.574,711.664Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_107" data-name="Path 107" d="M949.813,724.718s11.36-1.729,14.5-4.591,16.89-7.488,18.217-3.667,19.494,17.447,6.633,19.107-30.153,1.609-33.835-.065S949.813,724.718,949.813,724.718Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_108" data-name="Path 108" d="M989.228,734.173c-12.86,1.659-30.153,1.609-33.835-.065-2.8-1.275-4.535-6.858-5.2-9.45l-.379.061s1.833,9.109,5.516,10.783,20.975,1.725,33.835.065c3.712-.479,4.836-1.956,4.529-3.906C993.319,732.907,991.991,733.817,989.228,734.173Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_109" data-name="Path 109" d="M670.26,723.9s9.587-1.459,12.237-3.875,14.255-6.32,15.374-3.095,16.452,14.725,5.6,16.125-25.448,1.358-28.555-.055S670.26,723.9,670.26,723.9Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_110" data-name="Path 110" d="M703.524,731.875c-10.853,1.4-25.448,1.358-28.555-.055-2.367-1.076-3.827-5.788-4.39-7.976l-.32.051s1.547,7.687,4.655,9.1,17.7,1.456,28.555.055c3.133-.4,4.081-1.651,3.822-3.3C706.977,730.807,705.856,731.575,703.524,731.875Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_111" data-name="Path 111" d="M178.389,719.109s7.463-1.136,9.527-3.016,11.1-4.92,11.969-2.409,12.808,11.463,4.358,12.553-19.811,1.057-22.23-.043S178.389,719.109,178.389,719.109Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
|
||||
<path id="Path_112" data-name="Path 112" d="M204.285,725.321c-8.449,1.09-19.811,1.057-22.23-.043-1.842-.838-2.979-4.506-3.417-6.209l-.249.04s1.2,5.984,3.624,7.085,13.781,1.133,22.23.043c2.439-.315,3.177-1.285,2.976-2.566C206.973,724.489,206.1,725.087,204.285,725.321Z" transform="translate(-56 -106.019)" opacity="0.2"/>
|
||||
<path id="Path_113" data-name="Path 113" d="M439.7,707.337c0,30.22-42.124,20.873-93.7,20.873s-93.074,9.347-93.074-20.873,42.118-36.793,93.694-36.793S439.7,677.117,439.7,707.337Z" transform="translate(-56 -106.019)" opacity="0.1"/>
|
||||
<path id="Path_114" data-name="Path 114" d="M439.7,699.9c0,30.22-42.124,20.873-93.7,20.873s-93.074,9.347-93.074-20.873S295.04,663.1,346.616,663.1,439.7,669.676,439.7,699.9Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
|
||||
</g>
|
||||
<g id="docusaurus_keytar" transform="translate(312.271 493.733)">
|
||||
<path id="Path_40" data-name="Path 40" d="M99,52h91.791V89.153H99Z" transform="translate(5.904 -14.001)" fill="#fff" fill-rule="evenodd"/>
|
||||
<path id="Path_41" data-name="Path 41" d="M24.855,163.927A21.828,21.828,0,0,1,5.947,153a21.829,21.829,0,0,0,18.908,32.782H46.71V163.927Z" transform="translate(-3 -4.634)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_42" data-name="Path 42" d="M121.861,61.1l76.514-4.782V45.39A21.854,21.854,0,0,0,176.52,23.535H78.173L75.441,18.8a3.154,3.154,0,0,0-5.464,0l-2.732,4.732L64.513,18.8a3.154,3.154,0,0,0-5.464,0l-2.732,4.732L53.586,18.8a3.154,3.154,0,0,0-5.464,0L45.39,23.535c-.024,0-.046,0-.071,0l-4.526-4.525a3.153,3.153,0,0,0-5.276,1.414l-1.5,5.577-5.674-1.521a3.154,3.154,0,0,0-3.863,3.864L26,34.023l-5.575,1.494a3.155,3.155,0,0,0-1.416,5.278l4.526,4.526c0,.023,0,.046,0,.07L18.8,48.122a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,59.05a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,69.977a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,80.9a3.154,3.154,0,0,0,0,5.464L23.535,89.1,18.8,91.832a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,102.76a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,113.687a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,124.615a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,135.542a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,146.469a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,157.4a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,168.324a3.154,3.154,0,0,0,0,5.464l4.732,2.732A21.854,21.854,0,0,0,45.39,198.375H176.52a21.854,21.854,0,0,0,21.855-21.855V89.1l-76.514-4.782a11.632,11.632,0,0,1,0-23.219" transform="translate(-1.681 -17.226)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_43" data-name="Path 43" d="M143,186.71h32.782V143H143Z" transform="translate(9.984 -5.561)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_44" data-name="Path 44" d="M196.71,159.855a5.438,5.438,0,0,0-.7.07c-.042-.164-.081-.329-.127-.493a5.457,5.457,0,1,0-5.4-9.372q-.181-.185-.366-.367a5.454,5.454,0,1,0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467,5.467,0,1,0-10.788,0c-.162.042-.325.08-.486.126a5.457,5.457,0,1,0-9.384,5.4,21.843,21.843,0,1,0,36.421,21.02,5.452,5.452,0,1,0,.7-10.858" transform="translate(10.912 -6.025)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_45" data-name="Path 45" d="M153,124.855h32.782V103H153Z" transform="translate(10.912 -9.271)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_46" data-name="Path 46" d="M194.855,116.765a2.732,2.732,0,1,0,0-5.464,2.811,2.811,0,0,0-.349.035c-.022-.082-.04-.164-.063-.246a2.733,2.733,0,0,0-1.052-5.253,2.7,2.7,0,0,0-1.648.566q-.09-.093-.184-.184a2.7,2.7,0,0,0,.553-1.633,2.732,2.732,0,0,0-5.245-1.07,10.928,10.928,0,1,0,0,21.031,2.732,2.732,0,0,0,5.245-1.07,2.7,2.7,0,0,0-.553-1.633q.093-.09.184-.184a2.7,2.7,0,0,0,1.648.566,2.732,2.732,0,0,0,1.052-5.253c.023-.081.042-.164.063-.246a2.814,2.814,0,0,0,.349.035" transform="translate(12.767 -9.377)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_47" data-name="Path 47" d="M65.087,56.891a2.732,2.732,0,0,1-2.732-2.732,8.2,8.2,0,0,0-16.391,0,2.732,2.732,0,0,1-5.464,0,13.659,13.659,0,0,1,27.319,0,2.732,2.732,0,0,1-2.732,2.732" transform="translate(0.478 -15.068)" fill-rule="evenodd"/>
|
||||
<path id="Path_48" data-name="Path 48" d="M103,191.347h65.565a21.854,21.854,0,0,0,21.855-21.855V93H124.855A21.854,21.854,0,0,0,103,114.855Z" transform="translate(6.275 -10.199)" fill="#ffff50" fill-rule="evenodd"/>
|
||||
<path id="Path_49" data-name="Path 49" d="M173.216,129.787H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0,21.855H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186m0,21.855H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0-54.434H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0,21.652H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186m0,21.855H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186M189.585,61.611c-.013,0-.024-.007-.037-.005-3.377.115-4.974,3.492-6.384,6.472-1.471,3.114-2.608,5.139-4.473,5.078-2.064-.074-3.244-2.406-4.494-4.874-1.436-2.835-3.075-6.049-6.516-5.929-3.329.114-4.932,3.053-6.346,5.646-1.5,2.762-2.529,4.442-4.5,4.364-2.106-.076-3.225-1.972-4.52-4.167-1.444-2.443-3.112-5.191-6.487-5.1-3.272.113-4.879,2.606-6.3,4.808-1.5,2.328-2.552,3.746-4.551,3.662-2.156-.076-3.27-1.65-4.558-3.472-1.447-2.047-3.077-4.363-6.442-4.251-3.2.109-4.807,2.153-6.224,3.954-1.346,1.709-2.4,3.062-4.621,2.977a1.093,1.093,0,0,0-.079,2.186c3.3.11,4.967-1.967,6.417-3.81,1.286-1.635,2.4-3.045,4.582-3.12,2.1-.09,3.091,1.218,4.584,3.327,1.417,2,3.026,4.277,6.263,4.394,3.391.114,5.022-2.42,6.467-4.663,1.292-2,2.406-3.734,4.535-3.807,1.959-.073,3.026,1.475,4.529,4.022,1.417,2.4,3.023,5.121,6.324,5.241,3.415.118,5.064-2.863,6.5-5.5,1.245-2.282,2.419-4.437,4.5-4.509,1.959-.046,2.981,1.743,4.492,4.732,1.412,2.79,3.013,5.95,6.365,6.071l.185,0c3.348,0,4.937-3.36,6.343-6.331,1.245-2.634,2.423-5.114,4.444-5.216Z" transform="translate(7.109 -13.11)" fill-rule="evenodd"/>
|
||||
<path id="Path_50" data-name="Path 50" d="M83,186.71h43.71V143H83Z" transform="translate(4.42 -5.561)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<g id="Group_8" data-name="Group 8" transform="matrix(0.966, -0.259, 0.259, 0.966, 109.327, 91.085)">
|
||||
<rect id="Rectangle_3" data-name="Rectangle 3" width="92.361" height="36.462" rx="2" transform="translate(0 0)" fill="#d8d8d8"/>
|
||||
<g id="Group_2" data-name="Group 2" transform="translate(1.531 23.03)">
|
||||
<rect id="Rectangle_4" data-name="Rectangle 4" width="5.336" height="5.336" rx="1" transform="translate(16.797 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_5" data-name="Rectangle 5" width="5.336" height="5.336" rx="1" transform="translate(23.12 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_6" data-name="Rectangle 6" width="5.336" height="5.336" rx="1" transform="translate(29.444 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_7" data-name="Rectangle 7" width="5.336" height="5.336" rx="1" transform="translate(35.768 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_8" data-name="Rectangle 8" width="5.336" height="5.336" rx="1" transform="translate(42.091 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_9" data-name="Rectangle 9" width="5.336" height="5.336" rx="1" transform="translate(48.415 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_10" data-name="Rectangle 10" width="5.336" height="5.336" rx="1" transform="translate(54.739 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_11" data-name="Rectangle 11" width="5.336" height="5.336" rx="1" transform="translate(61.063 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_12" data-name="Rectangle 12" width="5.336" height="5.336" rx="1" transform="translate(67.386 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_51" data-name="Path 51" d="M1.093,0H14.518a1.093,1.093,0,0,1,1.093,1.093V4.243a1.093,1.093,0,0,1-1.093,1.093H1.093A1.093,1.093,0,0,1,0,4.243V1.093A1.093,1.093,0,0,1,1.093,0ZM75,0H88.426a1.093,1.093,0,0,1,1.093,1.093V4.243a1.093,1.093,0,0,1-1.093,1.093H75a1.093,1.093,0,0,1-1.093-1.093V1.093A1.093,1.093,0,0,1,75,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="Group_3" data-name="Group 3" transform="translate(1.531 10.261)">
|
||||
<path id="Path_52" data-name="Path 52" d="M1.093,0H6.218A1.093,1.093,0,0,1,7.31,1.093V4.242A1.093,1.093,0,0,1,6.218,5.335H1.093A1.093,1.093,0,0,1,0,4.242V1.093A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_13" data-name="Rectangle 13" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_14" data-name="Rectangle 14" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_15" data-name="Rectangle 15" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_16" data-name="Rectangle 16" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_17" data-name="Rectangle 17" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_18" data-name="Rectangle 18" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_19" data-name="Rectangle 19" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_20" data-name="Rectangle 20" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_21" data-name="Rectangle 21" width="5.336" height="5.336" rx="1" transform="translate(58.888 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_22" data-name="Rectangle 22" width="5.336" height="5.336" rx="1" transform="translate(65.212 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_23" data-name="Rectangle 23" width="5.336" height="5.336" rx="1" transform="translate(71.536 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_24" data-name="Rectangle 24" width="5.336" height="5.336" rx="1" transform="translate(77.859 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_25" data-name="Rectangle 25" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<g id="Group_4" data-name="Group 4" transform="translate(91.05 9.546) rotate(180)">
|
||||
<path id="Path_53" data-name="Path 53" d="M1.093,0H6.219A1.093,1.093,0,0,1,7.312,1.093v3.15A1.093,1.093,0,0,1,6.219,5.336H1.093A1.093,1.093,0,0,1,0,4.243V1.093A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_26" data-name="Rectangle 26" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_27" data-name="Rectangle 27" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_28" data-name="Rectangle 28" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_29" data-name="Rectangle 29" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_30" data-name="Rectangle 30" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_31" data-name="Rectangle 31" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_32" data-name="Rectangle 32" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_33" data-name="Rectangle 33" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_34" data-name="Rectangle 34" width="5.336" height="5.336" rx="1" transform="translate(58.889 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_35" data-name="Rectangle 35" width="5.336" height="5.336" rx="1" transform="translate(65.213 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_36" data-name="Rectangle 36" width="5.336" height="5.336" rx="1" transform="translate(71.537 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_37" data-name="Rectangle 37" width="5.336" height="5.336" rx="1" transform="translate(77.86 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_38" data-name="Rectangle 38" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_39" data-name="Rectangle 39" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_40" data-name="Rectangle 40" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_41" data-name="Rectangle 41" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_42" data-name="Rectangle 42" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_43" data-name="Rectangle 43" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_44" data-name="Rectangle 44" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_45" data-name="Rectangle 45" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_46" data-name="Rectangle 46" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_47" data-name="Rectangle 47" width="5.336" height="5.336" rx="1" transform="translate(58.889 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_48" data-name="Rectangle 48" width="5.336" height="5.336" rx="1" transform="translate(65.213 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_49" data-name="Rectangle 49" width="5.336" height="5.336" rx="1" transform="translate(71.537 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_50" data-name="Rectangle 50" width="5.336" height="5.336" rx="1" transform="translate(77.86 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_51" data-name="Rectangle 51" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<g id="Group_6" data-name="Group 6" transform="translate(1.531 16.584)">
|
||||
<path id="Path_54" data-name="Path 54" d="M1.093,0h7.3A1.093,1.093,0,0,1,9.485,1.093v3.15A1.093,1.093,0,0,1,8.392,5.336h-7.3A1.093,1.093,0,0,1,0,4.243V1.094A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<g id="Group_5" data-name="Group 5" transform="translate(10.671 0)">
|
||||
<rect id="Rectangle_52" data-name="Rectangle 52" width="5.336" height="5.336" rx="1" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_53" data-name="Rectangle 53" width="5.336" height="5.336" rx="1" transform="translate(6.324 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_54" data-name="Rectangle 54" width="5.336" height="5.336" rx="1" transform="translate(12.647 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_55" data-name="Rectangle 55" width="5.336" height="5.336" rx="1" transform="translate(18.971 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_56" data-name="Rectangle 56" width="5.336" height="5.336" rx="1" transform="translate(25.295 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_57" data-name="Rectangle 57" width="5.336" height="5.336" rx="1" transform="translate(31.619 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_58" data-name="Rectangle 58" width="5.336" height="5.336" rx="1" transform="translate(37.942 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_59" data-name="Rectangle 59" width="5.336" height="5.336" rx="1" transform="translate(44.265 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_60" data-name="Rectangle 60" width="5.336" height="5.336" rx="1" transform="translate(50.589 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_61" data-name="Rectangle 61" width="5.336" height="5.336" rx="1" transform="translate(56.912 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_62" data-name="Rectangle 62" width="5.336" height="5.336" rx="1" transform="translate(63.236 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<path id="Path_55" data-name="Path 55" d="M1.094,0H8A1.093,1.093,0,0,1,9.091,1.093v3.15A1.093,1.093,0,0,1,8,5.336H1.093A1.093,1.093,0,0,1,0,4.243V1.094A1.093,1.093,0,0,1,1.093,0Z" transform="translate(80.428 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="Group_7" data-name="Group 7" transform="translate(1.531 29.627)">
|
||||
<rect id="Rectangle_63" data-name="Rectangle 63" width="5.336" height="5.336" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_64" data-name="Rectangle 64" width="5.336" height="5.336" rx="1" transform="translate(6.324 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_65" data-name="Rectangle 65" width="5.336" height="5.336" rx="1" transform="translate(12.647 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_66" data-name="Rectangle 66" width="5.336" height="5.336" rx="1" transform="translate(18.971 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_56" data-name="Path 56" d="M1.093,0H31.515a1.093,1.093,0,0,1,1.093,1.093V4.244a1.093,1.093,0,0,1-1.093,1.093H1.093A1.093,1.093,0,0,1,0,4.244V1.093A1.093,1.093,0,0,1,1.093,0ZM34.687,0h3.942a1.093,1.093,0,0,1,1.093,1.093V4.244a1.093,1.093,0,0,1-1.093,1.093H34.687a1.093,1.093,0,0,1-1.093-1.093V1.093A1.093,1.093,0,0,1,34.687,0Z" transform="translate(25.294 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_67" data-name="Rectangle 67" width="5.336" height="5.336" rx="1" transform="translate(66.003 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_68" data-name="Rectangle 68" width="5.336" height="5.336" rx="1" transform="translate(72.327 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_69" data-name="Rectangle 69" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_57" data-name="Path 57" d="M5.336,0V1.18A1.093,1.093,0,0,1,4.243,2.273H1.093A1.093,1.093,0,0,1,0,1.18V0Z" transform="translate(83.59 2.273) rotate(180)" fill="#4a4a4a"/>
|
||||
<path id="Path_58" data-name="Path 58" d="M5.336,0V1.18A1.093,1.093,0,0,1,4.243,2.273H1.093A1.093,1.093,0,0,1,0,1.18V0Z" transform="translate(78.255 3.063)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<rect id="Rectangle_70" data-name="Rectangle 70" width="88.927" height="2.371" rx="1.085" transform="translate(1.925 1.17)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_71" data-name="Rectangle 71" width="4.986" height="1.581" rx="0.723" transform="translate(4.1 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_72" data-name="Rectangle 72" width="4.986" height="1.581" rx="0.723" transform="translate(10.923 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_73" data-name="Rectangle 73" width="4.986" height="1.581" rx="0.723" transform="translate(16.173 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_74" data-name="Rectangle 74" width="4.986" height="1.581" rx="0.723" transform="translate(21.421 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_75" data-name="Rectangle 75" width="4.986" height="1.581" rx="0.723" transform="translate(26.671 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_76" data-name="Rectangle 76" width="4.986" height="1.581" rx="0.723" transform="translate(33.232 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_77" data-name="Rectangle 77" width="4.986" height="1.581" rx="0.723" transform="translate(38.48 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_78" data-name="Rectangle 78" width="4.986" height="1.581" rx="0.723" transform="translate(43.73 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_79" data-name="Rectangle 79" width="4.986" height="1.581" rx="0.723" transform="translate(48.978 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_80" data-name="Rectangle 80" width="4.986" height="1.581" rx="0.723" transform="translate(55.54 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_81" data-name="Rectangle 81" width="4.986" height="1.581" rx="0.723" transform="translate(60.788 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_82" data-name="Rectangle 82" width="4.986" height="1.581" rx="0.723" transform="translate(66.038 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_83" data-name="Rectangle 83" width="4.986" height="1.581" rx="0.723" transform="translate(72.599 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_84" data-name="Rectangle 84" width="4.986" height="1.581" rx="0.723" transform="translate(77.847 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_85" data-name="Rectangle 85" width="4.986" height="1.581" rx="0.723" transform="translate(83.097 1.566)" fill="#d8d8d8" opacity="0.136"/>
|
||||
</g>
|
||||
<path id="Path_59" data-name="Path 59" d="M146.71,159.855a5.439,5.439,0,0,0-.7.07c-.042-.164-.081-.329-.127-.493a5.457,5.457,0,1,0-5.4-9.372q-.181-.185-.366-.367a5.454,5.454,0,1,0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467,5.467,0,1,0-10.788,0c-.162.042-.325.08-.486.126a5.457,5.457,0,1,0-9.384,5.4,21.843,21.843,0,1,0,36.421,21.02,5.452,5.452,0,1,0,.7-10.858" transform="translate(6.275 -6.025)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_60" data-name="Path 60" d="M83,124.855h43.71V103H83Z" transform="translate(4.42 -9.271)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_61" data-name="Path 61" d="M134.855,116.765a2.732,2.732,0,1,0,0-5.464,2.811,2.811,0,0,0-.349.035c-.022-.082-.04-.164-.063-.246a2.733,2.733,0,0,0-1.052-5.253,2.7,2.7,0,0,0-1.648.566q-.09-.093-.184-.184a2.7,2.7,0,0,0,.553-1.633,2.732,2.732,0,0,0-5.245-1.07,10.928,10.928,0,1,0,0,21.031,2.732,2.732,0,0,0,5.245-1.07,2.7,2.7,0,0,0-.553-1.633q.093-.09.184-.184a2.7,2.7,0,0,0,1.648.566,2.732,2.732,0,0,0,1.052-5.253c.023-.081.042-.164.063-.246a2.811,2.811,0,0,0,.349.035" transform="translate(7.202 -9.377)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_62" data-name="Path 62" d="M143.232,42.33a2.967,2.967,0,0,1-.535-.055,2.754,2.754,0,0,1-.514-.153,2.838,2.838,0,0,1-.471-.251,4.139,4.139,0,0,1-.415-.339,3.2,3.2,0,0,1-.338-.415A2.7,2.7,0,0,1,140.5,39.6a2.968,2.968,0,0,1,.055-.535,3.152,3.152,0,0,1,.152-.514,2.874,2.874,0,0,1,.252-.47,2.633,2.633,0,0,1,.753-.754,2.837,2.837,0,0,1,.471-.251,2.753,2.753,0,0,1,.514-.153,2.527,2.527,0,0,1,1.071,0,2.654,2.654,0,0,1,.983.4,4.139,4.139,0,0,1,.415.339,4.019,4.019,0,0,1,.339.415,2.786,2.786,0,0,1,.251.47,2.864,2.864,0,0,1,.208,1.049,2.77,2.77,0,0,1-.8,1.934,4.139,4.139,0,0,1-.415.339,2.722,2.722,0,0,1-1.519.459m21.855-1.366a2.789,2.789,0,0,1-1.935-.8,4.162,4.162,0,0,1-.338-.415,2.7,2.7,0,0,1-.459-1.519,2.789,2.789,0,0,1,.8-1.934,4.139,4.139,0,0,1,.415-.339,2.838,2.838,0,0,1,.471-.251,2.752,2.752,0,0,1,.514-.153,2.527,2.527,0,0,1,1.071,0,2.654,2.654,0,0,1,.983.4,4.139,4.139,0,0,1,.415.339,2.79,2.79,0,0,1,.8,1.934,3.069,3.069,0,0,1-.055.535,2.779,2.779,0,0,1-.153.514,3.885,3.885,0,0,1-.251.47,4.02,4.02,0,0,1-.339.415,4.138,4.138,0,0,1-.415.339,2.722,2.722,0,0,1-1.519.459" transform="translate(9.753 -15.532)" fill-rule="evenodd"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 31 KiB |
170
docs/static/img/undraw_docusaurus_react.svg
vendored
Normal file
170
docs/static/img/undraw_docusaurus_react.svg
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1041.277" height="554.141" viewBox="0 0 1041.277 554.141">
|
||||
<title>Powered by React</title>
|
||||
<g id="Group_24" data-name="Group 24" transform="translate(-440 -263)">
|
||||
<g id="Group_23" data-name="Group 23" transform="translate(439.989 262.965)">
|
||||
<path id="Path_299" data-name="Path 299" d="M1040.82,611.12q-1.74,3.75-3.47,7.4-2.7,5.67-5.33,11.12c-.78,1.61-1.56,3.19-2.32,4.77-8.6,17.57-16.63,33.11-23.45,45.89A73.21,73.21,0,0,1,942.44,719l-151.65,1.65h-1.6l-13,.14-11.12.12-34.1.37h-1.38l-17.36.19h-.53l-107,1.16-95.51,1-11.11.12-69,.75H429l-44.75.48h-.48l-141.5,1.53-42.33.46a87.991,87.991,0,0,1-10.79-.54h0c-1.22-.14-2.44-.3-3.65-.49a87.38,87.38,0,0,1-51.29-27.54C116,678.37,102.75,655,93.85,629.64q-1.93-5.49-3.6-11.12C59.44,514.37,97,380,164.6,290.08q4.25-5.64,8.64-11l.07-.08c20.79-25.52,44.1-46.84,68.93-62,44-26.91,92.75-34.49,140.7-11.9,40.57,19.12,78.45,28.11,115.17,30.55,3.71.24,7.42.42,11.11.53,84.23,2.65,163.17-27.7,255.87-47.29,3.69-.78,7.39-1.55,11.12-2.28,66.13-13.16,139.49-20.1,226.73-5.51a189.089,189.089,0,0,1,26.76,6.4q5.77,1.86,11.12,4c41.64,16.94,64.35,48.24,74,87.46q1.37,5.46,2.37,11.11C1134.3,384.41,1084.19,518.23,1040.82,611.12Z" transform="translate(-79.34 -172.91)" fill="#f2f2f2"/>
|
||||
<path id="Path_300" data-name="Path 300" d="M576.36,618.52a95.21,95.21,0,0,1-1.87,11.12h93.7V618.52Zm-78.25,62.81,11.11-.09V653.77c-3.81-.17-7.52-.34-11.11-.52ZM265.19,618.52v11.12h198.5V618.52ZM1114.87,279h-74V191.51q-5.35-2.17-11.12-4V279H776.21V186.58c-3.73.73-7.43,1.5-11.12,2.28V279H509.22V236.15c-3.69-.11-7.4-.29-11.11-.53V279H242.24V217c-24.83,15.16-48.14,36.48-68.93,62h-.07v.08q-4.4,5.4-8.64,11h8.64V618.52h-83q1.66,5.63,3.6,11.12h79.39v93.62a87,87,0,0,0,12.2,2.79c1.21.19,2.43.35,3.65.49h0a87.991,87.991,0,0,0,10.79.54l42.33-.46v-97H498.11v94.21l11.11-.12V629.64H765.09V721l11.12-.12V629.64H1029.7v4.77c.76-1.58,1.54-3.16,2.32-4.77q2.63-5.45,5.33-11.12,1.73-3.64,3.47-7.4v-321h76.42Q1116.23,284.43,1114.87,279ZM242.24,618.52V290.08H498.11V618.52Zm267,0V290.08H765.09V618.52Zm520.48,0H776.21V290.08H1029.7Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_301" data-name="Path 301" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l46.65-28,93.6-.78,2-.01.66-.01,2-.03,44.94-.37,2.01-.01.64-.01,2-.01L315,509.3l.38-.01,35.55-.3h.29l277.4-2.34,6.79-.05h.68l5.18-.05,37.65-.31,2-.03,1.85-.02h.96l11.71-.09,2.32-.03,3.11-.02,9.75-.09,15.47-.13,2-.02,3.48-.02h.65l74.71-.64Z" fill="#65617d"/>
|
||||
<path id="Path_302" data-name="Path 302" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l46.65-28,93.6-.78,2-.01.66-.01,2-.03,44.94-.37,2.01-.01.64-.01,2-.01L315,509.3l.38-.01,35.55-.3h.29l277.4-2.34,6.79-.05h.68l5.18-.05,37.65-.31,2-.03,1.85-.02h.96l11.71-.09,2.32-.03,3.11-.02,9.75-.09,15.47-.13,2-.02,3.48-.02h.65l74.71-.64Z" opacity="0.2"/>
|
||||
<path id="Path_303" data-name="Path 303" d="M375.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
|
||||
<path id="Path_304" data-name="Path 304" d="M375.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_305" data-name="Path 305" d="M377.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
|
||||
<rect id="Rectangle_137" data-name="Rectangle 137" width="47.17" height="31.5" transform="translate(680.92 483.65)" fill="#3f3d56"/>
|
||||
<rect id="Rectangle_138" data-name="Rectangle 138" width="47.17" height="31.5" transform="translate(680.92 483.65)" opacity="0.1"/>
|
||||
<rect id="Rectangle_139" data-name="Rectangle 139" width="47.17" height="31.5" transform="translate(678.92 483.65)" fill="#3f3d56"/>
|
||||
<path id="Path_306" data-name="Path 306" d="M298.09,483.65v4.97l-47.17,1.26v-6.23Z" opacity="0.1"/>
|
||||
<path id="Path_307" data-name="Path 307" d="M460.69,485.27v168.2a4,4,0,0,1-3.85,3.95l-191.65,5.1h-.05a4,4,0,0,1-3.95-3.95V485.27a4,4,0,0,1,3.95-3.95h191.6a4,4,0,0,1,3.95,3.95Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
|
||||
<path id="Path_308" data-name="Path 308" d="M265.19,481.32v181.2h-.05a4,4,0,0,1-3.95-3.95V485.27a4,4,0,0,1,3.95-3.95Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_309" data-name="Path 309" d="M194.59,319.15h177.5V467.4l-177.5,4Z" fill="#39374d"/>
|
||||
<path id="Path_310" data-name="Path 310" d="M726.09,483.65v6.41l-47.17-1.26v-5.15Z" opacity="0.1"/>
|
||||
<path id="Path_311" data-name="Path 311" d="M867.69,485.27v173.3a4,4,0,0,1-4,3.95h0L672,657.42a4,4,0,0,1-3.85-3.95V485.27a4,4,0,0,1,3.95-3.95H863.7a4,4,0,0,1,3.99,3.95Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
|
||||
<path id="Path_312" data-name="Path 312" d="M867.69,485.27v173.3a4,4,0,0,1-4,3.95h0V481.32h0a4,4,0,0,1,4,3.95Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_313" data-name="Path 313" d="M775.59,319.15H598.09V467.4l177.5,4Z" fill="#39374d"/>
|
||||
<path id="Path_314" data-name="Path 314" d="M663.19,485.27v168.2a4,4,0,0,1-3.85,3.95l-191.65,5.1h0a4,4,0,0,1-4-3.95V485.27a4,4,0,0,1,3.95-3.95h191.6A4,4,0,0,1,663.19,485.27Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
|
||||
<path id="Path_315" data-name="Path 315" d="M397.09,319.15h177.5V467.4l-177.5,4Z" fill="#4267b2"/>
|
||||
<path id="Path_316" data-name="Path 316" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l202.51-1.33h.48l40.99-.28h.19l283.08-1.87h.29l.17-.01h.47l4.79-.03h1.46l74.49-.5,4.4-.02.98-.01Z" opacity="0.1"/>
|
||||
<circle id="Ellipse_111" data-name="Ellipse 111" cx="51.33" cy="51.33" r="51.33" transform="translate(435.93 246.82)" fill="#fbbebe"/>
|
||||
<path id="Path_317" data-name="Path 317" d="M617.94,550.07s-99.5,12-90,0c3.44-4.34,4.39-17.2,4.2-31.85-.06-4.45-.22-9.06-.45-13.65-1.1-22-3.75-43.5-3.75-43.5s87-41,77-8.5c-4,13.13-2.69,31.57.35,48.88.89,5.05,1.92,10,3,14.7a344.66,344.66,0,0,0,9.65,33.92Z" transform="translate(-79.34 -172.91)" fill="#fbbebe"/>
|
||||
<path id="Path_318" data-name="Path 318" d="M585.47,546c11.51-2.13,23.7-6,34.53-1.54,2.85,1.17,5.47,2.88,8.39,3.86s6.12,1.22,9.16,1.91c10.68,2.42,19.34,10.55,24.9,20s8.44,20.14,11.26,30.72l6.9,25.83c6,22.45,12,45.09,13.39,68.3a2437.506,2437.506,0,0,1-250.84,1.43c5.44-10.34,11-21.31,10.54-33s-7.19-23.22-4.76-34.74c1.55-7.34,6.57-13.39,9.64-20.22,8.75-19.52,1.94-45.79,17.32-60.65,6.92-6.68,17-9.21,26.63-8.89,12.28.41,24.85,4.24,37,6.11C555.09,547.48,569.79,548.88,585.47,546Z" transform="translate(-79.34 -172.91)" fill="#ff6584"/>
|
||||
<path id="Path_319" data-name="Path 319" d="M716.37,657.17l-.1,1.43v.1l-.17,2.3-1.33,18.51-1.61,22.3-.46,6.28-1,13.44v.17l-107,1-175.59,1.9v.84h-.14v-1.12l.45-14.36.86-28.06.74-23.79.07-2.37a10.53,10.53,0,0,1,11.42-10.17c4.72.4,10.85.89,18.18,1.41l3,.22c42.33,2.94,120.56,6.74,199.5,2,1.66-.09,3.33-.19,5-.31,12.24-.77,24.47-1.76,36.58-3a10.53,10.53,0,0,1,11.6,11.23Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_320" data-name="Path 320" d="M429.08,725.44v-.84l175.62-1.91,107-1h.3v-.17l1-13.44.43-6,1.64-22.61,1.29-17.9v-.44a10.617,10.617,0,0,0-.11-2.47.3.3,0,0,0,0-.1,10.391,10.391,0,0,0-2-4.64,10.54,10.54,0,0,0-9.42-4c-12.11,1.24-24.34,2.23-36.58,3-1.67.12-3.34.22-5,.31-78.94,4.69-157.17.89-199.5-2l-3-.22c-7.33-.52-13.46-1-18.18-1.41a10.54,10.54,0,0,0-11.24,8.53,11,11,0,0,0-.18,1.64l-.68,22.16L429.54,710l-.44,14.36v1.12Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
|
||||
<path id="Path_321" data-name="Path 321" d="M716.67,664.18l-1.23,15.33-1.83,22.85-.46,5.72-1,12.81-.06.64v.17h0l-.15,1.48.11-1.48h-.29l-107,1-175.65,1.9v-.28l.49-14.36,1-28.06.64-18.65A6.36,6.36,0,0,1,434.3,658a6.25,6.25,0,0,1,3.78-.9c2.1.17,4.68.37,7.69.59,4.89.36,10.92.78,17.94,1.22,13,.82,29.31,1.7,48,2.42,52,2,122.2,2.67,188.88-3.17,3-.26,6.1-.55,9.13-.84a6.26,6.26,0,0,1,3.48.66,5.159,5.159,0,0,1,.86.54,6.14,6.14,0,0,1,2,2.46,3.564,3.564,0,0,1,.25.61A6.279,6.279,0,0,1,716.67,664.18Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_322" data-name="Path 322" d="M377.44,677.87v3.19a6.13,6.13,0,0,1-3.5,5.54l-40.1.77a6.12,6.12,0,0,1-3.57-5.57v-3Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_323" data-name="Path 323" d="M298.59,515.57l-52.25,1V507.9l52.25-1Z" fill="#3f3d56"/>
|
||||
<path id="Path_324" data-name="Path 324" d="M298.59,515.57l-52.25,1V507.9l52.25-1Z" opacity="0.1"/>
|
||||
<path id="Path_325" data-name="Path 325" d="M300.59,515.57l-52.25,1V507.9l52.25-1Z" fill="#3f3d56"/>
|
||||
<path id="Path_326" data-name="Path 326" d="M758.56,679.87v3.19a6.13,6.13,0,0,0,3.5,5.54l40.1.77a6.12,6.12,0,0,0,3.57-5.57v-3Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
|
||||
<path id="Path_327" data-name="Path 327" d="M678.72,517.57l52.25,1V509.9l-52.25-1Z" opacity="0.1"/>
|
||||
<path id="Path_328" data-name="Path 328" d="M676.72,517.57l52.25,1V509.9l-52.25-1Z" fill="#3f3d56"/>
|
||||
<path id="Path_329" data-name="Path 329" d="M534.13,486.79c.08,7-3.16,13.6-5.91,20.07a163.491,163.491,0,0,0-12.66,74.71c.73,11,2.58,22,.73,32.9s-8.43,21.77-19,24.9c17.53,10.45,41.26,9.35,57.76-2.66,8.79-6.4,15.34-15.33,21.75-24.11a97.86,97.86,0,0,1-13.31,44.75A103.43,103.43,0,0,0,637,616.53c4.31-5.81,8.06-12.19,9.72-19.23,3.09-13-1.22-26.51-4.51-39.5a266.055,266.055,0,0,1-6.17-33c-.43-3.56-.78-7.22.1-10.7,1-4.07,3.67-7.51,5.64-11.22,5.6-10.54,5.73-23.3,2.86-34.88s-8.49-22.26-14.06-32.81c-4.46-8.46-9.3-17.31-17.46-22.28-5.1-3.1-11-4.39-16.88-5.64l-25.37-5.43c-5.55-1.19-11.26-2.38-16.87-1.51-9.47,1.48-16.14,8.32-22,15.34-4.59,5.46-15.81,15.71-16.6,22.86-.72,6.59,5.1,17.63,6.09,24.58,1.3,9,2.22,6,7.3,11.52C532,478.05,534.07,482,534.13,486.79Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
|
||||
</g>
|
||||
<g id="docusaurus_keytar" transform="translate(670.271 615.768)">
|
||||
<path id="Path_40" data-name="Path 40" d="M99,52h43.635V69.662H99Z" transform="translate(-49.132 -33.936)" fill="#fff" fill-rule="evenodd"/>
|
||||
<path id="Path_41" data-name="Path 41" d="M13.389,158.195A10.377,10.377,0,0,1,4.4,153a10.377,10.377,0,0,0,8.988,15.584H23.779V158.195Z" transform="translate(-3 -82.47)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_42" data-name="Path 42" d="M66.967,38.083l36.373-2.273V30.615A10.389,10.389,0,0,0,92.95,20.226H46.2l-1.3-2.249a1.5,1.5,0,0,0-2.6,0L41,20.226l-1.3-2.249a1.5,1.5,0,0,0-2.6,0l-1.3,2.249-1.3-2.249a1.5,1.5,0,0,0-2.6,0l-1.3,2.249-.034,0-2.152-2.151a1.5,1.5,0,0,0-2.508.672L25.21,21.4l-2.7-.723a1.5,1.5,0,0,0-1.836,1.837l.722,2.7-2.65.71a1.5,1.5,0,0,0-.673,2.509l2.152,2.152c0,.011,0,.022,0,.033l-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6L20.226,41l-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3A10.389,10.389,0,0,0,30.615,103.34H92.95A10.389,10.389,0,0,0,103.34,92.95V51.393L66.967,49.12a5.53,5.53,0,0,1,0-11.038" transform="translate(-9.836 -17.226)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_43" data-name="Path 43" d="M143,163.779h15.584V143H143Z" transform="translate(-70.275 -77.665)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_44" data-name="Path 44" d="M173.779,148.389a2.582,2.582,0,0,0-.332.033c-.02-.078-.038-.156-.06-.234a2.594,2.594,0,1,0-2.567-4.455q-.086-.088-.174-.175a2.593,2.593,0,1,0-4.461-2.569c-.077-.022-.154-.04-.231-.06a2.6,2.6,0,1,0-5.128,0c-.077.02-.154.038-.231.06a2.594,2.594,0,1,0-4.461,2.569,10.384,10.384,0,1,0,17.314,9.992,2.592,2.592,0,1,0,.332-5.161" transform="translate(-75.08 -75.262)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_45" data-name="Path 45" d="M153,113.389h15.584V103H153Z" transform="translate(-75.08 -58.444)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_46" data-name="Path 46" d="M183.389,108.944a1.3,1.3,0,1,0,0-2.6,1.336,1.336,0,0,0-.166.017c-.01-.039-.019-.078-.03-.117a1.3,1.3,0,0,0-.5-2.5,1.285,1.285,0,0,0-.783.269q-.043-.044-.087-.087a1.285,1.285,0,0,0,.263-.776,1.3,1.3,0,0,0-2.493-.509,5.195,5.195,0,1,0,0,10,1.3,1.3,0,0,0,2.493-.509,1.285,1.285,0,0,0-.263-.776q.044-.043.087-.087a1.285,1.285,0,0,0,.783.269,1.3,1.3,0,0,0,.5-2.5c.011-.038.02-.078.03-.117a1.337,1.337,0,0,0,.166.017" transform="translate(-84.691 -57.894)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_47" data-name="Path 47" d="M52.188,48.292a1.3,1.3,0,0,1-1.3-1.3,3.9,3.9,0,0,0-7.792,0,1.3,1.3,0,1,1-2.6,0,6.493,6.493,0,0,1,12.987,0,1.3,1.3,0,0,1-1.3,1.3" transform="translate(-21.02 -28.41)" fill-rule="evenodd"/>
|
||||
<path id="Path_48" data-name="Path 48" d="M103,139.752h31.168a10.389,10.389,0,0,0,10.389-10.389V93H113.389A10.389,10.389,0,0,0,103,103.389Z" transform="translate(-51.054 -53.638)" fill="#ffff50" fill-rule="evenodd"/>
|
||||
<path id="Path_49" data-name="Path 49" d="M141.1,94.017H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0-25.877H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.293H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m7.782-47.993c-.006,0-.011,0-.018,0-1.605.055-2.365,1.66-3.035,3.077-.7,1.48-1.24,2.443-2.126,2.414-.981-.035-1.542-1.144-2.137-2.317-.683-1.347-1.462-2.876-3.1-2.819-1.582.054-2.344,1.451-3.017,2.684-.715,1.313-1.2,2.112-2.141,2.075-1-.036-1.533-.938-2.149-1.981-.686-1.162-1.479-2.467-3.084-2.423-1.555.053-2.319,1.239-2.994,2.286-.713,1.106-1.213,1.781-2.164,1.741-1.025-.036-1.554-.784-2.167-1.65-.688-.973-1.463-2.074-3.062-2.021a3.815,3.815,0,0,0-2.959,1.879c-.64.812-1.14,1.456-2.2,1.415a.52.52,0,0,0-.037,1.039,3.588,3.588,0,0,0,3.05-1.811c.611-.777,1.139-1.448,2.178-1.483,1-.043,1.47.579,2.179,1.582.674.953,1.438,2.033,2.977,2.089,1.612.054,2.387-1.151,3.074-2.217.614-.953,1.144-1.775,2.156-1.81.931-.035,1.438.7,2.153,1.912.674,1.141,1.437,2.434,3.006,2.491,1.623.056,2.407-1.361,3.09-2.616.592-1.085,1.15-2.109,2.14-2.143.931-.022,1.417.829,2.135,2.249.671,1.326,1.432,2.828,3.026,2.886l.088,0c1.592,0,2.347-1.6,3.015-3.01.592-1.252,1.152-2.431,2.113-2.479Z" transform="translate(-55.378 -38.552)" fill-rule="evenodd"/>
|
||||
<path id="Path_50" data-name="Path 50" d="M83,163.779h20.779V143H83Z" transform="translate(-41.443 -77.665)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<g id="Group_8" data-name="Group 8" transform="matrix(0.966, -0.259, 0.259, 0.966, 51.971, 43.3)">
|
||||
<rect id="Rectangle_3" data-name="Rectangle 3" width="43.906" height="17.333" rx="2" transform="translate(0 0)" fill="#d8d8d8"/>
|
||||
<g id="Group_2" data-name="Group 2" transform="translate(0.728 10.948)">
|
||||
<rect id="Rectangle_4" data-name="Rectangle 4" width="2.537" height="2.537" rx="1" transform="translate(7.985 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_5" data-name="Rectangle 5" width="2.537" height="2.537" rx="1" transform="translate(10.991 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_6" data-name="Rectangle 6" width="2.537" height="2.537" rx="1" transform="translate(13.997 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_7" data-name="Rectangle 7" width="2.537" height="2.537" rx="1" transform="translate(17.003 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_8" data-name="Rectangle 8" width="2.537" height="2.537" rx="1" transform="translate(20.009 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_9" data-name="Rectangle 9" width="2.537" height="2.537" rx="1" transform="translate(23.015 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_10" data-name="Rectangle 10" width="2.537" height="2.537" rx="1" transform="translate(26.021 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_11" data-name="Rectangle 11" width="2.537" height="2.537" rx="1" transform="translate(29.028 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_12" data-name="Rectangle 12" width="2.537" height="2.537" rx="1" transform="translate(32.034 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_51" data-name="Path 51" d="M.519,0H6.9A.519.519,0,0,1,7.421.52v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0ZM35.653,0h6.383a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H35.652a.519.519,0,0,1-.519-.519V.519A.519.519,0,0,1,35.652,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="Group_3" data-name="Group 3" transform="translate(0.728 4.878)">
|
||||
<path id="Path_52" data-name="Path 52" d="M.519,0H2.956a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_13" data-name="Rectangle 13" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_14" data-name="Rectangle 14" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_15" data-name="Rectangle 15" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_16" data-name="Rectangle 16" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_17" data-name="Rectangle 17" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_18" data-name="Rectangle 18" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_19" data-name="Rectangle 19" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_20" data-name="Rectangle 20" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_21" data-name="Rectangle 21" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_22" data-name="Rectangle 22" width="2.537" height="2.537" rx="1" transform="translate(31 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_23" data-name="Rectangle 23" width="2.537" height="2.537" rx="1" transform="translate(34.006 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_24" data-name="Rectangle 24" width="2.537" height="2.537" rx="1" transform="translate(37.012 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_25" data-name="Rectangle 25" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<g id="Group_4" data-name="Group 4" transform="translate(43.283 4.538) rotate(180)">
|
||||
<path id="Path_53" data-name="Path 53" d="M.519,0H2.956a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_26" data-name="Rectangle 26" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_27" data-name="Rectangle 27" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_28" data-name="Rectangle 28" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_29" data-name="Rectangle 29" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_30" data-name="Rectangle 30" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_31" data-name="Rectangle 31" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_32" data-name="Rectangle 32" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_33" data-name="Rectangle 33" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_34" data-name="Rectangle 34" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_35" data-name="Rectangle 35" width="2.537" height="2.537" rx="1" transform="translate(31.001 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_36" data-name="Rectangle 36" width="2.537" height="2.537" rx="1" transform="translate(34.007 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_37" data-name="Rectangle 37" width="2.537" height="2.537" rx="1" transform="translate(37.013 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_38" data-name="Rectangle 38" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_39" data-name="Rectangle 39" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_40" data-name="Rectangle 40" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_41" data-name="Rectangle 41" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_42" data-name="Rectangle 42" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_43" data-name="Rectangle 43" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_44" data-name="Rectangle 44" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_45" data-name="Rectangle 45" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_46" data-name="Rectangle 46" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_47" data-name="Rectangle 47" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_48" data-name="Rectangle 48" width="2.537" height="2.537" rx="1" transform="translate(31.001 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_49" data-name="Rectangle 49" width="2.537" height="2.537" rx="1" transform="translate(34.007 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_50" data-name="Rectangle 50" width="2.537" height="2.537" rx="1" transform="translate(37.013 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_51" data-name="Rectangle 51" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<g id="Group_6" data-name="Group 6" transform="translate(0.728 7.883)">
|
||||
<path id="Path_54" data-name="Path 54" d="M.519,0h3.47a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.52A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<g id="Group_5" data-name="Group 5" transform="translate(5.073 0)">
|
||||
<rect id="Rectangle_52" data-name="Rectangle 52" width="2.537" height="2.537" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_53" data-name="Rectangle 53" width="2.537" height="2.537" rx="1" transform="translate(3.006 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_54" data-name="Rectangle 54" width="2.537" height="2.537" rx="1" transform="translate(6.012 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_55" data-name="Rectangle 55" width="2.537" height="2.537" rx="1" transform="translate(9.018 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_56" data-name="Rectangle 56" width="2.537" height="2.537" rx="1" transform="translate(12.025 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_57" data-name="Rectangle 57" width="2.537" height="2.537" rx="1" transform="translate(15.031 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_58" data-name="Rectangle 58" width="2.537" height="2.537" rx="1" transform="translate(18.037 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_59" data-name="Rectangle 59" width="2.537" height="2.537" rx="1" transform="translate(21.042 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_60" data-name="Rectangle 60" width="2.537" height="2.537" rx="1" transform="translate(24.049 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_61" data-name="Rectangle 61" width="2.537" height="2.537" rx="1" transform="translate(27.055 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_62" data-name="Rectangle 62" width="2.537" height="2.537" rx="1" transform="translate(30.061 0)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<path id="Path_55" data-name="Path 55" d="M.52,0H3.8a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.52A.519.519,0,0,1,.519,0Z" transform="translate(38.234 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="Group_7" data-name="Group 7" transform="translate(0.728 14.084)">
|
||||
<rect id="Rectangle_63" data-name="Rectangle 63" width="2.537" height="2.537" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_64" data-name="Rectangle 64" width="2.537" height="2.537" rx="1" transform="translate(3.006 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_65" data-name="Rectangle 65" width="2.537" height="2.537" rx="1" transform="translate(6.012 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_66" data-name="Rectangle 66" width="2.537" height="2.537" rx="1" transform="translate(9.018 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_56" data-name="Path 56" d="M.519,0H14.981A.519.519,0,0,1,15.5.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.018V.519A.519.519,0,0,1,.519,0Zm15.97,0h1.874a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H16.489a.519.519,0,0,1-.519-.519V.519A.519.519,0,0,1,16.489,0Z" transform="translate(12.024 0)" fill="#4a4a4a" fill-rule="evenodd"/>
|
||||
<rect id="Rectangle_67" data-name="Rectangle 67" width="2.537" height="2.537" rx="1" transform="translate(31.376 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_68" data-name="Rectangle 68" width="2.537" height="2.537" rx="1" transform="translate(34.382 0)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_69" data-name="Rectangle 69" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
|
||||
<path id="Path_57" data-name="Path 57" d="M2.537,0V.561a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,.561V0Z" transform="translate(39.736 1.08) rotate(180)" fill="#4a4a4a"/>
|
||||
<path id="Path_58" data-name="Path 58" d="M2.537,0V.561a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,.561V0Z" transform="translate(37.2 1.456)" fill="#4a4a4a"/>
|
||||
</g>
|
||||
<rect id="Rectangle_70" data-name="Rectangle 70" width="42.273" height="1.127" rx="0.564" transform="translate(0.915 0.556)" fill="#4a4a4a"/>
|
||||
<rect id="Rectangle_71" data-name="Rectangle 71" width="2.37" height="0.752" rx="0.376" transform="translate(1.949 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_72" data-name="Rectangle 72" width="2.37" height="0.752" rx="0.376" transform="translate(5.193 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_73" data-name="Rectangle 73" width="2.37" height="0.752" rx="0.376" transform="translate(7.688 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_74" data-name="Rectangle 74" width="2.37" height="0.752" rx="0.376" transform="translate(10.183 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_75" data-name="Rectangle 75" width="2.37" height="0.752" rx="0.376" transform="translate(12.679 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_76" data-name="Rectangle 76" width="2.37" height="0.752" rx="0.376" transform="translate(15.797 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_77" data-name="Rectangle 77" width="2.37" height="0.752" rx="0.376" transform="translate(18.292 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_78" data-name="Rectangle 78" width="2.37" height="0.752" rx="0.376" transform="translate(20.788 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_79" data-name="Rectangle 79" width="2.37" height="0.752" rx="0.376" transform="translate(23.283 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_80" data-name="Rectangle 80" width="2.37" height="0.752" rx="0.376" transform="translate(26.402 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_81" data-name="Rectangle 81" width="2.37" height="0.752" rx="0.376" transform="translate(28.897 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_82" data-name="Rectangle 82" width="2.37" height="0.752" rx="0.376" transform="translate(31.393 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_83" data-name="Rectangle 83" width="2.37" height="0.752" rx="0.376" transform="translate(34.512 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_84" data-name="Rectangle 84" width="2.37" height="0.752" rx="0.376" transform="translate(37.007 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
<rect id="Rectangle_85" data-name="Rectangle 85" width="2.37" height="0.752" rx="0.376" transform="translate(39.502 0.744)" fill="#d8d8d8" opacity="0.136"/>
|
||||
</g>
|
||||
<path id="Path_59" data-name="Path 59" d="M123.779,148.389a2.583,2.583,0,0,0-.332.033c-.02-.078-.038-.156-.06-.234a2.594,2.594,0,1,0-2.567-4.455q-.086-.088-.174-.175a2.593,2.593,0,1,0-4.461-2.569c-.077-.022-.154-.04-.231-.06a2.6,2.6,0,1,0-5.128,0c-.077.02-.154.038-.231.06a2.594,2.594,0,1,0-4.461,2.569,10.384,10.384,0,1,0,17.314,9.992,2.592,2.592,0,1,0,.332-5.161" transform="translate(-51.054 -75.262)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_60" data-name="Path 60" d="M83,113.389h20.779V103H83Z" transform="translate(-41.443 -58.444)" fill="#3ecc5f" fill-rule="evenodd"/>
|
||||
<path id="Path_61" data-name="Path 61" d="M123.389,108.944a1.3,1.3,0,1,0,0-2.6,1.338,1.338,0,0,0-.166.017c-.01-.039-.019-.078-.03-.117a1.3,1.3,0,0,0-.5-2.5,1.285,1.285,0,0,0-.783.269q-.043-.044-.087-.087a1.285,1.285,0,0,0,.263-.776,1.3,1.3,0,0,0-2.493-.509,5.195,5.195,0,1,0,0,10,1.3,1.3,0,0,0,2.493-.509,1.285,1.285,0,0,0-.263-.776q.044-.043.087-.087a1.285,1.285,0,0,0,.783.269,1.3,1.3,0,0,0,.5-2.5c.011-.038.02-.078.03-.117a1.335,1.335,0,0,0,.166.017" transform="translate(-55.859 -57.894)" fill="#44d860" fill-rule="evenodd"/>
|
||||
<path id="Path_62" data-name="Path 62" d="M141.8,38.745a1.41,1.41,0,0,1-.255-.026,1.309,1.309,0,0,1-.244-.073,1.349,1.349,0,0,1-.224-.119,1.967,1.967,0,0,1-.2-.161,1.52,1.52,0,0,1-.161-.2,1.282,1.282,0,0,1-.218-.722,1.41,1.41,0,0,1,.026-.255,1.5,1.5,0,0,1,.072-.244,1.364,1.364,0,0,1,.12-.223,1.252,1.252,0,0,1,.358-.358,1.349,1.349,0,0,1,.224-.119,1.309,1.309,0,0,1,.244-.073,1.2,1.2,0,0,1,.509,0,1.262,1.262,0,0,1,.468.192,1.968,1.968,0,0,1,.2.161,1.908,1.908,0,0,1,.161.2,1.322,1.322,0,0,1,.12.223,1.361,1.361,0,0,1,.1.5,1.317,1.317,0,0,1-.379.919,1.968,1.968,0,0,1-.2.161,1.346,1.346,0,0,1-.223.119,1.332,1.332,0,0,1-.5.1m10.389-.649a1.326,1.326,0,0,1-.92-.379,1.979,1.979,0,0,1-.161-.2,1.282,1.282,0,0,1-.218-.722,1.326,1.326,0,0,1,.379-.919,1.967,1.967,0,0,1,.2-.161,1.351,1.351,0,0,1,.224-.119,1.308,1.308,0,0,1,.244-.073,1.2,1.2,0,0,1,.509,0,1.262,1.262,0,0,1,.468.192,1.967,1.967,0,0,1,.2.161,1.326,1.326,0,0,1,.379.919,1.461,1.461,0,0,1-.026.255,1.323,1.323,0,0,1-.073.244,1.847,1.847,0,0,1-.119.223,1.911,1.911,0,0,1-.161.2,1.967,1.967,0,0,1-.2.161,1.294,1.294,0,0,1-.722.218" transform="translate(-69.074 -26.006)" fill-rule="evenodd"/>
|
||||
</g>
|
||||
<g id="React-icon" transform="translate(906.3 541.56)">
|
||||
<path id="Path_330" data-name="Path 330" d="M263.668,117.179c0-5.827-7.3-11.35-18.487-14.775,2.582-11.4,1.434-20.477-3.622-23.382a7.861,7.861,0,0,0-4.016-1v4a4.152,4.152,0,0,1,2.044.466c2.439,1.4,3.5,6.724,2.672,13.574-.2,1.685-.52,3.461-.914,5.272a86.9,86.9,0,0,0-11.386-1.954,87.469,87.469,0,0,0-7.459-8.965c5.845-5.433,11.332-8.41,15.062-8.41V78h0c-4.931,0-11.386,3.514-17.913,9.611-6.527-6.061-12.982-9.539-17.913-9.539v4c3.712,0,9.216,2.959,15.062,8.356a84.687,84.687,0,0,0-7.405,8.947,83.732,83.732,0,0,0-11.4,1.972c-.412-1.793-.717-3.532-.932-5.2-.843-6.85.2-12.175,2.618-13.592a3.991,3.991,0,0,1,2.062-.466v-4h0a8,8,0,0,0-4.052,1c-5.039,2.9-6.168,11.96-3.568,23.328-11.153,3.443-18.415,8.947-18.415,14.757,0,5.828,7.3,11.35,18.487,14.775-2.582,11.4-1.434,20.477,3.622,23.382a7.882,7.882,0,0,0,4.034,1c4.931,0,11.386-3.514,17.913-9.611,6.527,6.061,12.982,9.539,17.913,9.539a8,8,0,0,0,4.052-1c5.039-2.9,6.168-11.96,3.568-23.328C256.406,128.511,263.668,122.988,263.668,117.179Zm-23.346-11.96c-.663,2.313-1.488,4.7-2.421,7.083-.735-1.434-1.506-2.869-2.349-4.3-.825-1.434-1.7-2.833-2.582-4.2C235.517,104.179,237.974,104.645,240.323,105.219Zm-8.212,19.1c-1.4,2.421-2.833,4.716-4.321,6.85-2.672.233-5.379.359-8.1.359-2.708,0-5.415-.126-8.069-.341q-2.232-3.2-4.339-6.814-2.044-3.523-3.73-7.136c1.112-2.4,2.367-4.805,3.712-7.154,1.4-2.421,2.833-4.716,4.321-6.85,2.672-.233,5.379-.359,8.1-.359,2.708,0,5.415.126,8.069.341q2.232,3.2,4.339,6.814,2.044,3.523,3.73,7.136C234.692,119.564,233.455,121.966,232.11,124.315Zm5.792-2.331c.968,2.4,1.793,4.805,2.474,7.136-2.349.574-4.823,1.058-7.387,1.434.879-1.381,1.757-2.8,2.582-4.25C236.4,124.871,237.167,123.419,237.9,121.984ZM219.72,141.116a73.921,73.921,0,0,1-4.985-5.738c1.614.072,3.263.126,4.931.126,1.685,0,3.353-.036,4.985-.126A69.993,69.993,0,0,1,219.72,141.116ZM206.38,130.555c-2.546-.377-5-.843-7.352-1.417.663-2.313,1.488-4.7,2.421-7.083.735,1.434,1.506,2.869,2.349,4.3S205.5,129.192,206.38,130.555ZM219.63,93.241a73.924,73.924,0,0,1,4.985,5.738c-1.614-.072-3.263-.126-4.931-.126-1.686,0-3.353.036-4.985.126A69.993,69.993,0,0,1,219.63,93.241ZM206.362,103.8c-.879,1.381-1.757,2.8-2.582,4.25-.825,1.434-1.6,2.869-2.331,4.3-.968-2.4-1.793-4.805-2.474-7.136C201.323,104.663,203.8,104.179,206.362,103.8Zm-16.227,22.449c-6.348-2.708-10.454-6.258-10.454-9.073s4.106-6.383,10.454-9.073c1.542-.663,3.228-1.255,4.967-1.811a86.122,86.122,0,0,0,4.034,10.92,84.9,84.9,0,0,0-3.981,10.866C193.38,127.525,191.694,126.915,190.134,126.252Zm9.647,25.623c-2.439-1.4-3.5-6.724-2.672-13.574.2-1.686.52-3.461.914-5.272a86.9,86.9,0,0,0,11.386,1.954,87.465,87.465,0,0,0,7.459,8.965c-5.845,5.433-11.332,8.41-15.062,8.41A4.279,4.279,0,0,1,199.781,151.875Zm42.532-13.663c.843,6.85-.2,12.175-2.618,13.592a3.99,3.99,0,0,1-2.062.466c-3.712,0-9.216-2.959-15.062-8.356a84.689,84.689,0,0,0,7.405-8.947,83.731,83.731,0,0,0,11.4-1.972A50.194,50.194,0,0,1,242.313,138.212Zm6.9-11.96c-1.542.663-3.228,1.255-4.967,1.811a86.12,86.12,0,0,0-4.034-10.92,84.9,84.9,0,0,0,3.981-10.866c1.775.556,3.461,1.165,5.039,1.829,6.348,2.708,10.454,6.258,10.454,9.073C259.67,119.994,255.564,123.562,249.216,126.252Z" fill="#61dafb"/>
|
||||
<path id="Path_331" data-name="Path 331" d="M320.8,78.4Z" transform="translate(-119.082 -0.328)" fill="#61dafb"/>
|
||||
<circle id="Ellipse_112" data-name="Ellipse 112" cx="8.194" cy="8.194" r="8.194" transform="translate(211.472 108.984)" fill="#61dafb"/>
|
||||
<path id="Path_332" data-name="Path 332" d="M520.5,78.1Z" transform="translate(-282.975 -0.082)" fill="#61dafb"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 35 KiB |
40
docs/static/img/undraw_docusaurus_tree.svg
vendored
Normal file
40
docs/static/img/undraw_docusaurus_tree.svg
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1129" height="663" viewBox="0 0 1129 663">
|
||||
<title>Focus on What Matters</title>
|
||||
<circle cx="321" cy="321" r="321" fill="#f2f2f2" />
|
||||
<ellipse cx="559" cy="635.49998" rx="514" ry="27.50002" fill="#3f3d56" />
|
||||
<ellipse cx="558" cy="627" rx="460" ry="22" opacity="0.2" />
|
||||
<rect x="131" y="152.5" width="840" height="50" fill="#3f3d56" />
|
||||
<path d="M166.5,727.3299A21.67009,21.67009,0,0,0,188.1701,749H984.8299A21.67009,21.67009,0,0,0,1006.5,727.3299V296h-840Z" transform="translate(-35.5 -118.5)" fill="#3f3d56" />
|
||||
<path d="M984.8299,236H188.1701A21.67009,21.67009,0,0,0,166.5,257.6701V296h840V257.6701A21.67009,21.67009,0,0,0,984.8299,236Z" transform="translate(-35.5 -118.5)" fill="#3f3d56" />
|
||||
<path d="M984.8299,236H188.1701A21.67009,21.67009,0,0,0,166.5,257.6701V296h840V257.6701A21.67009,21.67009,0,0,0,984.8299,236Z" transform="translate(-35.5 -118.5)" opacity="0.2" />
|
||||
<circle cx="181" cy="147.5" r="13" fill="#3f3d56" />
|
||||
<circle cx="217" cy="147.5" r="13" fill="#3f3d56" />
|
||||
<circle cx="253" cy="147.5" r="13" fill="#3f3d56" />
|
||||
<rect x="168" y="213.5" width="337" height="386" rx="5.33505" fill="#606060" />
|
||||
<rect x="603" y="272.5" width="284" height="22" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="537" y="352.5" width="416" height="15" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="537" y="396.5" width="416" height="15" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="537" y="440.5" width="416" height="15" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="537" y="484.5" width="416" height="15" rx="5.47638" fill="#2e8555" />
|
||||
<rect x="865" y="552.5" width="88" height="26" rx="7.02756" fill="#3ecc5f" />
|
||||
<path d="M1088.60287,624.61594a30.11371,30.11371,0,0,0,3.98291-15.266c0-13.79652-8.54358-24.98081-19.08256-24.98081s-19.08256,11.18429-19.08256,24.98081a30.11411,30.11411,0,0,0,3.98291,15.266,31.248,31.248,0,0,0,0,30.53213,31.248,31.248,0,0,0,0,30.53208,31.248,31.248,0,0,0,0,30.53208,30.11408,30.11408,0,0,0-3.98291,15.266c0,13.79652,8.54353,24.98081,19.08256,24.98081s19.08256-11.18429,19.08256-24.98081a30.11368,30.11368,0,0,0-3.98291-15.266,31.248,31.248,0,0,0,0-30.53208,31.248,31.248,0,0,0,0-30.53208,31.248,31.248,0,0,0,0-30.53213Z" transform="translate(-35.5 -118.5)" fill="#3f3d56" />
|
||||
<ellipse cx="1038.00321" cy="460.31783" rx="19.08256" ry="24.9808" fill="#3f3d56" />
|
||||
<ellipse cx="1038.00321" cy="429.78574" rx="19.08256" ry="24.9808" fill="#3f3d56" />
|
||||
<path d="M1144.93871,339.34489a91.61081,91.61081,0,0,0,7.10658-10.46092l-50.141-8.23491,54.22885.4033a91.566,91.566,0,0,0,1.74556-72.42605l-72.75449,37.74139,67.09658-49.32086a91.41255,91.41255,0,1,0-150.971,102.29805,91.45842,91.45842,0,0,0-10.42451,16.66946l65.0866,33.81447-69.40046-23.292a91.46011,91.46011,0,0,0,14.73837,85.83669,91.40575,91.40575,0,1,0,143.68892,0,91.41808,91.41808,0,0,0,0-113.02862Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M981.6885,395.8592a91.01343,91.01343,0,0,0,19.56129,56.51431,91.40575,91.40575,0,1,0,143.68892,0C1157.18982,436.82067,981.6885,385.60008,981.6885,395.8592Z" transform="translate(-35.5 -118.5)" opacity="0.1" />
|
||||
<path d="M365.62,461.43628H477.094v45.12043H365.62Z" transform="translate(-35.5 -118.5)" fill="#fff" fill-rule="evenodd" />
|
||||
<path d="M264.76252,608.74122a26.50931,26.50931,0,0,1-22.96231-13.27072,26.50976,26.50976,0,0,0,22.96231,39.81215H291.304V608.74122Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M384.17242,468.57061l92.92155-5.80726V449.49263a26.54091,26.54091,0,0,0-26.54143-26.54143H331.1161l-3.31768-5.74622a3.83043,3.83043,0,0,0-6.63536,0l-3.31768,5.74622-3.31767-5.74622a3.83043,3.83043,0,0,0-6.63536,0l-3.31768,5.74622L301.257,417.205a3.83043,3.83043,0,0,0-6.63536,0L291.304,422.9512c-.02919,0-.05573.004-.08625.004l-5.49674-5.49541a3.8293,3.8293,0,0,0-6.4071,1.71723l-1.81676,6.77338L270.607,424.1031a3.82993,3.82993,0,0,0-4.6912,4.69253l1.84463,6.89148-6.77072,1.81411a3.8315,3.8315,0,0,0-1.71988,6.40975l5.49673,5.49673c0,.02787-.004.05574-.004.08493l-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74621,3.31768L259.0163,466.081a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31767a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31767a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31768a3.83042,3.83042,0,0,0,0,6.63535l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768L259.0163,558.976a3.83042,3.83042,0,0,0,0,6.63535l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768-5.74622,3.31768a3.83042,3.83042,0,0,0,0,6.63535l5.74622,3.31768-5.74622,3.31768a3.83043,3.83043,0,0,0,0,6.63536l5.74622,3.31768A26.54091,26.54091,0,0,0,291.304,635.28265H450.55254A26.5409,26.5409,0,0,0,477.094,608.74122V502.5755l-92.92155-5.80727a14.12639,14.12639,0,0,1,0-28.19762" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M424.01111,635.28265h39.81214V582.19979H424.01111Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M490.36468,602.10586a6.60242,6.60242,0,0,0-.848.08493c-.05042-.19906-.09821-.39945-.15393-.59852A6.62668,6.62668,0,1,0,482.80568,590.21q-.2203-.22491-.44457-.44589a6.62391,6.62391,0,1,0-11.39689-6.56369c-.1964-.05575-.39414-.10218-.59056-.15262a6.63957,6.63957,0,1,0-13.10086,0c-.1964.05042-.39414.09687-.59056.15262a6.62767,6.62767,0,1,0-11.39688,6.56369,26.52754,26.52754,0,1,0,44.23127,25.52756,6.6211,6.6211,0,1,0,.848-13.18579" transform="translate(-35.5 -118.5)" fill="#44d860" fill-rule="evenodd" />
|
||||
<path d="M437.28182,555.65836H477.094V529.11693H437.28182Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M490.36468,545.70532a3.31768,3.31768,0,0,0,0-6.63536,3.41133,3.41133,0,0,0-.42333.04247c-.02655-.09953-.04911-.19907-.077-.29859a3.319,3.319,0,0,0-1.278-6.37923,3.28174,3.28174,0,0,0-2.00122.68742q-.10947-.11346-.22294-.22295a3.282,3.282,0,0,0,.67149-1.98265,3.31768,3.31768,0,0,0-6.37-1.2992,13.27078,13.27078,0,1,0,0,25.54082,3.31768,3.31768,0,0,0,6.37-1.2992,3.282,3.282,0,0,0-.67149-1.98265q.11347-.10947.22294-.22294a3.28174,3.28174,0,0,0,2.00122.68742,3.31768,3.31768,0,0,0,1.278-6.37923c.02786-.0982.05042-.19907.077-.29859a3.41325,3.41325,0,0,0,.42333.04246" transform="translate(-35.5 -118.5)" fill="#44d860" fill-rule="evenodd" />
|
||||
<path d="M317.84538,466.081a3.31768,3.31768,0,0,1-3.31767-3.31768,9.953,9.953,0,1,0-19.90608,0,3.31768,3.31768,0,1,1-6.63535,0,16.58839,16.58839,0,1,1,33.17678,0,3.31768,3.31768,0,0,1-3.31768,3.31768" transform="translate(-35.5 -118.5)" fill-rule="evenodd" />
|
||||
<path d="M370.92825,635.28265h79.62429A26.5409,26.5409,0,0,0,477.094,608.74122v-92.895H397.46968a26.54091,26.54091,0,0,0-26.54143,26.54143Z" transform="translate(-35.5 -118.5)" fill="#ffff50" fill-rule="evenodd" />
|
||||
<path d="M457.21444,556.98543H390.80778a1.32707,1.32707,0,0,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0,26.54143H390.80778a1.32707,1.32707,0,1,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0,26.54143H390.80778a1.32707,1.32707,0,1,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0-66.10674H390.80778a1.32707,1.32707,0,0,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0,26.29459H390.80778a1.32707,1.32707,0,0,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414m0,26.54143H390.80778a1.32707,1.32707,0,0,1,0-2.65414h66.40666a1.32707,1.32707,0,0,1,0,2.65414M477.094,474.19076c-.01592,0-.0292-.008-.04512-.00663-4.10064.13934-6.04083,4.24132-7.75274,7.86024-1.78623,3.78215-3.16771,6.24122-5.43171,6.16691-2.50685-.09024-3.94007-2.92222-5.45825-5.91874-1.74377-3.44243-3.73438-7.34667-7.91333-7.20069-4.04227.138-5.98907,3.70784-7.70631,6.857-1.82738,3.35484-3.07084,5.39455-5.46887,5.30033-2.55727-.09289-3.91619-2.39536-5.48877-5.06013-1.75306-2.96733-3.77951-6.30359-7.8775-6.18946-3.97326.13669-5.92537,3.16507-7.64791,5.83912-1.82207,2.82666-3.09872,4.5492-5.52725,4.447-2.61832-.09289-3.9706-2.00388-5.53522-4.21611-1.757-2.4856-3.737-5.299-7.82308-5.16231-3.88567.13271-5.83779,2.61434-7.559,4.80135-1.635,2.07555-2.9116,3.71846-5.61218,3.615a1.32793,1.32793,0,1,0-.09555,2.65414c4.00377.134,6.03154-2.38873,7.79257-4.6275,1.562-1.9853,2.91027-3.69855,5.56441-3.78879,2.55594-.10882,3.75429,1.47968,5.56707,4.04093,1.7212,2.43385,3.67465,5.19416,7.60545,5.33616,4.11789.138,6.09921-2.93946,7.8536-5.66261,1.56861-2.43385,2.92221-4.53461,5.50734-4.62352,2.37944-.08892,3.67466,1.79154,5.50072,4.885,1.72121,2.91557,3.67069,6.21865,7.67977,6.36463,4.14709.14332,6.14965-3.47693,7.89475-6.68181,1.51155-2.77092,2.93814-5.38791,5.46621-5.4755,2.37944-.05573,3.62025,2.11668,5.45558,5.74622,1.71459,3.388,3.65875,7.22591,7.73019,7.37321l.22429.004c4.06614,0,5.99571-4.08074,7.70364-7.68905,1.51154-3.19825,2.94211-6.21069,5.3972-6.33411Z" transform="translate(-35.5 -118.5)" fill-rule="evenodd" />
|
||||
<path d="M344.38682,635.28265h53.08286V582.19979H344.38682Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M424.01111,602.10586a6.60242,6.60242,0,0,0-.848.08493c-.05042-.19906-.09821-.39945-.15394-.59852A6.62667,6.62667,0,1,0,416.45211,590.21q-.2203-.22491-.44458-.44589a6.62391,6.62391,0,1,0-11.39689-6.56369c-.1964-.05575-.39413-.10218-.59054-.15262a6.63957,6.63957,0,1,0-13.10084,0c-.19641.05042-.39414.09687-.59055.15262a6.62767,6.62767,0,1,0-11.39689,6.56369,26.52755,26.52755,0,1,0,44.2313,25.52756,6.6211,6.6211,0,1,0,.848-13.18579" transform="translate(-35.5 -118.5)" fill="#44d860" fill-rule="evenodd" />
|
||||
<path d="M344.38682,555.65836h53.08286V529.11693H344.38682Z" transform="translate(-35.5 -118.5)" fill="#3ecc5f" fill-rule="evenodd" />
|
||||
<path d="M410.74039,545.70532a3.31768,3.31768,0,1,0,0-6.63536,3.41133,3.41133,0,0,0-.42333.04247c-.02655-.09953-.04911-.19907-.077-.29859a3.319,3.319,0,0,0-1.278-6.37923,3.28174,3.28174,0,0,0-2.00122.68742q-.10947-.11346-.22294-.22295a3.282,3.282,0,0,0,.67149-1.98265,3.31768,3.31768,0,0,0-6.37-1.2992,13.27078,13.27078,0,1,0,0,25.54082,3.31768,3.31768,0,0,0,6.37-1.2992,3.282,3.282,0,0,0-.67149-1.98265q.11347-.10947.22294-.22294a3.28174,3.28174,0,0,0,2.00122.68742,3.31768,3.31768,0,0,0,1.278-6.37923c.02786-.0982.05042-.19907.077-.29859a3.41325,3.41325,0,0,0,.42333.04246" transform="translate(-35.5 -118.5)" fill="#44d860" fill-rule="evenodd" />
|
||||
<path d="M424.01111,447.8338a3.60349,3.60349,0,0,1-.65028-.06636,3.34415,3.34415,0,0,1-.62372-.18579,3.44679,3.44679,0,0,1-.572-.30522,5.02708,5.02708,0,0,1-.50429-.4114,3.88726,3.88726,0,0,1-.41007-.50428,3.27532,3.27532,0,0,1-.55737-1.84463,3.60248,3.60248,0,0,1,.06636-.65027,3.82638,3.82638,0,0,1,.18447-.62373,3.48858,3.48858,0,0,1,.30656-.57064,3.197,3.197,0,0,1,.91436-.91568,3.44685,3.44685,0,0,1,.572-.30523,3.344,3.344,0,0,1,.62372-.18578,3.06907,3.06907,0,0,1,1.30053,0,3.22332,3.22332,0,0,1,1.19436.491,5.02835,5.02835,0,0,1,.50429.41139,4.8801,4.8801,0,0,1,.41139.50429,3.38246,3.38246,0,0,1,.30522.57064,3.47806,3.47806,0,0,1,.25215,1.274A3.36394,3.36394,0,0,1,426.36,446.865a5.02708,5.02708,0,0,1-.50429.4114,3.3057,3.3057,0,0,1-1.84463.55737m26.54143-1.65884a3.38754,3.38754,0,0,1-2.35024-.96877,5.04185,5.04185,0,0,1-.41007-.50428,3.27532,3.27532,0,0,1-.55737-1.84463,3.38659,3.38659,0,0,1,.96744-2.34892,5.02559,5.02559,0,0,1,.50429-.41139,3.44685,3.44685,0,0,1,.572-.30523,3.3432,3.3432,0,0,1,.62373-.18579,3.06952,3.06952,0,0,1,1.30052,0,3.22356,3.22356,0,0,1,1.19436.491,5.02559,5.02559,0,0,1,.50429.41139,3.38792,3.38792,0,0,1,.96876,2.34892,3.72635,3.72635,0,0,1-.06636.65026,3.37387,3.37387,0,0,1-.18579.62373,4.71469,4.71469,0,0,1-.30522.57064,4.8801,4.8801,0,0,1-.41139.50429,5.02559,5.02559,0,0,1-.50429.41139,3.30547,3.30547,0,0,1-1.84463.55737" transform="translate(-35.5 -118.5)" fill-rule="evenodd" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 12 KiB |
3456
docs/static/openapi.json
vendored
Normal file
3456
docs/static/openapi.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
241
docs/transcript.md
Normal file
241
docs/transcript.md
Normal file
@@ -0,0 +1,241 @@
|
||||
# Transcript Formats
|
||||
|
||||
The Reflector API provides multiple output formats for transcript data through the `transcript_format` query parameter on the GET `/v1/transcripts/{id}` endpoint.
|
||||
|
||||
## Overview
|
||||
|
||||
When retrieving a transcript, you can specify the desired format using the `transcript_format` query parameter. The API supports four formats optimized for different use cases:
|
||||
|
||||
- **text** - Plain text with speaker names (default)
|
||||
- **text-timestamped** - Timestamped text with speaker names
|
||||
- **webvtt-named** - WebVTT subtitle format with participant names
|
||||
- **json** - Structured JSON segments with full metadata
|
||||
|
||||
All formats include participant information when available, resolving speaker IDs to actual names.
|
||||
|
||||
## Query Parameter Usage
|
||||
|
||||
```
|
||||
GET /v1/transcripts/{id}?transcript_format={format}
|
||||
```
|
||||
|
||||
### Parameters
|
||||
|
||||
- `transcript_format` (optional): The desired output format
|
||||
- Type: `"text" | "text-timestamped" | "webvtt-named" | "json"`
|
||||
- Default: `"text"`
|
||||
|
||||
## Format Descriptions
|
||||
|
||||
### Text Format (`text`)
|
||||
|
||||
**Use case:** Simple, human-readable transcript for display or export.
|
||||
|
||||
**Format:** Speaker names followed by their dialogue, one line per segment.
|
||||
|
||||
**Example:**
|
||||
```
|
||||
John Smith: Hello everyone
|
||||
Jane Doe: Hi there
|
||||
John Smith: How are you today?
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```bash
|
||||
GET /v1/transcripts/{id}?transcript_format=text
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"id": "transcript_123",
|
||||
"name": "Meeting Recording",
|
||||
"transcript_format": "text",
|
||||
"transcript": "John Smith: Hello everyone\nJane Doe: Hi there\nJohn Smith: How are you today?",
|
||||
"participants": [
|
||||
{"id": "p1", "speaker": 0, "name": "John Smith"},
|
||||
{"id": "p2", "speaker": 1, "name": "Jane Doe"}
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### Text Timestamped Format (`text-timestamped`)
|
||||
|
||||
**Use case:** Transcript with timing information for navigation or reference.
|
||||
|
||||
**Format:** `[MM:SS]` timestamp prefix before each speaker and dialogue.
|
||||
|
||||
**Example:**
|
||||
```
|
||||
[00:00] John Smith: Hello everyone
|
||||
[00:05] Jane Doe: Hi there
|
||||
[00:12] John Smith: How are you today?
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```bash
|
||||
GET /v1/transcripts/{id}?transcript_format=text-timestamped
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"id": "transcript_123",
|
||||
"name": "Meeting Recording",
|
||||
"transcript_format": "text-timestamped",
|
||||
"transcript": "[00:00] John Smith: Hello everyone\n[00:05] Jane Doe: Hi there\n[00:12] John Smith: How are you today?",
|
||||
"participants": [
|
||||
{"id": "p1", "speaker": 0, "name": "John Smith"},
|
||||
{"id": "p2", "speaker": 1, "name": "Jane Doe"}
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### WebVTT Named Format (`webvtt-named`)
|
||||
|
||||
**Use case:** Subtitle files for video players, accessibility tools, or video editing.
|
||||
|
||||
**Format:** Standard WebVTT subtitle format with voice tags using participant names.
|
||||
|
||||
**Example:**
|
||||
```
|
||||
WEBVTT
|
||||
|
||||
00:00:00.000 --> 00:00:05.000
|
||||
<v John Smith>Hello everyone
|
||||
|
||||
00:00:05.000 --> 00:00:12.000
|
||||
<v Jane Doe>Hi there
|
||||
|
||||
00:00:12.000 --> 00:00:18.000
|
||||
<v John Smith>How are you today?
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```bash
|
||||
GET /v1/transcripts/{id}?transcript_format=webvtt-named
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"id": "transcript_123",
|
||||
"name": "Meeting Recording",
|
||||
"transcript_format": "webvtt-named",
|
||||
"transcript": "WEBVTT\n\n00:00:00.000 --> 00:00:05.000\n<v John Smith>Hello everyone\n\n...",
|
||||
"participants": [
|
||||
{"id": "p1", "speaker": 0, "name": "John Smith"},
|
||||
{"id": "p2", "speaker": 1, "name": "Jane Doe"}
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### JSON Format (`json`)
|
||||
|
||||
**Use case:** Programmatic access with full timing and speaker metadata.
|
||||
|
||||
**Format:** Array of segment objects with speaker information, text content, and precise timing.
|
||||
|
||||
**Example:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"speaker": 0,
|
||||
"speaker_name": "John Smith",
|
||||
"text": "Hello everyone",
|
||||
"start": 0.0,
|
||||
"end": 5.0
|
||||
},
|
||||
{
|
||||
"speaker": 1,
|
||||
"speaker_name": "Jane Doe",
|
||||
"text": "Hi there",
|
||||
"start": 5.0,
|
||||
"end": 12.0
|
||||
},
|
||||
{
|
||||
"speaker": 0,
|
||||
"speaker_name": "John Smith",
|
||||
"text": "How are you today?",
|
||||
"start": 12.0,
|
||||
"end": 18.0
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```bash
|
||||
GET /v1/transcripts/{id}?transcript_format=json
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"id": "transcript_123",
|
||||
"name": "Meeting Recording",
|
||||
"transcript_format": "json",
|
||||
"transcript": [
|
||||
{
|
||||
"speaker": 0,
|
||||
"speaker_name": "John Smith",
|
||||
"text": "Hello everyone",
|
||||
"start": 0.0,
|
||||
"end": 5.0
|
||||
},
|
||||
{
|
||||
"speaker": 1,
|
||||
"speaker_name": "Jane Doe",
|
||||
"text": "Hi there",
|
||||
"start": 5.0,
|
||||
"end": 12.0
|
||||
}
|
||||
],
|
||||
"participants": [
|
||||
{"id": "p1", "speaker": 0, "name": "John Smith"},
|
||||
{"id": "p2", "speaker": 1, "name": "Jane Doe"}
|
||||
],
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
## Response Structure
|
||||
|
||||
All formats return the same base transcript metadata with an additional `transcript_format` field and format-specific `transcript` field:
|
||||
|
||||
### Common Fields
|
||||
|
||||
- `id`: Transcript identifier
|
||||
- `user_id`: Owner user ID (if authenticated)
|
||||
- `name`: Transcript name
|
||||
- `status`: Processing status
|
||||
- `locked`: Whether transcript is locked for editing
|
||||
- `duration`: Total duration in seconds
|
||||
- `title`: Auto-generated or custom title
|
||||
- `short_summary`: Brief summary
|
||||
- `long_summary`: Detailed summary
|
||||
- `created_at`: Creation timestamp
|
||||
- `share_mode`: Access control setting
|
||||
- `source_language`: Original audio language
|
||||
- `target_language`: Translation target language
|
||||
- `reviewed`: Whether transcript has been reviewed
|
||||
- `meeting_id`: Associated meeting ID (if applicable)
|
||||
- `source_kind`: Source type (live, file, room)
|
||||
- `room_id`: Associated room ID (if applicable)
|
||||
- `audio_deleted`: Whether audio has been deleted
|
||||
- `participants`: Array of participant objects with speaker mappings
|
||||
|
||||
### Format-Specific Fields
|
||||
|
||||
- `transcript_format`: The format identifier (discriminator field)
|
||||
- `transcript`: The formatted transcript content (string for text/webvtt formats, array for json format)
|
||||
|
||||
## Speaker Name Resolution
|
||||
|
||||
All formats resolve speaker IDs to participant names when available:
|
||||
|
||||
- If a participant exists for the speaker ID, their name is used
|
||||
- If no participant exists, a default name like "Speaker 0" is generated
|
||||
- Speaker IDs are integers (0, 1, 2, etc.) assigned during diarization
|
||||
8
docs/tsconfig.json
Normal file
8
docs/tsconfig.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
// This file is not used in compilation. It is here just for a nice editor experience.
|
||||
"extends": "@docusaurus/tsconfig",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "."
|
||||
},
|
||||
"exclude": [".docusaurus", "build"]
|
||||
}
|
||||
150
gpu/modal_deployments/deploy-all.sh
Executable file
150
gpu/modal_deployments/deploy-all.sh
Executable file
@@ -0,0 +1,150 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# --- Usage ---
|
||||
usage() {
|
||||
echo "Usage: $0 [OPTIONS]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --hf-token TOKEN HuggingFace token"
|
||||
echo " --help Show this help message"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # Interactive mode"
|
||||
echo " $0 --hf-token hf_xxxxx # Non-interactive mode"
|
||||
echo ""
|
||||
exit 0
|
||||
}
|
||||
|
||||
# --- Parse Arguments ---
|
||||
HF_TOKEN=""
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--hf-token)
|
||||
HF_TOKEN="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "=========================================="
|
||||
echo "Reflector GPU Functions Deployment"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
# --- Check Dependencies ---
|
||||
if ! command -v modal &> /dev/null; then
|
||||
echo "Error: Modal CLI not installed."
|
||||
echo " Install with: pip install modal"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v openssl &> /dev/null; then
|
||||
echo "Error: openssl not found."
|
||||
echo " Mac: brew install openssl"
|
||||
echo " Ubuntu: sudo apt-get install openssl"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check Modal authentication
|
||||
if ! modal profile current &> /dev/null; then
|
||||
echo "Error: Not authenticated with Modal."
|
||||
echo " Run: modal setup"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- HuggingFace Token Setup ---
|
||||
if [ -z "$HF_TOKEN" ]; then
|
||||
echo "HuggingFace token required for Pyannote diarization model."
|
||||
echo "1. Create account at https://huggingface.co"
|
||||
echo "2. Accept license at https://huggingface.co/pyannote/speaker-diarization-3.1"
|
||||
echo "3. Generate token at https://huggingface.co/settings/tokens"
|
||||
echo ""
|
||||
read -p "Enter your HuggingFace token: " HF_TOKEN
|
||||
fi
|
||||
|
||||
if [ -z "$HF_TOKEN" ]; then
|
||||
echo "Error: HuggingFace token is required for diarization"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Basic token format validation
|
||||
if [[ ! "$HF_TOKEN" =~ ^hf_ ]]; then
|
||||
echo "Warning: HuggingFace tokens usually start with 'hf_'"
|
||||
if [ -t 0 ]; then
|
||||
read -p "Continue anyway? (y/n): " confirm
|
||||
if [ "$confirm" != "y" ]; then
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Non-interactive mode: proceeding anyway"
|
||||
fi
|
||||
fi
|
||||
|
||||
# --- Auto-generate reflector<->GPU API Key ---
|
||||
echo ""
|
||||
echo "Generating API key for GPU services..."
|
||||
API_KEY=$(openssl rand -hex 32)
|
||||
|
||||
# --- Create Modal Secrets ---
|
||||
echo "Creating Modal secrets..."
|
||||
|
||||
# Create or update hf_token secret (delete first if exists)
|
||||
if modal secret list 2>/dev/null | grep -q "hf_token"; then
|
||||
echo " -> Recreating secret: hf_token"
|
||||
modal secret delete hf_token --yes 2>/dev/null || true
|
||||
fi
|
||||
echo " -> Creating secret: hf_token"
|
||||
modal secret create hf_token HF_TOKEN="$HF_TOKEN"
|
||||
|
||||
# Create or update reflector-gpu secret (delete first if exists)
|
||||
if modal secret list 2>/dev/null | grep -q "reflector-gpu"; then
|
||||
echo " -> Recreating secret: reflector-gpu"
|
||||
modal secret delete reflector-gpu --yes 2>/dev/null || true
|
||||
fi
|
||||
echo " -> Creating secret: reflector-gpu"
|
||||
modal secret create reflector-gpu REFLECTOR_GPU_APIKEY="$API_KEY"
|
||||
|
||||
# --- Deploy Functions ---
|
||||
echo ""
|
||||
echo "Deploying transcriber (Whisper)..."
|
||||
TRANSCRIBER_URL=$(modal deploy reflector_transcriber.py 2>&1 | grep -o 'https://[^ ]*web.modal.run' | head -1)
|
||||
if [ -z "$TRANSCRIBER_URL" ]; then
|
||||
echo "Error: Failed to deploy transcriber. Check Modal dashboard for details."
|
||||
exit 1
|
||||
fi
|
||||
echo " -> $TRANSCRIBER_URL"
|
||||
|
||||
echo ""
|
||||
echo "Deploying diarizer (Pyannote)..."
|
||||
DIARIZER_URL=$(modal deploy reflector_diarizer.py 2>&1 | grep -o 'https://[^ ]*web.modal.run' | head -1)
|
||||
if [ -z "$DIARIZER_URL" ]; then
|
||||
echo "Error: Failed to deploy diarizer. Check Modal dashboard for details."
|
||||
exit 1
|
||||
fi
|
||||
echo " -> $DIARIZER_URL"
|
||||
|
||||
# --- Output Configuration ---
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Deployment complete!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "Copy these values to your server's server/.env file:"
|
||||
echo ""
|
||||
echo "# --- Modal GPU Configuration ---"
|
||||
echo "TRANSCRIPT_BACKEND=modal"
|
||||
echo "TRANSCRIPT_URL=$TRANSCRIBER_URL"
|
||||
echo "TRANSCRIPT_MODAL_API_KEY=$API_KEY"
|
||||
echo ""
|
||||
echo "DIARIZATION_BACKEND=modal"
|
||||
echo "DIARIZATION_URL=$DIARIZER_URL"
|
||||
echo "DIARIZATION_MODAL_API_KEY=$API_KEY"
|
||||
echo "# --- End Modal Configuration ---"
|
||||
@@ -24,6 +24,12 @@ app = modal.App(name="reflector-diarizer")
|
||||
upload_volume = modal.Volume.from_name("diarizer-uploads", create_if_missing=True)
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (2 copies)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/modal_deployments/reflector_diarizer.py (this file)
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtension:
|
||||
parsed_url = urlparse(url)
|
||||
url_path = parsed_url.path
|
||||
@@ -39,6 +45,8 @@ def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtens
|
||||
return AudioFileExtension("wav")
|
||||
if "audio/mp4" in content_type:
|
||||
return AudioFileExtension("mp4")
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return AudioFileExtension("webm")
|
||||
|
||||
raise ValueError(
|
||||
f"Unsupported audio format for URL: {url}. "
|
||||
@@ -105,7 +113,7 @@ def download_pyannote_audio():
|
||||
|
||||
|
||||
diarizer_image = (
|
||||
modal.Image.debian_slim(python_version="3.10.8")
|
||||
modal.Image.debian_slim(python_version="3.10")
|
||||
.pip_install(
|
||||
"pyannote.audio==3.1.0",
|
||||
"requests",
|
||||
@@ -116,7 +124,7 @@ diarizer_image = (
|
||||
"transformers==4.34.0",
|
||||
"sentencepiece",
|
||||
"protobuf",
|
||||
"numpy",
|
||||
"numpy<2",
|
||||
"huggingface_hub",
|
||||
"hf-transfer",
|
||||
)
|
||||
|
||||
@@ -89,6 +89,7 @@ image = (
|
||||
"torch==2.5.1",
|
||||
"faster-whisper==1.1.1",
|
||||
"fastapi==0.115.12",
|
||||
"python-multipart",
|
||||
"requests",
|
||||
"librosa==0.10.1",
|
||||
"numpy<2",
|
||||
@@ -98,6 +99,12 @@ image = (
|
||||
)
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (this file - 2 copies!)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/modal_deployments/reflector_diarizer.py
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtension:
|
||||
parsed_url = urlparse(url)
|
||||
url_path = parsed_url.path
|
||||
@@ -113,6 +120,8 @@ def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtens
|
||||
return AudioFileExtension("wav")
|
||||
if "audio/mp4" in content_type:
|
||||
return AudioFileExtension("mp4")
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return AudioFileExtension("webm")
|
||||
|
||||
raise ValueError(
|
||||
f"Unsupported audio format for URL: {url}. "
|
||||
@@ -315,6 +324,11 @@ class TranscriberWhisperFile:
|
||||
import numpy as np
|
||||
from silero_vad import VADIterator
|
||||
|
||||
# IMPORTANT: This VAD segment logic is duplicated in multiple files for deployment isolation.
|
||||
# If you modify this function, you MUST update all copies:
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (this file)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/self_hosted/app/services/transcriber.py
|
||||
def vad_segments(
|
||||
audio_array,
|
||||
sample_rate: int = SAMPLERATE,
|
||||
@@ -322,6 +336,7 @@ class TranscriberWhisperFile:
|
||||
) -> Generator[TimeSegment, None, None]:
|
||||
"""Generate speech segments as TimeSegment using Silero VAD."""
|
||||
iterator = VADIterator(self.vad_model, sampling_rate=sample_rate)
|
||||
audio_duration = len(audio_array) / float(SAMPLERATE)
|
||||
start = None
|
||||
for i in range(0, len(audio_array), window_size):
|
||||
chunk = audio_array[i : i + window_size]
|
||||
@@ -341,6 +356,9 @@ class TranscriberWhisperFile:
|
||||
start / float(SAMPLERATE), end / float(SAMPLERATE)
|
||||
)
|
||||
start = None
|
||||
# Handle case where audio ends while speech is still active
|
||||
if start is not None:
|
||||
yield TimeSegment(start / float(SAMPLERATE), audio_duration)
|
||||
iterator.reset_states()
|
||||
|
||||
upload_volume.reload()
|
||||
@@ -406,6 +424,12 @@ class TranscriberWhisperFile:
|
||||
return {"text": " ".join(all_text), "words": all_words}
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (this file - 2 copies!)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/modal_deployments/reflector_diarizer.py
|
||||
def detect_audio_format(url: str, headers: dict) -> str:
|
||||
from urllib.parse import urlparse
|
||||
|
||||
@@ -423,6 +447,8 @@ def detect_audio_format(url: str, headers: dict) -> str:
|
||||
return "wav"
|
||||
if "audio/mp4" in content_type:
|
||||
return "mp4"
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return "webm"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
|
||||
@@ -77,19 +77,25 @@ image = (
|
||||
.pip_install(
|
||||
"hf_transfer==0.1.9",
|
||||
"huggingface_hub[hf-xet]==0.31.2",
|
||||
"nemo_toolkit[asr]==2.3.0",
|
||||
"nemo_toolkit[asr]==2.5.0",
|
||||
"cuda-python==12.8.0",
|
||||
"fastapi==0.115.12",
|
||||
"numpy<2",
|
||||
"librosa==0.10.1",
|
||||
"librosa==0.11.0",
|
||||
"requests",
|
||||
"silero-vad==5.1.0",
|
||||
"silero-vad==6.2.0",
|
||||
"torch",
|
||||
)
|
||||
.entrypoint([]) # silence chatty logs by container on start
|
||||
)
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (2 copies)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py (this file)
|
||||
# - gpu/modal_deployments/reflector_diarizer.py
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtension:
|
||||
parsed_url = urlparse(url)
|
||||
url_path = parsed_url.path
|
||||
@@ -105,6 +111,8 @@ def detect_audio_format(url: str, headers: Mapping[str, str]) -> AudioFileExtens
|
||||
return AudioFileExtension("wav")
|
||||
if "audio/mp4" in content_type:
|
||||
return AudioFileExtension("mp4")
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return AudioFileExtension("webm")
|
||||
|
||||
raise ValueError(
|
||||
f"Unsupported audio format for URL: {url}. "
|
||||
@@ -301,11 +309,17 @@ class TranscriberParakeetFile:
|
||||
audio_array, sample_rate = librosa.load(file_path, sr=SAMPLERATE, mono=True)
|
||||
return audio_array
|
||||
|
||||
# IMPORTANT: This VAD segment logic is duplicated in multiple files for deployment isolation.
|
||||
# If you modify this function, you MUST update all copies:
|
||||
# - gpu/modal_deployments/reflector_transcriber.py
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py (this file)
|
||||
# - gpu/self_hosted/app/services/transcriber.py
|
||||
def vad_segment_generator(
|
||||
audio_array,
|
||||
) -> Generator[TimeSegment, None, None]:
|
||||
"""Generate speech segments using VAD with start/end sample indices"""
|
||||
vad_iterator = VADIterator(self.vad_model, sampling_rate=SAMPLERATE)
|
||||
audio_duration = len(audio_array) / float(SAMPLERATE)
|
||||
window_size = VAD_CONFIG["window_size"]
|
||||
start = None
|
||||
|
||||
@@ -332,6 +346,10 @@ class TranscriberParakeetFile:
|
||||
yield TimeSegment(start_time, end_time)
|
||||
start = None
|
||||
|
||||
if start is not None:
|
||||
start_time = start / float(SAMPLERATE)
|
||||
yield TimeSegment(start_time, audio_duration)
|
||||
|
||||
vad_iterator.reset_states()
|
||||
|
||||
def batch_speech_segments(
|
||||
|
||||
@@ -103,7 +103,7 @@ def configure_seamless_m4t():
|
||||
|
||||
|
||||
transcriber_image = (
|
||||
Image.debian_slim(python_version="3.10.8")
|
||||
Image.debian_slim(python_version="3.10")
|
||||
.apt_install("git")
|
||||
.apt_install("wget")
|
||||
.apt_install("libsndfile-dev")
|
||||
@@ -119,6 +119,7 @@ transcriber_image = (
|
||||
"fairseq2",
|
||||
"pyyaml",
|
||||
"hf-transfer~=0.1",
|
||||
"pydantic",
|
||||
)
|
||||
.run_function(install_seamless_communication)
|
||||
.run_function(download_seamlessm4t_model)
|
||||
|
||||
137
gpu/self_hosted/DEV_SETUP.md
Normal file
137
gpu/self_hosted/DEV_SETUP.md
Normal file
@@ -0,0 +1,137 @@
|
||||
# Local Development GPU Setup
|
||||
|
||||
Run transcription and diarization locally for development/testing.
|
||||
|
||||
> **For production deployment**, see the [Self-Hosted GPU Setup Guide](../../docs/docs/installation/self-hosted-gpu-setup.md).
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. **Python 3.12+** and **uv** package manager
|
||||
2. **FFmpeg** installed and on PATH
|
||||
3. **HuggingFace account** with access to pyannote models
|
||||
|
||||
### Accept Pyannote Licenses (Required)
|
||||
|
||||
Before first run, accept licenses for these gated models (logged into HuggingFace):
|
||||
- https://hf.co/pyannote/speaker-diarization-3.1
|
||||
- https://hf.co/pyannote/segmentation-3.0
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Install dependencies
|
||||
|
||||
```bash
|
||||
cd gpu/self_hosted
|
||||
uv sync
|
||||
```
|
||||
|
||||
### 2. Start the GPU service
|
||||
|
||||
```bash
|
||||
cd gpu/self_hosted
|
||||
HF_TOKEN=<your-huggingface-token> \
|
||||
REFLECTOR_GPU_APIKEY=dev-key-12345 \
|
||||
.venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
Note: The `.env` file is NOT auto-loaded. Pass env vars explicitly or use:
|
||||
```bash
|
||||
export HF_TOKEN=<your-token>
|
||||
export REFLECTOR_GPU_APIKEY=dev-key-12345
|
||||
.venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
### 3. Configure Reflector to use local GPU
|
||||
|
||||
Edit `server/.env`:
|
||||
|
||||
```bash
|
||||
# Transcription - local GPU service
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=http://host.docker.internal:8000
|
||||
TRANSCRIPT_MODAL_API_KEY=dev-key-12345
|
||||
|
||||
# Diarization - local GPU service
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=http://host.docker.internal:8000
|
||||
DIARIZATION_MODAL_API_KEY=dev-key-12345
|
||||
```
|
||||
|
||||
Note: Use `host.docker.internal` because Reflector server runs in Docker.
|
||||
|
||||
### 4. Restart Reflector server
|
||||
|
||||
```bash
|
||||
cd server
|
||||
docker compose restart server worker
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
### Test transcription
|
||||
|
||||
```bash
|
||||
curl -s -X POST http://localhost:8000/v1/audio/transcriptions \
|
||||
-H "Authorization: Bearer dev-key-12345" \
|
||||
-F "file=@/path/to/audio.wav" \
|
||||
-F "language=en"
|
||||
```
|
||||
|
||||
### Test diarization
|
||||
|
||||
```bash
|
||||
curl -s -X POST "http://localhost:8000/diarize?audio_file_url=<audio-url>" \
|
||||
-H "Authorization: Bearer dev-key-12345"
|
||||
```
|
||||
|
||||
## Platform Notes
|
||||
|
||||
### macOS (ARM)
|
||||
|
||||
Docker build fails - CUDA packages are x86_64 only. Use local Python instead:
|
||||
```bash
|
||||
uv sync
|
||||
HF_TOKEN=xxx REFLECTOR_GPU_APIKEY=xxx .venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
### Linux with NVIDIA GPU
|
||||
|
||||
Docker works with CUDA acceleration:
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### CPU-only
|
||||
|
||||
Works on any platform, just slower. PyTorch auto-detects and falls back to CPU.
|
||||
|
||||
## Switching Back to Modal.com
|
||||
|
||||
Edit `server/.env`:
|
||||
|
||||
```bash
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-parakeet-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=<modal-api-key>
|
||||
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run
|
||||
DIARIZATION_MODAL_API_KEY=<modal-api-key>
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Could not download pyannote pipeline"
|
||||
- Accept model licenses at HuggingFace (see Prerequisites)
|
||||
- Verify HF_TOKEN is set and valid
|
||||
|
||||
### Service won't start
|
||||
- Check port 8000 is free: `lsof -i :8000`
|
||||
- Kill orphan processes if needed
|
||||
|
||||
### Transcription returns empty text
|
||||
- Ensure audio contains speech (not just tones/silence)
|
||||
- Check audio format is supported (wav, mp3, etc.)
|
||||
|
||||
### Deprecation warnings from torchaudio/pyannote
|
||||
- Safe to ignore - doesn't affect functionality
|
||||
@@ -56,9 +56,13 @@ Docker
|
||||
|
||||
- Not yet provided in this directory. A Dockerfile will be added later. For now, use Local run above
|
||||
|
||||
Conformance tests
|
||||
# Setup
|
||||
|
||||
# From this directory
|
||||
[SETUP.md](SETUP.md)
|
||||
|
||||
# Conformance tests
|
||||
|
||||
## From this directory
|
||||
|
||||
TRANSCRIPT_URL=http://localhost:8000 \
|
||||
TRANSCRIPT_API_KEY=dev-key \
|
||||
|
||||
@@ -129,6 +129,11 @@ class WhisperService:
|
||||
audio = np.frombuffer(proc.stdout, dtype=np.float32)
|
||||
return audio
|
||||
|
||||
# IMPORTANT: This VAD segment logic is duplicated in multiple files for deployment isolation.
|
||||
# If you modify this function, you MUST update all copies:
|
||||
# - gpu/modal_deployments/reflector_transcriber.py
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/self_hosted/app/services/transcriber.py (this file)
|
||||
def vad_segments(
|
||||
audio_array,
|
||||
sample_rate: int = SAMPLE_RATE,
|
||||
@@ -153,6 +158,10 @@ class WhisperService:
|
||||
end = speech["end"]
|
||||
yield (start / float(SAMPLE_RATE), end / float(SAMPLE_RATE))
|
||||
start = None
|
||||
# Handle case where audio ends while speech is still active
|
||||
if start is not None:
|
||||
audio_duration = len(audio_array) / float(sample_rate)
|
||||
yield (start / float(SAMPLE_RATE), audio_duration)
|
||||
iterator.reset_states()
|
||||
|
||||
audio_array = load_audio_via_ffmpeg(file_path, SAMPLE_RATE)
|
||||
|
||||
@@ -34,6 +34,12 @@ def ensure_dirs():
|
||||
UPLOADS_PATH.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# IMPORTANT: This function is duplicated in multiple files for deployment isolation.
|
||||
# If you modify the audio format detection logic, you MUST update all copies:
|
||||
# - gpu/self_hosted/app/utils.py (this file)
|
||||
# - gpu/modal_deployments/reflector_transcriber.py (2 copies)
|
||||
# - gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
# - gpu/modal_deployments/reflector_diarizer.py
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> str:
|
||||
url_path = urlparse(url).path
|
||||
for ext in SUPPORTED_FILE_EXTENSIONS:
|
||||
@@ -47,6 +53,8 @@ def detect_audio_format(url: str, headers: Mapping[str, str]) -> str:
|
||||
return "wav"
|
||||
if "audio/mp4" in content_type:
|
||||
return "mp4"
|
||||
if "audio/webm" in content_type or "video/webm" in content_type:
|
||||
return "webm"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
|
||||
258
scripts/setup-authentik-oauth.sh
Executable file
258
scripts/setup-authentik-oauth.sh
Executable file
@@ -0,0 +1,258 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Setup Authentik OAuth provider for Reflector
|
||||
#
|
||||
# IMPORTANT: Run this script from your Reflector repository directory (cd ~/reflector)
|
||||
# The script creates files using relative paths: server/reflector/auth/jwt/keys/
|
||||
#
|
||||
# Usage: ./setup-authentik-oauth.sh <authentik-url> <admin-password> <frontend-url>
|
||||
# Example: ./setup-authentik-oauth.sh https://authentik.example.com MyPassword123 https://app.example.com
|
||||
|
||||
AUTHENTIK_URL="${1:-}"
|
||||
ADMIN_PASSWORD="${2:-}"
|
||||
FRONTEND_URL="${3:-}"
|
||||
|
||||
if [ -z "$AUTHENTIK_URL" ] || [ -z "$ADMIN_PASSWORD" ] || [ -z "$FRONTEND_URL" ]; then
|
||||
echo "Usage: $0 <authentik-url> <admin-password> <frontend-url>"
|
||||
echo "Example: $0 https://authentik.example.com MyPassword123 https://app.example.com"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove trailing slash from URLs
|
||||
AUTHENTIK_URL="${AUTHENTIK_URL%/}"
|
||||
FRONTEND_URL="${FRONTEND_URL%/}"
|
||||
|
||||
echo "==========================================="
|
||||
echo "Authentik OAuth Setup for Reflector"
|
||||
echo "==========================================="
|
||||
echo ""
|
||||
echo "Authentik URL: $AUTHENTIK_URL"
|
||||
echo "Frontend URL: $FRONTEND_URL"
|
||||
echo ""
|
||||
|
||||
# Step 1: Create API token via Django shell
|
||||
echo "Creating API token..."
|
||||
cd ~/authentik || { echo "Error: ~/authentik directory not found"; exit 1; }
|
||||
|
||||
API_TOKEN=$(sudo docker compose exec -T server python -m manage shell 2>&1 << 'PYTHON' | grep "^TOKEN:" | cut -d: -f2
|
||||
from authentik.core.models import User, Token, TokenIntents
|
||||
|
||||
user = User.objects.get(username='akadmin')
|
||||
token, created = Token.objects.update_or_create(
|
||||
identifier='reflector-setup',
|
||||
defaults={
|
||||
'user': user,
|
||||
'intent': TokenIntents.INTENT_API,
|
||||
'description': 'Reflector setup token',
|
||||
'expiring': False
|
||||
}
|
||||
)
|
||||
print(f"TOKEN:{token.key}")
|
||||
PYTHON
|
||||
)
|
||||
|
||||
cd - > /dev/null
|
||||
|
||||
if [ -z "$API_TOKEN" ] || [ "$API_TOKEN" = "null" ]; then
|
||||
echo "Error: Failed to create API token"
|
||||
echo "Make sure Authentik is fully started and akadmin user exists"
|
||||
exit 1
|
||||
fi
|
||||
echo " -> Got API token"
|
||||
|
||||
# Step 2: Get authorization flow UUID
|
||||
echo "Getting authorization flow..."
|
||||
FLOW_RESPONSE=$(curl -s "$AUTHENTIK_URL/api/v3/flows/instances/?slug=default-provider-authorization-implicit-consent" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
|
||||
FLOW_UUID=$(echo "$FLOW_RESPONSE" | jq -r '.results[0].pk')
|
||||
if [ -z "$FLOW_UUID" ] || [ "$FLOW_UUID" = "null" ]; then
|
||||
echo "Error: Could not find authorization flow"
|
||||
echo "Response: $FLOW_RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
echo " -> Flow UUID: $FLOW_UUID"
|
||||
|
||||
# Step 3: Get invalidation flow UUID
|
||||
echo "Getting invalidation flow..."
|
||||
INVALIDATION_RESPONSE=$(curl -s "$AUTHENTIK_URL/api/v3/flows/instances/?slug=default-provider-invalidation-flow" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
|
||||
INVALIDATION_UUID=$(echo "$INVALIDATION_RESPONSE" | jq -r '.results[0].pk')
|
||||
if [ -z "$INVALIDATION_UUID" ] || [ "$INVALIDATION_UUID" = "null" ]; then
|
||||
echo "Warning: Could not find invalidation flow, using authorization flow"
|
||||
INVALIDATION_UUID="$FLOW_UUID"
|
||||
fi
|
||||
echo " -> Invalidation UUID: $INVALIDATION_UUID"
|
||||
|
||||
# Step 4: Get scope mappings (email, openid, profile)
|
||||
echo "Getting scope mappings..."
|
||||
SCOPE_RESPONSE=$(curl -s "$AUTHENTIK_URL/api/v3/propertymappings/all/" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
|
||||
EMAIL_SCOPE=$(echo "$SCOPE_RESPONSE" | jq -r '.results[] | select(.name == "authentik default OAuth Mapping: OpenID '\''email'\''") | .pk')
|
||||
OPENID_SCOPE=$(echo "$SCOPE_RESPONSE" | jq -r '.results[] | select(.name == "authentik default OAuth Mapping: OpenID '\''openid'\''") | .pk')
|
||||
PROFILE_SCOPE=$(echo "$SCOPE_RESPONSE" | jq -r '.results[] | select(.name == "authentik default OAuth Mapping: OpenID '\''profile'\''") | .pk')
|
||||
echo " -> email: $EMAIL_SCOPE"
|
||||
echo " -> openid: $OPENID_SCOPE"
|
||||
echo " -> profile: $PROFILE_SCOPE"
|
||||
|
||||
# Step 5: Get signing key
|
||||
echo "Getting signing key..."
|
||||
CERT_RESPONSE=$(curl -s "$AUTHENTIK_URL/api/v3/crypto/certificatekeypairs/" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
SIGNING_KEY=$(echo "$CERT_RESPONSE" | jq -r '.results[0].pk')
|
||||
echo " -> Signing key: $SIGNING_KEY"
|
||||
|
||||
# Step 6: Generate client credentials
|
||||
CLIENT_ID="reflector"
|
||||
CLIENT_SECRET=$(openssl rand -hex 32)
|
||||
|
||||
# Step 7: Create OAuth2 provider
|
||||
echo "Creating OAuth2 provider..."
|
||||
PROVIDER_RESPONSE=$(curl -s -X POST "$AUTHENTIK_URL/api/v3/providers/oauth2/" \
|
||||
-H "Authorization: Bearer $API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"name\": \"Reflector\",
|
||||
\"authorization_flow\": \"$FLOW_UUID\",
|
||||
\"invalidation_flow\": \"$INVALIDATION_UUID\",
|
||||
\"client_type\": \"confidential\",
|
||||
\"client_id\": \"$CLIENT_ID\",
|
||||
\"client_secret\": \"$CLIENT_SECRET\",
|
||||
\"redirect_uris\": [{
|
||||
\"matching_mode\": \"strict\",
|
||||
\"url\": \"$FRONTEND_URL/api/auth/callback/authentik\"
|
||||
}],
|
||||
\"property_mappings\": [\"$EMAIL_SCOPE\", \"$OPENID_SCOPE\", \"$PROFILE_SCOPE\"],
|
||||
\"signing_key\": \"$SIGNING_KEY\",
|
||||
\"access_token_validity\": \"hours=1\",
|
||||
\"refresh_token_validity\": \"days=30\"
|
||||
}")
|
||||
|
||||
PROVIDER_ID=$(echo "$PROVIDER_RESPONSE" | jq -r '.pk')
|
||||
if [ -z "$PROVIDER_ID" ] || [ "$PROVIDER_ID" = "null" ]; then
|
||||
# Check if provider already exists
|
||||
if echo "$PROVIDER_RESPONSE" | grep -q "already exists"; then
|
||||
echo " -> Provider already exists, updating..."
|
||||
EXISTING=$(curl -s "$AUTHENTIK_URL/api/v3/providers/oauth2/?name=Reflector" \
|
||||
-H "Authorization: Bearer $API_TOKEN")
|
||||
PROVIDER_ID=$(echo "$EXISTING" | jq -r '.results[0].pk')
|
||||
CLIENT_ID=$(echo "$EXISTING" | jq -r '.results[0].client_id')
|
||||
# Update secret and scopes
|
||||
curl -s -X PATCH "$AUTHENTIK_URL/api/v3/providers/oauth2/$PROVIDER_ID/" \
|
||||
-H "Authorization: Bearer $API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"client_secret\": \"$CLIENT_SECRET\",
|
||||
\"property_mappings\": [\"$EMAIL_SCOPE\", \"$OPENID_SCOPE\", \"$PROFILE_SCOPE\"],
|
||||
\"signing_key\": \"$SIGNING_KEY\"
|
||||
}" > /dev/null
|
||||
else
|
||||
echo "Error: Failed to create provider"
|
||||
echo "Response: $PROVIDER_RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
echo " -> Provider ID: $PROVIDER_ID"
|
||||
|
||||
# Step 8: Create application
|
||||
echo "Creating application..."
|
||||
APP_RESPONSE=$(curl -s -X POST "$AUTHENTIK_URL/api/v3/core/applications/" \
|
||||
-H "Authorization: Bearer $API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"name\": \"Reflector\",
|
||||
\"slug\": \"reflector\",
|
||||
\"provider\": $PROVIDER_ID
|
||||
}")
|
||||
|
||||
if echo "$APP_RESPONSE" | grep -q "already exists"; then
|
||||
echo " -> Application already exists"
|
||||
else
|
||||
APP_SLUG=$(echo "$APP_RESPONSE" | jq -r '.slug')
|
||||
if [ -z "$APP_SLUG" ] || [ "$APP_SLUG" = "null" ]; then
|
||||
echo "Error: Failed to create application"
|
||||
echo "Response: $APP_RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
echo " -> Application created: $APP_SLUG"
|
||||
fi
|
||||
|
||||
# Step 9: Extract public key for JWT verification
|
||||
echo "Extracting public key for JWT verification..."
|
||||
mkdir -p server/reflector/auth/jwt/keys
|
||||
curl -s "$AUTHENTIK_URL/application/o/reflector/jwks/" | \
|
||||
jq -r '.keys[0].x5c[0]' | \
|
||||
base64 -d | \
|
||||
openssl x509 -pubkey -noout > server/reflector/auth/jwt/keys/authentik_public.pem
|
||||
|
||||
if [ ! -s server/reflector/auth/jwt/keys/authentik_public.pem ]; then
|
||||
echo "Error: Failed to extract public key"
|
||||
exit 1
|
||||
fi
|
||||
echo " -> Saved to server/reflector/auth/jwt/keys/authentik_public.pem"
|
||||
|
||||
# Step 10: Update environment files automatically
|
||||
echo "Updating environment files..."
|
||||
|
||||
# Update server/.env
|
||||
cat >> server/.env << EOF
|
||||
|
||||
# --- Authentik OAuth (added by setup script) ---
|
||||
AUTH_BACKEND=jwt
|
||||
AUTH_JWT_AUDIENCE=$CLIENT_ID
|
||||
AUTH_JWT_PUBLIC_KEY=authentik_public.pem
|
||||
# --- End JWT Configuration ---
|
||||
EOF
|
||||
echo " -> Updated server/.env"
|
||||
|
||||
# Update www/.env
|
||||
cat >> www/.env << EOF
|
||||
|
||||
# --- Authentik OAuth (added by setup script) ---
|
||||
FEATURE_REQUIRE_LOGIN=true
|
||||
AUTHENTIK_ISSUER=$AUTHENTIK_URL/application/o/reflector
|
||||
AUTHENTIK_REFRESH_TOKEN_URL=$AUTHENTIK_URL/application/o/token/
|
||||
AUTHENTIK_CLIENT_ID=$CLIENT_ID
|
||||
AUTHENTIK_CLIENT_SECRET=$CLIENT_SECRET
|
||||
# --- End Authentik Configuration ---
|
||||
EOF
|
||||
echo " -> Updated www/.env"
|
||||
|
||||
# Step 11: Restart Reflector services
|
||||
echo "Restarting Reflector services..."
|
||||
docker compose -f docker-compose.prod.yml up -d server worker web
|
||||
|
||||
echo ""
|
||||
echo "==========================================="
|
||||
echo "Setup complete!"
|
||||
echo "==========================================="
|
||||
echo ""
|
||||
echo "Authentik admin: $AUTHENTIK_URL"
|
||||
echo " Username: akadmin"
|
||||
echo " Password: (provided as argument)"
|
||||
echo ""
|
||||
echo "Frontend: $FRONTEND_URL"
|
||||
echo " Authentication is now required"
|
||||
echo ""
|
||||
echo "Note: Public key saved to server/reflector/auth/jwt/keys/authentik_public.pem"
|
||||
echo " and mounted via docker-compose volume."
|
||||
echo ""
|
||||
echo "==========================================="
|
||||
echo "Configuration values (for reference):"
|
||||
echo "==========================================="
|
||||
echo ""
|
||||
echo "# server/.env"
|
||||
echo "AUTH_BACKEND=jwt"
|
||||
echo "AUTH_JWT_AUDIENCE=$CLIENT_ID"
|
||||
echo "AUTH_JWT_PUBLIC_KEY=authentik_public.pem"
|
||||
echo ""
|
||||
echo "# www/.env"
|
||||
echo "FEATURE_REQUIRE_LOGIN=true"
|
||||
echo "AUTHENTIK_ISSUER=$AUTHENTIK_URL/application/o/reflector"
|
||||
echo "AUTHENTIK_REFRESH_TOKEN_URL=$AUTHENTIK_URL/application/o/token/"
|
||||
echo "AUTHENTIK_CLIENT_ID=$CLIENT_ID"
|
||||
echo "AUTHENTIK_CLIENT_SECRET=$CLIENT_SECRET"
|
||||
echo ""
|
||||
139
server/.env.example
Normal file
139
server/.env.example
Normal file
@@ -0,0 +1,139 @@
|
||||
#
|
||||
# This file serve as an example of possible configuration
|
||||
# All the settings are described here: reflector/settings.py
|
||||
#
|
||||
|
||||
## =======================================================
|
||||
## Core Configuration (Required for Production)
|
||||
## =======================================================
|
||||
|
||||
## Database (for docker-compose.prod.yml, use postgres hostname)
|
||||
#DATABASE_URL=postgresql+asyncpg://reflector:reflector@postgres:5432/reflector
|
||||
|
||||
## Redis (for docker-compose.prod.yml, use redis hostname)
|
||||
#REDIS_HOST=redis
|
||||
#REDIS_PORT=6379
|
||||
#CELERY_BROKER_URL=redis://redis:6379/1
|
||||
#CELERY_RESULT_BACKEND=redis://redis:6379/1
|
||||
|
||||
## Base URL - your API domain with https
|
||||
#BASE_URL=https://api.example.com
|
||||
|
||||
## CORS - required when frontend and API are on different domains
|
||||
#CORS_ORIGIN=https://app.example.com
|
||||
#CORS_ALLOW_CREDENTIALS=true
|
||||
|
||||
## Secret key - generate with: openssl rand -hex 32
|
||||
#SECRET_KEY=changeme-generate-a-secure-random-string
|
||||
|
||||
## =======================================================
|
||||
## User authentication
|
||||
## =======================================================
|
||||
|
||||
## Using jwt/authentik
|
||||
AUTH_BACKEND=jwt
|
||||
AUTH_JWT_AUDIENCE=
|
||||
|
||||
## =======================================================
|
||||
## Transcription backend
|
||||
##
|
||||
## Check reflector/processors/audio_transcript_* for the
|
||||
## full list of available transcription backend
|
||||
## =======================================================
|
||||
|
||||
## Using local whisper
|
||||
#TRANSCRIPT_BACKEND=whisper
|
||||
|
||||
## Using serverless modal.com (require reflector-gpu-modal deployed)
|
||||
#TRANSCRIPT_BACKEND=modal
|
||||
#TRANSCRIPT_URL=https://xxxxx--reflector-transcriber-web.modal.run
|
||||
#TRANSCRIPT_MODAL_API_KEY=xxxxx
|
||||
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-parakeet-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=
|
||||
|
||||
## =======================================================
|
||||
## Translation backend
|
||||
##
|
||||
## Only available in modal atm
|
||||
## =======================================================
|
||||
TRANSLATION_BACKEND=modal
|
||||
TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run
|
||||
#TRANSLATION_MODAL_API_KEY=xxxxx
|
||||
|
||||
## =======================================================
|
||||
## LLM backend (Required)
|
||||
##
|
||||
## Responsible for generating titles, summaries, and topic detection
|
||||
## Requires OpenAI API key
|
||||
## =======================================================
|
||||
|
||||
## OpenAI API key - get from https://platform.openai.com/account/api-keys
|
||||
LLM_API_KEY=sk-your-openai-api-key
|
||||
LLM_MODEL=gpt-4o-mini
|
||||
|
||||
## Optional: Custom endpoint (defaults to OpenAI)
|
||||
# LLM_URL=https://api.openai.com/v1
|
||||
|
||||
## Context size for summary generation (tokens)
|
||||
LLM_CONTEXT_WINDOW=16000
|
||||
|
||||
## =======================================================
|
||||
## Diarization
|
||||
##
|
||||
## Only available on modal
|
||||
## To allow diarization, you need to expose expose the files to be dowloded by the pipeline
|
||||
## =======================================================
|
||||
DIARIZATION_ENABLED=false
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run
|
||||
#DIARIZATION_MODAL_API_KEY=xxxxx
|
||||
|
||||
|
||||
## =======================================================
|
||||
## Transcript Storage
|
||||
##
|
||||
## Where to store audio files and transcripts
|
||||
## AWS S3 is required for production
|
||||
## =======================================================
|
||||
TRANSCRIPT_STORAGE_BACKEND=aws
|
||||
TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=your-aws-access-key
|
||||
TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY=your-aws-secret-key
|
||||
TRANSCRIPT_STORAGE_AWS_BUCKET_NAME=reflector-media
|
||||
TRANSCRIPT_STORAGE_AWS_REGION=us-east-1
|
||||
|
||||
|
||||
## =======================================================
|
||||
## Sentry
|
||||
## =======================================================
|
||||
|
||||
## Sentry DSN configuration
|
||||
#SENTRY_DSN=
|
||||
|
||||
## =======================================================
|
||||
## Video Platform Configuration
|
||||
## =======================================================
|
||||
|
||||
## Whereby
|
||||
#WHEREBY_API_KEY=your-whereby-api-key
|
||||
#WHEREBY_WEBHOOK_SECRET=your-whereby-webhook-secret
|
||||
#WHEREBY_STORAGE_AWS_ACCESS_KEY_ID=your-aws-key
|
||||
#WHEREBY_STORAGE_AWS_SECRET_ACCESS_KEY=your-aws-secret
|
||||
#AWS_PROCESS_RECORDING_QUEUE_URL=https://sqs.us-west-2.amazonaws.com/...
|
||||
|
||||
## Daily.co
|
||||
#DAILY_API_KEY=your-daily-api-key
|
||||
#DAILY_WEBHOOK_SECRET=your-daily-webhook-secret
|
||||
#DAILY_SUBDOMAIN=your-subdomain
|
||||
#DAILY_WEBHOOK_UUID= # Auto-populated by recreate_daily_webhook.py script
|
||||
#DAILYCO_STORAGE_AWS_ROLE_ARN=... # IAM role ARN for Daily.co S3 access
|
||||
#DAILYCO_STORAGE_AWS_BUCKET_NAME=reflector-dailyco
|
||||
#DAILYCO_STORAGE_AWS_REGION=us-west-2
|
||||
|
||||
## Whereby (optional separate bucket)
|
||||
#WHEREBY_STORAGE_AWS_BUCKET_NAME=reflector-whereby
|
||||
#WHEREBY_STORAGE_AWS_REGION=us-east-1
|
||||
|
||||
## Platform Configuration
|
||||
#DEFAULT_VIDEO_PLATFORM=whereby # Default platform for new rooms
|
||||
@@ -1,3 +1,29 @@
|
||||
## API Key Management
|
||||
|
||||
### Finding Your User ID
|
||||
|
||||
```bash
|
||||
# Get your OAuth sub (user ID) - requires authentication
|
||||
curl -H "Authorization: Bearer <your_jwt>" http://localhost:1250/v1/me
|
||||
# Returns: {"sub": "your-oauth-sub-here", "email": "...", ...}
|
||||
```
|
||||
|
||||
### Creating API Keys
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:1250/v1/user/api-keys \
|
||||
-H "Authorization: Bearer <your_jwt>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"name": "My API Key"}'
|
||||
```
|
||||
|
||||
### Using API Keys
|
||||
|
||||
```bash
|
||||
# Use X-API-Key header instead of Authorization
|
||||
curl -H "X-API-Key: <your_api_key>" http://localhost:1250/v1/transcripts
|
||||
```
|
||||
|
||||
## AWS S3/SQS usage clarification
|
||||
|
||||
Whereby.com uploads recordings directly to our S3 bucket when meetings end.
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
# AsyncIO Event Loop Analysis for test_attendee_parsing_bug.py
|
||||
|
||||
## Problem Summary
|
||||
The test passes but encounters an error during teardown where asyncpg tries to use a different/closed event loop, resulting in:
|
||||
- `RuntimeError: Task got Future attached to a different loop`
|
||||
- `RuntimeError: Event loop is closed`
|
||||
|
||||
## Root Cause Analysis
|
||||
|
||||
### 1. Multiple Event Loop Creation Points
|
||||
|
||||
The test environment creates event loops at different scopes:
|
||||
|
||||
1. **Session-scoped loop** (conftest.py:27-34):
|
||||
- Created once per test session
|
||||
- Used by session-scoped fixtures
|
||||
- Closed after all tests complete
|
||||
|
||||
2. **Function-scoped loop** (pytest-asyncio default):
|
||||
- Created for each async test function
|
||||
- This is the loop that runs the actual test
|
||||
- Closed immediately after test completes
|
||||
|
||||
3. **AsyncPG internal loop**:
|
||||
- AsyncPG connections store a reference to the loop they were created with
|
||||
- Used for connection lifecycle management
|
||||
|
||||
### 2. Event Loop Lifecycle Mismatch
|
||||
|
||||
The issue occurs because:
|
||||
|
||||
1. **Session fixture creates database connection** on session-scoped loop
|
||||
2. **Test runs** on function-scoped loop (different from session loop)
|
||||
3. **During teardown**, the session fixture tries to rollback/close using the original session loop
|
||||
4. **AsyncPG connection** still references the function-scoped loop which is now closed
|
||||
5. **Conflict**: SQLAlchemy tries to use session loop, but asyncpg Future is attached to the closed function loop
|
||||
|
||||
### 3. Configuration Issues
|
||||
|
||||
Current pytest configuration:
|
||||
- `asyncio_mode = "auto"` in pyproject.toml
|
||||
- `asyncio_default_fixture_loop_scope=session` (shown in test output)
|
||||
- `asyncio_default_test_loop_scope=function` (shown in test output)
|
||||
|
||||
This mismatch between fixture loop scope (session) and test loop scope (function) causes the problem.
|
||||
|
||||
## Solutions
|
||||
|
||||
### Option 1: Align Loop Scopes (Recommended)
|
||||
Change pytest-asyncio configuration to use consistent loop scopes:
|
||||
|
||||
```python
|
||||
# pyproject.toml
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
asyncio_default_fixture_loop_scope = "function" # Change from session to function
|
||||
```
|
||||
|
||||
### Option 2: Use Function-Scoped Database Fixture
|
||||
Change the `session` fixture scope from session to function:
|
||||
|
||||
```python
|
||||
@pytest_asyncio.fixture # Remove scope="session"
|
||||
async def session(setup_database):
|
||||
# ... existing code ...
|
||||
```
|
||||
|
||||
### Option 3: Explicit Loop Management
|
||||
Ensure all async operations use the same loop:
|
||||
|
||||
```python
|
||||
@pytest_asyncio.fixture
|
||||
async def session(setup_database, event_loop):
|
||||
# Force using the current event loop
|
||||
engine = create_async_engine(
|
||||
settings.DATABASE_URL,
|
||||
echo=False,
|
||||
poolclass=NullPool,
|
||||
connect_args={"loop": event_loop} # Pass explicit loop
|
||||
)
|
||||
# ... rest of fixture ...
|
||||
```
|
||||
|
||||
### Option 4: Upgrade pytest-asyncio
|
||||
The current version (1.1.0) has known issues with loop management. Consider upgrading to the latest version which has better loop scope handling.
|
||||
|
||||
## Immediate Workaround
|
||||
|
||||
For the test to run cleanly without the teardown error, you can:
|
||||
|
||||
1. Add explicit cleanup in the test:
|
||||
```python
|
||||
@pytest.mark.asyncio
|
||||
async def test_attendee_parsing_bug(session):
|
||||
# ... existing test code ...
|
||||
|
||||
# Explicit cleanup before fixture teardown
|
||||
await session.commit() # or await session.close()
|
||||
```
|
||||
|
||||
2. Or suppress the teardown error (not recommended for production):
|
||||
```python
|
||||
@pytest.fixture
|
||||
async def session(setup_database):
|
||||
# ... existing setup ...
|
||||
try:
|
||||
yield session
|
||||
await session.rollback()
|
||||
except RuntimeError as e:
|
||||
if "Event loop is closed" not in str(e):
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
```
|
||||
|
||||
## Recommendation
|
||||
|
||||
The cleanest solution is to align the loop scopes by setting both fixture and test loop scopes to "function" scope. This ensures each test gets its own clean event loop and avoids cross-contamination between tests.
|
||||
421
server/docs/daily_pipeline.md
Normal file
421
server/docs/daily_pipeline.md
Normal file
@@ -0,0 +1,421 @@
|
||||
# Daily.co pipeline
|
||||
|
||||
This document details every external call, storage operation, and database write that occurs when a new Daily.co recording is discovered.
|
||||
It includes a bunch of common logic that other pipelines use, therefore not everything is Daily-oriented.
|
||||
|
||||
**The doc was generated at 12.12.2025 and things may have changed since.**
|
||||
|
||||
## Trigger
|
||||
|
||||
Two entry points, both converging to the same handler:
|
||||
|
||||
1. **Webhook**: Daily.co sends `POST /v1/daily/webhook` with `recording.ready-to-download`
|
||||
2. **Polling**: `GET /recordings` (paginated, max 100/call) → filter new → convert to same payload format
|
||||
|
||||
Both produce `RecordingReadyPayload` and call `handleRecordingReady(payload)`.
|
||||
|
||||
```
|
||||
┌─────────────────┐ ┌──────────────────────────┐
|
||||
│ Daily Webhook │────▶│ RecordingReadyPayload │
|
||||
│ (push) │ │ {room_name, recording_id│
|
||||
└─────────────────┘ │ tracks[], ...} │
|
||||
└────────────┬─────────────┘
|
||||
┌─────────────────┐ │
|
||||
│ GET /recordings│ ▼
|
||||
│ (poll) │────▶ convert ──▶ handleRecordingReady()
|
||||
└─────────────────┘ │
|
||||
▼
|
||||
┌────────────────────────┐
|
||||
│ process_multitrack_ │
|
||||
│ recording pipeline │
|
||||
└────────────────────────┘
|
||||
```
|
||||
|
||||
**Polling API**: `GET https://api.daily.co/v1/recordings`
|
||||
- Pagination: `limit` (max 100), `starting_after`, `ending_before`
|
||||
- Rate limit: ~2 req/sec
|
||||
- Response: `{total_count, data: Recording[]}`
|
||||
|
||||
```mermaid
|
||||
flowchart TB
|
||||
subgraph Trigger["1. Recording Discovery - Daily.co Webhook"]
|
||||
DAILY_WEBHOOK["Daily.co sends POST /v1/daily/webhook<br/>type: recording.ready-to-download"]
|
||||
VERIFY["Verify X-Webhook-Signature (HMAC)"]
|
||||
PARSE["Parse DailyWebhookEvent<br/>Extract tracks[], room_name, recording_id"]
|
||||
FILTER["Filter audio tracks only<br/>track_keys = [t.s3Key for t in tracks if t.type == 'audio']"]
|
||||
DISPATCH["process_multitrack_recording.delay()"]
|
||||
|
||||
DAILY_WEBHOOK --> VERIFY --> PARSE --> FILTER --> DISPATCH
|
||||
end
|
||||
|
||||
subgraph Init["2. Recording Initialization"]
|
||||
FETCH_MEETING[DB READ: meetings_controller.get_by_room_name]
|
||||
FETCH_ROOM[DB READ: rooms_controller.get_by_name]
|
||||
DAILY_API_REC[Daily API: GET /recordings/recording_id]
|
||||
DAILY_API_PART[Daily API: GET /meetings/mtgSessionId/participants]
|
||||
CREATE_RECORDING[DB WRITE: recordings_controller.create]
|
||||
CREATE_TRANSCRIPT[DB WRITE: transcripts_controller.add]
|
||||
MAP_PARTICIPANTS[DB WRITE: transcript.participants upsert]
|
||||
end
|
||||
|
||||
subgraph Pipeline["3. Processing Pipeline"]
|
||||
direction TB
|
||||
PAD[Track Padding & Mixdown]
|
||||
TRANSCRIBE[GPU: Transcription per track]
|
||||
TOPICS[LLM: Topic Detection]
|
||||
TITLE[LLM: Title Generation]
|
||||
SUMMARY[LLM: Summary Generation]
|
||||
end
|
||||
|
||||
subgraph Storage["4. S3 Operations"]
|
||||
S3_PRESIGN[S3: generate_presigned_url for tracks]
|
||||
S3_UPLOAD_PADDED[S3 UPLOAD: padded tracks temp]
|
||||
S3_UPLOAD_MP3[S3 UPLOAD: audio.mp3]
|
||||
S3_DELETE_TEMP[S3 DELETE: cleanup temp files]
|
||||
end
|
||||
|
||||
subgraph PostProcess["5. Post-Processing"]
|
||||
CONSENT[Consent check & cleanup]
|
||||
ZULIP[Zulip: send/update message]
|
||||
WEBHOOK_OUT[Webhook: POST to room.webhook_url]
|
||||
end
|
||||
|
||||
Trigger --> Init --> Pipeline
|
||||
Pipeline --> Storage
|
||||
Pipeline --> PostProcess
|
||||
```
|
||||
|
||||
## Detailed Sequence: Daily.co Multitrack Recording
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant DailyCo as Daily.co
|
||||
participant API as FastAPI /v1/daily/webhook
|
||||
participant Worker as Celery Worker
|
||||
participant DB as PostgreSQL
|
||||
participant DailyAPI as Daily.co REST API
|
||||
participant S3 as AWS S3
|
||||
participant GPU as Modal.com GPU
|
||||
participant LLM as LLM Service
|
||||
participant WS as WebSocket
|
||||
participant Zulip as Zulip
|
||||
participant ExtWH as External Webhook
|
||||
|
||||
Note over DailyCo,API: Phase 0: Webhook Receipt
|
||||
DailyCo->>API: POST /v1/daily/webhook
|
||||
Note right of DailyCo: X-Webhook-Signature, X-Webhook-Timestamp
|
||||
API->>API: verify_webhook_signature()
|
||||
API->>API: Extract audio track s3Keys from payload.tracks[]
|
||||
API->>Worker: process_multitrack_recording.delay()
|
||||
API-->>DailyCo: 200 OK
|
||||
|
||||
Note over Worker,DailyAPI: Phase 1: Recording Initialization
|
||||
Worker->>DB: SELECT meeting WHERE room_name=?
|
||||
Worker->>DB: SELECT room WHERE name=?
|
||||
Worker->>DailyAPI: GET /recordings/{recording_id}
|
||||
DailyAPI-->>Worker: {mtgSessionId, ...}
|
||||
Worker->>DailyAPI: GET /meetings/{mtgSessionId}/participants
|
||||
DailyAPI-->>Worker: [{participant_id, user_name}, ...]
|
||||
Worker->>DB: INSERT INTO recording
|
||||
Worker->>DB: INSERT INTO transcript (status='idle')
|
||||
loop For each track_key (parse participant_id from filename)
|
||||
Worker->>DB: UPSERT transcript.participants[speaker=idx, name=X]
|
||||
end
|
||||
|
||||
Note over Worker,S3: Phase 2: Track Padding
|
||||
Worker->>DB: UPDATE transcript SET status='processing'
|
||||
Worker->>WS: broadcast STATUS='processing'
|
||||
loop For each track in track_keys (N tracks)
|
||||
Worker->>S3: generate_presigned_url(track_key, DAILYCO_BUCKET)
|
||||
S3-->>Worker: presigned_url (2hr)
|
||||
Note over Worker: PyAV: read WebM, extract start_time
|
||||
Note over Worker: PyAV: adelay filter (pad silence)
|
||||
Worker->>S3: PUT file_pipeline/{id}/tracks/padded_{idx}.webm
|
||||
Worker->>S3: generate_presigned_url(padded_{idx}.webm)
|
||||
end
|
||||
|
||||
Note over Worker,S3: Phase 3: Audio Mixdown
|
||||
Note over Worker: PyAV: amix filter → stereo MP3
|
||||
Worker->>DB: UPDATE transcript SET duration=X
|
||||
Worker->>WS: broadcast DURATION
|
||||
Worker->>S3: PUT {transcript_id}/audio.mp3
|
||||
Worker->>DB: UPDATE transcript SET audio_location='storage'
|
||||
|
||||
Note over Worker: Phase 4: Waveform
|
||||
Note over Worker: Generate peaks from MP3
|
||||
Worker->>DB: UPDATE events+=WAVEFORM
|
||||
Worker->>WS: broadcast WAVEFORM
|
||||
|
||||
Note over Worker,GPU: Phase 5: Transcription (N GPU calls)
|
||||
loop For each padded track URL (N tracks)
|
||||
Worker->>GPU: POST /v1/audio/transcriptions-from-url
|
||||
Note right of GPU: {audio_file_url, language, batch:true}
|
||||
GPU-->>Worker: {words: [{word, start, end}, ...]}
|
||||
Note over Worker: Assign speaker=track_idx to words
|
||||
end
|
||||
Note over Worker: Merge all words, sort by start time
|
||||
Worker->>DB: UPDATE events+=TRANSCRIPT
|
||||
Worker->>WS: broadcast TRANSCRIPT
|
||||
|
||||
Note over Worker,S3: Cleanup temp files
|
||||
loop For each padded file
|
||||
Worker->>S3: DELETE padded_{idx}.webm
|
||||
end
|
||||
|
||||
Note over Worker,LLM: Phase 6: Topic Detection (C LLM calls)
|
||||
Note over Worker: C = ceil(total_words / 300)
|
||||
loop For each 300-word chunk (C chunks)
|
||||
Worker->>LLM: TOPIC_PROMPT + words[i:i+300]
|
||||
Note right of LLM: "Extract main topic title + 2-sentence summary"
|
||||
LLM-->>Worker: TitleSummary{title, summary}
|
||||
Worker->>DB: UPSERT topics[]
|
||||
Worker->>DB: UPDATE events+=TOPIC
|
||||
Worker->>WS: broadcast TOPIC
|
||||
end
|
||||
|
||||
Note over Worker,LLM: Phase 7a: Title Generation (1 LLM call)
|
||||
Note over Worker: Input: all TitleSummary[].title joined
|
||||
Worker->>LLM: TITLE_PROMPT
|
||||
Note right of LLM: "Generate concise title from topic titles"
|
||||
LLM-->>Worker: "Meeting Title"
|
||||
Worker->>DB: UPDATE transcript SET title=X
|
||||
Worker->>DB: UPDATE events+=FINAL_TITLE
|
||||
Worker->>WS: broadcast FINAL_TITLE
|
||||
|
||||
Note over Worker,LLM: Phase 7b: Summary Generation (2+2M LLM calls)
|
||||
Note over Worker: Reconstruct full transcript from TitleSummary[].transcript
|
||||
opt If participants unknown
|
||||
Worker->>LLM: PARTICIPANTS_PROMPT
|
||||
LLM-->>Worker: ParticipantsResponse
|
||||
end
|
||||
Worker->>LLM: SUBJECTS_PROMPT (call #1)
|
||||
Note right of LLM: "Main high-level topics? Max 6"
|
||||
LLM-->>Worker: SubjectsResponse{subjects: ["A", "B", ...]}
|
||||
|
||||
loop For each subject (M subjects, max 6)
|
||||
Worker->>LLM: DETAILED_SUBJECT_PROMPT (call #2..#1+M)
|
||||
Note right of LLM: "Info about 'A': decisions, actions, deadlines"
|
||||
LLM-->>Worker: detailed_response (discarded after next call)
|
||||
Worker->>LLM: PARAGRAPH_SUMMARY_PROMPT (call #2+M..#1+2M)
|
||||
Note right of LLM: "Summarize in 1 paragraph"
|
||||
LLM-->>Worker: paragraph → summaries[]
|
||||
end
|
||||
|
||||
Worker->>LLM: RECAP_PROMPT (call #2+2M)
|
||||
Note right of LLM: "High-level quick recap, 1 paragraph"
|
||||
LLM-->>Worker: recap
|
||||
Note over Worker: long_summary = "# Quick recap\n{recap}\n# Summary\n**A**\n{para1}..."
|
||||
Note over Worker: short_summary = recap only
|
||||
Worker->>DB: UPDATE long_summary, short_summary
|
||||
Worker->>DB: UPDATE events+=FINAL_LONG_SUMMARY
|
||||
Worker->>WS: broadcast FINAL_LONG_SUMMARY
|
||||
Worker->>DB: UPDATE events+=FINAL_SHORT_SUMMARY
|
||||
Worker->>WS: broadcast FINAL_SHORT_SUMMARY
|
||||
|
||||
Note over Worker,DB: Phase 8: Finalize
|
||||
Worker->>DB: UPDATE transcript SET status='ended'
|
||||
Worker->>DB: UPDATE events+=STATUS
|
||||
Worker->>WS: broadcast STATUS='ended'
|
||||
|
||||
Note over Worker,ExtWH: Phase 9: Post-Processing Chain
|
||||
Worker->>DB: SELECT meeting_consent WHERE meeting_id=?
|
||||
alt Any consent denied
|
||||
Worker->>S3: DELETE tracks from DAILYCO_BUCKET
|
||||
Worker->>S3: DELETE audio.mp3 from TRANSCRIPT_BUCKET
|
||||
Worker->>DB: UPDATE transcript SET audio_deleted=true
|
||||
end
|
||||
|
||||
opt Room has zulip_auto_post=true
|
||||
alt Existing zulip_message_id
|
||||
Worker->>Zulip: PATCH /api/v1/messages/{id}
|
||||
else New
|
||||
Worker->>Zulip: POST /api/v1/messages
|
||||
Zulip-->>Worker: {id}
|
||||
Worker->>DB: UPDATE transcript SET zulip_message_id=X
|
||||
end
|
||||
end
|
||||
|
||||
opt Room has webhook_url
|
||||
Worker->>ExtWH: POST {webhook_url}
|
||||
Note right of ExtWH: X-Webhook-Signature: HMAC-SHA256
|
||||
Note right of ExtWH: Body: {transcript_id, room_id, ...}
|
||||
end
|
||||
```
|
||||
|
||||
## Title & Summary Generation Data Flow
|
||||
|
||||
```mermaid
|
||||
flowchart TB
|
||||
subgraph Input["Input: TitleSummary[] from Topic Detection"]
|
||||
TS1["TitleSummary 1<br/>title: 'Q1 Budget'<br/>transcript: words[0:300]"]
|
||||
TS2["TitleSummary 2<br/>title: 'Product Launch'<br/>transcript: words[300:600]"]
|
||||
TS3["TitleSummary N..."]
|
||||
end
|
||||
|
||||
subgraph TitleGen["Title Generation"]
|
||||
T1["Extract .title from each TitleSummary"]
|
||||
T2["Concatenate: '- Q1 Budget\n- Product Launch\n...'"]
|
||||
T3["LLM: TITLE_PROMPT\n'Generate concise title from topic titles'"]
|
||||
T4["Output: FinalTitle"]
|
||||
|
||||
T1 --> T2 --> T3 --> T4
|
||||
end
|
||||
|
||||
subgraph SummaryGen["Summary Generation"]
|
||||
direction TB
|
||||
|
||||
subgraph Reconstruct["1. Reconstruct Full Transcript"]
|
||||
S1["For each TitleSummary.transcript.as_segments()"]
|
||||
S2["Map speaker ID → name"]
|
||||
S3["Build: 'Alice: hello\nBob: hi\n...'"]
|
||||
S1 --> S2 --> S3
|
||||
end
|
||||
|
||||
subgraph Subjects["2. Extract Subjects - LLM call #1"]
|
||||
S4["LLM: SUBJECTS_PROMPT\n'Main high-level topics? Max 6'"]
|
||||
S5["subjects[] = ['Budget Review', ...]"]
|
||||
S4 --> S5
|
||||
end
|
||||
|
||||
subgraph DetailedSum["3. Per-Subject Summary - LLM calls #2 to #(1+2M)"]
|
||||
S6["For each subject:"]
|
||||
S7["LLM: DETAILED_SUBJECT_PROMPT\n'Info about subject: decisions, actions...'"]
|
||||
S8["detailed_response - NOT STORED"]
|
||||
S9["LLM: PARAGRAPH_SUMMARY_PROMPT\n'Summarize in 1 paragraph'"]
|
||||
S10["paragraph → summaries[]"]
|
||||
|
||||
S6 --> S7 --> S8 --> S9 --> S10
|
||||
end
|
||||
|
||||
subgraph Recap["4. Generate Recap - LLM call #(2+2M)"]
|
||||
S11["Concatenate paragraph summaries"]
|
||||
S12["LLM: RECAP_PROMPT\n'High-level recap, 1 paragraph'"]
|
||||
S13["recap"]
|
||||
S11 --> S12 --> S13
|
||||
end
|
||||
|
||||
subgraph Output["5. Output"]
|
||||
S14["long_summary = markdown:\n# Quick recap\n[recap]\n# Summary\n**Subject 1**\n[para1]..."]
|
||||
S15["short_summary = recap only"]
|
||||
S14 --> S15
|
||||
end
|
||||
|
||||
Reconstruct --> Subjects --> DetailedSum --> Recap --> Output
|
||||
end
|
||||
|
||||
Input --> TitleGen
|
||||
Input --> SummaryGen
|
||||
```
|
||||
|
||||
### topics[] vs subjects[]
|
||||
|
||||
| | topics[] | subjects[] |
|
||||
|-|----------|------------|
|
||||
| **Source** | 300-word chunk splitting | LLM extraction from full text |
|
||||
| **Count** | Variable (words / 300) | Max 6 |
|
||||
| **Purpose** | Timeline segmentation | Summary structure |
|
||||
| **Has timestamp?** | Yes | No |
|
||||
|
||||
## External API Calls Summary
|
||||
|
||||
### 1. Daily.co REST API (called during initialization)
|
||||
|
||||
| Endpoint | Method | When | Purpose |
|
||||
|----------|--------|------|---------|
|
||||
| `GET /recordings/{recording_id}` | GET | After webhook | Get mtgSessionId for participant lookup |
|
||||
| `GET /meetings/{mtgSessionId}/participants` | GET | After above | Map participant_id → user_name |
|
||||
|
||||
### 2. GPU Service (Modal.com or Self-Hosted)
|
||||
|
||||
| Endpoint | Method | Count | Request |
|
||||
|----------|--------|-------|---------|
|
||||
| `{TRANSCRIPT_URL}/v1/audio/transcriptions-from-url` | POST | **N** (N = num tracks) | `{audio_file_url, language, batch: true}` |
|
||||
|
||||
**Note**: Diarization is NOT called for multitrack - speaker identification comes from separate tracks.
|
||||
|
||||
### 3. LLM Service (OpenAI-compatible via LlamaIndex)
|
||||
|
||||
| Phase | Operation | Input | LLM Calls | Output |
|
||||
|-------|-----------|-------|-----------|--------|
|
||||
| Topic Detection | TOPIC_PROMPT per 300-word chunk | words[i:i+300] | **C** = ceil(words/300) | TitleSummary{title, summary, timestamp} |
|
||||
| Title Generation | TITLE_PROMPT | All topic titles joined | **1** | FinalTitle |
|
||||
| Participant ID | PARTICIPANTS_PROMPT | Full transcript | **0-1** (skipped if known) | ParticipantsResponse |
|
||||
| Subject Extraction | SUBJECTS_PROMPT | Full transcript | **1** | SubjectsResponse{subjects[]} |
|
||||
| Subject Detail | DETAILED_SUBJECT_PROMPT | Full transcript + subject name | **M** (M = subjects, max 6) | detailed text (discarded) |
|
||||
| Subject Paragraph | PARAGRAPH_SUMMARY_PROMPT | Detailed text | **M** | paragraph text → summaries[] |
|
||||
| Recap | RECAP_PROMPT | All paragraph summaries | **1** | recap text |
|
||||
|
||||
**Total LLM calls**: C + 2M + 3 (+ 1 if participants unknown)
|
||||
- Short meeting (1000 words, 3 subjects): ~4 + 6 + 3 = **13 calls**
|
||||
- Long meeting (5000 words, 6 subjects): ~17 + 12 + 3 = **32 calls**
|
||||
|
||||
## S3 Operations Summary
|
||||
|
||||
### Source Bucket: `DAILYCO_STORAGE_AWS_BUCKET_NAME`
|
||||
Daily.co uploads raw-tracks recordings here.
|
||||
|
||||
| Operation | Key Pattern | When |
|
||||
|-----------|-------------|------|
|
||||
| **READ** (presign) | `{domain}/{room_name}/{ts}/{participant_id}-cam-audio-{ts}.webm` | Track acquisition |
|
||||
| **DELETE** | Same as above | Consent denied cleanup |
|
||||
|
||||
### Transcript Storage Bucket: `TRANSCRIPT_STORAGE_AWS_BUCKET_NAME`
|
||||
Reflector's own storage.
|
||||
|
||||
| Operation | Key Pattern | When |
|
||||
|-----------|-------------|------|
|
||||
| **PUT** | `file_pipeline/{transcript_id}/tracks/padded_{idx}.webm` | After track padding |
|
||||
| **READ** (presign) | Same | For GPU transcription |
|
||||
| **DELETE** | Same | After transcription complete |
|
||||
| **PUT** | `{transcript_id}/audio.mp3` | After mixdown |
|
||||
| **DELETE** | Same | Consent denied cleanup |
|
||||
|
||||
## Database Operations
|
||||
|
||||
### Tables Written
|
||||
|
||||
| Table | Operation | When |
|
||||
|-------|-----------|------|
|
||||
| `recording` | INSERT | Initialization |
|
||||
| `transcript` | INSERT | Initialization |
|
||||
| `transcript` | UPDATE (participants) | After Daily API participant fetch |
|
||||
| `transcript` | UPDATE (status, events, duration, topics, title, summaries, etc.) | Throughout pipeline |
|
||||
|
||||
### Transcript Update Sequence
|
||||
|
||||
```
|
||||
1. INSERT: id, name, status='idle', source_kind='room', user_id, recording_id, room_id, meeting_id
|
||||
2. UPDATE: participants[] (speaker index → participant name mapping)
|
||||
3. UPDATE: status='processing', events+=[{event:'STATUS', data:{value:'processing'}}]
|
||||
4. UPDATE: duration=X, events+=[{event:'DURATION', data:{duration:X}}]
|
||||
5. UPDATE: audio_location='storage'
|
||||
6. UPDATE: events+=[{event:'WAVEFORM', data:{waveform:[...]}}]
|
||||
7. UPDATE: events+=[{event:'TRANSCRIPT', data:{text, translation}}]
|
||||
8. UPDATE: topics[]+=topic, events+=[{event:'TOPIC'}] -- repeated per chunk
|
||||
9. UPDATE: title=X, events+=[{event:'FINAL_TITLE'}]
|
||||
10. UPDATE: long_summary=X, events+=[{event:'FINAL_LONG_SUMMARY'}]
|
||||
11. UPDATE: short_summary=X, events+=[{event:'FINAL_SHORT_SUMMARY'}]
|
||||
12. UPDATE: status='ended', events+=[{event:'STATUS', data:{value:'ended'}}]
|
||||
13. UPDATE: zulip_message_id=X -- if Zulip enabled
|
||||
14. UPDATE: audio_deleted=true -- if consent denied
|
||||
```
|
||||
|
||||
## WebSocket Events
|
||||
|
||||
All broadcast to room `ts:{transcript_id}`:
|
||||
|
||||
| Event | Payload | Trigger |
|
||||
|-------|---------|---------|
|
||||
| STATUS | `{value: "processing"\|"ended"\|"error"}` | Status transitions |
|
||||
| DURATION | `{duration: float}` | After audio processing |
|
||||
| WAVEFORM | `{waveform: float[]}` | After waveform generation |
|
||||
| TRANSCRIPT | `{text: string, translation: string\|null}` | After transcription merge |
|
||||
| TOPIC | `{id, title, summary, timestamp, duration, transcript, words}` | Per topic detected |
|
||||
| FINAL_TITLE | `{title: string}` | After LLM title generation |
|
||||
| FINAL_LONG_SUMMARY | `{long_summary: string}` | After LLM summary |
|
||||
| FINAL_SHORT_SUMMARY | `{short_summary: string}` | After LLM recap |
|
||||
|
||||
User-room broadcasts to `user:{user_id}`:
|
||||
- `TRANSCRIPT_STATUS`
|
||||
- `TRANSCRIPT_FINAL_TITLE`
|
||||
- `TRANSCRIPT_DURATION`
|
||||
236
server/docs/video-platforms/README.md
Normal file
236
server/docs/video-platforms/README.md
Normal file
@@ -0,0 +1,236 @@
|
||||
# Reflector Architecture: Whereby + Daily.co Recording Storage
|
||||
|
||||
## System Overview
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
subgraph "Actors"
|
||||
APP[Our App<br/>Reflector]
|
||||
WHEREBY[Whereby Service<br/>External]
|
||||
DAILY[Daily.co Service<br/>External]
|
||||
end
|
||||
|
||||
subgraph "AWS S3 Buckets"
|
||||
TRANSCRIPT_BUCKET[Transcript Bucket<br/>reflector-transcripts<br/>Output: Processed MP3s]
|
||||
WHEREBY_BUCKET[Whereby Bucket<br/>reflector-whereby-recordings<br/>Input: Raw MP4s]
|
||||
DAILY_BUCKET[Daily.co Bucket<br/>reflector-dailyco-recordings<br/>Input: Raw WebM tracks]
|
||||
end
|
||||
|
||||
subgraph "AWS Infrastructure"
|
||||
SQS[SQS Queue<br/>Whereby notifications]
|
||||
end
|
||||
|
||||
subgraph "Database"
|
||||
DB[(PostgreSQL<br/>Recordings, Transcripts, Meetings)]
|
||||
end
|
||||
|
||||
APP -->|Write processed| TRANSCRIPT_BUCKET
|
||||
APP -->|Read/Delete| WHEREBY_BUCKET
|
||||
APP -->|Read/Delete| DAILY_BUCKET
|
||||
APP -->|Poll| SQS
|
||||
APP -->|Store metadata| DB
|
||||
|
||||
WHEREBY -->|Write recordings| WHEREBY_BUCKET
|
||||
WHEREBY_BUCKET -->|S3 Event| SQS
|
||||
WHEREBY -->|Participant webhooks<br/>room.client.joined/left| APP
|
||||
|
||||
DAILY -->|Write recordings| DAILY_BUCKET
|
||||
DAILY -->|Recording webhook<br/>recording.ready-to-download| APP
|
||||
```
|
||||
|
||||
**Note on Webhook vs S3 Event for Recording Processing:**
|
||||
- **Whereby**: Uses S3 Events → SQS for recording availability (S3 as source of truth, no race conditions)
|
||||
- **Daily.co**: Uses webhooks for recording availability (more immediate, built-in reliability)
|
||||
- **Both**: Use webhooks for participant tracking (real-time updates)
|
||||
|
||||
## Credentials & Permissions
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
subgraph "Master Credentials"
|
||||
MASTER[TRANSCRIPT_STORAGE_AWS_*<br/>Access Key ID + Secret]
|
||||
end
|
||||
|
||||
subgraph "Whereby Upload Credentials"
|
||||
WHEREBY_CREDS[AWS_WHEREBY_ACCESS_KEY_*<br/>Access Key ID + Secret]
|
||||
end
|
||||
|
||||
subgraph "Daily.co Upload Role"
|
||||
DAILY_ROLE[DAILY_STORAGE_AWS_ROLE_ARN<br/>IAM Role ARN]
|
||||
end
|
||||
|
||||
subgraph "Our App Uses"
|
||||
MASTER -->|Read/Write/Delete| TRANSCRIPT_BUCKET[Transcript Bucket]
|
||||
MASTER -->|Read/Delete| WHEREBY_BUCKET[Whereby Bucket]
|
||||
MASTER -->|Read/Delete| DAILY_BUCKET[Daily.co Bucket]
|
||||
MASTER -->|Poll/Delete| SQS[SQS Queue]
|
||||
end
|
||||
|
||||
subgraph "We Give To Services"
|
||||
WHEREBY_CREDS -->|Passed in API call| WHEREBY_SERVICE[Whereby Service]
|
||||
WHEREBY_SERVICE -->|Write Only| WHEREBY_BUCKET
|
||||
|
||||
DAILY_ROLE -->|Passed in API call| DAILY_SERVICE[Daily.co Service]
|
||||
DAILY_SERVICE -->|Assume Role| DAILY_ROLE
|
||||
DAILY_SERVICE -->|Write Only| DAILY_BUCKET
|
||||
end
|
||||
```
|
||||
|
||||
# Video Platform Recording Integration
|
||||
|
||||
This document explains how Reflector receives and identifies multitrack audio recordings from different video platforms.
|
||||
|
||||
## Platform Comparison
|
||||
|
||||
| Platform | Delivery Method | Track Identification |
|
||||
|----------|----------------|---------------------|
|
||||
| **Daily.co** | Webhook | Explicit track list in payload |
|
||||
| **Whereby** | SQS (S3 notifications) | Single file per notification |
|
||||
|
||||
---
|
||||
|
||||
## Daily.co
|
||||
|
||||
**Note:** Primary discovery via polling (`poll_daily_recordings`), webhooks as backup.
|
||||
|
||||
Daily.co uses **webhooks** to notify Reflector when recordings are ready.
|
||||
|
||||
### How It Works
|
||||
|
||||
1. **Daily.co sends webhook** when recording is ready
|
||||
- Event type: `recording.ready-to-download`
|
||||
- Endpoint: `/v1/daily/webhook` (`reflector/views/daily.py:46-102`)
|
||||
|
||||
2. **Webhook payload explicitly includes track list**:
|
||||
```json
|
||||
{
|
||||
"recording_id": "7443ee0a-dab1-40eb-b316-33d6c0d5ff88",
|
||||
"room_name": "daily-20251020193458",
|
||||
"tracks": [
|
||||
{
|
||||
"type": "audio",
|
||||
"s3Key": "monadical/daily-20251020193458/1760988935484-52f7f48b-fbab-431f-9a50-87b9abfc8255-cam-audio-1760988935922",
|
||||
"size": 831843
|
||||
},
|
||||
{
|
||||
"type": "audio",
|
||||
"s3Key": "monadical/daily-20251020193458/1760988935484-a37c35e3-6f8e-4274-a482-e9d0f102a732-cam-audio-1760988943823",
|
||||
"size": 408438
|
||||
},
|
||||
{
|
||||
"type": "video",
|
||||
"s3Key": "monadical/daily-20251020193458/...-video.webm",
|
||||
"size": 30000000
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
3. **System extracts audio tracks** (`daily.py:211`):
|
||||
```python
|
||||
track_keys = [t.s3Key for t in tracks if t.type == "audio"]
|
||||
```
|
||||
|
||||
4. **Triggers multitrack processing** (`daily.py:213-218`):
|
||||
```python
|
||||
process_multitrack_recording.delay(
|
||||
bucket_name=bucket_name, # reflector-dailyco-local
|
||||
room_name=room_name, # daily-20251020193458
|
||||
recording_id=recording_id, # 7443ee0a-dab1-40eb-b316-33d6c0d5ff88
|
||||
track_keys=track_keys # Only audio s3Keys
|
||||
)
|
||||
```
|
||||
|
||||
### Key Advantage: No Ambiguity
|
||||
|
||||
Even though multiple meetings may share the same S3 bucket/folder (`monadical/`), **there's no ambiguity** because:
|
||||
- Each webhook payload contains the exact `s3Key` list for that specific `recording_id`
|
||||
- No need to scan folders or guess which files belong together
|
||||
- Each track's s3Key includes the room timestamp subfolder (e.g., `daily-20251020193458/`)
|
||||
|
||||
The room name includes timestamp (`daily-20251020193458`) to keep recordings organized, but **the webhook's explicit track list is what prevents mixing files from different meetings**.
|
||||
|
||||
### Track Timeline Extraction
|
||||
|
||||
Daily.co provides timing information in two places:
|
||||
|
||||
**1. PyAV WebM Metadata (current approach)**:
|
||||
```python
|
||||
# Read from WebM container stream metadata
|
||||
stream.start_time = 8.130s # Meeting-relative timing
|
||||
```
|
||||
|
||||
**2. Filename Timestamps (alternative approach, commit 3bae9076)**:
|
||||
```
|
||||
Filename format: {recording_start_ts}-{uuid}-cam-audio-{track_start_ts}.webm
|
||||
Example: 1760988935484-52f7f48b-fbab-431f-9a50-87b9abfc8255-cam-audio-1760988935922.webm
|
||||
|
||||
Parse timestamps:
|
||||
- recording_start_ts: 1760988935484 (Unix ms)
|
||||
- track_start_ts: 1760988935922 (Unix ms)
|
||||
- offset: (1760988935922 - 1760988935484) / 1000 = 0.438s
|
||||
```
|
||||
|
||||
**Time Difference (PyAV vs Filename)**:
|
||||
```
|
||||
Track 0:
|
||||
Filename offset: 438ms
|
||||
PyAV metadata: 229ms
|
||||
Difference: 209ms
|
||||
|
||||
Track 1:
|
||||
Filename offset: 8339ms
|
||||
PyAV metadata: 8130ms
|
||||
Difference: 209ms
|
||||
```
|
||||
|
||||
**Consistent 209ms delta** suggests network/encoding delay between file upload initiation (filename) and actual audio stream start (metadata).
|
||||
|
||||
**Current implementation uses PyAV metadata** because:
|
||||
- More accurate (represents when audio actually started)
|
||||
- Padding BEFORE transcription produces correct Whisper timestamps automatically
|
||||
- No manual offset adjustment needed during transcript merge
|
||||
|
||||
### Why Re-encoding During Padding
|
||||
|
||||
Padding coincidentally involves re-encoding, which is important for Daily.co + Whisper:
|
||||
|
||||
**Problem:** Daily.co skips frames in recordings when microphone is muted or paused
|
||||
- WebM containers have gaps where audio frames should be
|
||||
- Whisper doesn't understand these gaps and produces incorrect timestamps
|
||||
- Example: 5s of audio with 2s muted → file has frames only for 3s, Whisper thinks duration is 3s
|
||||
|
||||
**Solution:** Re-encoding via PyAV filter graph (`adelay` + `aresample`)
|
||||
- Restores missing frames as silence
|
||||
- Produces continuous audio stream without gaps
|
||||
- Whisper now sees correct duration and produces accurate timestamps
|
||||
|
||||
**Why combined with padding:**
|
||||
- Already re-encoding for padding (adding initial silence)
|
||||
- More performant to do both operations in single PyAV pipeline
|
||||
- Padded values needed for mixdown anyway (creating final MP3)
|
||||
|
||||
Implementation: `main_multitrack_pipeline.py:_apply_audio_padding_streaming()`
|
||||
|
||||
---
|
||||
|
||||
## Whereby (SQS-based)
|
||||
|
||||
Whereby uses **AWS SQS** (via S3 notifications) to notify Reflector when files are uploaded.
|
||||
|
||||
### How It Works
|
||||
|
||||
1. **Whereby uploads recording** to S3
|
||||
2. **S3 sends notification** to SQS queue (one notification per file)
|
||||
3. **Reflector polls SQS queue** (`worker/process.py:process_messages()`)
|
||||
4. **System processes single file** (`worker/process.py:process_recording()`)
|
||||
|
||||
### Key Difference from Daily.co
|
||||
|
||||
**Whereby (SQS):** System receives S3 notification "file X was created" - only knows about one file at a time, would need to scan folder to find related files
|
||||
|
||||
**Daily.co (Webhook):** Daily explicitly tells system which files belong together in the webhook payload
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ Webhooks are configured at the room level with two fields:
|
||||
|
||||
### `transcript.completed`
|
||||
|
||||
Triggered when a transcript has been fully processed, including transcription, diarization, summarization, and topic detection.
|
||||
Triggered when a transcript has been fully processed, including transcription, diarization, summarization, topic detection and calendar event integration.
|
||||
|
||||
### `test`
|
||||
|
||||
@@ -128,6 +128,27 @@ This event includes a convenient URL for accessing the transcript:
|
||||
"room": {
|
||||
"id": "room-789",
|
||||
"name": "Product Team Room"
|
||||
},
|
||||
"calendar_event": {
|
||||
"id": "calendar-event-123",
|
||||
"ics_uid": "event-123",
|
||||
"title": "Q3 Product Planning Meeting",
|
||||
"start_time": "2025-08-27T12:00:00Z",
|
||||
"end_time": "2025-08-27T12:30:00Z",
|
||||
"description": "Team discussed Q3 product roadmap, prioritizing mobile app features and API improvements.",
|
||||
"location": "Conference Room 1",
|
||||
"attendees": [
|
||||
{
|
||||
"id": "participant-1",
|
||||
"name": "John Doe",
|
||||
"speaker": "Speaker 1"
|
||||
},
|
||||
{
|
||||
"id": "participant-2",
|
||||
"name": "Jane Smith",
|
||||
"speaker": "Speaker 2"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
#
|
||||
# This file serve as an example of possible configuration
|
||||
# All the settings are described here: reflector/settings.py
|
||||
#
|
||||
|
||||
## =======================================================
|
||||
## User authentication
|
||||
## =======================================================
|
||||
|
||||
## Using jwt/authentik
|
||||
AUTH_BACKEND=jwt
|
||||
AUTH_JWT_AUDIENCE=
|
||||
|
||||
## =======================================================
|
||||
## Transcription backend
|
||||
##
|
||||
## Check reflector/processors/audio_transcript_* for the
|
||||
## full list of available transcription backend
|
||||
## =======================================================
|
||||
|
||||
## Using local whisper
|
||||
#TRANSCRIPT_BACKEND=whisper
|
||||
|
||||
## Using serverless modal.com (require reflector-gpu-modal deployed)
|
||||
#TRANSCRIPT_BACKEND=modal
|
||||
#TRANSCRIPT_URL=https://xxxxx--reflector-transcriber-web.modal.run
|
||||
#TRANSCRIPT_MODAL_API_KEY=xxxxx
|
||||
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=
|
||||
|
||||
## =======================================================
|
||||
## Translation backend
|
||||
##
|
||||
## Only available in modal atm
|
||||
## =======================================================
|
||||
TRANSLATION_BACKEND=modal
|
||||
TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run
|
||||
#TRANSLATION_MODAL_API_KEY=xxxxx
|
||||
|
||||
## =======================================================
|
||||
## LLM backend
|
||||
##
|
||||
## Responsible for titles and short summary
|
||||
## Check reflector/llm/* for the full list of available
|
||||
## llm backend implementation
|
||||
## =======================================================
|
||||
|
||||
## Context size for summary generation (tokens)
|
||||
# LLM_MODEL=microsoft/phi-4
|
||||
LLM_CONTEXT_WINDOW=16000
|
||||
LLM_URL=
|
||||
LLM_API_KEY=sk-
|
||||
|
||||
## =======================================================
|
||||
## Diarization
|
||||
##
|
||||
## Only available on modal
|
||||
## To allow diarization, you need to expose expose the files to be dowloded by the pipeline
|
||||
## =======================================================
|
||||
DIARIZATION_ENABLED=false
|
||||
DIARIZATION_BACKEND=modal
|
||||
DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run
|
||||
#DIARIZATION_MODAL_API_KEY=xxxxx
|
||||
|
||||
|
||||
## =======================================================
|
||||
## Sentry
|
||||
## =======================================================
|
||||
|
||||
## Sentry DSN configuration
|
||||
#SENTRY_DSN=
|
||||
@@ -1,583 +0,0 @@
|
||||
# Celery to TaskIQ Migration Guide
|
||||
|
||||
## Executive Summary
|
||||
|
||||
This document outlines the migration path from Celery to TaskIQ for the Reflector project. TaskIQ is a modern, async-first distributed task queue that provides similar functionality to Celery while being designed specifically for async Python applications.
|
||||
|
||||
## Current Celery Usage Analysis
|
||||
|
||||
### Key Patterns in Use
|
||||
1. **Task Decorators**: `@shared_task`, `@asynctask`, `@with_session` decorators
|
||||
2. **Task Invocation**: `.delay()`, `.si()` for signatures
|
||||
3. **Workflow Patterns**: `chain()`, `group()`, `chord()` for complex pipelines
|
||||
4. **Scheduled Tasks**: Celery Beat with crontab and periodic schedules
|
||||
5. **Session Management**: Custom `@with_session` and `@with_session_and_transcript` decorators
|
||||
6. **Retry Logic**: Auto-retry with exponential backoff
|
||||
7. **Redis Backend**: Using Redis for broker and result backend
|
||||
|
||||
### Critical Files to Migrate
|
||||
- `reflector/worker/app.py` - Celery app configuration and beat schedule
|
||||
- `reflector/worker/session_decorator.py` - Session management decorators
|
||||
- `reflector/pipelines/main_file_pipeline.py` - File processing pipeline
|
||||
- `reflector/pipelines/main_live_pipeline.py` - Live streaming pipeline (10 tasks)
|
||||
- `reflector/worker/process.py` - Background processing tasks
|
||||
- `reflector/worker/ics_sync.py` - Calendar sync tasks
|
||||
- `reflector/worker/cleanup.py` - Cleanup tasks
|
||||
- `reflector/worker/webhook.py` - Webhook notifications
|
||||
|
||||
## TaskIQ Architecture Mapping
|
||||
|
||||
### 1. Installation
|
||||
|
||||
```bash
|
||||
# Remove Celery dependencies
|
||||
uv remove celery flower
|
||||
|
||||
# Install TaskIQ with Redis support
|
||||
uv add taskiq taskiq-redis taskiq-pipelines
|
||||
```
|
||||
|
||||
### 2. Broker Configuration
|
||||
|
||||
#### Current (Celery)
|
||||
```python
|
||||
# reflector/worker/app.py
|
||||
from celery import Celery
|
||||
|
||||
app = Celery(
|
||||
"reflector",
|
||||
broker=settings.CELERY_BROKER_URL,
|
||||
backend=settings.CELERY_RESULT_BACKEND,
|
||||
include=[...],
|
||||
)
|
||||
```
|
||||
|
||||
#### New (TaskIQ)
|
||||
```python
|
||||
# reflector/worker/broker.py
|
||||
from taskiq_redis import RedisAsyncResultBackend, RedisStreamBroker
|
||||
from taskiq import PipelineMiddleware, SimpleRetryMiddleware
|
||||
|
||||
result_backend = RedisAsyncResultBackend(
|
||||
redis_url=settings.REDIS_URL,
|
||||
result_ex_time=86400, # 24 hours
|
||||
)
|
||||
|
||||
broker = RedisStreamBroker(
|
||||
url=settings.REDIS_URL,
|
||||
max_connection_pool_size=10,
|
||||
).with_result_backend(result_backend).with_middlewares(
|
||||
PipelineMiddleware(), # For chain/group/chord support
|
||||
SimpleRetryMiddleware(default_retry_count=3),
|
||||
)
|
||||
|
||||
# For testing environment
|
||||
if os.environ.get("ENVIRONMENT") == "pytest":
|
||||
from taskiq import InMemoryBroker
|
||||
broker = InMemoryBroker(await_inplace=True)
|
||||
```
|
||||
|
||||
### 3. Task Definition Migration
|
||||
|
||||
#### Current (Celery)
|
||||
```python
|
||||
@shared_task
|
||||
@asynctask
|
||||
@with_session
|
||||
async def task_pipeline_file_process(session: AsyncSession, transcript_id: str):
|
||||
pipeline = PipelineMainFile(transcript_id=transcript_id)
|
||||
await pipeline.process()
|
||||
```
|
||||
|
||||
#### New (TaskIQ)
|
||||
```python
|
||||
from taskiq import TaskiqDepends
|
||||
from reflector.worker.broker import broker
|
||||
from reflector.worker.dependencies import get_db_session
|
||||
|
||||
@broker.task
|
||||
async def task_pipeline_file_process(transcript_id: str):
|
||||
# Use get_session for proper test mocking
|
||||
async for session in get_session():
|
||||
pipeline = PipelineMainFile(transcript_id=transcript_id)
|
||||
await pipeline.process()
|
||||
```
|
||||
|
||||
### 4. Session Management
|
||||
|
||||
#### Current Session Decorators (Keep Using These!)
|
||||
```python
|
||||
# reflector/worker/session_decorator.py
|
||||
def with_session(func):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
async with get_session_context() as session:
|
||||
return await func(session, *args, **kwargs)
|
||||
return wrapper
|
||||
```
|
||||
|
||||
#### Session Management Strategy
|
||||
|
||||
**⚠️ CRITICAL**: The key insight is to maintain consistent session management patterns:
|
||||
|
||||
1. **For Worker Tasks**: Continue using `@with_session` decorator pattern
|
||||
2. **For FastAPI endpoints**: Use `get_session` dependency injection
|
||||
3. **Never use `get_session_factory()` directly** in application code
|
||||
|
||||
```python
|
||||
# APPROACH 1: Simple migration keeping decorator pattern
|
||||
from reflector.worker.session_decorator import with_session
|
||||
|
||||
@taskiq_broker.task
|
||||
@with_session
|
||||
async def task_pipeline_file_process(session, *, transcript_id: str):
|
||||
# Session is provided by decorator, just like Celery version
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
pipeline = PipelineMainFile(transcript_id=transcript_id)
|
||||
await pipeline.process()
|
||||
|
||||
# APPROACH 2: For test compatibility without decorator
|
||||
from reflector.db import get_session
|
||||
|
||||
@taskiq_broker.task
|
||||
async def task_pipeline_file_process(transcript_id: str):
|
||||
# Use get_session which is mocked in tests
|
||||
async for session in get_session():
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
pipeline = PipelineMainFile(transcript_id=transcript_id)
|
||||
await pipeline.process()
|
||||
|
||||
# APPROACH 3: Future - TaskIQ dependency injection (after full migration)
|
||||
from taskiq import TaskiqDepends
|
||||
|
||||
async def get_session_context():
|
||||
"""Context manager version of get_session for consistency"""
|
||||
async for session in get_session():
|
||||
yield session
|
||||
|
||||
@taskiq_broker.task
|
||||
async def task_pipeline_file_process(
|
||||
transcript_id: str,
|
||||
session: AsyncSession = TaskiqDepends(get_session_context)
|
||||
):
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
pipeline = PipelineMainFile(transcript_id=transcript_id)
|
||||
await pipeline.process()
|
||||
```
|
||||
|
||||
**Key Points:**
|
||||
- `@with_session` decorator works with TaskIQ tasks (remove `@asynctask`, keep `@with_session`)
|
||||
- For testing: `get_session()` from `reflector.db` is properly mocked
|
||||
- Never call `get_session_factory()` directly - always use the abstractions
|
||||
|
||||
### 5. Task Invocation
|
||||
|
||||
#### Current (Celery)
|
||||
```python
|
||||
# Simple async execution
|
||||
task_pipeline_file_process.delay(transcript_id=transcript.id)
|
||||
|
||||
# With signature for chaining
|
||||
task_cleanup_consent.si(transcript_id=transcript_id)
|
||||
```
|
||||
|
||||
#### New (TaskIQ)
|
||||
```python
|
||||
# Simple async execution
|
||||
await task_pipeline_file_process.kiq(transcript_id=transcript.id)
|
||||
|
||||
# With kicker for advanced configuration
|
||||
await task_cleanup_consent.kicker().with_labels(
|
||||
priority="high"
|
||||
).kiq(transcript_id=transcript_id)
|
||||
```
|
||||
|
||||
### 6. Workflow Patterns (Chain, Group, Chord)
|
||||
|
||||
#### Current (Celery)
|
||||
```python
|
||||
from celery import chain, group, chord
|
||||
|
||||
# Chain example
|
||||
post_chain = chain(
|
||||
task_cleanup_consent.si(transcript_id=transcript_id),
|
||||
task_pipeline_post_to_zulip.si(transcript_id=transcript_id),
|
||||
task_send_webhook_if_needed.si(transcript_id=transcript_id),
|
||||
)
|
||||
|
||||
# Chord example (parallel + callback)
|
||||
chain = chord(
|
||||
group(chain_mp3_and_diarize, chain_title_preview),
|
||||
chain_final_summaries,
|
||||
) | task_pipeline_post_to_zulip.si(transcript_id=transcript_id)
|
||||
```
|
||||
|
||||
#### New (TaskIQ with Pipelines)
|
||||
```python
|
||||
from taskiq_pipelines import Pipeline
|
||||
from taskiq import gather
|
||||
|
||||
# Chain example using Pipeline
|
||||
post_pipeline = (
|
||||
Pipeline(broker, task_cleanup_consent)
|
||||
.call_next(task_pipeline_post_to_zulip, transcript_id=transcript_id)
|
||||
.call_next(task_send_webhook_if_needed, transcript_id=transcript_id)
|
||||
)
|
||||
await post_pipeline.kiq(transcript_id=transcript_id)
|
||||
|
||||
# Parallel execution with gather
|
||||
results = await gather([
|
||||
chain_mp3_and_diarize.kiq(transcript_id),
|
||||
chain_title_preview.kiq(transcript_id),
|
||||
])
|
||||
|
||||
# Then execute callback
|
||||
await chain_final_summaries.kiq(transcript_id, results)
|
||||
await task_pipeline_post_to_zulip.kiq(transcript_id)
|
||||
```
|
||||
|
||||
### 7. Scheduled Tasks (Celery Beat → TaskIQ Scheduler)
|
||||
|
||||
#### Current (Celery Beat)
|
||||
```python
|
||||
# reflector/worker/app.py
|
||||
app.conf.beat_schedule = {
|
||||
"process_messages": {
|
||||
"task": "reflector.worker.process.process_messages",
|
||||
"schedule": float(settings.SQS_POLLING_TIMEOUT_SECONDS),
|
||||
},
|
||||
"reprocess_failed_recordings": {
|
||||
"task": "reflector.worker.process.reprocess_failed_recordings",
|
||||
"schedule": crontab(hour=5, minute=0),
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
#### New (TaskIQ Scheduler)
|
||||
```python
|
||||
# reflector/worker/scheduler.py
|
||||
from taskiq import TaskiqScheduler
|
||||
from taskiq_redis import ListRedisScheduleSource
|
||||
|
||||
schedule_source = ListRedisScheduleSource(settings.REDIS_URL)
|
||||
|
||||
# Define scheduled tasks with decorators
|
||||
@broker.task(
|
||||
schedule=[
|
||||
{
|
||||
"cron": f"*/{int(settings.SQS_POLLING_TIMEOUT_SECONDS)} * * * * *"
|
||||
}
|
||||
]
|
||||
)
|
||||
async def process_messages():
|
||||
# Task implementation
|
||||
pass
|
||||
|
||||
@broker.task(
|
||||
schedule=[{"cron": "0 5 * * *"}] # Daily at 5 AM
|
||||
)
|
||||
async def reprocess_failed_recordings():
|
||||
# Task implementation
|
||||
pass
|
||||
|
||||
# Initialize scheduler
|
||||
scheduler = TaskiqScheduler(broker, sources=[schedule_source])
|
||||
|
||||
# Run scheduler (separate process)
|
||||
# taskiq scheduler reflector.worker.scheduler:scheduler
|
||||
```
|
||||
|
||||
### 8. Retry Configuration
|
||||
|
||||
#### Current (Celery)
|
||||
```python
|
||||
@shared_task(
|
||||
bind=True,
|
||||
max_retries=30,
|
||||
default_retry_delay=60,
|
||||
retry_backoff=True,
|
||||
retry_backoff_max=3600,
|
||||
)
|
||||
async def task_send_webhook_if_needed(self, ...):
|
||||
try:
|
||||
# Task logic
|
||||
except Exception as exc:
|
||||
raise self.retry(exc=exc)
|
||||
```
|
||||
|
||||
#### New (TaskIQ)
|
||||
```python
|
||||
from taskiq.middlewares import SimpleRetryMiddleware
|
||||
|
||||
# Global middleware configuration (1:1 with Celery defaults)
|
||||
broker = broker.with_middlewares(
|
||||
SimpleRetryMiddleware(default_retry_count=3),
|
||||
)
|
||||
|
||||
# For specific tasks with custom retry logic:
|
||||
@broker.task(retry_on_error=True, max_retries=30)
|
||||
async def task_send_webhook_if_needed(...):
|
||||
# Task logic - exceptions auto-retry
|
||||
pass
|
||||
```
|
||||
|
||||
## Testing Migration
|
||||
|
||||
### Current Pytest Setup (Celery)
|
||||
```python
|
||||
# tests/conftest.py
|
||||
@pytest.fixture(scope="session")
|
||||
def celery_config():
|
||||
return {
|
||||
"broker_url": "memory://",
|
||||
"result_backend": "cache+memory://",
|
||||
}
|
||||
|
||||
@pytest.mark.usefixtures("celery_session_app")
|
||||
@pytest.mark.usefixtures("celery_session_worker")
|
||||
async def test_task():
|
||||
pass
|
||||
```
|
||||
|
||||
### New Pytest Setup (TaskIQ)
|
||||
```python
|
||||
# tests/conftest.py
|
||||
import pytest
|
||||
from taskiq import InMemoryBroker
|
||||
from reflector.worker.broker import broker
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
async def setup_taskiq_broker():
|
||||
"""Replace broker with InMemoryBroker for testing"""
|
||||
original_broker = broker
|
||||
test_broker = InMemoryBroker(await_inplace=True)
|
||||
|
||||
# Copy task registrations
|
||||
for task_name, task in original_broker._tasks.items():
|
||||
test_broker.register_task(task.original_function, task_name=task_name)
|
||||
|
||||
yield test_broker
|
||||
await test_broker.shutdown()
|
||||
|
||||
@pytest.fixture
|
||||
async def taskiq_with_db_session(db_session):
|
||||
"""Setup TaskIQ with database session"""
|
||||
from reflector.worker.broker import broker
|
||||
broker.add_dependency_context({
|
||||
AsyncSession: db_session
|
||||
})
|
||||
yield
|
||||
broker.custom_dependency_context = {}
|
||||
|
||||
# Test example
|
||||
@pytest.mark.anyio
|
||||
async def test_task(taskiq_with_db_session):
|
||||
result = await task_pipeline_file_process("transcript-id")
|
||||
assert result is not None
|
||||
```
|
||||
|
||||
## Migration Steps
|
||||
|
||||
### Phase 1: Setup (Week 1)
|
||||
1. **Install TaskIQ packages**
|
||||
```bash
|
||||
uv add taskiq taskiq-redis taskiq-pipelines
|
||||
```
|
||||
|
||||
2. **Create new broker configuration**
|
||||
- Create `reflector/worker/broker.py` with TaskIQ broker setup
|
||||
- Create `reflector/worker/dependencies.py` for dependency injection
|
||||
|
||||
3. **Update settings**
|
||||
- Keep existing Redis configuration
|
||||
- Add TaskIQ-specific settings if needed
|
||||
|
||||
### Phase 2: Parallel Running (Week 2-3)
|
||||
1. **Migrate simple tasks first**
|
||||
- Start with `cleanup.py` (1 task)
|
||||
- Move to `webhook.py` (1 task)
|
||||
- Test thoroughly in isolation
|
||||
|
||||
2. **Setup dual-mode operation**
|
||||
- Keep Celery tasks running
|
||||
- Add TaskIQ versions alongside
|
||||
- Use feature flags to switch between them
|
||||
|
||||
### Phase 3: Complex Tasks (Week 3-4)
|
||||
1. **Migrate pipeline tasks**
|
||||
- Convert `main_file_pipeline.py`
|
||||
- Convert `main_live_pipeline.py` (most complex with 10 tasks)
|
||||
- Ensure chain/group/chord patterns work
|
||||
|
||||
2. **Migrate scheduled tasks**
|
||||
- Setup TaskIQ scheduler
|
||||
- Convert beat schedule to TaskIQ schedules
|
||||
- Test cron patterns
|
||||
|
||||
### Phase 4: Testing & Validation (Week 4-5)
|
||||
1. **Update test suite**
|
||||
- Replace Celery fixtures with TaskIQ fixtures
|
||||
- Update all test files
|
||||
- Ensure coverage remains the same
|
||||
|
||||
2. **Performance testing**
|
||||
- Compare task execution times
|
||||
- Monitor Redis memory usage
|
||||
- Test under load
|
||||
|
||||
### Phase 5: Cutover (Week 5-6)
|
||||
1. **Final migration**
|
||||
- Remove Celery dependencies
|
||||
- Update deployment scripts
|
||||
- Update documentation
|
||||
|
||||
2. **Monitoring**
|
||||
- Setup TaskIQ monitoring (if available)
|
||||
- Create health checks
|
||||
- Document operational procedures
|
||||
|
||||
## Key Differences to Note
|
||||
|
||||
### Advantages of TaskIQ
|
||||
1. **Native async support** - No need for `@asynctask` wrapper
|
||||
2. **Dependency injection** - Cleaner than decorators for session management
|
||||
3. **Type hints** - Better IDE support and autocompletion
|
||||
4. **Modern Python** - Designed for Python 3.7+
|
||||
5. **Simpler testing** - InMemoryBroker makes testing easier
|
||||
|
||||
### Potential Challenges
|
||||
1. **Less mature ecosystem** - Fewer third-party integrations
|
||||
2. **Documentation** - Less comprehensive than Celery
|
||||
3. **Monitoring tools** - No Flower equivalent (may need custom solution)
|
||||
4. **Community support** - Smaller community than Celery
|
||||
|
||||
## Command Line Changes
|
||||
|
||||
### Current (Celery)
|
||||
```bash
|
||||
# Start worker
|
||||
celery -A reflector.worker.app worker --loglevel=info
|
||||
|
||||
# Start beat scheduler
|
||||
celery -A reflector.worker.app beat
|
||||
```
|
||||
|
||||
### New (TaskIQ)
|
||||
```bash
|
||||
# Start worker
|
||||
taskiq worker reflector.worker.broker:broker
|
||||
|
||||
# Start scheduler
|
||||
taskiq scheduler reflector.worker.scheduler:scheduler
|
||||
|
||||
# With custom settings
|
||||
taskiq worker reflector.worker.broker:broker --workers 4 --log-level INFO
|
||||
```
|
||||
|
||||
## Rollback Plan
|
||||
|
||||
If issues arise during migration:
|
||||
|
||||
1. **Keep Celery code in version control** - Tag the last Celery version
|
||||
2. **Maintain dual broker setup** - Can switch back via environment variable
|
||||
3. **Database compatibility** - No schema changes required
|
||||
4. **Redis compatibility** - Both use Redis, easy to switch back
|
||||
|
||||
## Success Criteria
|
||||
|
||||
1. ✅ All tasks migrated and functioning
|
||||
2. ✅ Test coverage maintained at current levels
|
||||
3. ✅ Performance equal or better than Celery
|
||||
4. ✅ Scheduled tasks running reliably
|
||||
5. ✅ Error handling and retries working correctly
|
||||
6. ✅ WebSocket notifications still functioning
|
||||
7. ✅ Pipeline processing maintaining same behavior
|
||||
|
||||
## Monitoring & Operations
|
||||
|
||||
### Health Checks
|
||||
```python
|
||||
# reflector/worker/healthcheck.py
|
||||
@broker.task
|
||||
async def healthcheck_ping():
|
||||
"""TaskIQ health check task"""
|
||||
return {"status": "healthy", "timestamp": datetime.now()}
|
||||
```
|
||||
|
||||
### Metrics Collection
|
||||
- Task execution times
|
||||
- Success/failure rates
|
||||
- Queue depths
|
||||
- Worker utilization
|
||||
|
||||
## Key Implementation Points - MUST READ
|
||||
|
||||
### Critical Changes Required
|
||||
|
||||
1. **Session Management in Tasks**
|
||||
- ✅ **VERIFIED**: Tasks MUST use `get_session()` from `reflector.db` for test compatibility
|
||||
- ❌ Do NOT use `get_session_factory()` directly in tasks - it bypasses test mocks
|
||||
- ✅ The test database session IS properly shared when using `get_session()`
|
||||
|
||||
2. **Task Invocation Changes**
|
||||
- Replace `.delay()` with `await .kiq()`
|
||||
- All task invocations become async/await
|
||||
- No need to commit sessions before task invocation (controllers handle this)
|
||||
|
||||
3. **Broker Configuration**
|
||||
- TaskIQ broker must be initialized in `worker/app.py`
|
||||
- Use `InMemoryBroker(await_inplace=True)` for testing
|
||||
- Use `RedisStreamBroker` for production
|
||||
|
||||
4. **Test Setup Requirements**
|
||||
- Set `os.environ["ENVIRONMENT"] = "pytest"` at top of test files
|
||||
- Add TaskIQ broker fixture to test functions
|
||||
- Keep Celery fixtures for now (dual-mode operation)
|
||||
|
||||
5. **Import Pattern Changes**
|
||||
```python
|
||||
# Each file needs both imports during migration
|
||||
from reflector.pipelines.main_file_pipeline import (
|
||||
task_pipeline_file_process, # Celery version
|
||||
task_pipeline_file_process_taskiq, # TaskIQ version
|
||||
)
|
||||
```
|
||||
|
||||
6. **Decorator Changes**
|
||||
- Remove `@asynctask` - TaskIQ is async-native
|
||||
- **Keep `@with_session`** - it works with TaskIQ tasks!
|
||||
- Remove `@shared_task` from TaskIQ version
|
||||
- Keep `@shared_task` on Celery version for backward compatibility
|
||||
|
||||
## Verified POC Results
|
||||
|
||||
✅ **Database transactions work correctly** across test and TaskIQ tasks
|
||||
✅ **Tasks execute immediately** in tests with `InMemoryBroker(await_inplace=True)`
|
||||
✅ **Session mocking works** when using `get_session()` properly
|
||||
✅ **"OK" output confirmed** - TaskIQ task executes and accesses test data
|
||||
|
||||
## Conclusion
|
||||
|
||||
The migration from Celery to TaskIQ is feasible and offers several advantages for an async-first codebase like Reflector. The key challenges will be:
|
||||
|
||||
1. Migrating complex pipeline patterns (chain/chord)
|
||||
2. Ensuring scheduled task reliability
|
||||
3. **SOLVED**: Maintaining session management patterns - use `get_session()`
|
||||
4. Updating the test suite
|
||||
|
||||
The phased approach allows for gradual migration with minimal risk. The ability to run both systems in parallel provides a safety net during the transition period.
|
||||
|
||||
## Appendix: Quick Reference
|
||||
|
||||
| Celery | TaskIQ |
|
||||
|--------|--------|
|
||||
| `@shared_task` | `@broker.task` |
|
||||
| `.delay()` | `.kiq()` |
|
||||
| `.apply_async()` | `.kicker().kiq()` |
|
||||
| `chain()` | `Pipeline()` |
|
||||
| `group()` | `gather()` |
|
||||
| `chord()` | `gather() + callback` |
|
||||
| `@task.retry()` | `retry_on_error=True` |
|
||||
| Celery Beat | TaskIQ Scheduler |
|
||||
| `celery worker` | `taskiq worker` |
|
||||
| Flower | Custom monitoring needed |
|
||||
@@ -3,7 +3,7 @@ from logging.config import fileConfig
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from reflector.db.base import metadata
|
||||
from reflector.db import metadata
|
||||
from reflector.settings import settings
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
|
||||
26
server/migrations/versions/05f8688d6895_add_action_items.py
Normal file
26
server/migrations/versions/05f8688d6895_add_action_items.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""add_action_items
|
||||
|
||||
Revision ID: 05f8688d6895
|
||||
Revises: bbafedfa510c
|
||||
Create Date: 2025-12-12 11:57:50.209658
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "05f8688d6895"
|
||||
down_revision: Union[str, None] = "bbafedfa510c"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column("transcript", sa.Column("action_items", sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("transcript", "action_items")
|
||||
@@ -23,16 +23,14 @@ def upgrade() -> None:
|
||||
op.drop_column("transcript", "search_vector_en")
|
||||
|
||||
# Recreate the search vector column with long_summary included
|
||||
op.execute(
|
||||
"""
|
||||
op.execute("""
|
||||
ALTER TABLE transcript ADD COLUMN search_vector_en tsvector
|
||||
GENERATED ALWAYS AS (
|
||||
setweight(to_tsvector('english', coalesce(title, '')), 'A') ||
|
||||
setweight(to_tsvector('english', coalesce(long_summary, '')), 'B') ||
|
||||
setweight(to_tsvector('english', coalesce(webvtt, '')), 'C')
|
||||
) STORED
|
||||
"""
|
||||
)
|
||||
""")
|
||||
|
||||
# Recreate the GIN index for the search vector
|
||||
op.create_index(
|
||||
@@ -49,15 +47,13 @@ def downgrade() -> None:
|
||||
op.drop_column("transcript", "search_vector_en")
|
||||
|
||||
# Recreate the original search vector column without long_summary
|
||||
op.execute(
|
||||
"""
|
||||
op.execute("""
|
||||
ALTER TABLE transcript ADD COLUMN search_vector_en tsvector
|
||||
GENERATED ALWAYS AS (
|
||||
setweight(to_tsvector('english', coalesce(title, '')), 'A') ||
|
||||
setweight(to_tsvector('english', coalesce(webvtt, '')), 'B')
|
||||
) STORED
|
||||
"""
|
||||
)
|
||||
""")
|
||||
|
||||
# Recreate the GIN index for the search vector
|
||||
op.create_index(
|
||||
|
||||
@@ -21,15 +21,13 @@ def upgrade() -> None:
|
||||
if conn.dialect.name != "postgresql":
|
||||
return
|
||||
|
||||
op.execute(
|
||||
"""
|
||||
op.execute("""
|
||||
ALTER TABLE transcript ADD COLUMN search_vector_en tsvector
|
||||
GENERATED ALWAYS AS (
|
||||
setweight(to_tsvector('english', coalesce(title, '')), 'A') ||
|
||||
setweight(to_tsvector('english', coalesce(webvtt, '')), 'B')
|
||||
) STORED
|
||||
"""
|
||||
)
|
||||
""")
|
||||
|
||||
op.create_index(
|
||||
"idx_transcript_search_vector_en",
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
"""add_platform_support
|
||||
|
||||
Revision ID: 1e49625677e4
|
||||
Revises: 9e3f7b2a4c8e
|
||||
Create Date: 2025-10-08 13:17:29.943612
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "1e49625677e4"
|
||||
down_revision: Union[str, None] = "9e3f7b2a4c8e"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add platform field with default 'whereby' for backward compatibility."""
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"platform",
|
||||
sa.String(),
|
||||
nullable=True,
|
||||
server_default=None,
|
||||
)
|
||||
)
|
||||
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"platform",
|
||||
sa.String(),
|
||||
nullable=False,
|
||||
server_default="whereby",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove platform field."""
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.drop_column("platform")
|
||||
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.drop_column("platform")
|
||||
@@ -19,14 +19,12 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# Set room_id to NULL for meetings that reference non-existent rooms
|
||||
op.execute(
|
||||
"""
|
||||
op.execute("""
|
||||
UPDATE meeting
|
||||
SET room_id = NULL
|
||||
WHERE room_id IS NOT NULL
|
||||
AND room_id NOT IN (SELECT id FROM room WHERE id IS NOT NULL)
|
||||
"""
|
||||
)
|
||||
""")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
"""add daily participant session table with immutable left_at
|
||||
|
||||
Revision ID: 2b92a1b03caa
|
||||
Revises: f8294b31f022
|
||||
Create Date: 2025-11-13 20:29:30.486577
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "2b92a1b03caa"
|
||||
down_revision: Union[str, None] = "f8294b31f022"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create table
|
||||
op.create_table(
|
||||
"daily_participant_session",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("meeting_id", sa.String(), nullable=False),
|
||||
sa.Column("room_id", sa.String(), nullable=False),
|
||||
sa.Column("session_id", sa.String(), nullable=False),
|
||||
sa.Column("user_id", sa.String(), nullable=True),
|
||||
sa.Column("user_name", sa.String(), nullable=False),
|
||||
sa.Column("joined_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("left_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(["meeting_id"], ["meeting.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["room_id"], ["room.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("daily_participant_session", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
"idx_daily_session_meeting_left", ["meeting_id", "left_at"], unique=False
|
||||
)
|
||||
batch_op.create_index("idx_daily_session_room", ["room_id"], unique=False)
|
||||
|
||||
# Create trigger function to prevent left_at from being updated once set
|
||||
op.execute("""
|
||||
CREATE OR REPLACE FUNCTION prevent_left_at_update()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF OLD.left_at IS NOT NULL THEN
|
||||
RAISE EXCEPTION 'left_at is immutable once set';
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
""")
|
||||
|
||||
# Create trigger
|
||||
op.execute("""
|
||||
CREATE TRIGGER prevent_left_at_update_trigger
|
||||
BEFORE UPDATE ON daily_participant_session
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION prevent_left_at_update();
|
||||
""")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop trigger
|
||||
op.execute(
|
||||
"DROP TRIGGER IF EXISTS prevent_left_at_update_trigger ON daily_participant_session;"
|
||||
)
|
||||
|
||||
# Drop trigger function
|
||||
op.execute("DROP FUNCTION IF EXISTS prevent_left_at_update();")
|
||||
|
||||
# Drop indexes and table
|
||||
with op.batch_alter_table("daily_participant_session", schema=None) as batch_op:
|
||||
batch_op.drop_index("idx_daily_session_room")
|
||||
batch_op.drop_index("idx_daily_session_meeting_left")
|
||||
|
||||
op.drop_table("daily_participant_session")
|
||||
@@ -28,7 +28,7 @@ def upgrade() -> None:
|
||||
transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON))
|
||||
|
||||
# Select all rows from the transcript table
|
||||
results = bind.execute(select(transcript.c.id, transcript.c.topics))
|
||||
results = bind.execute(select([transcript.c.id, transcript.c.topics]))
|
||||
|
||||
for row in results:
|
||||
transcript_id = row["id"]
|
||||
@@ -58,7 +58,7 @@ def downgrade() -> None:
|
||||
transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON))
|
||||
|
||||
# Select all rows from the transcript table
|
||||
results = bind.execute(select(transcript.c.id, transcript.c.topics))
|
||||
results = bind.execute(select([transcript.c.id, transcript.c.topics]))
|
||||
|
||||
for row in results:
|
||||
transcript_id = row["id"]
|
||||
|
||||
@@ -36,7 +36,9 @@ def upgrade() -> None:
|
||||
|
||||
# select only the one with duration = 0
|
||||
results = bind.execute(
|
||||
select(transcript.c.id, transcript.c.duration).where(transcript.c.duration == 0)
|
||||
select([transcript.c.id, transcript.c.duration]).where(
|
||||
transcript.c.duration == 0
|
||||
)
|
||||
)
|
||||
|
||||
data_dir = Path(settings.DATA_DIR)
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
"""Make room platform non-nullable with dynamic default
|
||||
|
||||
Revision ID: 5d6b9df9b045
|
||||
Revises: 2b92a1b03caa
|
||||
Create Date: 2025-11-21 13:22:25.756584
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "5d6b9df9b045"
|
||||
down_revision: Union[str, None] = "2b92a1b03caa"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.execute("UPDATE room SET platform = 'whereby' WHERE platform IS NULL")
|
||||
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.alter_column("platform", existing_type=sa.String(), nullable=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.alter_column("platform", existing_type=sa.String(), nullable=True)
|
||||
@@ -28,7 +28,7 @@ def upgrade() -> None:
|
||||
transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON))
|
||||
|
||||
# Select all rows from the transcript table
|
||||
results = bind.execute(select(transcript.c.id, transcript.c.topics))
|
||||
results = bind.execute(select([transcript.c.id, transcript.c.topics]))
|
||||
|
||||
for row in results:
|
||||
transcript_id = row["id"]
|
||||
@@ -58,7 +58,7 @@ def downgrade() -> None:
|
||||
transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON))
|
||||
|
||||
# Select all rows from the transcript table
|
||||
results = bind.execute(select(transcript.c.id, transcript.c.topics))
|
||||
results = bind.execute(select([transcript.c.id, transcript.c.topics]))
|
||||
|
||||
for row in results:
|
||||
transcript_id = row["id"]
|
||||
|
||||
38
server/migrations/versions/9e3f7b2a4c8e_add_user_api_keys.py
Normal file
38
server/migrations/versions/9e3f7b2a4c8e_add_user_api_keys.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""add user api keys
|
||||
|
||||
Revision ID: 9e3f7b2a4c8e
|
||||
Revises: dc035ff72fd5
|
||||
Create Date: 2025-10-17 00:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "9e3f7b2a4c8e"
|
||||
down_revision: Union[str, None] = "dc035ff72fd5"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"user_api_key",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("user_id", sa.String(), nullable=False),
|
||||
sa.Column("key_hash", sa.String(), nullable=False),
|
||||
sa.Column("name", sa.String(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
with op.batch_alter_table("user_api_key", schema=None) as batch_op:
|
||||
batch_op.create_index("idx_user_api_key_hash", ["key_hash"], unique=True)
|
||||
batch_op.create_index("idx_user_api_key_user_id", ["user_id"], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("user_api_key")
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user