diff --git a/docker-compose.selfhosted.yml b/docker-compose.selfhosted.yml index a301364c..e4308883 100644 --- a/docker-compose.selfhosted.yml +++ b/docker-compose.selfhosted.yml @@ -1,11 +1,12 @@ # Self-hosted production Docker Compose — single file for everything. # -# Usage: ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy -# or: docker compose -f docker-compose.selfhosted.yml --profile gpu [--profile ollama-gpu] [--profile garage] [--profile caddy] up -d +# Usage: ./scripts/setup-selfhosted.sh <--gpu|--cpu|--hosted> [--ollama-gpu|--ollama-cpu] [--garage] [--caddy] +# or: docker compose -f docker-compose.selfhosted.yml [--profile gpu] [--profile ollama-gpu] [--profile garage] [--profile caddy] up -d # -# Specialized models (pick ONE — required): -# --profile gpu NVIDIA GPU for transcription/diarization/translation -# --profile cpu CPU-only for transcription/diarization/translation +# ML processing modes (pick ONE — required): +# --gpu NVIDIA GPU container for transcription/diarization/translation (profile: gpu) +# --cpu In-process CPU processing on server/worker (no ML container needed) +# --hosted Remote GPU service URL (no ML container needed) # # Local LLM (optional — for summarization/topics): # --profile ollama-gpu Local Ollama with NVIDIA GPU @@ -45,16 +46,9 @@ services: REDIS_HOST: redis CELERY_BROKER_URL: redis://redis:6379/1 CELERY_RESULT_BACKEND: redis://redis:6379/1 - # Specialized models via gpu/cpu container (aliased as "transcription") - TRANSCRIPT_BACKEND: modal - TRANSCRIPT_URL: http://transcription:8000 - TRANSCRIPT_MODAL_API_KEY: selfhosted - DIARIZATION_BACKEND: modal - DIARIZATION_URL: http://transcription:8000 - TRANSLATION_BACKEND: modal - TRANSLATE_URL: http://transcription:8000 - PADDING_BACKEND: modal - PADDING_URL: http://transcription:8000 + # ML backend config comes from env_file (server/.env), set per-mode by setup script + # HF_TOKEN needed for in-process pyannote diarization (--cpu mode) + HF_TOKEN: ${HF_TOKEN:-} # WebRTC: fixed UDP port range for ICE candidates (mapped above) WEBRTC_PORT_RANGE: "51000-51100" depends_on: @@ -79,15 +73,8 @@ services: REDIS_HOST: redis CELERY_BROKER_URL: redis://redis:6379/1 CELERY_RESULT_BACKEND: redis://redis:6379/1 - TRANSCRIPT_BACKEND: modal - TRANSCRIPT_URL: http://transcription:8000 - TRANSCRIPT_MODAL_API_KEY: selfhosted - DIARIZATION_BACKEND: modal - DIARIZATION_URL: http://transcription:8000 - TRANSLATION_BACKEND: modal - TRANSLATE_URL: http://transcription:8000 - PADDING_BACKEND: modal - PADDING_URL: http://transcription:8000 + # ML backend config comes from env_file (server/.env), set per-mode by setup script + HF_TOKEN: ${HF_TOKEN:-} depends_on: postgres: condition: service_healthy @@ -165,7 +152,10 @@ services: # =========================================================== # Specialized model containers (transcription, diarization, translation) - # Both gpu and cpu get alias "transcription" so server config never changes. + # Only the gpu profile is activated by the setup script (--gpu mode). + # The cpu service definition is kept for manual/standalone use but is + # NOT activated by --cpu mode (which uses in-process local backends). + # Both services get alias "transcription" so server config never changes. # =========================================================== gpu: diff --git a/docs/create-docs.sh b/docs/create-docs.sh index 456efd96..ff59320d 100755 --- a/docs/create-docs.sh +++ b/docs/create-docs.sh @@ -254,15 +254,15 @@ Reflector can run completely offline: Control where each step happens: ```yaml -# All local processing -TRANSCRIPT_BACKEND=local -DIARIZATION_BACKEND=local -TRANSLATION_BACKEND=local +# All in-process processing +TRANSCRIPT_BACKEND=whisper +DIARIZATION_BACKEND=pyannote +TRANSLATION_BACKEND=marian # Hybrid approach -TRANSCRIPT_BACKEND=modal # Fast GPU processing -DIARIZATION_BACKEND=local # Sensitive speaker data -TRANSLATION_BACKEND=modal # Non-sensitive translation +TRANSCRIPT_BACKEND=modal # Fast GPU processing +DIARIZATION_BACKEND=pyannote # Sensitive speaker data +TRANSLATION_BACKEND=modal # Non-sensitive translation ``` ### Storage Options diff --git a/docsv2/selfhosted-production.md b/docsv2/selfhosted-production.md index 42d3b4d9..cc0a425d 100644 --- a/docsv2/selfhosted-production.md +++ b/docsv2/selfhosted-production.md @@ -53,9 +53,12 @@ cd reflector # Same but without a domain (self-signed cert, access via IP): ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy -# CPU-only (same, but slower): +# CPU-only (in-process ML, no GPU container): ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --garage --caddy +# Remote GPU service (your own hosted GPU, no local ML container): +./scripts/setup-selfhosted.sh --hosted --garage --caddy + # With password authentication (single admin user): ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --password mysecretpass @@ -65,14 +68,15 @@ cd reflector That's it. The script generates env files, secrets, starts all containers, waits for health checks, and prints the URL. -## Specialized Models (Required) +## ML Processing Modes (Required) -Pick `--gpu` or `--cpu`. This determines how **transcription, diarization, translation, and audio padding** run: +Pick `--gpu`, `--cpu`, or `--hosted`. This determines how **transcription, diarization, translation, and audio padding** run: | Flag | What it does | Requires | |------|-------------|----------| -| `--gpu` | NVIDIA GPU acceleration for ML models | NVIDIA GPU + drivers + `nvidia-container-toolkit` | -| `--cpu` | CPU-only (slower but works without GPU) | 8+ cores, 32GB+ RAM recommended | +| `--gpu` | NVIDIA GPU container for ML models | NVIDIA GPU + drivers + `nvidia-container-toolkit` | +| `--cpu` | In-process CPU processing on server/worker (no ML container) | 8+ cores, 16GB+ RAM (32GB recommended for large files) | +| `--hosted` | Remote GPU service URL (no local ML container) | A running GPU service instance (e.g. `gpu/self_hosted/`) | ## Local LLM (Optional) @@ -130,9 +134,11 @@ Browse all available models at https://ollama.com/library. - **`--gpu --ollama-gpu`**: Best for servers with NVIDIA GPU. Fully self-contained, no external API keys needed. - **`--cpu --ollama-cpu`**: No GPU available but want everything self-contained. Slower but works. +- **`--hosted --ollama-cpu`**: Remote GPU for ML, local CPU for LLM. Great when you have a separate GPU server. - **`--gpu --ollama-cpu`**: GPU for transcription, CPU for LLM. Saves GPU VRAM for ML models. - **`--gpu`**: Have NVIDIA GPU but prefer a cloud LLM (faster/better summaries with GPT-4, Claude, etc.). - **`--cpu`**: No GPU, prefer cloud LLM. Slowest transcription but best summary quality. +- **`--hosted`**: Remote GPU, cloud LLM. No local ML at all. ## Other Optional Flags @@ -160,7 +166,7 @@ Without `--caddy` or `--domain`, no ports are exposed. Point your own reverse pr 4. **Generate `www/.env`** — Auto-detects server IP, sets URLs 5. **Storage setup** — Either initializes Garage (bucket, keys, permissions) or prompts for external S3 credentials 6. **Caddyfile** — Generates domain-specific (Let's Encrypt) or IP-specific (self-signed) configuration -7. **Build & start** — Always builds GPU/CPU model image from source. With `--build`, also builds backend and frontend from source; otherwise pulls prebuilt images from the registry +7. **Build & start** — For `--gpu`, builds the GPU model image from source. For `--cpu` and `--hosted`, no ML container is built. With `--build`, also builds backend and frontend from source; otherwise pulls prebuilt images from the registry 8. **Auto-detects video platforms** — If `DAILY_API_KEY` is found in `server/.env`, generates `.env.hatchet` (dashboard URL/cookie config), starts Hatchet workflow engine, and generates an API token. If any video platform is configured, enables the Rooms feature 9. **Health checks** — Waits for each service, pulls Ollama model if needed, warns about missing LLM config @@ -181,7 +187,7 @@ Without `--caddy` or `--domain`, no ports are exposed. Point your own reverse pr | `ADMIN_PASSWORD_HASH` | PBKDF2 hash for password auth | *(unset)* | | `WEBRTC_HOST` | IP advertised in WebRTC ICE candidates | Auto-detected (server IP) | | `TRANSCRIPT_URL` | Specialized model endpoint | `http://transcription:8000` | -| `PADDING_BACKEND` | Audio padding backend (`local` or `modal`) | `modal` (selfhosted), `local` (default) | +| `PADDING_BACKEND` | Audio padding backend (`pyav` or `modal`) | `modal` (selfhosted), `pyav` (default) | | `PADDING_URL` | Audio padding endpoint (when `PADDING_BACKEND=modal`) | `http://transcription:8000` | | `LLM_URL` | OpenAI-compatible LLM endpoint | Auto-set for Ollama modes | | `LLM_API_KEY` | LLM API key | `not-needed` for Ollama | @@ -604,10 +610,9 @@ The setup script is idempotent — it won't overwrite existing secrets or env va │ │ │ v v v ┌───────────┐ ┌─────────┐ ┌─────────┐ - │transcription│ │postgres │ │ redis │ - │(gpu/cpu) │ │ :5432 │ │ :6379 │ - │ :8000 │ └─────────┘ └─────────┘ - └───────────┘ + │ ML models │ │postgres │ │ redis │ + │ (varies) │ │ :5432 │ │ :6379 │ + └───────────┘ └─────────┘ └─────────┘ │ ┌─────┴─────┐ ┌─────────┐ │ ollama │ │ garage │ @@ -622,6 +627,11 @@ The setup script is idempotent — it won't overwrite existing secrets or env va │ │ :8888 │──│ -cpu / -llm │ │ │ └─────────┘ └───────────────┘ │ └───────────────────────────────────┘ + +ML models box varies by mode: + --gpu: Local GPU container (transcription:8000) + --cpu: In-process on server/worker (no container) + --hosted: Remote GPU service (user URL) ``` All services communicate over Docker's internal network. Only Caddy (if enabled) exposes ports to the internet. Hatchet services are only started when `DAILY_API_KEY` is configured. diff --git a/gpu/self_hosted/pyproject.toml b/gpu/self_hosted/pyproject.toml index 48237c66..3eb72105 100644 --- a/gpu/self_hosted/pyproject.toml +++ b/gpu/self_hosted/pyproject.toml @@ -11,10 +11,11 @@ dependencies = [ "faster-whisper>=1.1.0", "librosa==0.10.1", "numpy<2", - "silero-vad==5.1.0", + "silero-vad==5.1.2", "transformers>=4.35.0", "sentencepiece", - "pyannote.audio==3.1.0", + "pyannote.audio==3.4.0", + "pytorch-lightning<2.6", "torchaudio>=2.3.0", "av>=13.1.0", ] diff --git a/gpu/self_hosted/uv.lock b/gpu/self_hosted/uv.lock index f3fdbd60..1e679429 100644 --- a/gpu/self_hosted/uv.lock +++ b/gpu/self_hosted/uv.lock @@ -1742,7 +1742,7 @@ wheels = [ [[package]] name = "pyannote-audio" -version = "3.1.0" +version = "3.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asteroid-filterbanks" }, @@ -1765,9 +1765,9 @@ dependencies = [ { name = "torchaudio" }, { name = "torchmetrics" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/55/7253267c35e2aa9188b1d86cba121eb5bdd91ed12d3194488625a008cae7/pyannote.audio-3.1.0.tar.gz", hash = "sha256:da04705443d3b74607e034d3ca88f8b572c7e9672dd9a4199cab65a0dbc33fad", size = 14812058, upload-time = "2023-11-16T12:26:38.939Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/1e/efe9619c38f1281ddf21640654d8ea9e3f67c459b76f78657b26d8557bbe/pyannote_audio-3.4.0.tar.gz", hash = "sha256:d523d883cb8d37cb6daf99f3ba83f9138bb193646ad71e6eae7deb89d8ddd642", size = 804850, upload-time = "2025-09-09T07:04:51.17Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/37/158859ce4c45b5ba2dca40b53b0c10d36f935b7f6d4e737298397167c8b1/pyannote.audio-3.1.0-py2.py3-none-any.whl", hash = "sha256:66ab485728c6e141760e80555cb7a083e7be824cd528cc79b9e6f7d6421a91ae", size = 208592, upload-time = "2023-11-16T12:26:36.726Z" }, + { url = "https://files.pythonhosted.org/packages/79/13/620c6f711b723653092fd063bfee82a6af5ea3a4d3c42efc53ce623a7f4d/pyannote_audio-3.4.0-py2.py3-none-any.whl", hash = "sha256:36e38f058059f46da3478dda581cda53d9d85a21173a3e70bbdbc3ba93b5e1b7", size = 897789, upload-time = "2025-09-09T07:04:49.464Z" }, ] [[package]] @@ -2075,6 +2075,7 @@ dependencies = [ { name = "librosa" }, { name = "numpy" }, { name = "pyannote-audio" }, + { name = "pytorch-lightning" }, { name = "sentencepiece" }, { name = "silero-vad" }, { name = "torch" }, @@ -2090,9 +2091,10 @@ requires-dist = [ { name = "faster-whisper", specifier = ">=1.1.0" }, { name = "librosa", specifier = "==0.10.1" }, { name = "numpy", specifier = "<2" }, - { name = "pyannote-audio", specifier = "==3.1.0" }, + { name = "pyannote-audio", specifier = "==3.4.0" }, + { name = "pytorch-lightning", specifier = "<2.6" }, { name = "sentencepiece" }, - { name = "silero-vad", specifier = "==5.1.0" }, + { name = "silero-vad", specifier = "==5.1.2" }, { name = "torch", specifier = ">=2.3.0" }, { name = "torchaudio", specifier = ">=2.3.0" }, { name = "transformers", specifier = ">=4.35.0" }, @@ -2472,16 +2474,16 @@ wheels = [ [[package]] name = "silero-vad" -version = "5.1" +version = "5.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "onnxruntime" }, { name = "torch" }, { name = "torchaudio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/5d/b912e45d21b8b61859a552554893222d2cdebfd0f9afa7e8ba69c7a3441a/silero_vad-5.1.tar.gz", hash = "sha256:c644275ba5df06cee596cc050ba0bd1e0f5237d1abfa44d58dd4618f6e77434d", size = 3996829, upload-time = "2024-07-09T13:19:24.181Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/b4/d0311b2e6220a11f8f4699f4a278cb088131573286cdfe804c87c7eb5123/silero_vad-5.1.2.tar.gz", hash = "sha256:c442971160026d2d7aa0ad83f0c7ee86c89797a65289fe625c8ea59fc6fb828d", size = 5098526, upload-time = "2024-10-09T09:50:47.019Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/be/0fdbc72030b93d6f55107490d5d2185ddf0dbabdc921f589649d3e92ccd5/silero_vad-5.1-py3-none-any.whl", hash = "sha256:ecb50b484f538f7a962ce5cd3c07120d9db7b9d5a0c5861ccafe459856f22c8f", size = 3939986, upload-time = "2024-07-09T13:19:21.383Z" }, + { url = "https://files.pythonhosted.org/packages/98/f7/5ae11d13fbb733cd3bfd7ff1c3a3902e6f55437df4b72307c1f168146268/silero_vad-5.1.2-py3-none-any.whl", hash = "sha256:93b41953d7774b165407fda6b533c119c5803864e367d5034dc626c82cfdf661", size = 5026737, upload-time = "2024-10-09T09:50:44.355Z" }, ] [[package]] diff --git a/scripts/setup-selfhosted.sh b/scripts/setup-selfhosted.sh index e925c39c..9d7f7858 100755 --- a/scripts/setup-selfhosted.sh +++ b/scripts/setup-selfhosted.sh @@ -4,11 +4,12 @@ # Single script to configure and launch everything on one server. # # Usage: -# ./scripts/setup-selfhosted.sh <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASSWORD] [--build] +# ./scripts/setup-selfhosted.sh <--gpu|--cpu|--hosted> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASSWORD] [--build] # -# Specialized models (pick ONE — required): -# --gpu NVIDIA GPU for transcription/diarization/translation -# --cpu CPU-only for transcription/diarization/translation (slower) +# ML processing modes (pick ONE — required): +# --gpu NVIDIA GPU container for transcription/diarization/translation +# --cpu In-process CPU processing (no ML container, slower) +# --hosted Remote GPU service URL (no ML container) # # Local LLM (optional — for summarization & topic detection): # --ollama-gpu Local Ollama with NVIDIA GPU acceleration @@ -29,6 +30,7 @@ # ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy # ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --garage --caddy --domain reflector.example.com # ./scripts/setup-selfhosted.sh --cpu --ollama-cpu --garage --caddy +# ./scripts/setup-selfhosted.sh --hosted --garage --caddy # ./scripts/setup-selfhosted.sh --gpu --ollama-gpu --llm-model mistral --garage --caddy # ./scripts/setup-selfhosted.sh --gpu --garage --caddy --password mysecretpass # ./scripts/setup-selfhosted.sh --gpu --garage --caddy @@ -183,11 +185,14 @@ for i in "${!ARGS[@]}"; do arg="${ARGS[$i]}" case "$arg" in --gpu) - [[ -n "$MODEL_MODE" ]] && { err "Cannot combine --gpu and --cpu. Pick one."; exit 1; } + [[ -n "$MODEL_MODE" ]] && { err "Cannot combine --gpu, --cpu, and --hosted. Pick one."; exit 1; } MODEL_MODE="gpu" ;; --cpu) - [[ -n "$MODEL_MODE" ]] && { err "Cannot combine --gpu and --cpu. Pick one."; exit 1; } + [[ -n "$MODEL_MODE" ]] && { err "Cannot combine --gpu, --cpu, and --hosted. Pick one."; exit 1; } MODEL_MODE="cpu" ;; + --hosted) + [[ -n "$MODEL_MODE" ]] && { err "Cannot combine --gpu, --cpu, and --hosted. Pick one."; exit 1; } + MODEL_MODE="hosted" ;; --ollama-gpu) [[ -n "$OLLAMA_MODE" ]] && { err "Cannot combine --ollama-gpu and --ollama-cpu. Pick one."; exit 1; } OLLAMA_MODE="ollama-gpu" ;; @@ -224,20 +229,21 @@ for i in "${!ARGS[@]}"; do SKIP_NEXT=true ;; *) err "Unknown argument: $arg" - err "Usage: $0 <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASS] [--build]" + err "Usage: $0 <--gpu|--cpu|--hosted> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASS] [--build]" exit 1 ;; esac done if [[ -z "$MODEL_MODE" ]]; then - err "No model mode specified. You must choose --gpu or --cpu." + err "No model mode specified. You must choose --gpu, --cpu, or --hosted." err "" - err "Usage: $0 <--gpu|--cpu> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASS] [--build]" + err "Usage: $0 <--gpu|--cpu|--hosted> [--ollama-gpu|--ollama-cpu] [--llm-model MODEL] [--garage] [--caddy] [--domain DOMAIN] [--password PASS] [--build]" err "" - err "Specialized models (required):" - err " --gpu NVIDIA GPU for transcription/diarization/translation" - err " --cpu CPU-only (slower but works without GPU)" + err "ML processing modes (required):" + err " --gpu NVIDIA GPU container for transcription/diarization/translation" + err " --cpu In-process CPU processing (no ML container, slower)" + err " --hosted Remote GPU service URL (no ML container)" err "" err "Local LLM (optional):" err " --ollama-gpu Local Ollama with GPU (for summarization/topics)" @@ -255,7 +261,9 @@ if [[ -z "$MODEL_MODE" ]]; then fi # Build profiles list — one profile per feature -COMPOSE_PROFILES=("$MODEL_MODE") +# Only --gpu needs a compose profile; --cpu and --hosted use in-process/remote backends +COMPOSE_PROFILES=() +[[ "$MODEL_MODE" == "gpu" ]] && COMPOSE_PROFILES+=("gpu") [[ -n "$OLLAMA_MODE" ]] && COMPOSE_PROFILES+=("$OLLAMA_MODE") [[ "$USE_GARAGE" == "true" ]] && COMPOSE_PROFILES+=("garage") [[ "$USE_CADDY" == "true" ]] && COMPOSE_PROFILES+=("caddy") @@ -422,43 +430,102 @@ step_server_env() { env_set "$SERVER_ENV" "WEBRTC_HOST" "$PRIMARY_IP" fi - # Specialized models (always via gpu/cpu container aliased as "transcription") - env_set "$SERVER_ENV" "TRANSCRIPT_BACKEND" "modal" - env_set "$SERVER_ENV" "TRANSCRIPT_URL" "http://transcription:8000" - env_set "$SERVER_ENV" "TRANSCRIPT_MODAL_API_KEY" "selfhosted" + # Specialized models — backend configuration per mode env_set "$SERVER_ENV" "DIARIZATION_ENABLED" "true" - env_set "$SERVER_ENV" "DIARIZATION_BACKEND" "modal" - env_set "$SERVER_ENV" "DIARIZATION_URL" "http://transcription:8000" - env_set "$SERVER_ENV" "TRANSLATION_BACKEND" "modal" - env_set "$SERVER_ENV" "TRANSLATE_URL" "http://transcription:8000" - env_set "$SERVER_ENV" "PADDING_BACKEND" "modal" - env_set "$SERVER_ENV" "PADDING_URL" "http://transcription:8000" + case "$MODEL_MODE" in + gpu) + # GPU container aliased as "transcription" on docker network + env_set "$SERVER_ENV" "TRANSCRIPT_BACKEND" "modal" + env_set "$SERVER_ENV" "TRANSCRIPT_URL" "http://transcription:8000" + env_set "$SERVER_ENV" "TRANSCRIPT_MODAL_API_KEY" "selfhosted" + env_set "$SERVER_ENV" "DIARIZATION_BACKEND" "modal" + env_set "$SERVER_ENV" "DIARIZATION_URL" "http://transcription:8000" + env_set "$SERVER_ENV" "TRANSLATION_BACKEND" "modal" + env_set "$SERVER_ENV" "TRANSLATE_URL" "http://transcription:8000" + env_set "$SERVER_ENV" "PADDING_BACKEND" "modal" + env_set "$SERVER_ENV" "PADDING_URL" "http://transcription:8000" + ok "ML backends: GPU container (modal)" + ;; + cpu) + # In-process backends — no ML service container needed + env_set "$SERVER_ENV" "TRANSCRIPT_BACKEND" "whisper" + env_set "$SERVER_ENV" "DIARIZATION_BACKEND" "pyannote" + env_set "$SERVER_ENV" "TRANSLATION_BACKEND" "marian" + env_set "$SERVER_ENV" "PADDING_BACKEND" "pyav" + ok "ML backends: in-process CPU (whisper/pyannote/marian/pyav)" + ;; + hosted) + # Remote GPU service — user provides URL + local gpu_url="" + if env_has_key "$SERVER_ENV" "TRANSCRIPT_URL"; then + gpu_url=$(env_get "$SERVER_ENV" "TRANSCRIPT_URL") + fi + if [[ -z "$gpu_url" ]] && [[ -t 0 ]]; then + echo "" + info "Enter the URL of your remote GPU service (e.g. https://gpu.example.com)" + read -rp " GPU service URL: " gpu_url + fi + if [[ -z "$gpu_url" ]]; then + err "GPU service URL required for --hosted mode." + err "Set TRANSCRIPT_URL in server/.env or provide it interactively." + exit 1 + fi + env_set "$SERVER_ENV" "TRANSCRIPT_BACKEND" "modal" + env_set "$SERVER_ENV" "TRANSCRIPT_URL" "$gpu_url" + env_set "$SERVER_ENV" "DIARIZATION_BACKEND" "modal" + env_set "$SERVER_ENV" "DIARIZATION_URL" "$gpu_url" + env_set "$SERVER_ENV" "TRANSLATION_BACKEND" "modal" + env_set "$SERVER_ENV" "TRANSLATE_URL" "$gpu_url" + env_set "$SERVER_ENV" "PADDING_BACKEND" "modal" + env_set "$SERVER_ENV" "PADDING_URL" "$gpu_url" + # API key for remote service + local gpu_api_key="" + if env_has_key "$SERVER_ENV" "TRANSCRIPT_MODAL_API_KEY"; then + gpu_api_key=$(env_get "$SERVER_ENV" "TRANSCRIPT_MODAL_API_KEY") + fi + if [[ -z "$gpu_api_key" ]] && [[ -t 0 ]]; then + read -rp " GPU service API key (or Enter to skip): " gpu_api_key + fi + if [[ -n "$gpu_api_key" ]]; then + env_set "$SERVER_ENV" "TRANSCRIPT_MODAL_API_KEY" "$gpu_api_key" + fi + ok "ML backends: remote hosted ($gpu_url)" + ;; + esac # HuggingFace token for gated models (pyannote diarization) - # Written to root .env so docker compose picks it up for gpu/cpu containers - local root_env="$ROOT_DIR/.env" - local current_hf_token="${HF_TOKEN:-}" - if [[ -f "$root_env" ]] && env_has_key "$root_env" "HF_TOKEN"; then - current_hf_token=$(env_get "$root_env" "HF_TOKEN") - fi - if [[ -z "$current_hf_token" ]]; then - echo "" - warn "HF_TOKEN not set. Diarization will use a public model fallback." - warn "For best results, get a token at https://huggingface.co/settings/tokens" - warn "and accept pyannote licenses at https://huggingface.co/pyannote/speaker-diarization-3.1" - if [[ -t 0 ]]; then - read -rp " HuggingFace token (or press Enter to skip): " current_hf_token + # --gpu: written to root .env (docker compose passes to GPU container) + # --cpu: written to both root .env and server/.env (in-process pyannote needs it) + # --hosted: not needed (remote service handles its own auth) + if [[ "$MODEL_MODE" != "hosted" ]]; then + local root_env="$ROOT_DIR/.env" + local current_hf_token="${HF_TOKEN:-}" + if [[ -f "$root_env" ]] && env_has_key "$root_env" "HF_TOKEN"; then + current_hf_token=$(env_get "$root_env" "HF_TOKEN") + fi + if [[ -z "$current_hf_token" ]]; then + echo "" + warn "HF_TOKEN not set. Diarization will use a public model fallback." + warn "For best results, get a token at https://huggingface.co/settings/tokens" + warn "and accept pyannote licenses at https://huggingface.co/pyannote/speaker-diarization-3.1" + if [[ -t 0 ]]; then + read -rp " HuggingFace token (or press Enter to skip): " current_hf_token + fi + fi + if [[ -n "$current_hf_token" ]]; then + touch "$root_env" + env_set "$root_env" "HF_TOKEN" "$current_hf_token" + export HF_TOKEN="$current_hf_token" + # In CPU mode, server process needs HF_TOKEN directly + if [[ "$MODEL_MODE" == "cpu" ]]; then + env_set "$SERVER_ENV" "HF_TOKEN" "$current_hf_token" + fi + ok "HF_TOKEN configured" + else + touch "$root_env" + env_set "$root_env" "HF_TOKEN" "" + ok "HF_TOKEN skipped (using public model fallback)" fi - fi - if [[ -n "$current_hf_token" ]]; then - touch "$root_env" - env_set "$root_env" "HF_TOKEN" "$current_hf_token" - export HF_TOKEN="$current_hf_token" - ok "HF_TOKEN configured" - else - touch "$root_env" - env_set "$root_env" "HF_TOKEN" "" - ok "HF_TOKEN skipped (using public model fallback)" fi # LLM configuration @@ -799,11 +866,12 @@ CADDYEOF step_services() { info "Step 6: Starting Docker services" - # Build GPU/CPU image from source (always needed — no prebuilt image) - local build_svc="$MODEL_MODE" - info "Building $build_svc image (first build downloads ML models, may take a while)..." - compose_cmd build "$build_svc" - ok "$build_svc image built" + # Build GPU image from source (only for --gpu mode) + if [[ "$MODEL_MODE" == "gpu" ]]; then + info "Building gpu image (first build downloads ML models, may take a while)..." + compose_cmd build gpu + ok "gpu image built" + fi # Build or pull backend and frontend images if [[ "$BUILD_IMAGES" == "true" ]]; then @@ -871,25 +939,29 @@ step_services() { step_health() { info "Step 7: Health checks" - # Specialized model service (gpu or cpu) - local model_svc="$MODEL_MODE" - - info "Waiting for $model_svc service (first start downloads ~1GB of models)..." - local model_ok=false - for i in $(seq 1 120); do - if curl -sf http://localhost:8000/docs > /dev/null 2>&1; then - model_ok=true - break + # Specialized model service (only for --gpu mode) + if [[ "$MODEL_MODE" == "gpu" ]]; then + info "Waiting for gpu service (first start downloads ~1GB of models)..." + local model_ok=false + for i in $(seq 1 120); do + if curl -sf http://localhost:8000/docs > /dev/null 2>&1; then + model_ok=true + break + fi + echo -ne "\r Waiting for gpu service... ($i/120)" + sleep 5 + done + echo "" + if [[ "$model_ok" == "true" ]]; then + ok "gpu service healthy (transcription + diarization)" + else + warn "gpu service not ready yet — it will keep loading in the background" + warn "Check with: docker compose -f docker-compose.selfhosted.yml logs gpu" fi - echo -ne "\r Waiting for $model_svc service... ($i/120)" - sleep 5 - done - echo "" - if [[ "$model_ok" == "true" ]]; then - ok "$model_svc service healthy (transcription + diarization)" - else - warn "$model_svc service not ready yet — it will keep loading in the background" - warn "Check with: docker compose -f docker-compose.selfhosted.yml logs $model_svc" + elif [[ "$MODEL_MODE" == "cpu" ]]; then + ok "CPU mode — ML processing runs in-process on server/worker (no separate service)" + elif [[ "$MODEL_MODE" == "hosted" ]]; then + ok "Hosted mode — ML processing via remote GPU service (no local health check)" fi # Ollama (if applicable) diff --git a/server/.env.example b/server/.env.example index 08b9b392..45dc5a37 100644 --- a/server/.env.example +++ b/server/.env.example @@ -89,11 +89,11 @@ LLM_CONTEXT_WINDOW=16000 ## ======================================================= ## Audio Padding ## -## backends: local (in-process PyAV), modal (HTTP API client) -## Default is "local" — no external service needed. +## backends: pyav (in-process PyAV), modal (HTTP API client) +## Default is "pyav" — no external service needed. ## Set to "modal" when using Modal.com or self-hosted gpu/self_hosted/ container. ## ======================================================= -#PADDING_BACKEND=local +#PADDING_BACKEND=pyav #PADDING_BACKEND=modal #PADDING_URL=https://xxxxx--reflector-padding-web.modal.run #PADDING_MODAL_API_KEY=xxxxx @@ -101,8 +101,8 @@ LLM_CONTEXT_WINDOW=16000 ## ======================================================= ## Diarization ## -## Only available on modal -## To allow diarization, you need to expose expose the files to be dowloded by the pipeline +## backends: modal (HTTP API), pyannote (in-process pyannote.audio) +## To allow diarization, you need to expose expose the files to be downloaded by the pipeline ## ======================================================= DIARIZATION_ENABLED=false DIARIZATION_BACKEND=modal diff --git a/server/.env.selfhosted.example b/server/.env.selfhosted.example index eb96599f..3fe3c6de 100644 --- a/server/.env.selfhosted.example +++ b/server/.env.selfhosted.example @@ -32,26 +32,46 @@ AUTH_BACKEND=none # ======================================================= # Specialized Models (Transcription, Diarization, Translation) -# These run in the gpu/cpu container — NOT an LLM. -# The "modal" backend means "HTTP API client" — it talks to -# the self-hosted container, not Modal.com cloud. +# These do NOT use an LLM. Configured per mode by the setup script: +# +# --gpu mode: modal backends → GPU container (http://transcription:8000) +# --cpu mode: whisper/pyannote/marian/pyav → in-process ML on server/worker +# --hosted mode: modal backends → user-provided remote GPU service URL # ======================================================= + +# --- --gpu mode (default) --- TRANSCRIPT_BACKEND=modal TRANSCRIPT_URL=http://transcription:8000 TRANSCRIPT_MODAL_API_KEY=selfhosted - DIARIZATION_ENABLED=true DIARIZATION_BACKEND=modal DIARIZATION_URL=http://transcription:8000 - TRANSLATION_BACKEND=modal TRANSLATE_URL=http://transcription:8000 - PADDING_BACKEND=modal PADDING_URL=http://transcription:8000 -# HuggingFace token — optional, for gated models (e.g. pyannote). -# Falls back to public S3 model bundle if not set. +# --- --cpu mode (set by setup script) --- +# TRANSCRIPT_BACKEND=whisper +# DIARIZATION_BACKEND=pyannote +# TRANSLATION_BACKEND=marian +# PADDING_BACKEND=pyav + +# --- --hosted mode (set by setup script) --- +# TRANSCRIPT_BACKEND=modal +# TRANSCRIPT_URL=https://your-gpu-service.example.com +# DIARIZATION_BACKEND=modal +# DIARIZATION_URL=https://your-gpu-service.example.com +# ... (all URLs point to one remote service) + +# Whisper model sizes for local transcription (--cpu mode) +# Options: "tiny", "base", "small", "medium", "large-v2" +# WHISPER_CHUNK_MODEL=tiny +# WHISPER_FILE_MODEL=tiny + +# HuggingFace token — for gated models (e.g. pyannote diarization). +# Required for --gpu and --cpu modes; falls back to public S3 bundle if not set. +# Not needed for --hosted mode (remote service handles its own auth). # HF_TOKEN=hf_xxxxx # ======================================================= diff --git a/server/Dockerfile b/server/Dockerfile index 66f40ffe..b4fc8c4a 100644 --- a/server/Dockerfile +++ b/server/Dockerfile @@ -6,7 +6,7 @@ ENV PYTHONUNBUFFERED=1 \ # builder install base dependencies WORKDIR /tmp -RUN apt-get update && apt-get install -y curl && apt-get clean +RUN apt-get update && apt-get install -y curl ffmpeg && apt-get clean ADD https://astral.sh/uv/install.sh /uv-installer.sh RUN sh /uv-installer.sh && rm /uv-installer.sh ENV PATH="/root/.local/bin/:$PATH" diff --git a/server/pyproject.toml b/server/pyproject.toml index 5c3c077b..62b8335a 100644 --- a/server/pyproject.toml +++ b/server/pyproject.toml @@ -71,9 +71,12 @@ local = [ "faster-whisper>=0.10.0", ] silero-vad = [ - "silero-vad>=5.1.2", + "silero-vad==5.1.2", "torch>=2.8.0", "torchaudio>=2.8.0", + "pyannote.audio==3.4.0", + "pytorch-lightning<2.6", + "librosa==0.10.1", ] [tool.uv] diff --git a/server/reflector/auth/__init__.py b/server/reflector/auth/__init__.py index bc45695f..1abf2fcb 100644 --- a/server/reflector/auth/__init__.py +++ b/server/reflector/auth/__init__.py @@ -14,6 +14,7 @@ current_user = auth_module.current_user current_user_optional = auth_module.current_user_optional parse_ws_bearer_token = auth_module.parse_ws_bearer_token current_user_ws_optional = auth_module.current_user_ws_optional +verify_raw_token = auth_module.verify_raw_token # Optional router (e.g. for /auth/login in password backend) router = getattr(auth_module, "router", None) diff --git a/server/reflector/auth/auth_jwt.py b/server/reflector/auth/auth_jwt.py index a2383825..0b255a63 100644 --- a/server/reflector/auth/auth_jwt.py +++ b/server/reflector/auth/auth_jwt.py @@ -144,3 +144,8 @@ async def current_user_ws_optional(websocket: "WebSocket") -> Optional[UserInfo] if not token: return None return await _authenticate_user(token, None, JWTAuth()) + + +async def verify_raw_token(token: str) -> Optional[UserInfo]: + """Verify a raw JWT token string (used for query-param auth fallback).""" + return await _authenticate_user(token, None, JWTAuth()) diff --git a/server/reflector/auth/auth_none.py b/server/reflector/auth/auth_none.py index 33c3b81b..b719df6b 100644 --- a/server/reflector/auth/auth_none.py +++ b/server/reflector/auth/auth_none.py @@ -27,3 +27,8 @@ def parse_ws_bearer_token(websocket): async def current_user_ws_optional(websocket): return None + + +async def verify_raw_token(token): + """Verify a raw JWT token string (used for query-param auth fallback).""" + return None diff --git a/server/reflector/auth/auth_password.py b/server/reflector/auth/auth_password.py index 12a7d9b5..4c8029d2 100644 --- a/server/reflector/auth/auth_password.py +++ b/server/reflector/auth/auth_password.py @@ -168,6 +168,11 @@ async def current_user_ws_optional(websocket: "WebSocket") -> Optional[UserInfo] return await _authenticate_user(token, None) +async def verify_raw_token(token: str) -> Optional[UserInfo]: + """Verify a raw JWT token string (used for query-param auth fallback).""" + return await _authenticate_user(token, None) + + # --- Login router --- router = APIRouter(prefix="/auth", tags=["auth"]) diff --git a/server/reflector/processors/__init__.py b/server/reflector/processors/__init__.py index ab0ad312..5f2d2496 100644 --- a/server/reflector/processors/__init__.py +++ b/server/reflector/processors/__init__.py @@ -4,6 +4,8 @@ from .audio_diarization_auto import AudioDiarizationAutoProcessor # noqa: F401 from .audio_downscale import AudioDownscaleProcessor # noqa: F401 from .audio_file_writer import AudioFileWriterProcessor # noqa: F401 from .audio_merge import AudioMergeProcessor # noqa: F401 +from .audio_padding import AudioPaddingProcessor # noqa: F401 +from .audio_padding_auto import AudioPaddingAutoProcessor # noqa: F401 from .audio_transcript import AudioTranscriptProcessor # noqa: F401 from .audio_transcript_auto import AudioTranscriptAutoProcessor # noqa: F401 from .base import ( # noqa: F401 diff --git a/server/reflector/processors/_audio_download.py b/server/reflector/processors/_audio_download.py new file mode 100644 index 00000000..8b466268 --- /dev/null +++ b/server/reflector/processors/_audio_download.py @@ -0,0 +1,86 @@ +""" +Shared audio download utility for local processors. + +Downloads audio from a URL to a temporary file for in-process ML inference. +""" + +import asyncio +import os +import tempfile +from pathlib import Path + +import requests + +from reflector.logger import logger + +S3_TIMEOUT = 60 + + +async def download_audio_to_temp(url: str) -> Path: + """Download audio from URL to a temporary file. + + The caller is responsible for deleting the temp file after use. + + Args: + url: Presigned URL or public URL to download audio from. + + Returns: + Path to the downloaded temporary file. + """ + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _download_blocking, url) + + +def _download_blocking(url: str) -> Path: + """Blocking download implementation.""" + log = logger.bind(url=url[:80]) + log.info("Downloading audio to temp file") + + response = requests.get(url, stream=True, timeout=S3_TIMEOUT) + response.raise_for_status() + + # Determine extension from content-type or URL + ext = _detect_extension(url, response.headers.get("content-type", "")) + + fd, tmp_path = tempfile.mkstemp(suffix=ext) + try: + total_bytes = 0 + with os.fdopen(fd, "wb") as f: + for chunk in response.iter_content(chunk_size=8192): + if chunk: + f.write(chunk) + total_bytes += len(chunk) + log.info("Audio downloaded", bytes=total_bytes, path=tmp_path) + return Path(tmp_path) + except Exception: + # Clean up on failure + try: + os.unlink(tmp_path) + except OSError: + pass + raise + + +def _detect_extension(url: str, content_type: str) -> str: + """Detect audio file extension from URL or content-type.""" + # Try URL path first + path = url.split("?")[0] # Strip query params + for ext in (".wav", ".mp3", ".mp4", ".m4a", ".webm", ".ogg", ".flac"): + if path.lower().endswith(ext): + return ext + + # Try content-type + ct_map = { + "audio/wav": ".wav", + "audio/x-wav": ".wav", + "audio/mpeg": ".mp3", + "audio/mp4": ".m4a", + "audio/webm": ".webm", + "audio/ogg": ".ogg", + "audio/flac": ".flac", + } + for ct, ext in ct_map.items(): + if ct in content_type.lower(): + return ext + + return ".audio" diff --git a/server/reflector/processors/_marian_translator_service.py b/server/reflector/processors/_marian_translator_service.py new file mode 100644 index 00000000..ca93c111 --- /dev/null +++ b/server/reflector/processors/_marian_translator_service.py @@ -0,0 +1,76 @@ +""" +MarianMT translation service. + +Singleton service that loads HuggingFace MarianMT translation models +and reuses them across all MarianMT translator processor instances. + +Ported from gpu/self_hosted/app/services/translator.py for in-process use. +""" + +import logging +import threading + +from transformers import MarianMTModel, MarianTokenizer, pipeline + +logger = logging.getLogger(__name__) + + +class MarianTranslatorService: + """MarianMT text translation service for in-process use.""" + + def __init__(self): + self._pipeline = None + self._current_pair = None + self._lock = threading.Lock() + + def load(self, source_language: str = "en", target_language: str = "fr"): + """Load the translation model for a specific language pair.""" + model_name = self._resolve_model_name(source_language, target_language) + logger.info( + "Loading MarianMT model: %s (%s -> %s)", + model_name, + source_language, + target_language, + ) + tokenizer = MarianTokenizer.from_pretrained(model_name) + model = MarianMTModel.from_pretrained(model_name) + self._pipeline = pipeline("translation", model=model, tokenizer=tokenizer) + self._current_pair = (source_language.lower(), target_language.lower()) + + def _resolve_model_name(self, src: str, tgt: str) -> str: + """Resolve language pair to MarianMT model name.""" + pair = (src.lower(), tgt.lower()) + mapping = { + ("en", "fr"): "Helsinki-NLP/opus-mt-en-fr", + ("fr", "en"): "Helsinki-NLP/opus-mt-fr-en", + ("en", "es"): "Helsinki-NLP/opus-mt-en-es", + ("es", "en"): "Helsinki-NLP/opus-mt-es-en", + ("en", "de"): "Helsinki-NLP/opus-mt-en-de", + ("de", "en"): "Helsinki-NLP/opus-mt-de-en", + } + return mapping.get(pair, "Helsinki-NLP/opus-mt-en-fr") + + def translate(self, text: str, source_language: str, target_language: str) -> dict: + """Translate text between languages. + + Args: + text: Text to translate. + source_language: Source language code (e.g. "en"). + target_language: Target language code (e.g. "fr"). + + Returns: + dict with "text" key containing {source_language: original, target_language: translated}. + """ + pair = (source_language.lower(), target_language.lower()) + if self._pipeline is None or self._current_pair != pair: + self.load(source_language, target_language) + with self._lock: + results = self._pipeline( + text, src_lang=source_language, tgt_lang=target_language + ) + translated = results[0]["translation_text"] if results else "" + return {"text": {source_language: text, target_language: translated}} + + +# Module-level singleton — shared across all MarianMT translator processors +translator_service = MarianTranslatorService() diff --git a/server/reflector/processors/_pyannote_diarization_service.py b/server/reflector/processors/_pyannote_diarization_service.py new file mode 100644 index 00000000..1f1f1d2d --- /dev/null +++ b/server/reflector/processors/_pyannote_diarization_service.py @@ -0,0 +1,133 @@ +""" +Pyannote diarization service using pyannote.audio. + +Singleton service that loads the pyannote speaker diarization model once +and reuses it across all pyannote diarization processor instances. + +Ported from gpu/self_hosted/app/services/diarizer.py for in-process use. +""" + +import logging +import tarfile +import threading +from pathlib import Path +from urllib.request import urlopen + +import torch +import torchaudio +import yaml +from pyannote.audio import Pipeline + +from reflector.settings import settings + +logger = logging.getLogger(__name__) + +S3_BUNDLE_URL = "https://reflector-public.s3.us-east-1.amazonaws.com/pyannote-speaker-diarization-3.1.tar.gz" +BUNDLE_CACHE_DIR = Path.home() / ".cache" / "pyannote-bundle" + + +def _ensure_model(cache_dir: Path) -> str: + """Download and extract S3 model bundle if not cached.""" + model_dir = cache_dir / "pyannote-speaker-diarization-3.1" + config_path = model_dir / "config.yaml" + + if config_path.exists(): + logger.info("Using cached model bundle at %s", model_dir) + return str(model_dir) + + cache_dir.mkdir(parents=True, exist_ok=True) + tarball_path = cache_dir / "model.tar.gz" + + logger.info("Downloading model bundle from %s", S3_BUNDLE_URL) + with urlopen(S3_BUNDLE_URL) as response, open(tarball_path, "wb") as f: + while chunk := response.read(8192): + f.write(chunk) + + logger.info("Extracting model bundle") + with tarfile.open(tarball_path, "r:gz") as tar: + tar.extractall(path=cache_dir, filter="data") + tarball_path.unlink() + + _patch_config(model_dir, cache_dir) + return str(model_dir) + + +def _patch_config(model_dir: Path, cache_dir: Path) -> None: + """Rewrite config.yaml to reference local pytorch_model.bin paths.""" + config_path = model_dir / "config.yaml" + with open(config_path) as f: + config = yaml.safe_load(f) + + config["pipeline"]["params"]["segmentation"] = str( + cache_dir / "pyannote-segmentation-3.0" / "pytorch_model.bin" + ) + config["pipeline"]["params"]["embedding"] = str( + cache_dir / "pyannote-wespeaker-voxceleb-resnet34-LM" / "pytorch_model.bin" + ) + + with open(config_path, "w") as f: + yaml.dump(config, f) + + logger.info("Patched config.yaml with local model paths") + + +class PyannoteDiarizationService: + """Pyannote speaker diarization service for in-process use.""" + + def __init__(self): + self._pipeline = None + self._device = "cpu" + self._lock = threading.Lock() + + def load(self): + self._device = "cuda" if torch.cuda.is_available() else "cpu" + hf_token = settings.HF_TOKEN + + if hf_token: + logger.info("Loading pyannote model from HuggingFace (HF_TOKEN set)") + self._pipeline = Pipeline.from_pretrained( + "pyannote/speaker-diarization-3.1", + use_auth_token=hf_token, + ) + else: + logger.info("HF_TOKEN not set — loading model from S3 bundle") + model_path = _ensure_model(BUNDLE_CACHE_DIR) + config_path = Path(model_path) / "config.yaml" + self._pipeline = Pipeline.from_pretrained(str(config_path)) + + self._pipeline.to(torch.device(self._device)) + + def diarize_file(self, file_path: str, timestamp: float = 0.0) -> dict: + """Run speaker diarization on an audio file. + + Args: + file_path: Path to the audio file. + timestamp: Offset to add to all segment timestamps. + + Returns: + dict with "diarization" key containing list of + {"start": float, "end": float, "speaker": int} segments. + """ + if self._pipeline is None: + self.load() + waveform, sample_rate = torchaudio.load(file_path) + with self._lock: + diarization = self._pipeline( + {"waveform": waveform, "sample_rate": sample_rate} + ) + segments = [] + for diarization_segment, _, speaker in diarization.itertracks(yield_label=True): + segments.append( + { + "start": round(timestamp + diarization_segment.start, 3), + "end": round(timestamp + diarization_segment.end, 3), + "speaker": int(speaker[-2:]) + if speaker and speaker[-2:].isdigit() + else 0, + } + ) + return {"diarization": segments} + + +# Module-level singleton — shared across all pyannote diarization processors +diarization_service = PyannoteDiarizationService() diff --git a/server/reflector/processors/audio_diarization_pyannote.py b/server/reflector/processors/audio_diarization_pyannote.py new file mode 100644 index 00000000..0e174acb --- /dev/null +++ b/server/reflector/processors/audio_diarization_pyannote.py @@ -0,0 +1,37 @@ +""" +Pyannote audio diarization processor using pyannote.audio in-process. + +Downloads audio from URL, runs pyannote diarization locally, +and returns speaker segments. No HTTP backend needed. +""" + +import asyncio +import os + +from reflector.processors._audio_download import download_audio_to_temp +from reflector.processors._pyannote_diarization_service import diarization_service +from reflector.processors.audio_diarization import AudioDiarizationProcessor +from reflector.processors.audio_diarization_auto import AudioDiarizationAutoProcessor +from reflector.processors.types import AudioDiarizationInput + + +class AudioDiarizationPyannoteProcessor(AudioDiarizationProcessor): + INPUT_TYPE = AudioDiarizationInput + + async def _diarize(self, data: AudioDiarizationInput): + """Run pyannote diarization on audio from URL.""" + tmp_path = await download_audio_to_temp(data.audio_url) + try: + loop = asyncio.get_event_loop() + result = await loop.run_in_executor( + None, diarization_service.diarize_file, str(tmp_path) + ) + return result["diarization"] + finally: + try: + os.unlink(tmp_path) + except OSError: + pass + + +AudioDiarizationAutoProcessor.register("pyannote", AudioDiarizationPyannoteProcessor) diff --git a/server/reflector/processors/audio_padding.py b/server/reflector/processors/audio_padding.py new file mode 100644 index 00000000..fb95aa0f --- /dev/null +++ b/server/reflector/processors/audio_padding.py @@ -0,0 +1,23 @@ +""" +Base class for audio padding processors. +""" + +from pydantic import BaseModel + + +class PaddingResponse(BaseModel): + size: int + cancelled: bool = False + + +class AudioPaddingProcessor: + """Base class for audio padding processors.""" + + async def pad_track( + self, + track_url: str, + output_url: str, + start_time_seconds: float, + track_index: int, + ) -> PaddingResponse: + raise NotImplementedError diff --git a/server/reflector/processors/audio_padding_auto.py b/server/reflector/processors/audio_padding_auto.py index 2e191470..e3ebe1d4 100644 --- a/server/reflector/processors/audio_padding_auto.py +++ b/server/reflector/processors/audio_padding_auto.py @@ -1,9 +1,10 @@ import importlib +from reflector.processors.audio_padding import AudioPaddingProcessor from reflector.settings import settings -class AudioPaddingAutoProcessor: +class AudioPaddingAutoProcessor(AudioPaddingProcessor): _registry = {} @classmethod diff --git a/server/reflector/processors/audio_padding_modal.py b/server/reflector/processors/audio_padding_modal.py index 289058f1..825dc95f 100644 --- a/server/reflector/processors/audio_padding_modal.py +++ b/server/reflector/processors/audio_padding_modal.py @@ -6,19 +6,14 @@ import asyncio import os import httpx -from pydantic import BaseModel from reflector.hatchet.constants import TIMEOUT_AUDIO from reflector.logger import logger +from reflector.processors.audio_padding import AudioPaddingProcessor, PaddingResponse from reflector.processors.audio_padding_auto import AudioPaddingAutoProcessor -class PaddingResponse(BaseModel): - size: int - cancelled: bool = False - - -class AudioPaddingModalProcessor: +class AudioPaddingModalProcessor(AudioPaddingProcessor): """Audio padding processor using Modal.com CPU backend via HTTP.""" def __init__( diff --git a/server/reflector/processors/audio_padding_local.py b/server/reflector/processors/audio_padding_pyav.py similarity index 92% rename from server/reflector/processors/audio_padding_local.py rename to server/reflector/processors/audio_padding_pyav.py index e6646660..f97255a7 100644 --- a/server/reflector/processors/audio_padding_local.py +++ b/server/reflector/processors/audio_padding_pyav.py @@ -1,5 +1,5 @@ """ -Local audio padding processor using PyAV. +PyAV audio padding processor. Pads audio tracks with silence directly in-process (no HTTP). Reuses the shared PyAV utilities from reflector.utils.audio_padding. @@ -12,15 +12,15 @@ import tempfile import av from reflector.logger import logger +from reflector.processors.audio_padding import AudioPaddingProcessor, PaddingResponse from reflector.processors.audio_padding_auto import AudioPaddingAutoProcessor -from reflector.processors.audio_padding_modal import PaddingResponse from reflector.utils.audio_padding import apply_audio_padding_to_file S3_TIMEOUT = 60 -class AudioPaddingLocalProcessor: - """Audio padding processor using local PyAV (no HTTP backend).""" +class AudioPaddingPyavProcessor(AudioPaddingProcessor): + """Audio padding processor using PyAV (no HTTP backend).""" async def pad_track( self, @@ -29,7 +29,7 @@ class AudioPaddingLocalProcessor: start_time_seconds: float, track_index: int, ) -> PaddingResponse: - """Pad audio track with silence locally via PyAV. + """Pad audio track with silence via PyAV. Args: track_url: Presigned GET URL for source audio track @@ -130,4 +130,4 @@ class AudioPaddingLocalProcessor: log.warning("Failed to cleanup temp directory", error=str(e)) -AudioPaddingAutoProcessor.register("local", AudioPaddingLocalProcessor) +AudioPaddingAutoProcessor.register("pyav", AudioPaddingPyavProcessor) diff --git a/server/reflector/processors/audio_transcript_whisper.py b/server/reflector/processors/audio_transcript_whisper.py index 73278577..c30ef3cc 100644 --- a/server/reflector/processors/audio_transcript_whisper.py +++ b/server/reflector/processors/audio_transcript_whisper.py @@ -3,13 +3,17 @@ from faster_whisper import WhisperModel from reflector.processors.audio_transcript import AudioTranscriptProcessor from reflector.processors.audio_transcript_auto import AudioTranscriptAutoProcessor from reflector.processors.types import AudioFile, Transcript, Word +from reflector.settings import settings class AudioTranscriptWhisperProcessor(AudioTranscriptProcessor): def __init__(self): super().__init__() self.model = WhisperModel( - "tiny", device="cpu", compute_type="float32", num_workers=12 + settings.WHISPER_CHUNK_MODEL, + device="cpu", + compute_type="float32", + num_workers=12, ) async def _transcript(self, data: AudioFile): diff --git a/server/reflector/processors/file_diarization_pyannote.py b/server/reflector/processors/file_diarization_pyannote.py new file mode 100644 index 00000000..bc595f2f --- /dev/null +++ b/server/reflector/processors/file_diarization_pyannote.py @@ -0,0 +1,39 @@ +""" +Pyannote file diarization processor using pyannote.audio in-process. + +Downloads audio from URL, runs pyannote diarization locally, +and returns speaker segments. No HTTP backend needed. +""" + +import asyncio +import os + +from reflector.processors._audio_download import download_audio_to_temp +from reflector.processors._pyannote_diarization_service import diarization_service +from reflector.processors.file_diarization import ( + FileDiarizationInput, + FileDiarizationOutput, + FileDiarizationProcessor, +) +from reflector.processors.file_diarization_auto import FileDiarizationAutoProcessor + + +class FileDiarizationPyannoteProcessor(FileDiarizationProcessor): + async def _diarize(self, data: FileDiarizationInput): + """Run pyannote diarization on file from URL.""" + self.logger.info(f"Starting pyannote diarization from {data.audio_url}") + tmp_path = await download_audio_to_temp(data.audio_url) + try: + loop = asyncio.get_event_loop() + result = await loop.run_in_executor( + None, diarization_service.diarize_file, str(tmp_path) + ) + return FileDiarizationOutput(diarization=result["diarization"]) + finally: + try: + os.unlink(tmp_path) + except OSError: + pass + + +FileDiarizationAutoProcessor.register("pyannote", FileDiarizationPyannoteProcessor) diff --git a/server/reflector/processors/file_transcript_whisper.py b/server/reflector/processors/file_transcript_whisper.py new file mode 100644 index 00000000..a919b42f --- /dev/null +++ b/server/reflector/processors/file_transcript_whisper.py @@ -0,0 +1,275 @@ +""" +Local file transcription processor using faster-whisper with Silero VAD pipeline. + +Downloads audio from URL, segments it using Silero VAD, transcribes each +segment with faster-whisper, and merges results. No HTTP backend needed. + +VAD pipeline ported from gpu/self_hosted/app/services/transcriber.py. +""" + +import asyncio +import os +import shutil +import subprocess +import threading +from typing import Generator + +import numpy as np +from silero_vad import VADIterator, load_silero_vad + +from reflector.processors._audio_download import download_audio_to_temp +from reflector.processors.file_transcript import ( + FileTranscriptInput, + FileTranscriptProcessor, +) +from reflector.processors.file_transcript_auto import FileTranscriptAutoProcessor +from reflector.processors.types import Transcript, Word +from reflector.settings import settings + +SAMPLE_RATE = 16000 + +VAD_CONFIG = { + "batch_max_duration": 30.0, + "silence_padding": 0.5, + "window_size": 512, +} + + +class FileTranscriptWhisperProcessor(FileTranscriptProcessor): + """Transcribe complete audio files using local faster-whisper with VAD.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._model = None + self._lock = threading.Lock() + + def _ensure_model(self): + """Lazy-load the whisper model on first use.""" + if self._model is not None: + return + + import faster_whisper + import torch + + device = "cuda" if torch.cuda.is_available() else "cpu" + compute_type = "float16" if device == "cuda" else "int8" + model_name = settings.WHISPER_FILE_MODEL + + self.logger.info( + "Loading whisper model", + model=model_name, + device=device, + compute_type=compute_type, + ) + self._model = faster_whisper.WhisperModel( + model_name, + device=device, + compute_type=compute_type, + num_workers=1, + ) + + async def _transcript(self, data: FileTranscriptInput): + """Download file, run VAD segmentation, transcribe each segment.""" + tmp_path = await download_audio_to_temp(data.audio_url) + try: + loop = asyncio.get_event_loop() + result = await loop.run_in_executor( + None, + self._transcribe_file_blocking, + str(tmp_path), + data.language, + ) + return result + finally: + try: + os.unlink(tmp_path) + except OSError: + pass + + def _transcribe_file_blocking(self, file_path: str, language: str) -> Transcript: + """Blocking transcription with VAD pipeline.""" + self._ensure_model() + + audio_array = _load_audio_via_ffmpeg(file_path, SAMPLE_RATE) + + # VAD segmentation → batch merging + merged_batches: list[tuple[float, float]] = [] + batch_start = None + batch_end = None + max_duration = VAD_CONFIG["batch_max_duration"] + + for seg_start, seg_end in _vad_segments(audio_array): + if batch_start is None: + batch_start, batch_end = seg_start, seg_end + continue + if seg_end - batch_start <= max_duration: + batch_end = seg_end + else: + merged_batches.append((batch_start, batch_end)) + batch_start, batch_end = seg_start, seg_end + + if batch_start is not None and batch_end is not None: + merged_batches.append((batch_start, batch_end)) + + # If no speech detected, try transcribing the whole file + if not merged_batches: + return self._transcribe_whole_file(file_path, language) + + # Transcribe each batch + all_words = [] + for start_time, end_time in merged_batches: + s_idx = int(start_time * SAMPLE_RATE) + e_idx = int(end_time * SAMPLE_RATE) + segment = audio_array[s_idx:e_idx] + segment = _pad_audio(segment, SAMPLE_RATE) + + with self._lock: + segments, _ = self._model.transcribe( + segment, + language=language, + beam_size=5, + word_timestamps=True, + vad_filter=True, + vad_parameters={"min_silence_duration_ms": 500}, + ) + segments = list(segments) + + for seg in segments: + for w in seg.words: + all_words.append( + { + "word": w.word, + "start": round(float(w.start) + start_time, 2), + "end": round(float(w.end) + start_time, 2), + } + ) + + all_words = _enforce_word_timing_constraints(all_words) + + words = [ + Word(text=w["word"], start=w["start"], end=w["end"]) for w in all_words + ] + words.sort(key=lambda w: w.start) + return Transcript(words=words) + + def _transcribe_whole_file(self, file_path: str, language: str) -> Transcript: + """Fallback: transcribe entire file without VAD segmentation.""" + with self._lock: + segments, _ = self._model.transcribe( + file_path, + language=language, + beam_size=5, + word_timestamps=True, + vad_filter=True, + vad_parameters={"min_silence_duration_ms": 500}, + ) + segments = list(segments) + + words = [] + for seg in segments: + for w in seg.words: + words.append( + Word( + text=w.word, + start=round(float(w.start), 2), + end=round(float(w.end), 2), + ) + ) + return Transcript(words=words) + + +# --- VAD helpers (ported from gpu/self_hosted/app/services/transcriber.py) --- +# IMPORTANT: This VAD segment logic is duplicated for deployment isolation. +# If you modify this, consider updating the GPU service copy as well: +# - gpu/self_hosted/app/services/transcriber.py +# - gpu/modal_deployments/reflector_transcriber.py +# - gpu/modal_deployments/reflector_transcriber_parakeet.py + + +def _load_audio_via_ffmpeg( + input_path: str, sample_rate: int = SAMPLE_RATE +) -> np.ndarray: + """Load audio file via ffmpeg, converting to mono float32 at target sample rate.""" + ffmpeg_bin = shutil.which("ffmpeg") or "ffmpeg" + cmd = [ + ffmpeg_bin, + "-nostdin", + "-threads", + "1", + "-i", + input_path, + "-f", + "f32le", + "-acodec", + "pcm_f32le", + "-ac", + "1", + "-ar", + str(sample_rate), + "pipe:1", + ] + proc = subprocess.run( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True + ) + return np.frombuffer(proc.stdout, dtype=np.float32) + + +def _vad_segments( + audio_array: np.ndarray, + sample_rate: int = SAMPLE_RATE, + window_size: int = VAD_CONFIG["window_size"], +) -> Generator[tuple[float, float], None, None]: + """Detect speech segments using Silero VAD.""" + vad_model = load_silero_vad(onnx=False) + iterator = VADIterator(vad_model, sampling_rate=sample_rate) + start = None + + for i in range(0, len(audio_array), window_size): + chunk = audio_array[i : i + window_size] + if len(chunk) < window_size: + chunk = np.pad(chunk, (0, window_size - len(chunk)), mode="constant") + speech = iterator(chunk) + if not speech: + continue + if "start" in speech: + start = speech["start"] + continue + if "end" in speech and start is not None: + end = speech["end"] + yield (start / float(SAMPLE_RATE), end / float(SAMPLE_RATE)) + start = None + + # Handle case where audio ends while speech is still active + if start is not None: + audio_duration = len(audio_array) / float(sample_rate) + yield (start / float(SAMPLE_RATE), audio_duration) + + iterator.reset_states() + + +def _pad_audio(audio_array: np.ndarray, sample_rate: int = SAMPLE_RATE) -> np.ndarray: + """Pad short audio with silence for VAD compatibility.""" + audio_duration = len(audio_array) / sample_rate + if audio_duration < VAD_CONFIG["silence_padding"]: + silence_samples = int(sample_rate * VAD_CONFIG["silence_padding"]) + silence = np.zeros(silence_samples, dtype=np.float32) + return np.concatenate([audio_array, silence]) + return audio_array + + +def _enforce_word_timing_constraints(words: list[dict]) -> list[dict]: + """Ensure no word end time exceeds the next word's start time.""" + if len(words) <= 1: + return words + enforced: list[dict] = [] + for i, word in enumerate(words): + current = dict(word) + if i < len(words) - 1: + next_start = words[i + 1]["start"] + if current["end"] > next_start: + current["end"] = next_start + enforced.append(current) + return enforced + + +FileTranscriptAutoProcessor.register("whisper", FileTranscriptWhisperProcessor) diff --git a/server/reflector/processors/transcript_translator_marian.py b/server/reflector/processors/transcript_translator_marian.py new file mode 100644 index 00000000..d7ef9648 --- /dev/null +++ b/server/reflector/processors/transcript_translator_marian.py @@ -0,0 +1,50 @@ +""" +MarianMT transcript translator processor using HuggingFace MarianMT in-process. + +Translates transcript text using HuggingFace MarianMT models +locally. No HTTP backend needed. +""" + +import asyncio + +from reflector.processors._marian_translator_service import translator_service +from reflector.processors.transcript_translator import TranscriptTranslatorProcessor +from reflector.processors.transcript_translator_auto import ( + TranscriptTranslatorAutoProcessor, +) +from reflector.processors.types import TranslationLanguages + + +class TranscriptTranslatorMarianProcessor(TranscriptTranslatorProcessor): + """Translate transcript text using MarianMT models.""" + + async def _translate(self, text: str) -> str | None: + source_language = self.get_pref("audio:source_language", "en") + target_language = self.get_pref("audio:target_language", "en") + + languages = TranslationLanguages() + assert languages.is_supported(target_language) + + self.logger.debug(f"MarianMT translate {text=}") + + loop = asyncio.get_event_loop() + result = await loop.run_in_executor( + None, + translator_service.translate, + text, + source_language, + target_language, + ) + + if target_language in result["text"]: + translation = result["text"][target_language] + else: + translation = None + + self.logger.debug(f"Translation result: {text=}, {translation=}") + return translation + + +TranscriptTranslatorAutoProcessor.register( + "marian", TranscriptTranslatorMarianProcessor +) diff --git a/server/reflector/settings.py b/server/reflector/settings.py index 47845f58..3a608aef 100644 --- a/server/reflector/settings.py +++ b/server/reflector/settings.py @@ -40,11 +40,19 @@ class Settings(BaseSettings): # backends: silero, frames AUDIO_CHUNKER_BACKEND: str = "frames" + # HuggingFace token for gated models (pyannote diarization in --cpu mode) + HF_TOKEN: str | None = None + # Audio Transcription # backends: # - whisper: in-process model loading (no HTTP, runs in same process) # - modal: HTTP API client (works with Modal.com OR self-hosted gpu/self_hosted/) TRANSCRIPT_BACKEND: str = "whisper" + + # Whisper model sizes for local transcription + # Options: "tiny", "base", "small", "medium", "large-v2" + WHISPER_CHUNK_MODEL: str = "tiny" + WHISPER_FILE_MODEL: str = "tiny" TRANSCRIPT_URL: str | None = None TRANSCRIPT_TIMEOUT: int = 90 TRANSCRIPT_FILE_TIMEOUT: int = 600 @@ -100,7 +108,7 @@ class Settings(BaseSettings): ) # Diarization - # backend: modal — HTTP API client (works with Modal.com OR self-hosted gpu/self_hosted/) + # backends: modal — HTTP API client, pyannote — in-process pyannote.audio DIARIZATION_ENABLED: bool = True DIARIZATION_BACKEND: str = "modal" DIARIZATION_URL: str | None = None @@ -111,9 +119,9 @@ class Settings(BaseSettings): # Audio Padding # backends: - # - local: in-process PyAV padding (no HTTP, runs in same process) + # - pyav: in-process PyAV padding (no HTTP, runs in same process) # - modal: HTTP API client (works with Modal.com OR self-hosted gpu/self_hosted/) - PADDING_BACKEND: str = "local" + PADDING_BACKEND: str = "pyav" PADDING_URL: str | None = None PADDING_MODAL_API_KEY: str | None = None diff --git a/server/reflector/views/transcripts_audio.py b/server/reflector/views/transcripts_audio.py index 027b6d1b..f6dc2c2c 100644 --- a/server/reflector/views/transcripts_audio.py +++ b/server/reflector/views/transcripts_audio.py @@ -13,7 +13,6 @@ from fastapi import APIRouter, Depends, HTTPException, Request, Response, status import reflector.auth as auth from reflector.db.transcripts import AudioWaveform, transcripts_controller from reflector.settings import settings -from reflector.views.transcripts import ALGORITHM from ._range_requests_response import range_requests_response @@ -36,16 +35,23 @@ async def transcript_get_audio_mp3( ): user_id = user["sub"] if user else None if not user_id and token: - unauthorized_exception = HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid or expired token", - headers={"WWW-Authenticate": "Bearer"}, - ) try: - payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[ALGORITHM]) - user_id: str = payload.get("sub") - except jwt.PyJWTError: - raise unauthorized_exception + token_user = await auth.verify_raw_token(token) + except Exception: + token_user = None + # Fallback: try as internal HS256 token (created by _generate_local_audio_link) + if not token_user: + try: + payload = jwt.decode(token, settings.SECRET_KEY, algorithms=["HS256"]) + user_id = payload.get("sub") + except jwt.PyJWTError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + user_id = token_user["sub"] transcript = await transcripts_controller.get_by_id_for_http( transcript_id, user_id=user_id diff --git a/server/tests/test_processors_cpu.py b/server/tests/test_processors_cpu.py new file mode 100644 index 00000000..164e4578 --- /dev/null +++ b/server/tests/test_processors_cpu.py @@ -0,0 +1,450 @@ +""" +Tests for in-process processor backends (--cpu mode). + +All ML model calls are mocked — no actual model loading needed. +Tests verify processor registration, wiring, error handling, and data flow. +""" + +import os +import tempfile +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from reflector.processors.file_diarization import ( + FileDiarizationInput, + FileDiarizationOutput, +) +from reflector.processors.types import ( + AudioDiarizationInput, + TitleSummaryWithId, + Transcript, + Word, +) + +# ── Registration Tests ────────────────────────────────────────────────── + + +def test_audio_diarization_pyannote_registers(): + """Verify AudioDiarizationPyannoteProcessor registers with 'pyannote' backend.""" + # Importing the module triggers registration + import reflector.processors.audio_diarization_pyannote # noqa: F401 + from reflector.processors.audio_diarization_auto import ( + AudioDiarizationAutoProcessor, + ) + + assert "pyannote" in AudioDiarizationAutoProcessor._registry + + +def test_file_diarization_pyannote_registers(): + """Verify FileDiarizationPyannoteProcessor registers with 'pyannote' backend.""" + import reflector.processors.file_diarization_pyannote # noqa: F401 + from reflector.processors.file_diarization_auto import FileDiarizationAutoProcessor + + assert "pyannote" in FileDiarizationAutoProcessor._registry + + +def test_transcript_translator_marian_registers(): + """Verify TranscriptTranslatorMarianProcessor registers with 'marian' backend.""" + import reflector.processors.transcript_translator_marian # noqa: F401 + from reflector.processors.transcript_translator_auto import ( + TranscriptTranslatorAutoProcessor, + ) + + assert "marian" in TranscriptTranslatorAutoProcessor._registry + + +def test_file_transcript_whisper_registers(): + """Verify FileTranscriptWhisperProcessor registers with 'whisper' backend.""" + import reflector.processors.file_transcript_whisper # noqa: F401 + from reflector.processors.file_transcript_auto import FileTranscriptAutoProcessor + + assert "whisper" in FileTranscriptAutoProcessor._registry + + +# ── Audio Download Utility Tests ──────────────────────────────────────── + + +@pytest.mark.asyncio +async def test_download_audio_to_temp_success(): + """Verify download_audio_to_temp downloads to a temp file and returns path.""" + from reflector.processors._audio_download import download_audio_to_temp + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.headers = {"content-type": "audio/wav"} + mock_response.iter_content.return_value = [b"fake audio data"] + mock_response.raise_for_status = MagicMock() + + with patch("reflector.processors._audio_download.requests.get") as mock_get: + mock_get.return_value = mock_response + + result = await download_audio_to_temp("https://example.com/test.wav") + + assert isinstance(result, Path) + assert result.exists() + assert result.read_bytes() == b"fake audio data" + assert result.suffix == ".wav" + + # Cleanup + os.unlink(result) + + +@pytest.mark.asyncio +async def test_download_audio_to_temp_cleanup_on_error(): + """Verify temp file is cleaned up when download fails mid-write.""" + from reflector.processors._audio_download import download_audio_to_temp + + mock_response = MagicMock() + mock_response.headers = {"content-type": "audio/wav"} + mock_response.raise_for_status = MagicMock() + + def fail_iter(*args, **kwargs): + raise ConnectionError("Download interrupted") + + mock_response.iter_content = fail_iter + + with patch("reflector.processors._audio_download.requests.get") as mock_get: + mock_get.return_value = mock_response + + with pytest.raises(ConnectionError, match="Download interrupted"): + await download_audio_to_temp("https://example.com/test.wav") + + +def test_detect_extension_from_url(): + """Verify extension detection from URL path.""" + from reflector.processors._audio_download import _detect_extension + + assert _detect_extension("https://example.com/test.wav", "") == ".wav" + assert _detect_extension("https://example.com/test.mp3?signed=1", "") == ".mp3" + assert _detect_extension("https://example.com/test.webm", "") == ".webm" + + +def test_detect_extension_from_content_type(): + """Verify extension detection from content-type header.""" + from reflector.processors._audio_download import _detect_extension + + assert _detect_extension("https://s3.aws/uuid", "audio/mpeg") == ".mp3" + assert _detect_extension("https://s3.aws/uuid", "audio/wav") == ".wav" + assert _detect_extension("https://s3.aws/uuid", "audio/webm") == ".webm" + + +def test_detect_extension_fallback(): + """Verify fallback extension when neither URL nor content-type is recognized.""" + from reflector.processors._audio_download import _detect_extension + + assert ( + _detect_extension("https://s3.aws/uuid", "application/octet-stream") == ".audio" + ) + + +# ── Audio Diarization Pyannote Processor Tests ────────────────────────── + + +@pytest.mark.asyncio +async def test_audio_diarization_pyannote_diarize(): + """Verify pyannote audio diarization downloads, diarizes, and cleans up.""" + from reflector.processors.audio_diarization_pyannote import ( + AudioDiarizationPyannoteProcessor, + ) + + mock_diarization_result = { + "diarization": [ + {"start": 0.0, "end": 2.5, "speaker": 0}, + {"start": 2.5, "end": 5.0, "speaker": 1}, + ] + } + + # Create a temp file to simulate download + tmp = tempfile.NamedTemporaryFile(suffix=".wav", delete=False) + tmp.write(b"fake audio") + tmp.close() + tmp_path = Path(tmp.name) + + processor = AudioDiarizationPyannoteProcessor() + + with ( + patch( + "reflector.processors.audio_diarization_pyannote.download_audio_to_temp", + new_callable=AsyncMock, + return_value=tmp_path, + ), + patch( + "reflector.processors.audio_diarization_pyannote.diarization_service" + ) as mock_svc, + ): + mock_svc.diarize_file.return_value = mock_diarization_result + + data = AudioDiarizationInput( + audio_url="https://example.com/test.wav", + topics=[ + TitleSummaryWithId( + id="topic-1", + title="Test Topic", + summary="A test topic", + timestamp=0.0, + duration=5.0, + transcript=Transcript( + words=[Word(text="hello", start=0.0, end=1.0)] + ), + ) + ], + ) + result = await processor._diarize(data) + + assert result == mock_diarization_result["diarization"] + mock_svc.diarize_file.assert_called_once() + + +# ── File Diarization Pyannote Processor Tests ─────────────────────────── + + +@pytest.mark.asyncio +async def test_file_diarization_pyannote_diarize(): + """Verify pyannote file diarization returns FileDiarizationOutput.""" + from reflector.processors.file_diarization_pyannote import ( + FileDiarizationPyannoteProcessor, + ) + + mock_diarization_result = { + "diarization": [ + {"start": 0.0, "end": 3.0, "speaker": 0}, + {"start": 3.0, "end": 6.0, "speaker": 1}, + ] + } + + tmp = tempfile.NamedTemporaryFile(suffix=".wav", delete=False) + tmp.write(b"fake audio") + tmp.close() + tmp_path = Path(tmp.name) + + processor = FileDiarizationPyannoteProcessor() + + with ( + patch( + "reflector.processors.file_diarization_pyannote.download_audio_to_temp", + new_callable=AsyncMock, + return_value=tmp_path, + ), + patch( + "reflector.processors.file_diarization_pyannote.diarization_service" + ) as mock_svc, + ): + mock_svc.diarize_file.return_value = mock_diarization_result + + data = FileDiarizationInput(audio_url="https://example.com/test.wav") + result = await processor._diarize(data) + + assert isinstance(result, FileDiarizationOutput) + assert len(result.diarization) == 2 + assert result.diarization[0]["start"] == 0.0 + assert result.diarization[1]["speaker"] == 1 + + +# ── Transcript Translator Marian Processor Tests ─────────────────────── + + +@pytest.mark.asyncio +async def test_transcript_translator_marian_translate(): + """Verify MarianMT translator calls service and extracts translation.""" + from reflector.processors.transcript_translator_marian import ( + TranscriptTranslatorMarianProcessor, + ) + + mock_result = {"text": {"en": "Hello world", "fr": "Bonjour le monde"}} + + processor = TranscriptTranslatorMarianProcessor() + + def fake_get_pref(key, default=None): + prefs = {"audio:source_language": "en", "audio:target_language": "fr"} + return prefs.get(key, default) + + with ( + patch.object(processor, "get_pref", side_effect=fake_get_pref), + patch( + "reflector.processors.transcript_translator_marian.translator_service" + ) as mock_svc, + ): + mock_svc.translate.return_value = mock_result + + result = await processor._translate("Hello world") + + assert result == "Bonjour le monde" + mock_svc.translate.assert_called_once_with("Hello world", "en", "fr") + + +@pytest.mark.asyncio +async def test_transcript_translator_marian_no_translation(): + """Verify translator returns None when target language not in result.""" + from reflector.processors.transcript_translator_marian import ( + TranscriptTranslatorMarianProcessor, + ) + + mock_result = {"text": {"en": "Hello world"}} + + processor = TranscriptTranslatorMarianProcessor() + + def fake_get_pref(key, default=None): + prefs = {"audio:source_language": "en", "audio:target_language": "fr"} + return prefs.get(key, default) + + with ( + patch.object(processor, "get_pref", side_effect=fake_get_pref), + patch( + "reflector.processors.transcript_translator_marian.translator_service" + ) as mock_svc, + ): + mock_svc.translate.return_value = mock_result + + result = await processor._translate("Hello world") + + assert result is None + + +# ── File Transcript Whisper Processor Tests ───────────────────────────── + + +@pytest.mark.asyncio +async def test_file_transcript_whisper_transcript(): + """Verify whisper file processor downloads, transcribes, and returns Transcript.""" + from reflector.processors.file_transcript import FileTranscriptInput + from reflector.processors.file_transcript_whisper import ( + FileTranscriptWhisperProcessor, + ) + + tmp = tempfile.NamedTemporaryFile(suffix=".wav", delete=False) + tmp.write(b"fake audio") + tmp.close() + tmp_path = Path(tmp.name) + + processor = FileTranscriptWhisperProcessor() + + # Mock the blocking transcription method + mock_transcript = Transcript( + words=[ + Word(text="Hello", start=0.0, end=0.5), + Word(text=" world", start=0.5, end=1.0), + ] + ) + + with ( + patch( + "reflector.processors.file_transcript_whisper.download_audio_to_temp", + new_callable=AsyncMock, + return_value=tmp_path, + ), + patch.object( + processor, + "_transcribe_file_blocking", + return_value=mock_transcript, + ), + ): + data = FileTranscriptInput( + audio_url="https://example.com/test.wav", language="en" + ) + result = await processor._transcript(data) + + assert isinstance(result, Transcript) + assert len(result.words) == 2 + assert result.words[0].text == "Hello" + + +# ── VAD Helper Tests ──────────────────────────────────────────────────── + + +def test_enforce_word_timing_constraints(): + """Verify word timing enforcement prevents overlapping times.""" + from reflector.processors.file_transcript_whisper import ( + _enforce_word_timing_constraints, + ) + + words = [ + {"word": "hello", "start": 0.0, "end": 1.5}, + {"word": "world", "start": 1.0, "end": 2.0}, # overlaps with previous + {"word": "test", "start": 2.0, "end": 3.0}, + ] + + result = _enforce_word_timing_constraints(words) + + assert result[0]["end"] == 1.0 # Clamped to next word's start + assert result[1]["end"] == 2.0 # Clamped to next word's start + assert result[2]["end"] == 3.0 # Last word unchanged + + +def test_enforce_word_timing_constraints_empty(): + """Verify timing enforcement handles empty and single-word lists.""" + from reflector.processors.file_transcript_whisper import ( + _enforce_word_timing_constraints, + ) + + assert _enforce_word_timing_constraints([]) == [] + assert _enforce_word_timing_constraints([{"word": "a", "start": 0, "end": 1}]) == [ + {"word": "a", "start": 0, "end": 1} + ] + + +def test_pad_audio_short(): + """Verify short audio gets padded with silence.""" + import numpy as np + + from reflector.processors.file_transcript_whisper import _pad_audio + + short_audio = np.zeros(100, dtype=np.float32) # Very short + result = _pad_audio(short_audio, sample_rate=16000) + + # Should be padded to at least silence_padding duration + assert len(result) > len(short_audio) + + +def test_pad_audio_long(): + """Verify long audio is not padded.""" + import numpy as np + + from reflector.processors.file_transcript_whisper import _pad_audio + + long_audio = np.zeros(32000, dtype=np.float32) # 2 seconds + result = _pad_audio(long_audio, sample_rate=16000) + + assert len(result) == len(long_audio) + + +# ── Translator Service Tests ──────────────────────────────────────────── + + +def test_translator_service_resolve_model(): + """Verify model resolution for known and unknown language pairs.""" + from reflector.processors._marian_translator_service import MarianTranslatorService + + svc = MarianTranslatorService() + + assert svc._resolve_model_name("en", "fr") == "Helsinki-NLP/opus-mt-en-fr" + assert svc._resolve_model_name("es", "en") == "Helsinki-NLP/opus-mt-es-en" + assert svc._resolve_model_name("en", "de") == "Helsinki-NLP/opus-mt-en-de" + # Unknown pair falls back to en->fr + assert svc._resolve_model_name("ja", "ko") == "Helsinki-NLP/opus-mt-en-fr" + + +# ── Diarization Service Tests ─────────────────────────────────────────── + + +def test_diarization_service_singleton(): + """Verify diarization_service is a module-level singleton.""" + from reflector.processors._pyannote_diarization_service import ( + PyannoteDiarizationService, + diarization_service, + ) + + assert isinstance(diarization_service, PyannoteDiarizationService) + assert diarization_service._pipeline is None # Not loaded until first use + + +def test_translator_service_singleton(): + """Verify translator_service is a module-level singleton.""" + from reflector.processors._marian_translator_service import ( + MarianTranslatorService, + translator_service, + ) + + assert isinstance(translator_service, MarianTranslatorService) + assert translator_service._pipeline is None # Not loaded until first use diff --git a/server/tests/test_transcripts_audio_token_auth.py b/server/tests/test_transcripts_audio_token_auth.py new file mode 100644 index 00000000..0cc11dd5 --- /dev/null +++ b/server/tests/test_transcripts_audio_token_auth.py @@ -0,0 +1,327 @@ +"""Tests for audio mp3 endpoint token query-param authentication. + +Covers both password (HS256) and JWT/Authentik (RS256) auth backends, +verifying that private transcripts can be accessed via ?token= query param. +""" + +import shutil +from datetime import datetime, timedelta, timezone +from pathlib import Path +from unittest.mock import patch + +import jwt +import pytest +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +OWNER_USER_ID = "test-owner-user-id" + + +def _create_hs256_token(user_id: str, secret: str, expired: bool = False) -> str: + """Create an HS256 JWT like the password auth backend does.""" + delta = timedelta(minutes=-5) if expired else timedelta(hours=24) + payload = { + "sub": user_id, + "email": "test@example.com", + "exp": datetime.now(timezone.utc) + delta, + } + return jwt.encode(payload, secret, algorithm="HS256") + + +def _generate_rsa_keypair(): + """Generate a fresh RSA keypair for tests.""" + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + public_pem = private_key.public_key().public_bytes( + serialization.Encoding.PEM, serialization.PublicFormat.SubjectPublicKeyInfo + ) + return private_key, public_pem.decode() + + +def _create_rs256_token( + authentik_uid: str, + private_key, + audience: str, + expired: bool = False, +) -> str: + """Create an RS256 JWT like Authentik would issue.""" + delta = timedelta(minutes=-5) if expired else timedelta(hours=1) + payload = { + "sub": authentik_uid, + "email": "authentik-user@example.com", + "aud": audience, + "exp": datetime.now(timezone.utc) + delta, + } + return jwt.encode(payload, private_key, algorithm="RS256") + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture +async def private_transcript(tmpdir): + """Create a private transcript owned by OWNER_USER_ID with an mp3 file. + + Created directly via the controller (not HTTP) so no auth override + leaks into the test scope. + """ + from reflector.db.transcripts import SourceKind, transcripts_controller + from reflector.settings import settings + + settings.DATA_DIR = Path(tmpdir) + + transcript = await transcripts_controller.add( + "Private audio test", + source_kind=SourceKind.FILE, + user_id=OWNER_USER_ID, + share_mode="private", + ) + await transcripts_controller.update(transcript, {"status": "ended"}) + + # Copy a real mp3 to the expected location + audio_filename = transcript.audio_mp3_filename + mp3_source = Path(__file__).parent / "records" / "test_mathieu_hello.mp3" + audio_filename.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(mp3_source, audio_filename) + + yield transcript + + +# --------------------------------------------------------------------------- +# Core access control tests +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_audio_mp3_private_no_auth_returns_403(private_transcript, client): + """Without auth, accessing a private transcript's audio returns 403.""" + response = await client.get(f"/transcripts/{private_transcript.id}/audio/mp3") + assert response.status_code == 403 + + +@pytest.mark.asyncio +async def test_audio_mp3_with_bearer_header(private_transcript, client): + """Owner accessing audio via Authorization header works.""" + from reflector.app import app + from reflector.auth import current_user_optional + + # Temporarily override to simulate the owner being authenticated + app.dependency_overrides[current_user_optional] = lambda: { + "sub": OWNER_USER_ID, + "email": "test@example.com", + } + try: + response = await client.get(f"/transcripts/{private_transcript.id}/audio/mp3") + finally: + del app.dependency_overrides[current_user_optional] + + assert response.status_code == 200 + assert response.headers["content-type"] == "audio/mpeg" + + +@pytest.mark.asyncio +async def test_audio_mp3_public_transcript_no_auth_ok(tmpdir, client): + """Public transcripts are accessible without any auth.""" + from reflector.db.transcripts import SourceKind, transcripts_controller + from reflector.settings import settings + + settings.DATA_DIR = Path(tmpdir) + + transcript = await transcripts_controller.add( + "Public audio test", + source_kind=SourceKind.FILE, + user_id=OWNER_USER_ID, + share_mode="public", + ) + await transcripts_controller.update(transcript, {"status": "ended"}) + + audio_filename = transcript.audio_mp3_filename + mp3_source = Path(__file__).parent / "records" / "test_mathieu_hello.mp3" + audio_filename.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(mp3_source, audio_filename) + + response = await client.get(f"/transcripts/{transcript.id}/audio/mp3") + assert response.status_code == 200 + assert response.headers["content-type"] == "audio/mpeg" + + +# --------------------------------------------------------------------------- +# Password auth backend tests (?token= with HS256) +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_audio_mp3_password_token_query_param(private_transcript, client): + """Password backend: valid HS256 ?token= grants access to private audio.""" + from reflector.auth.auth_password import UserInfo + from reflector.settings import settings + + token = _create_hs256_token(OWNER_USER_ID, settings.SECRET_KEY) + + with patch("reflector.auth.verify_raw_token") as mock_verify: + mock_verify.return_value = UserInfo(sub=OWNER_USER_ID, email="test@example.com") + response = await client.get( + f"/transcripts/{private_transcript.id}/audio/mp3?token={token}" + ) + + assert response.status_code == 200 + assert response.headers["content-type"] == "audio/mpeg" + + +@pytest.mark.asyncio +async def test_audio_mp3_password_expired_token_returns_401(private_transcript, client): + """Password backend: expired HS256 ?token= returns 401.""" + from reflector.settings import settings + + expired_token = _create_hs256_token( + OWNER_USER_ID, settings.SECRET_KEY, expired=True + ) + + with patch("reflector.auth.verify_raw_token") as mock_verify: + mock_verify.side_effect = jwt.ExpiredSignatureError("token expired") + response = await client.get( + f"/transcripts/{private_transcript.id}/audio/mp3" f"?token={expired_token}" + ) + + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_audio_mp3_password_wrong_user_returns_403(private_transcript, client): + """Password backend: valid token for a different user returns 403.""" + from reflector.auth.auth_password import UserInfo + from reflector.settings import settings + + token = _create_hs256_token("other-user-id", settings.SECRET_KEY) + + with patch("reflector.auth.verify_raw_token") as mock_verify: + mock_verify.return_value = UserInfo( + sub="other-user-id", email="other@example.com" + ) + response = await client.get( + f"/transcripts/{private_transcript.id}/audio/mp3?token={token}" + ) + + assert response.status_code == 403 + + +@pytest.mark.asyncio +async def test_audio_mp3_invalid_token_returns_401(private_transcript, client): + """Garbage token string returns 401.""" + with patch("reflector.auth.verify_raw_token") as mock_verify: + mock_verify.return_value = None + response = await client.get( + f"/transcripts/{private_transcript.id}/audio/mp3" "?token=not-a-real-token" + ) + + assert response.status_code == 401 + + +# --------------------------------------------------------------------------- +# JWT/Authentik auth backend tests (?token= with RS256) +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_audio_mp3_authentik_token_query_param(private_transcript, client): + """Authentik backend: valid RS256 ?token= grants access to private audio.""" + from reflector.auth.auth_password import UserInfo + + private_key, _ = _generate_rsa_keypair() + token = _create_rs256_token("authentik-abc123", private_key, "test-audience") + + with patch("reflector.auth.verify_raw_token") as mock_verify: + # Authentik flow maps authentik_uid -> internal user id + mock_verify.return_value = UserInfo( + sub=OWNER_USER_ID, email="authentik-user@example.com" + ) + response = await client.get( + f"/transcripts/{private_transcript.id}/audio/mp3?token={token}" + ) + + assert response.status_code == 200 + assert response.headers["content-type"] == "audio/mpeg" + + +@pytest.mark.asyncio +async def test_audio_mp3_authentik_expired_token_returns_401( + private_transcript, client +): + """Authentik backend: expired RS256 ?token= returns 401.""" + private_key, _ = _generate_rsa_keypair() + expired_token = _create_rs256_token( + "authentik-abc123", private_key, "test-audience", expired=True + ) + + with patch("reflector.auth.verify_raw_token") as mock_verify: + mock_verify.side_effect = jwt.ExpiredSignatureError("token expired") + response = await client.get( + f"/transcripts/{private_transcript.id}/audio/mp3" f"?token={expired_token}" + ) + + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_audio_mp3_authentik_wrong_user_returns_403(private_transcript, client): + """Authentik backend: valid RS256 token for different user returns 403.""" + from reflector.auth.auth_password import UserInfo + + private_key, _ = _generate_rsa_keypair() + token = _create_rs256_token("authentik-other", private_key, "test-audience") + + with patch("reflector.auth.verify_raw_token") as mock_verify: + mock_verify.return_value = UserInfo( + sub="different-user-id", email="other@example.com" + ) + response = await client.get( + f"/transcripts/{private_transcript.id}/audio/mp3?token={token}" + ) + + assert response.status_code == 403 + + +# --------------------------------------------------------------------------- +# _generate_local_audio_link produces HS256 tokens — must be verifiable +# by any auth backend +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_local_audio_link_token_works_with_authentik_backend( + private_transcript, client +): + """_generate_local_audio_link creates an HS256 token via create_access_token. + + When the Authentik (RS256) auth backend is active, verify_raw_token uses + JWTAuth which expects RS256 + public key. The HS256 token created by + _generate_local_audio_link will fail verification, returning 401. + + This test documents the bug: the internal audio URL generated for the + diarization pipeline is unusable under the JWT auth backend. + """ + from urllib.parse import parse_qs, urlparse + + # Generate the internal audio link (uses create_access_token → HS256) + url = private_transcript._generate_local_audio_link() + parsed = urlparse(url) + token = parse_qs(parsed.query)["token"][0] + + # Simulate what happens when the JWT/Authentik backend tries to verify + # this HS256 token: JWTAuth.verify_token expects RS256, so it raises. + with patch("reflector.auth.verify_raw_token") as mock_verify: + mock_verify.side_effect = jwt.exceptions.InvalidAlgorithmError( + "the specified alg value is not allowed" + ) + response = await client.get( + f"/transcripts/{private_transcript.id}/audio/mp3?token={token}" + ) + + # BUG: this should be 200 (the token was created by our own server), + # but the Authentik backend rejects it because it's HS256, not RS256. + assert response.status_code == 200 diff --git a/server/uv.lock b/server/uv.lock index ae785ed3..12192a95 100644 --- a/server/uv.lock +++ b/server/uv.lock @@ -244,6 +244,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] +[[package]] +name = "antlr4-python3-runtime" +version = "4.9.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b", size = 117034, upload-time = "2021-11-06T17:52:23.524Z" } + [[package]] name = "anyio" version = "4.9.0" @@ -270,6 +276,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2f/f5/c36551e93acba41a59939ae6a0fb77ddb3f2e8e8caa716410c65f7341f72/asgi_lifespan-2.1.0-py3-none-any.whl", hash = "sha256:ed840706680e28428c01e14afb3875d7d76d3206f3d5b2f2294e059b5c23804f", size = 10895, upload-time = "2023-03-28T17:35:47.772Z" }, ] +[[package]] +name = "asteroid-filterbanks" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/fa/5c2be1f96dc179f83cdd3bb267edbd1f47d08f756785c016d5c2163901a7/asteroid-filterbanks-0.4.0.tar.gz", hash = "sha256:415f89d1dcf2b13b35f03f7a9370968ac4e6fa6800633c522dac992b283409b9", size = 24599, upload-time = "2021-04-09T20:03:07.456Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/7c/83ff6046176a675e6a1e8aeefed8892cd97fe7c46af93cc540d1b24b8323/asteroid_filterbanks-0.4.0-py3-none-any.whl", hash = "sha256:4932ac8b6acc6e08fb87cbe8ece84215b5a74eee284fe83acf3540a72a02eaf5", size = 29912, upload-time = "2021-04-09T20:03:05.817Z" }, +] + [[package]] name = "async-timeout" version = "5.0.1" @@ -312,6 +333,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, ] +[[package]] +name = "audioread" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/4a/874ecf9b472f998130c2b5e145dcdb9f6131e84786111489103b66772143/audioread-3.1.0.tar.gz", hash = "sha256:1c4ab2f2972764c896a8ac61ac53e261c8d29f0c6ccd652f84e18f08a4cab190", size = 20082, upload-time = "2025-10-26T19:44:13.484Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/16/fbe8e1e185a45042f7cd3a282def5bb8d95bb69ab9e9ef6a5368aa17e426/audioread-3.1.0-py3-none-any.whl", hash = "sha256:b30d1df6c5d3de5dcef0fb0e256f6ea17bdcf5f979408df0297d8a408e2971b4", size = 23143, upload-time = "2025-10-26T19:44:12.016Z" }, +] + [[package]] name = "av" version = "16.1.0" @@ -645,6 +675,56 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, ] +[[package]] +name = "colorlog" +version = "6.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a2/61/f083b5ac52e505dfc1c624eafbf8c7589a0d7f32daa398d2e7590efa5fda/colorlog-6.10.1.tar.gz", hash = "sha256:eb4ae5cb65fe7fec7773c2306061a8e63e02efc2c72eba9d27b0fa23c94f1321", size = 17162, upload-time = "2025-10-16T16:14:11.978Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/c1/e419ef3723a074172b68aaa89c9f3de486ed4c2399e2dbd8113a4fdcaf9e/colorlog-6.10.1-py3-none-any.whl", hash = "sha256:2d7e8348291948af66122cff006c9f8da6255d224e7cf8e37d8de2df3bad8c9c", size = 11743, upload-time = "2025-10-16T16:14:10.512Z" }, +] + +[[package]] +name = "contourpy" +version = "1.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773, upload-time = "2025-07-26T12:01:02.277Z" }, + { url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149, upload-time = "2025-07-26T12:01:04.072Z" }, + { url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222, upload-time = "2025-07-26T12:01:05.688Z" }, + { url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234, upload-time = "2025-07-26T12:01:07.054Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555, upload-time = "2025-07-26T12:01:08.801Z" }, + { url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238, upload-time = "2025-07-26T12:01:10.319Z" }, + { url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218, upload-time = "2025-07-26T12:01:12.659Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867, upload-time = "2025-07-26T12:01:15.533Z" }, + { url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677, upload-time = "2025-07-26T12:01:17.088Z" }, + { url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234, upload-time = "2025-07-26T12:01:18.256Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123, upload-time = "2025-07-26T12:01:19.848Z" }, + { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, + { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, + { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" }, + { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" }, + { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" }, + { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" }, + { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" }, + { url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809, upload-time = "2025-07-26T12:02:52.74Z" }, + { url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593, upload-time = "2025-07-26T12:02:54.037Z" }, + { url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202, upload-time = "2025-07-26T12:02:55.947Z" }, + { url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207, upload-time = "2025-07-26T12:02:57.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315, upload-time = "2025-07-26T12:02:58.801Z" }, +] + [[package]] name = "coverage" version = "7.9.2" @@ -749,6 +829,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/4c/0ecd260233290bee4b2facec4d8e755e57d8781d68f276e1248433993c9f/ctranslate2-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:511cdf810a5bf6a2cec735799e5cd47966e63f8f7688fdee1b97fed621abda00", size = 19470040, upload-time = "2025-04-08T19:49:55.274Z" }, ] +[[package]] +name = "cycler" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, +] + [[package]] name = "databases" version = "0.8.0" @@ -799,6 +888,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/07/d5/98748d9860e767a1248b5e31ffa7ce8cb7006e97bf8abbf3d891d0a8ba4e/debugpy-1.8.15-py2.py3-none-any.whl", hash = "sha256:bce2e6c5ff4f2e00b98d45e7e01a49c7b489ff6df5f12d881c67d2f1ac635f3d", size = 5282697, upload-time = "2025-07-15T16:44:07.996Z" }, ] +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + [[package]] name = "defusedxml" version = "0.7.1" @@ -861,6 +959,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, ] +[[package]] +name = "docopt" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491", size = 25901, upload-time = "2014-06-16T11:18:57.406Z" } + +[[package]] +name = "einops" +version = "0.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/77/850bef8d72ffb9219f0b1aac23fbc1bf7d038ee6ea666f331fa273031aa2/einops-0.8.2.tar.gz", hash = "sha256:609da665570e5e265e27283aab09e7f279ade90c4f01bcfca111f3d3e13f2827", size = 56261, upload-time = "2026-01-26T04:13:17.638Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/09/f8d8f8f31e4483c10a906437b4ce31bdf3d6d417b73fe33f1a8b59e34228/einops-0.8.2-py3-none-any.whl", hash = "sha256:54058201ac7087911181bfec4af6091bb59380360f069276601256a76af08193", size = 65638, upload-time = "2026-01-26T04:13:18.546Z" }, +] + [[package]] name = "email-validator" version = "2.2.0" @@ -998,6 +1111,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953, upload-time = "2025-02-11T04:26:44.484Z" }, ] +[[package]] +name = "fonttools" +version = "4.61.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/12/bf9f4eaa2fad039356cc627587e30ed008c03f1cebd3034376b5ee8d1d44/fonttools-4.61.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6604b735bb12fef8e0efd5578c9fb5d3d8532d5001ea13a19cddf295673ee09", size = 2852213, upload-time = "2025-12-12T17:29:46.675Z" }, + { url = "https://files.pythonhosted.org/packages/ac/49/4138d1acb6261499bedde1c07f8c2605d1d8f9d77a151e5507fd3ef084b6/fonttools-4.61.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ce02f38a754f207f2f06557523cd39a06438ba3aafc0639c477ac409fc64e37", size = 2401689, upload-time = "2025-12-12T17:29:48.769Z" }, + { url = "https://files.pythonhosted.org/packages/e5/fe/e6ce0fe20a40e03aef906af60aa87668696f9e4802fa283627d0b5ed777f/fonttools-4.61.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77efb033d8d7ff233385f30c62c7c79271c8885d5c9657d967ede124671bbdfb", size = 5058809, upload-time = "2025-12-12T17:29:51.701Z" }, + { url = "https://files.pythonhosted.org/packages/79/61/1ca198af22f7dd22c17ab86e9024ed3c06299cfdb08170640e9996d501a0/fonttools-4.61.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:75c1a6dfac6abd407634420c93864a1e274ebc1c7531346d9254c0d8f6ca00f9", size = 5036039, upload-time = "2025-12-12T17:29:53.659Z" }, + { url = "https://files.pythonhosted.org/packages/99/cc/fa1801e408586b5fce4da9f5455af8d770f4fc57391cd5da7256bb364d38/fonttools-4.61.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0de30bfe7745c0d1ffa2b0b7048fb7123ad0d71107e10ee090fa0b16b9452e87", size = 5034714, upload-time = "2025-12-12T17:29:55.592Z" }, + { url = "https://files.pythonhosted.org/packages/bf/aa/b7aeafe65adb1b0a925f8f25725e09f078c635bc22754f3fecb7456955b0/fonttools-4.61.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58b0ee0ab5b1fc9921eccfe11d1435added19d6494dde14e323f25ad2bc30c56", size = 5158648, upload-time = "2025-12-12T17:29:57.861Z" }, + { url = "https://files.pythonhosted.org/packages/99/f9/08ea7a38663328881384c6e7777bbefc46fd7d282adfd87a7d2b84ec9d50/fonttools-4.61.1-cp311-cp311-win32.whl", hash = "sha256:f79b168428351d11e10c5aeb61a74e1851ec221081299f4cf56036a95431c43a", size = 2280681, upload-time = "2025-12-12T17:29:59.943Z" }, + { url = "https://files.pythonhosted.org/packages/07/ad/37dd1ae5fa6e01612a1fbb954f0927681f282925a86e86198ccd7b15d515/fonttools-4.61.1-cp311-cp311-win_amd64.whl", hash = "sha256:fe2efccb324948a11dd09d22136fe2ac8a97d6c1347cf0b58a911dcd529f66b7", size = 2331951, upload-time = "2025-12-12T17:30:02.254Z" }, + { url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" }, + { url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/6c/44/f3aeac0fa98e7ad527f479e161aca6c3a1e47bb6996b053d45226fe37bf2/fonttools-4.61.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:15acc09befd16a0fb8a8f62bc147e1a82817542d72184acca9ce6e0aeda9fa6d", size = 5004295, upload-time = "2025-12-12T17:30:10.56Z" }, + { url = "https://files.pythonhosted.org/packages/14/e8/7424ced75473983b964d09f6747fa09f054a6d656f60e9ac9324cf40c743/fonttools-4.61.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e6bcdf33aec38d16508ce61fd81838f24c83c90a1d1b8c68982857038673d6b8", size = 4944109, upload-time = "2025-12-12T17:30:12.874Z" }, + { url = "https://files.pythonhosted.org/packages/c8/8b/6391b257fa3d0b553d73e778f953a2f0154292a7a7a085e2374b111e5410/fonttools-4.61.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5fade934607a523614726119164ff621e8c30e8fa1ffffbbd358662056ba69f0", size = 5093598, upload-time = "2025-12-12T17:30:15.79Z" }, + { url = "https://files.pythonhosted.org/packages/d9/71/fd2ea96cdc512d92da5678a1c98c267ddd4d8c5130b76d0f7a80f9a9fde8/fonttools-4.61.1-cp312-cp312-win32.whl", hash = "sha256:75da8f28eff26defba42c52986de97b22106cb8f26515b7c22443ebc9c2d3261", size = 2269060, upload-time = "2025-12-12T17:30:18.058Z" }, + { url = "https://files.pythonhosted.org/packages/80/3b/a3e81b71aed5a688e89dfe0e2694b26b78c7d7f39a5ffd8a7d75f54a12a8/fonttools-4.61.1-cp312-cp312-win_amd64.whl", hash = "sha256:497c31ce314219888c0e2fce5ad9178ca83fe5230b01a5006726cdf3ac9f24d9", size = 2319078, upload-time = "2025-12-12T17:30:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/c7/4e/ce75a57ff3aebf6fc1f4e9d508b8e5810618a33d900ad6c19eb30b290b97/fonttools-4.61.1-py3-none-any.whl", hash = "sha256:17d2bf5d541add43822bcf0c43d7d847b160c9bb01d15d5007d84e2217aaa371", size = 1148996, upload-time = "2025-12-12T17:31:21.03Z" }, +] + [[package]] name = "frozenlist" version = "1.7.0" @@ -1050,6 +1188,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597, upload-time = "2025-07-15T16:05:19.529Z" }, ] +[package.optional-dependencies] +http = [ + { name = "aiohttp" }, +] + [[package]] name = "google-crc32c" version = "1.7.1" @@ -1315,6 +1458,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, ] +[[package]] +name = "hyperpyyaml" +version = "1.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "ruamel-yaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/cf/000268ec653f354d464e6101505672365e8cecdcb216034a4bb0dbf9dca0/hyperpyyaml-1.2.3.tar.gz", hash = "sha256:4b135800ae13b846ae90aeb1c25e65e4d7a0138bd4935d3222734828d9f218f6", size = 17355, upload-time = "2026-01-01T20:01:49.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/a5/98e20cc4365e293dfc1382e2f8dbf44b1cb14d75658ab5147cb0d6d8718e/hyperpyyaml-1.2.3-py3-none-any.whl", hash = "sha256:0088c8ce97dc7c7d3460112b6ccc92dca46fade4d0320bc39f58b1fdda60f7f1", size = 16456, upload-time = "2026-01-01T20:01:48.462Z" }, +] + [[package]] name = "icalendar" version = "6.3.1" @@ -1457,6 +1613,55 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, ] +[[package]] +name = "julius" +version = "0.2.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/19/c9e1596b5572c786b93428d0904280e964c930fae7e6c9368ed9e1b63922/julius-0.2.7.tar.gz", hash = "sha256:3c0f5f5306d7d6016fcc95196b274cae6f07e2c9596eed314e4e7641554fbb08", size = 59640, upload-time = "2022-09-19T16:13:34.2Z" } + +[[package]] +name = "kiwisolver" +version = "1.4.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167, upload-time = "2025-08-10T21:25:53.403Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579, upload-time = "2025-08-10T21:25:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309, upload-time = "2025-08-10T21:25:55.76Z" }, + { url = "https://files.pythonhosted.org/packages/66/e1/e533435c0be77c3f64040d68d7a657771194a63c279f55573188161e81ca/kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61", size = 1435596, upload-time = "2025-08-10T21:25:56.861Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/51b73c7347f9aabdc7215aa79e8b15299097dc2f8e67dee2b095faca9cb0/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1", size = 1246548, upload-time = "2025-08-10T21:25:58.246Z" }, + { url = "https://files.pythonhosted.org/packages/21/aa/72a1c5d1e430294f2d32adb9542719cfb441b5da368d09d268c7757af46c/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872", size = 1263618, upload-time = "2025-08-10T21:25:59.857Z" }, + { url = "https://files.pythonhosted.org/packages/a3/af/db1509a9e79dbf4c260ce0cfa3903ea8945f6240e9e59d1e4deb731b1a40/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26", size = 1317437, upload-time = "2025-08-10T21:26:01.105Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f2/3ea5ee5d52abacdd12013a94130436e19969fa183faa1e7c7fbc89e9a42f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028", size = 2195742, upload-time = "2025-08-10T21:26:02.675Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9b/1efdd3013c2d9a2566aa6a337e9923a00590c516add9a1e89a768a3eb2fc/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771", size = 2290810, upload-time = "2025-08-10T21:26:04.009Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e5/cfdc36109ae4e67361f9bc5b41323648cb24a01b9ade18784657e022e65f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a", size = 2461579, upload-time = "2025-08-10T21:26:05.317Z" }, + { url = "https://files.pythonhosted.org/packages/62/86/b589e5e86c7610842213994cdea5add00960076bef4ae290c5fa68589cac/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464", size = 2268071, upload-time = "2025-08-10T21:26:06.686Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c6/f8df8509fd1eee6c622febe54384a96cfaf4d43bf2ccec7a0cc17e4715c9/kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2", size = 73840, upload-time = "2025-08-10T21:26:07.94Z" }, + { url = "https://files.pythonhosted.org/packages/e2/2d/16e0581daafd147bc11ac53f032a2b45eabac897f42a338d0a13c1e5c436/kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7", size = 65159, upload-time = "2025-08-10T21:26:09.048Z" }, + { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, + { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" }, + { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" }, + { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" }, + { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" }, + { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" }, + { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" }, + { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" }, + { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" }, + { url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104, upload-time = "2025-08-10T21:27:43.287Z" }, + { url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592, upload-time = "2025-08-10T21:27:44.314Z" }, + { url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281, upload-time = "2025-08-10T21:27:45.369Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/dbd2ecdce306f1d07a1aaf324817ee993aab7aee9db47ceac757deabafbe/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f", size = 78009, upload-time = "2025-08-10T21:27:46.376Z" }, + { url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929, upload-time = "2025-08-10T21:27:48.236Z" }, +] + [[package]] name = "kombu" version = "5.5.4" @@ -1472,6 +1677,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ef/70/a07dcf4f62598c8ad579df241af55ced65bed76e42e45d3c368a6d82dbc1/kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8", size = 210034, upload-time = "2025-06-01T10:19:20.436Z" }, ] +[[package]] +name = "lazy-loader" +version = "0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6b/c875b30a1ba490860c93da4cabf479e03f584eba06fe5963f6f6644653d8/lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1", size = 15431, upload-time = "2024-04-05T13:03:12.261Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/60/d497a310bde3f01cb805196ac61b7ad6dc5dcf8dce66634dc34364b20b4f/lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc", size = 12097, upload-time = "2024-04-05T13:03:10.514Z" }, +] + [[package]] name = "levenshtein" version = "0.27.1" @@ -1519,6 +1736,64 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/1e/408fd10217eac0e43aea0604be22b4851a09e03d761d44d4ea12089dd70e/levenshtein-0.27.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7987ef006a3cf56a4532bd4c90c2d3b7b4ca9ad3bf8ae1ee5713c4a3bdfda913", size = 98045, upload-time = "2025-03-02T19:44:44.527Z" }, ] +[[package]] +name = "librosa" +version = "0.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "audioread" }, + { name = "decorator" }, + { name = "joblib" }, + { name = "lazy-loader" }, + { name = "msgpack" }, + { name = "numba" }, + { name = "numpy" }, + { name = "pooch" }, + { name = "scikit-learn" }, + { name = "scipy" }, + { name = "soundfile" }, + { name = "soxr" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9a/c4/22a644b91098223d653993388daaf9af28175f2f39073269efa6f7c71caf/librosa-0.10.1.tar.gz", hash = "sha256:832f7d150d6dd08ed2aa08c0567a4be58330635c32ddd2208de9bc91300802c7", size = 311110, upload-time = "2023-08-16T13:52:20.7Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/a2/4f639c1168d7aada749a896afb4892a831e2041bebdcf636aebfe9e86556/librosa-0.10.1-py3-none-any.whl", hash = "sha256:7ab91d9f5fcb75ea14848a05d3b1f825cf8d0c42ca160d19ae6874f2de2d8223", size = 253710, upload-time = "2023-08-16T13:52:19.141Z" }, +] + +[[package]] +name = "lightning" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fsspec", extra = ["http"] }, + { name = "lightning-utilities" }, + { name = "packaging" }, + { name = "pytorch-lightning" }, + { name = "pyyaml" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, + { name = "torchmetrics" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/15/ad/a1c91a795521be252209d45fb080f28a4f1e7244d3b37121fcc6e3e43034/lightning-2.6.1.tar.gz", hash = "sha256:859104b98c61add6fe60d0c623abf749baf25f2950a66ebdfb4bd18aa7decba9", size = 663175, upload-time = "2026-01-30T14:59:13.92Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/6d/42640e15a8c34b57dc7ea922152440c0c6692214a08d5282b6e3eb46ddf4/lightning-2.6.1-py3-none-any.whl", hash = "sha256:30e1adac23004c713663928541bd72ecb1371b7abc9aff9f46b7fd2644988d30", size = 853631, upload-time = "2026-01-30T14:59:11.687Z" }, +] + +[[package]] +name = "lightning-utilities" +version = "0.15.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/45/7fa8f56b17dc0f0a41ec70dd307ecd6787254483549843bef4c30ab5adce/lightning_utilities-0.15.3.tar.gz", hash = "sha256:792ae0204c79f6859721ac7f386c237a33b0ed06ba775009cb894e010a842033", size = 33553, upload-time = "2026-02-22T14:48:53.348Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/f4/ead6e0e37209b07c9baa3e984ccdb0348ca370b77cea3aaea8ddbb097e00/lightning_utilities-0.15.3-py3-none-any.whl", hash = "sha256:6c55f1bee70084a1cbeaa41ada96e4b3a0fea5909e844dd335bd80f5a73c5f91", size = 31906, upload-time = "2026-02-22T14:48:52.488Z" }, +] + [[package]] name = "llama-cloud" version = "0.1.35" @@ -1744,6 +2019,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/24/8497595be04a8a0209536e9ce70d4132f8f8e001986f4c700414b3777758/llama_parse-0.6.43-py3-none-any.whl", hash = "sha256:fe435309638c4fdec4fec31f97c5031b743c92268962d03b99bd76704f566c32", size = 4944, upload-time = "2025-07-08T18:20:57.089Z" }, ] +[[package]] +name = "llvmlite" +version = "0.46.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/cd/08ae687ba099c7e3d21fe2ea536500563ef1943c5105bf6ab4ee3829f68e/llvmlite-0.46.0.tar.gz", hash = "sha256:227c9fd6d09dce2783c18b754b7cd9d9b3b3515210c46acc2d3c5badd9870ceb", size = 193456, upload-time = "2025-12-08T18:15:36.295Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/a1/2ad4b2367915faeebe8447f0a057861f646dbf5fbbb3561db42c65659cf3/llvmlite-0.46.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82f3d39b16f19aa1a56d5fe625883a6ab600d5cc9ea8906cca70ce94cabba067", size = 37232766, upload-time = "2025-12-08T18:14:48.836Z" }, + { url = "https://files.pythonhosted.org/packages/12/b5/99cf8772fdd846c07da4fd70f07812a3c8fd17ea2409522c946bb0f2b277/llvmlite-0.46.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a3df43900119803bbc52720e758c76f316a9a0f34612a886862dfe0a5591a17e", size = 56275175, upload-time = "2025-12-08T18:14:51.604Z" }, + { url = "https://files.pythonhosted.org/packages/38/f2/ed806f9c003563732da156139c45d970ee435bd0bfa5ed8de87ba972b452/llvmlite-0.46.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de183fefc8022d21b0aa37fc3e90410bc3524aed8617f0ff76732fc6c3af5361", size = 55128630, upload-time = "2025-12-08T18:14:55.107Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/8f5a37a65fc9b7b17408508145edd5f86263ad69c19d3574e818f533a0eb/llvmlite-0.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8b10bc585c58bdffec9e0c309bb7d51be1f2f15e169a4b4d42f2389e431eb93", size = 38138652, upload-time = "2025-12-08T18:14:58.171Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f8/4db016a5e547d4e054ff2f3b99203d63a497465f81ab78ec8eb2ff7b2304/llvmlite-0.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b9588ad4c63b4f0175a3984b85494f0c927c6b001e3a246a3a7fb3920d9a137", size = 37232767, upload-time = "2025-12-08T18:15:00.737Z" }, + { url = "https://files.pythonhosted.org/packages/aa/85/4890a7c14b4fa54400945cb52ac3cd88545bbdb973c440f98ca41591cdc5/llvmlite-0.46.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3535bd2bb6a2d7ae4012681ac228e5132cdb75fefb1bcb24e33f2f3e0c865ed4", size = 56275176, upload-time = "2025-12-08T18:15:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/6a/07/3d31d39c1a1a08cd5337e78299fca77e6aebc07c059fbd0033e3edfab45c/llvmlite-0.46.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cbfd366e60ff87ea6cc62f50bc4cd800ebb13ed4c149466f50cf2163a473d1e", size = 55128630, upload-time = "2025-12-08T18:15:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6b/d139535d7590a1bba1ceb68751bef22fadaa5b815bbdf0e858e3875726b2/llvmlite-0.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:398b39db462c39563a97b912d4f2866cd37cba60537975a09679b28fbbc0fb38", size = 38138940, upload-time = "2025-12-08T18:15:10.162Z" }, +] + [[package]] name = "mako" version = "1.3.10" @@ -1808,6 +2099,42 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/be/2f/5108cb3ee4ba6501748c4908b908e55f42a5b66245b4cfe0c99326e1ef6e/marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73", size = 50964, upload-time = "2025-12-22T06:53:51.801Z" }, ] +[[package]] +name = "matplotlib" +version = "3.10.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "contourpy" }, + { name = "cycler" }, + { name = "fonttools" }, + { name = "kiwisolver" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pillow" }, + { name = "pyparsing" }, + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/86/de7e3a1cdcfc941483af70609edc06b83e7c8a0e0dc9ac325200a3f4d220/matplotlib-3.10.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6be43b667360fef5c754dda5d25a32e6307a03c204f3c0fc5468b78fa87b4160", size = 8251215, upload-time = "2025-12-10T22:55:16.175Z" }, + { url = "https://files.pythonhosted.org/packages/fd/14/baad3222f424b19ce6ad243c71de1ad9ec6b2e4eb1e458a48fdc6d120401/matplotlib-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2b336e2d91a3d7006864e0990c83b216fcdca64b5a6484912902cef87313d78", size = 8139625, upload-time = "2025-12-10T22:55:17.712Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a0/7024215e95d456de5883e6732e708d8187d9753a21d32f8ddb3befc0c445/matplotlib-3.10.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efb30e3baaea72ce5928e32bab719ab4770099079d66726a62b11b1ef7273be4", size = 8712614, upload-time = "2025-12-10T22:55:20.8Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f4/b8347351da9a5b3f41e26cf547252d861f685c6867d179a7c9d60ad50189/matplotlib-3.10.8-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d56a1efd5bfd61486c8bc968fa18734464556f0fb8e51690f4ac25d85cbbbbc2", size = 9540997, upload-time = "2025-12-10T22:55:23.258Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/c7b914e297efe0bc36917bf216b2acb91044b91e930e878ae12981e461e5/matplotlib-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238b7ce5717600615c895050239ec955d91f321c209dd110db988500558e70d6", size = 9596825, upload-time = "2025-12-10T22:55:25.217Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d3/a4bbc01c237ab710a1f22b4da72f4ff6d77eb4c7735ea9811a94ae239067/matplotlib-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:18821ace09c763ec93aef5eeff087ee493a24051936d7b9ebcad9662f66501f9", size = 8135090, upload-time = "2025-12-10T22:55:27.162Z" }, + { url = "https://files.pythonhosted.org/packages/89/dd/a0b6588f102beab33ca6f5218b31725216577b2a24172f327eaf6417d5c9/matplotlib-3.10.8-cp311-cp311-win_arm64.whl", hash = "sha256:bab485bcf8b1c7d2060b4fcb6fc368a9e6f4cd754c9c2fea281f4be21df394a2", size = 8012377, upload-time = "2025-12-10T22:55:29.185Z" }, + { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, + { url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" }, + { url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" }, + { url = "https://files.pythonhosted.org/packages/04/30/3afaa31c757f34b7725ab9d2ba8b48b5e89c2019c003e7d0ead143aabc5a/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6da7c2ce169267d0d066adcf63758f0604aa6c3eebf67458930f9d9b79ad1db1", size = 8249198, upload-time = "2025-12-10T22:56:45.584Z" }, + { url = "https://files.pythonhosted.org/packages/48/2f/6334aec331f57485a642a7c8be03cb286f29111ae71c46c38b363230063c/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9153c3292705be9f9c64498a8872118540c3f4123d1a1c840172edf262c8be4a", size = 8136817, upload-time = "2025-12-10T22:56:47.339Z" }, + { url = "https://files.pythonhosted.org/packages/73/e4/6d6f14b2a759c622f191b2d67e9075a3f56aaccb3be4bb9bb6890030d0a0/matplotlib-3.10.8-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae029229a57cd1e8fe542485f27e7ca7b23aa9e8944ddb4985d0bc444f1eca2", size = 8713867, upload-time = "2025-12-10T22:56:48.954Z" }, +] + [[package]] name = "mdurl" version = "0.1.2" @@ -1826,6 +2153,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, ] +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c", size = 82271, upload-time = "2025-10-08T09:14:49.967Z" }, + { url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0", size = 84914, upload-time = "2025-10-08T09:14:50.958Z" }, + { url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962, upload-time = "2025-10-08T09:14:51.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183, upload-time = "2025-10-08T09:14:53.477Z" }, + { url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454, upload-time = "2025-10-08T09:14:54.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341, upload-time = "2025-10-08T09:14:56.328Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e", size = 64747, upload-time = "2025-10-08T09:14:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68", size = 71633, upload-time = "2025-10-08T09:14:59.177Z" }, + { url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406", size = 64755, upload-time = "2025-10-08T09:15:00.48Z" }, + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, +] + [[package]] name = "multidict" version = "6.6.3" @@ -1913,6 +2266,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c2/7e/9af5a710a1236e4772de8dfcc6af942a561327bb9f42b5b4a24d0cf100fd/nltk-3.9.3-py3-none-any.whl", hash = "sha256:60b3db6e9995b3dd976b1f0fa7dec22069b2677e759c28eb69b62ddd44870522", size = 1525385, upload-time = "2026-02-24T12:05:46.54Z" }, ] +[[package]] +name = "numba" +version = "0.64.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "llvmlite" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/c9/a0fb41787d01d621046138da30f6c2100d80857bf34b3390dd68040f27a3/numba-0.64.0.tar.gz", hash = "sha256:95e7300af648baa3308127b1955b52ce6d11889d16e8cfe637b4f85d2fca52b1", size = 2765679, upload-time = "2026-02-18T18:41:20.974Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/a3/1a4286a1c16136c8896d8e2090d950e79b3ec626d3a8dc9620f6234d5a38/numba-0.64.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:766156ee4b8afeeb2b2e23c81307c5d19031f18d5ce76ae2c5fb1429e72fa92b", size = 2682938, upload-time = "2026-02-18T18:40:52.897Z" }, + { url = "https://files.pythonhosted.org/packages/19/16/aa6e3ba3cd45435c117d1101b278b646444ed05b7c712af631b91353f573/numba-0.64.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d17071b4ffc9d39b75d8e6c101a36f0c81b646123859898c9799cb31807c8f78", size = 3747376, upload-time = "2026-02-18T18:40:54.925Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f1/dd2f25e18d75fdf897f730b78c5a7b00cc4450f2405564dbebfaf359f21f/numba-0.64.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ead5630434133bac87fa67526eacb264535e4e9a2d5ec780e0b4fc381a7d275", size = 3453292, upload-time = "2026-02-18T18:40:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/31/29/e09d5630578a50a2b3fa154990b6b839cf95327aa0709e2d50d0b6816cd1/numba-0.64.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2b1fd93e7aaac07d6fbaed059c00679f591f2423885c206d8c1b55d65ca3f2d", size = 2749824, upload-time = "2026-02-18T18:40:58.392Z" }, + { url = "https://files.pythonhosted.org/packages/70/a6/9fc52cb4f0d5e6d8b5f4d81615bc01012e3cf24e1052a60f17a68deb8092/numba-0.64.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69440a8e8bc1a81028446f06b363e28635aa67bd51b1e498023f03b812e0ce68", size = 2683418, upload-time = "2026-02-18T18:40:59.886Z" }, + { url = "https://files.pythonhosted.org/packages/9b/89/1a74ea99b180b7a5587b0301ed1b183a2937c4b4b67f7994689b5d36fc34/numba-0.64.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13721011f693ba558b8dd4e4db7f2640462bba1b855bdc804be45bbeb55031a", size = 3804087, upload-time = "2026-02-18T18:41:01.699Z" }, + { url = "https://files.pythonhosted.org/packages/91/e1/583c647404b15f807410510fec1eb9b80cb8474165940b7749f026f21cbc/numba-0.64.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0b180b1133f2b5d8b3f09d96b6d7a9e51a7da5dda3c09e998b5bcfac85d222c", size = 3504309, upload-time = "2026-02-18T18:41:03.252Z" }, + { url = "https://files.pythonhosted.org/packages/85/23/0fce5789b8a5035e7ace21216a468143f3144e02013252116616c58339aa/numba-0.64.0-cp312-cp312-win_amd64.whl", hash = "sha256:e63dc94023b47894849b8b106db28ccb98b49d5498b98878fac1a38f83ac007a", size = 2752740, upload-time = "2026-02-18T18:41:05.097Z" }, +] + [[package]] name = "numpy" version = "2.3.1" @@ -1949,6 +2322,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/6b/1c6b515a83d5564b1698a61efa245727c8feecf308f4091f565988519d20/numpy-2.3.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e610832418a2bc09d974cc9fecebfa51e9532d6190223bc5ef6a7402ebf3b5cb", size = 12927246, upload-time = "2025-06-21T12:27:38.618Z" }, ] +[[package]] +name = "omegaconf" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "antlr4-python3-runtime" }, + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/48/6388f1bb9da707110532cb70ec4d2822858ddfb44f1cdf1233c20a80ea4b/omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7", size = 3298120, upload-time = "2022-12-08T20:59:22.753Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b", size = 79500, upload-time = "2022-12-08T20:59:19.686Z" }, +] + [[package]] name = "onnxruntime" version = "1.22.1" @@ -1991,6 +2377,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8a/91/1f1cf577f745e956b276a8b1d3d76fa7a6ee0c2b05db3b001b900f2c71db/openai-1.97.0-py3-none-any.whl", hash = "sha256:a1c24d96f4609f3f7f51c9e1c2606d97cc6e334833438659cfd687e9c972c610", size = 764953, upload-time = "2025-07-16T16:37:33.135Z" }, ] +[[package]] +name = "optuna" +version = "4.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alembic" }, + { name = "colorlog" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "sqlalchemy" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/b2/b5e12de7b4486556fe2257611b55dbabf30d0300bdb031831aa943ad20e4/optuna-4.7.0.tar.gz", hash = "sha256:d91817e2079825557bd2e97de2e8c9ae260bfc99b32712502aef8a5095b2d2c0", size = 479740, upload-time = "2026-01-19T05:45:52.604Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/d1/6c8a4fbb38a9e3565f5c36b871262a85ecab3da48120af036b1e4937a15c/optuna-4.7.0-py3-none-any.whl", hash = "sha256:e41ec84018cecc10eabf28143573b1f0bde0ba56dba8151631a590ecbebc1186", size = 413894, upload-time = "2026-01-19T05:45:50.815Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -2092,6 +2496,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "pooch" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "platformdirs" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/43/85ef45e8b36c6a48546af7b266592dc32d7f67837a6514d111bced6d7d75/pooch-1.9.0.tar.gz", hash = "sha256:de46729579b9857ffd3e741987a2f6d5e0e03219892c167c6578c0091fb511ed", size = 61788, upload-time = "2026-01-30T19:15:09.649Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/2d/d4bf65e47cea8ff2c794a600c4fd1273a7902f268757c531e0ee9f18aa58/pooch-1.9.0-py3-none-any.whl", hash = "sha256:f265597baa9f760d25ceb29d0beb8186c243d6607b0f60b83ecf14078dbc703b", size = 67175, upload-time = "2026-01-30T19:15:08.36Z" }, +] + +[[package]] +name = "primepy" +version = "1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/77/0cfa1b4697cfb5336f3a96e8bc73327f64610be3a64c97275f1801afb395/primePy-1.3.tar.gz", hash = "sha256:25fd7e25344b0789a5984c75d89f054fcf1f180bef20c998e4befbac92de4669", size = 3914, upload-time = "2018-05-29T17:18:18.683Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/c1/bb7e334135859c3a92ec399bc89293ea73f28e815e35b43929c8db6af030/primePy-1.3-py3-none-any.whl", hash = "sha256:5ed443718765be9bf7e2ff4c56cdff71b42140a15b39d054f9d99f0009e2317a", size = 4040, upload-time = "2018-05-29T17:18:17.53Z" }, +] + [[package]] name = "prometheus-client" version = "0.22.1" @@ -2229,6 +2656,108 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, ] +[[package]] +name = "pyannote-audio" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asteroid-filterbanks" }, + { name = "einops" }, + { name = "huggingface-hub" }, + { name = "lightning" }, + { name = "omegaconf" }, + { name = "pyannote-core" }, + { name = "pyannote-database" }, + { name = "pyannote-metrics" }, + { name = "pyannote-pipeline" }, + { name = "pytorch-metric-learning" }, + { name = "rich" }, + { name = "semver" }, + { name = "soundfile" }, + { name = "speechbrain" }, + { name = "tensorboardx" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, + { name = "torch-audiomentations" }, + { name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and platform_python_implementation == 'CPython' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'darwin')" }, + { name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_python_implementation == 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'CPython' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "torchmetrics" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ec/1e/efe9619c38f1281ddf21640654d8ea9e3f67c459b76f78657b26d8557bbe/pyannote_audio-3.4.0.tar.gz", hash = "sha256:d523d883cb8d37cb6daf99f3ba83f9138bb193646ad71e6eae7deb89d8ddd642", size = 804850, upload-time = "2025-09-09T07:04:51.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/13/620c6f711b723653092fd063bfee82a6af5ea3a4d3c42efc53ce623a7f4d/pyannote_audio-3.4.0-py2.py3-none-any.whl", hash = "sha256:36e38f058059f46da3478dda581cda53d9d85a21173a3e70bbdbc3ba93b5e1b7", size = 897789, upload-time = "2025-09-09T07:04:49.464Z" }, +] + +[[package]] +name = "pyannote-core" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "scipy" }, + { name = "sortedcontainers" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/03/feaf7534206f02c75baf151ce4b8c322b402a6f477c2be82f69d9269cbe6/pyannote.core-5.0.0.tar.gz", hash = "sha256:1a55bcc8bd680ba6be5fa53efa3b6f3d2cdd67144c07b6b4d8d66d5cb0d2096f", size = 59247, upload-time = "2022-12-15T13:02:05.312Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/c4/370bc8ba66815a5832ece753a1009388bb07ea353d21c83f2d5a1a436f2c/pyannote.core-5.0.0-py3-none-any.whl", hash = "sha256:04920a6754492242ce0dc6017545595ab643870fe69a994f20c1a5f2da0544d0", size = 58475, upload-time = "2022-12-15T13:02:03.265Z" }, +] + +[[package]] +name = "pyannote-database" +version = "5.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pandas" }, + { name = "pyannote-core" }, + { name = "pyyaml" }, + { name = "typer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/ae/de36413d69a46be87cb612ebbcdc4eacbeebce3bc809124603e44a88fe26/pyannote.database-5.1.3.tar.gz", hash = "sha256:0eaf64c1cc506718de60d2d702f1359b1ae7ff252ee3e4799f1c5e378cd52c31", size = 49957, upload-time = "2025-01-15T20:28:26.437Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/64/92d51a3a05615ba58be8ba62a43f9f9f952d9f3646f7e4fb7826e5a3a24e/pyannote.database-5.1.3-py3-none-any.whl", hash = "sha256:37887844c7dfbcc075cb591eddc00aff45fae1ed905344e1f43e0090e63bd40a", size = 48127, upload-time = "2025-01-15T20:28:25.326Z" }, +] + +[[package]] +name = "pyannote-metrics" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docopt" }, + { name = "matplotlib" }, + { name = "numpy" }, + { name = "pandas" }, + { name = "pyannote-core" }, + { name = "pyannote-database" }, + { name = "scikit-learn" }, + { name = "scipy" }, + { name = "sympy" }, + { name = "tabulate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/2b/6c5f01d3c49aa1c160765946e23782ca6436ae8b9bc514b56319ff5f16e7/pyannote.metrics-3.2.1.tar.gz", hash = "sha256:08024255a3550e96a8e9da4f5f4af326886548480de891414567c8900920ee5c", size = 49086, upload-time = "2022-06-20T14:10:34.618Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/7d/035b370ab834b30e849fe9cd092b7bd7f321fcc4a2c56b84e96476b7ede5/pyannote.metrics-3.2.1-py3-none-any.whl", hash = "sha256:46be797cdade26c82773e5018659ae610145260069c7c5bf3d3c8a029ade8e22", size = 51386, upload-time = "2022-06-20T14:10:32.621Z" }, +] + +[[package]] +name = "pyannote-pipeline" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docopt" }, + { name = "filelock" }, + { name = "optuna" }, + { name = "pyannote-core" }, + { name = "pyannote-database" }, + { name = "pyyaml" }, + { name = "scikit-learn" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/35/04/4bcfe0dd588577a188328b806f3a7213d8cead0ce5fe5784d01fd57df93f/pyannote.pipeline-3.0.1.tar.gz", hash = "sha256:021794e26a2cf5d8fb5bb1835951e71f5fac33eb14e23dfb7468e16b1b805151", size = 34486, upload-time = "2023-09-22T20:16:49.951Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/42/1bf7cbf061ed05c580bfb63bffdd3f3474cbd5c02bee4fac518eea9e9d9e/pyannote.pipeline-3.0.1-py3-none-any.whl", hash = "sha256:819bde4c4dd514f740f2373dfec794832b9fc8e346a35e43a7681625ee187393", size = 31517, upload-time = "2023-09-22T20:16:48.153Z" }, +] + [[package]] name = "pycparser" version = "2.22" @@ -2434,6 +2963,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/81/ef2b1dfd1862567d573a4fdbc9f969067621764fbb74338496840a1d2977/pyopenssl-25.3.0-py3-none-any.whl", hash = "sha256:1fda6fc034d5e3d179d39e59c1895c9faeaf40a79de5fc4cbbfbe0d36f4a77b6", size = 57268, upload-time = "2025-09-17T00:32:19.474Z" }, ] +[[package]] +name = "pyparsing" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, +] + [[package]] name = "pypdf" version = "6.7.5" @@ -2622,6 +3160,42 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, ] +[[package]] +name = "pytorch-lightning" +version = "2.5.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fsspec", extra = ["http"] }, + { name = "lightning-utilities" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, + { name = "torchmetrics" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/1f/94a441d30779e1ffa5f7dc2ac5fa374c142d8b96c347a49a30226264124e/pytorch_lightning-2.5.6.tar.gz", hash = "sha256:c428faaceef74be50b870814d0d7e9f9c6ee748b8769a2afd3366bc69daf3a0f", size = 642830, upload-time = "2025-11-05T20:53:04.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/e4/32ed2f33c1b634f7c2895369222f4f8cb345044f4642bbff718e7dd1e0b7/pytorch_lightning-2.5.6-py3-none-any.whl", hash = "sha256:037bad1e2fd94d5eb6c5144f045fd4c1070c3d38fc9c14d9f3774a3a9be54dff", size = 831555, upload-time = "2025-11-05T20:53:03.316Z" }, +] + +[[package]] +name = "pytorch-metric-learning" +version = "2.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "scikit-learn" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/80/6e61b1a91debf4c1b47d441f9a9d7fe2aabcdd9575ed70b2811474eb95c3/pytorch-metric-learning-2.9.0.tar.gz", hash = "sha256:27a626caf5e2876a0fd666605a78cb67ef7597e25d7a68c18053dd503830701f", size = 84530, upload-time = "2025-08-17T17:11:19.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/7d/73ef5052f57b7720cad00e16598db3592a5ef4826745ffca67a2f085d4dc/pytorch_metric_learning-2.9.0-py3-none-any.whl", hash = "sha256:d51646006dc87168f00cf954785db133a4c5aac81253877248737aa42ef6432a", size = 127801, upload-time = "2025-08-17T17:11:18.185Z" }, +] + [[package]] name = "pytz" version = "2025.2" @@ -2800,6 +3374,9 @@ local = [ { name = "faster-whisper" }, ] silero-vad = [ + { name = "librosa" }, + { name = "pyannote-audio" }, + { name = "pytorch-lightning" }, { name = "silero-vad" }, { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, @@ -2872,7 +3449,10 @@ evaluation = [ ] local = [{ name = "faster-whisper", specifier = ">=0.10.0" }] silero-vad = [ - { name = "silero-vad", specifier = ">=5.1.2" }, + { name = "librosa", specifier = "==0.10.1" }, + { name = "pyannote-audio", specifier = "==3.4.0" }, + { name = "pytorch-lightning", specifier = "<2.6" }, + { name = "silero-vad", specifier = "==5.1.2" }, { name = "torch", specifier = ">=2.8.0", index = "https://download.pytorch.org/whl/cpu" }, { name = "torchaudio", specifier = ">=2.8.0", index = "https://download.pytorch.org/whl/cpu" }, ] @@ -3062,6 +3642,46 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c8/ed/9de62c2150ca8e2e5858acf3f4f4d0d180a38feef9fdab4078bea63d8dba/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e99685fc95d386da368013e7fb4269dd39c30d99f812a8372d62f244f662709c", size = 555334, upload-time = "2025-07-01T15:56:51.703Z" }, ] +[[package]] +name = "ruamel-yaml" +version = "0.18.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ruamel-yaml-clib", marker = "platform_python_implementation == 'CPython'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/2b/7a1f1ebcd6b3f14febdc003e658778d81e76b40df2267904ee6b13f0c5c6/ruamel_yaml-0.18.17.tar.gz", hash = "sha256:9091cd6e2d93a3a4b157ddb8fabf348c3de7f1fb1381346d985b6b247dcd8d3c", size = 149602, upload-time = "2025-12-17T20:02:55.757Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/fe/b6045c782f1fd1ae317d2a6ca1884857ce5c20f59befe6ab25a8603c43a7/ruamel_yaml-0.18.17-py3-none-any.whl", hash = "sha256:9c8ba9eb3e793efdf924b60d521820869d5bf0cb9c6f1b82d82de8295e290b9d", size = 121594, upload-time = "2025-12-17T20:02:07.657Z" }, +] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/97/60fda20e2fb54b83a61ae14648b0817c8f5d84a3821e40bfbdae1437026a/ruamel_yaml_clib-0.2.15.tar.gz", hash = "sha256:46e4cc8c43ef6a94885f72512094e482114a8a706d3c555a34ed4b0d20200600", size = 225794, upload-time = "2025-11-16T16:12:59.761Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/80/8ce7b9af532aa94dd83360f01ce4716264db73de6bc8efd22c32341f6658/ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c583229f336682b7212a43d2fa32c30e643d3076178fb9f7a6a14dde85a2d8bd", size = 147998, upload-time = "2025-11-16T16:13:13.241Z" }, + { url = "https://files.pythonhosted.org/packages/53/09/de9d3f6b6701ced5f276d082ad0f980edf08ca67114523d1b9264cd5e2e0/ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56ea19c157ed8c74b6be51b5fa1c3aff6e289a041575f0556f66e5fb848bb137", size = 132743, upload-time = "2025-11-16T16:13:14.265Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f7/73a9b517571e214fe5c246698ff3ed232f1ef863c8ae1667486625ec688a/ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5fea0932358e18293407feb921d4f4457db837b67ec1837f87074667449f9401", size = 731459, upload-time = "2025-11-16T20:22:44.338Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a2/0dc0013169800f1c331a6f55b1282c1f4492a6d32660a0cf7b89e6684919/ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71831bd61fbdb7aa0399d5c4da06bea37107ab5c79ff884cc07f2450910262", size = 749289, upload-time = "2025-11-16T16:13:15.633Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ed/3fb20a1a96b8dc645d88c4072df481fe06e0289e4d528ebbdcc044ebc8b3/ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:617d35dc765715fa86f8c3ccdae1e4229055832c452d4ec20856136acc75053f", size = 777630, upload-time = "2025-11-16T16:13:16.898Z" }, + { url = "https://files.pythonhosted.org/packages/60/50/6842f4628bc98b7aa4733ab2378346e1441e150935ad3b9f3c3c429d9408/ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b45498cc81a4724a2d42273d6cfc243c0547ad7c6b87b4f774cb7bcc131c98d", size = 744368, upload-time = "2025-11-16T16:13:18.117Z" }, + { url = "https://files.pythonhosted.org/packages/d3/b0/128ae8e19a7d794c2e36130a72b3bb650ce1dd13fb7def6cf10656437dcf/ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:def5663361f6771b18646620fca12968aae730132e104688766cf8a3b1d65922", size = 745233, upload-time = "2025-11-16T20:22:45.833Z" }, + { url = "https://files.pythonhosted.org/packages/75/05/91130633602d6ba7ce3e07f8fc865b40d2a09efd4751c740df89eed5caf9/ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:014181cdec565c8745b7cbc4de3bf2cc8ced05183d986e6d1200168e5bb59490", size = 770963, upload-time = "2025-11-16T16:13:19.344Z" }, + { url = "https://files.pythonhosted.org/packages/fd/4b/fd4542e7f33d7d1bc64cc9ac9ba574ce8cf145569d21f5f20133336cdc8c/ruamel_yaml_clib-0.2.15-cp311-cp311-win32.whl", hash = "sha256:d290eda8f6ada19e1771b54e5706b8f9807e6bb08e873900d5ba114ced13e02c", size = 102640, upload-time = "2025-11-16T16:13:20.498Z" }, + { url = "https://files.pythonhosted.org/packages/bb/eb/00ff6032c19c7537371e3119287999570867a0eafb0154fccc80e74bf57a/ruamel_yaml_clib-0.2.15-cp311-cp311-win_amd64.whl", hash = "sha256:bdc06ad71173b915167702f55d0f3f027fc61abd975bd308a0968c02db4a4c3e", size = 121996, upload-time = "2025-11-16T16:13:21.855Z" }, + { url = "https://files.pythonhosted.org/packages/72/4b/5fde11a0722d676e469d3d6f78c6a17591b9c7e0072ca359801c4bd17eee/ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb15a2e2a90c8475df45c0949793af1ff413acfb0a716b8b94e488ea95ce7cff", size = 149088, upload-time = "2025-11-16T16:13:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/85/82/4d08ac65ecf0ef3b046421985e66301a242804eb9a62c93ca3437dc94ee0/ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64da03cbe93c1e91af133f5bec37fd24d0d4ba2418eaf970d7166b0a26a148a2", size = 134553, upload-time = "2025-11-16T16:13:24.151Z" }, + { url = "https://files.pythonhosted.org/packages/b9/cb/22366d68b280e281a932403b76da7a988108287adff2bfa5ce881200107a/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f6d3655e95a80325b84c4e14c080b2470fe4f33b6846f288379ce36154993fb1", size = 737468, upload-time = "2025-11-16T20:22:47.335Z" }, + { url = "https://files.pythonhosted.org/packages/71/73/81230babf8c9e33770d43ed9056f603f6f5f9665aea4177a2c30ae48e3f3/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71845d377c7a47afc6592aacfea738cc8a7e876d586dfba814501d8c53c1ba60", size = 753349, upload-time = "2025-11-16T16:13:26.269Z" }, + { url = "https://files.pythonhosted.org/packages/61/62/150c841f24cda9e30f588ef396ed83f64cfdc13b92d2f925bb96df337ba9/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e5499db1ccbc7f4b41f0565e4f799d863ea720e01d3e99fa0b7b5fcd7802c9", size = 788211, upload-time = "2025-11-16T16:13:27.441Z" }, + { url = "https://files.pythonhosted.org/packages/30/93/e79bd9cbecc3267499d9ead919bd61f7ddf55d793fb5ef2b1d7d92444f35/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4b293a37dc97e2b1e8a1aec62792d1e52027087c8eea4fc7b5abd2bdafdd6642", size = 743203, upload-time = "2025-11-16T16:13:28.671Z" }, + { url = "https://files.pythonhosted.org/packages/8d/06/1eb640065c3a27ce92d76157f8efddb184bd484ed2639b712396a20d6dce/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:512571ad41bba04eac7268fe33f7f4742210ca26a81fe0c75357fa682636c690", size = 747292, upload-time = "2025-11-16T20:22:48.584Z" }, + { url = "https://files.pythonhosted.org/packages/a5/21/ee353e882350beab65fcc47a91b6bdc512cace4358ee327af2962892ff16/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e5e9f630c73a490b758bf14d859a39f375e6999aea5ddd2e2e9da89b9953486a", size = 771624, upload-time = "2025-11-16T16:13:29.853Z" }, + { url = "https://files.pythonhosted.org/packages/57/34/cc1b94057aa867c963ecf9ea92ac59198ec2ee3a8d22a126af0b4d4be712/ruamel_yaml_clib-0.2.15-cp312-cp312-win32.whl", hash = "sha256:f4421ab780c37210a07d138e56dd4b51f8642187cdfb433eb687fe8c11de0144", size = 100342, upload-time = "2025-11-16T16:13:31.067Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e5/8925a4208f131b218f9a7e459c0d6fcac8324ae35da269cb437894576366/ruamel_yaml_clib-0.2.15-cp312-cp312-win_amd64.whl", hash = "sha256:2b216904750889133d9222b7b873c199d48ecbb12912aca78970f84a5aa1a4bc", size = 119013, upload-time = "2025-11-16T16:13:32.164Z" }, +] + [[package]] name = "s3transfer" version = "0.13.0" @@ -3096,6 +3716,72 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl", hash = "sha256:836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11", size = 308878, upload-time = "2025-02-26T09:15:14.99Z" }, ] +[[package]] +name = "scikit-learn" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "numpy" }, + { name = "scipy" }, + { name = "threadpoolctl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/92/53ea2181da8ac6bf27170191028aee7251f8f841f8d3edbfdcaf2008fde9/scikit_learn-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:146b4d36f800c013d267b29168813f7a03a43ecd2895d04861f1240b564421da", size = 8595835, upload-time = "2025-12-10T07:07:39.385Z" }, + { url = "https://files.pythonhosted.org/packages/01/18/d154dc1638803adf987910cdd07097d9c526663a55666a97c124d09fb96a/scikit_learn-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f984ca4b14914e6b4094c5d52a32ea16b49832c03bd17a110f004db3c223e8e1", size = 8080381, upload-time = "2025-12-10T07:07:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/44/226142fcb7b7101e64fdee5f49dbe6288d4c7af8abf593237b70fca080a4/scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e30adb87f0cc81c7690a84f7932dd66be5bac57cfe16b91cb9151683a4a2d3b", size = 8799632, upload-time = "2025-12-10T07:07:43.899Z" }, + { url = "https://files.pythonhosted.org/packages/36/4d/4a67f30778a45d542bbea5db2dbfa1e9e100bf9ba64aefe34215ba9f11f6/scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ada8121bcb4dac28d930febc791a69f7cb1673c8495e5eee274190b73a4559c1", size = 9103788, upload-time = "2025-12-10T07:07:45.982Z" }, + { url = "https://files.pythonhosted.org/packages/89/3c/45c352094cfa60050bcbb967b1faf246b22e93cb459f2f907b600f2ceda5/scikit_learn-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:c57b1b610bd1f40ba43970e11ce62821c2e6569e4d74023db19c6b26f246cb3b", size = 8081706, upload-time = "2025-12-10T07:07:48.111Z" }, + { url = "https://files.pythonhosted.org/packages/3d/46/5416595bb395757f754feb20c3d776553a386b661658fb21b7c814e89efe/scikit_learn-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:2838551e011a64e3053ad7618dda9310175f7515f1742fa2d756f7c874c05961", size = 7688451, upload-time = "2025-12-10T07:07:49.873Z" }, + { url = "https://files.pythonhosted.org/packages/90/74/e6a7cc4b820e95cc38cf36cd74d5aa2b42e8ffc2d21fe5a9a9c45c1c7630/scikit_learn-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fb63362b5a7ddab88e52b6dbb47dac3fd7dafeee740dc6c8d8a446ddedade8e", size = 8548242, upload-time = "2025-12-10T07:07:51.568Z" }, + { url = "https://files.pythonhosted.org/packages/49/d8/9be608c6024d021041c7f0b3928d4749a706f4e2c3832bbede4fb4f58c95/scikit_learn-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:5025ce924beccb28298246e589c691fe1b8c1c96507e6d27d12c5fadd85bfd76", size = 8079075, upload-time = "2025-12-10T07:07:53.697Z" }, + { url = "https://files.pythonhosted.org/packages/dd/47/f187b4636ff80cc63f21cd40b7b2d177134acaa10f6bb73746130ee8c2e5/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4496bb2cf7a43ce1a2d7524a79e40bc5da45cf598dbf9545b7e8316ccba47bb4", size = 8660492, upload-time = "2025-12-10T07:07:55.574Z" }, + { url = "https://files.pythonhosted.org/packages/97/74/b7a304feb2b49df9fafa9382d4d09061a96ee9a9449a7cbea7988dda0828/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bcfe4d0d14aec44921545fd2af2338c7471de9cb701f1da4c9d85906ab847a", size = 8931904, upload-time = "2025-12-10T07:07:57.666Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c4/0ab22726a04ede56f689476b760f98f8f46607caecff993017ac1b64aa5d/scikit_learn-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:35c007dedb2ffe38fe3ee7d201ebac4a2deccd2408e8621d53067733e3c74809", size = 8019359, upload-time = "2025-12-10T07:07:59.838Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/344a67811cfd561d7335c1b96ca21455e7e472d281c3c279c4d3f2300236/scikit_learn-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:8c497fff237d7b4e07e9ef1a640887fa4fb765647f86fbe00f969ff6280ce2bb", size = 7641898, upload-time = "2025-12-10T07:08:01.36Z" }, +] + +[[package]] +name = "scipy" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7a/97/5a3609c4f8d58b039179648e62dd220f89864f56f7357f5d4f45c29eb2cc/scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0", size = 30573822, upload-time = "2026-02-23T00:26:24.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/75/b4ce781849931fef6fd529afa6b63711d5a733065722d0c3e2724af9e40a/scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec", size = 31613675, upload-time = "2026-02-23T00:16:00.13Z" }, + { url = "https://files.pythonhosted.org/packages/f7/58/bccc2861b305abdd1b8663d6130c0b3d7cc22e8d86663edbc8401bfd40d4/scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696", size = 28162057, upload-time = "2026-02-23T00:16:09.456Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ee/18146b7757ed4976276b9c9819108adbc73c5aad636e5353e20746b73069/scipy-1.17.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a3472cfbca0a54177d0faa68f697d8ba4c80bbdc19908c3465556d9f7efce9ee", size = 20334032, upload-time = "2026-02-23T00:16:17.358Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e6/cef1cf3557f0c54954198554a10016b6a03b2ec9e22a4e1df734936bd99c/scipy-1.17.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:766e0dc5a616d026a3a1cffa379af959671729083882f50307e18175797b3dfd", size = 22709533, upload-time = "2026-02-23T00:16:25.791Z" }, + { url = "https://files.pythonhosted.org/packages/4d/60/8804678875fc59362b0fb759ab3ecce1f09c10a735680318ac30da8cd76b/scipy-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744b2bf3640d907b79f3fd7874efe432d1cf171ee721243e350f55234b4cec4c", size = 33062057, upload-time = "2026-02-23T00:16:36.931Z" }, + { url = "https://files.pythonhosted.org/packages/09/7d/af933f0f6e0767995b4e2d705a0665e454d1c19402aa7e895de3951ebb04/scipy-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43af8d1f3bea642559019edfe64e9b11192a8978efbd1539d7bc2aaa23d92de4", size = 35349300, upload-time = "2026-02-23T00:16:49.108Z" }, + { url = "https://files.pythonhosted.org/packages/b4/3d/7ccbbdcbb54c8fdc20d3b6930137c782a163fa626f0aef920349873421ba/scipy-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd96a1898c0a47be4520327e01f874acfd61fb48a9420f8aa9f6483412ffa444", size = 35127333, upload-time = "2026-02-23T00:17:01.293Z" }, + { url = "https://files.pythonhosted.org/packages/e8/19/f926cb11c42b15ba08e3a71e376d816ac08614f769b4f47e06c3580c836a/scipy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4eb6c25dd62ee8d5edf68a8e1c171dd71c292fdae95d8aeb3dd7d7de4c364082", size = 37741314, upload-time = "2026-02-23T00:17:12.576Z" }, + { url = "https://files.pythonhosted.org/packages/95/da/0d1df507cf574b3f224ccc3d45244c9a1d732c81dcb26b1e8a766ae271a8/scipy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:d30e57c72013c2a4fe441c2fcb8e77b14e152ad48b5464858e07e2ad9fbfceff", size = 36607512, upload-time = "2026-02-23T00:17:23.424Z" }, + { url = "https://files.pythonhosted.org/packages/68/7f/bdd79ceaad24b671543ffe0ef61ed8e659440eb683b66f033454dcee90eb/scipy-1.17.1-cp311-cp311-win_arm64.whl", hash = "sha256:9ecb4efb1cd6e8c4afea0daa91a87fbddbce1b99d2895d151596716c0b2e859d", size = 24599248, upload-time = "2026-02-23T00:17:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/b992b488d6f299dbe3f11a20b24d3dda3d46f1a635ede1c46b5b17a7b163/scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8", size = 31610954, upload-time = "2026-02-23T00:17:49.855Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/cf107b01494c19dc100f1d0b7ac3cc08666e96ba2d64db7626066cee895e/scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76", size = 28172662, upload-time = "2026-02-23T00:18:01.64Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a9/599c28631bad314d219cf9ffd40e985b24d603fc8a2f4ccc5ae8419a535b/scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086", size = 20344366, upload-time = "2026-02-23T00:18:12.015Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/906eda513271c8deb5af284e5ef0206d17a96239af79f9fa0aebfe0e36b4/scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b", size = 22704017, upload-time = "2026-02-23T00:18:21.502Z" }, + { url = "https://files.pythonhosted.org/packages/da/34/16f10e3042d2f1d6b66e0428308ab52224b6a23049cb2f5c1756f713815f/scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21", size = 32927842, upload-time = "2026-02-23T00:18:35.367Z" }, + { url = "https://files.pythonhosted.org/packages/01/8e/1e35281b8ab6d5d72ebe9911edcdffa3f36b04ed9d51dec6dd140396e220/scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458", size = 35235890, upload-time = "2026-02-23T00:18:49.188Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/9d7f4c88bea6e0d5a4f1bc0506a53a00e9fcb198de372bfe4d3652cef482/scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb", size = 35003557, upload-time = "2026-02-23T00:18:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/7698add8f276dbab7a9de9fb6b0e02fc13ee61d51c7c3f85ac28b65e1239/scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea", size = 37625856, upload-time = "2026-02-23T00:19:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/dc08d77fbf3d87d3ee27f6a0c6dcce1de5829a64f2eae85a0ecc1f0daa73/scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87", size = 36549682, upload-time = "2026-02-23T00:19:07.67Z" }, + { url = "https://files.pythonhosted.org/packages/bc/98/fe9ae9ffb3b54b62559f52dedaebe204b408db8109a8c66fdd04869e6424/scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3", size = 24547340, upload-time = "2026-02-23T00:19:12.024Z" }, +] + +[[package]] +name = "semver" +version = "3.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/d1/d3159231aec234a59dd7d601e9dd9fe96f3afff15efd33c1070019b26132/semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602", size = 269730, upload-time = "2025-01-24T13:19:27.617Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/24/4d91e05817e92e3a61c8a21e08fd0f390f5301f1c448b137c57c4bc6e543/semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746", size = 17912, upload-time = "2025-01-24T13:19:24.949Z" }, +] + [[package]] name = "sentencepiece" version = "0.2.1" @@ -3158,7 +3844,7 @@ wheels = [ [[package]] name = "silero-vad" -version = "6.0.0" +version = "5.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "onnxruntime" }, @@ -3167,9 +3853,9 @@ dependencies = [ { name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and platform_python_implementation == 'CPython' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'darwin')" }, { name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_python_implementation == 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'CPython' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/79/ff5b13ca491a2eef2a43cd989ac9a87fa2131c246d467d909f2568c56955/silero_vad-6.0.0.tar.gz", hash = "sha256:4d202cb662112d9cba0e3fbc9f2c67e2e265c853f319adf20e348d108c797b76", size = 14567206, upload-time = "2025-08-26T07:10:02.571Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/b4/d0311b2e6220a11f8f4699f4a278cb088131573286cdfe804c87c7eb5123/silero_vad-5.1.2.tar.gz", hash = "sha256:c442971160026d2d7aa0ad83f0c7ee86c89797a65289fe625c8ea59fc6fb828d", size = 5098526, upload-time = "2024-10-09T09:50:47.019Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/6a/a0a024878a1933a2326c42a3ce24fff6c0bf4882655f156c960ba50c2ed4/silero_vad-6.0.0-py3-none-any.whl", hash = "sha256:37d29be8944d2a2e6f1cc38a066076f13e78e6fc1b567a1beddcca72096f077f", size = 6119146, upload-time = "2025-08-26T07:10:00.637Z" }, + { url = "https://files.pythonhosted.org/packages/98/f7/5ae11d13fbb733cd3bfd7ff1c3a3902e6f55437df4b72307c1f168146268/silero_vad-5.1.2-py3-none-any.whl", hash = "sha256:93b41953d7774b165407fda6b533c119c5803864e367d5034dc626c82cfdf661", size = 5026737, upload-time = "2024-10-09T09:50:44.355Z" }, ] [[package]] @@ -3199,6 +3885,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, ] +[[package]] +name = "soundfile" +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "platform_python_implementation == 'PyPy'" }, + { name = "cffi", version = "2.0.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_python_implementation != 'PyPy'" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/41/9b873a8c055582859b239be17902a85339bec6a30ad162f98c9b0288a2cc/soundfile-0.13.1.tar.gz", hash = "sha256:b2c68dab1e30297317080a5b43df57e302584c49e2942defdde0acccc53f0e5b", size = 46156, upload-time = "2025-01-25T09:17:04.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/28/e2a36573ccbcf3d57c00626a21fe51989380636e821b341d36ccca0c1c3a/soundfile-0.13.1-py2.py3-none-any.whl", hash = "sha256:a23c717560da2cf4c7b5ae1142514e0fd82d6bbd9dfc93a50423447142f2c445", size = 25751, upload-time = "2025-01-25T09:16:44.235Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ab/73e97a5b3cc46bba7ff8650a1504348fa1863a6f9d57d7001c6b67c5f20e/soundfile-0.13.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:82dc664d19831933fe59adad199bf3945ad06d84bc111a5b4c0d3089a5b9ec33", size = 1142250, upload-time = "2025-01-25T09:16:47.583Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e5/58fd1a8d7b26fc113af244f966ee3aecf03cb9293cb935daaddc1e455e18/soundfile-0.13.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:743f12c12c4054921e15736c6be09ac26b3b3d603aef6fd69f9dde68748f2593", size = 1101406, upload-time = "2025-01-25T09:16:49.662Z" }, + { url = "https://files.pythonhosted.org/packages/58/ae/c0e4a53d77cf6e9a04179535766b3321b0b9ced5f70522e4caf9329f0046/soundfile-0.13.1-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:9c9e855f5a4d06ce4213f31918653ab7de0c5a8d8107cd2427e44b42df547deb", size = 1235729, upload-time = "2025-01-25T09:16:53.018Z" }, + { url = "https://files.pythonhosted.org/packages/57/5e/70bdd9579b35003a489fc850b5047beeda26328053ebadc1fb60f320f7db/soundfile-0.13.1-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:03267c4e493315294834a0870f31dbb3b28a95561b80b134f0bd3cf2d5f0e618", size = 1313646, upload-time = "2025-01-25T09:16:54.872Z" }, + { url = "https://files.pythonhosted.org/packages/fe/df/8c11dc4dfceda14e3003bb81a0d0edcaaf0796dd7b4f826ea3e532146bba/soundfile-0.13.1-py2.py3-none-win32.whl", hash = "sha256:c734564fab7c5ddf8e9be5bf70bab68042cd17e9c214c06e365e20d64f9a69d5", size = 899881, upload-time = "2025-01-25T09:16:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/14/e9/6b761de83277f2f02ded7e7ea6f07828ec78e4b229b80e4ca55dd205b9dc/soundfile-0.13.1-py2.py3-none-win_amd64.whl", hash = "sha256:1e70a05a0626524a69e9f0f4dd2ec174b4e9567f4d8b6c11d38b5c289be36ee9", size = 1019162, upload-time = "2025-01-25T09:16:59.573Z" }, +] + [[package]] name = "soupsieve" version = "2.7" @@ -3208,6 +3914,50 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, ] +[[package]] +name = "soxr" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/7e/f4b461944662ad75036df65277d6130f9411002bfb79e9df7dff40a31db9/soxr-1.0.0.tar.gz", hash = "sha256:e07ee6c1d659bc6957034f4800c60cb8b98de798823e34d2a2bba1caa85a4509", size = 171415, upload-time = "2025-09-07T13:22:21.317Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/ce/a3262bc8733d3a4ce5f660ed88c3d97f4b12658b0909e71334cba1721dcb/soxr-1.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:28e19d74a5ef45c0d7000f3c70ec1719e89077379df2a1215058914d9603d2d8", size = 206739, upload-time = "2025-09-07T13:21:54.572Z" }, + { url = "https://files.pythonhosted.org/packages/64/dc/e8cbd100b652697cc9865dbed08832e7e135ff533f453eb6db9e6168d153/soxr-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8dc69fc18884e53b72f6141fdf9d80997edbb4fec9dc2942edcb63abbe0d023", size = 165233, upload-time = "2025-09-07T13:21:55.887Z" }, + { url = "https://files.pythonhosted.org/packages/75/12/4b49611c9ba5e9fe6f807d0a83352516808e8e573f8b4e712fc0c17f3363/soxr-1.0.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f15450e6f65f22f02fcd4c5a9219c873b1e583a73e232805ff160c759a6b586", size = 208867, upload-time = "2025-09-07T13:21:57.076Z" }, + { url = "https://files.pythonhosted.org/packages/cc/70/92146ab970a3ef8c43ac160035b1e52fde5417f89adb10572f7e788d9596/soxr-1.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f73f57452f9df37b4de7a4052789fcbd474a5b28f38bba43278ae4b489d4384", size = 242633, upload-time = "2025-09-07T13:21:58.621Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a7/628479336206959463d08260bffed87905e7ba9e3bd83ca6b405a0736e94/soxr-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f417c3d69236051cf5a1a7bad7c4bff04eb3d8fcaa24ac1cb06e26c8d48d8dc", size = 173814, upload-time = "2025-09-07T13:21:59.798Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c7/f92b81f1a151c13afb114f57799b86da9330bec844ea5a0d3fe6a8732678/soxr-1.0.0-cp312-abi3-macosx_10_14_x86_64.whl", hash = "sha256:abecf4e39017f3fadb5e051637c272ae5778d838e5c3926a35db36a53e3a607f", size = 205508, upload-time = "2025-09-07T13:22:01.252Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1d/c945fea9d83ea1f2be9d116b3674dbaef26ed090374a77c394b31e3b083b/soxr-1.0.0-cp312-abi3-macosx_11_0_arm64.whl", hash = "sha256:e973d487ee46aa8023ca00a139db6e09af053a37a032fe22f9ff0cc2e19c94b4", size = 163568, upload-time = "2025-09-07T13:22:03.558Z" }, + { url = "https://files.pythonhosted.org/packages/b5/80/10640970998a1d2199bef6c4d92205f36968cddaf3e4d0e9fe35ddd405bd/soxr-1.0.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e8ce273cca101aff3d8c387db5a5a41001ba76ef1837883438d3c652507a9ccc", size = 204707, upload-time = "2025-09-07T13:22:05.125Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/2726603c13c2126cb8ded9e57381b7377f4f0df6ba4408e1af5ddbfdc3dd/soxr-1.0.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8f2a69686f2856d37823bbb7b78c3d44904f311fe70ba49b893af11d6b6047b", size = 238032, upload-time = "2025-09-07T13:22:06.428Z" }, + { url = "https://files.pythonhosted.org/packages/ce/04/530252227f4d0721a5524a936336485dfb429bb206a66baf8e470384f4a2/soxr-1.0.0-cp312-abi3-win_amd64.whl", hash = "sha256:2a3b77b115ae7c478eecdbd060ed4f61beda542dfb70639177ac263aceda42a2", size = 172070, upload-time = "2025-09-07T13:22:07.62Z" }, +] + +[[package]] +name = "speechbrain" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "hyperpyyaml" }, + { name = "joblib" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "scipy" }, + { name = "sentencepiece" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, + { name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and platform_python_implementation == 'CPython' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'darwin')" }, + { name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_python_implementation == 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'CPython' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/10/87e666544a4e0cec7cbdc09f26948994831ae0f8bbc58de3bf53b68285ff/speechbrain-1.0.3.tar.gz", hash = "sha256:fcab3c6e90012cecb1eed40ea235733b550137e73da6bfa2340ba191ec714052", size = 747735, upload-time = "2025-04-07T17:17:06.749Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/13/e61f1085aebee17d5fc2df19fcc5177c10379be52578afbecdd615a831c9/speechbrain-1.0.3-py3-none-any.whl", hash = "sha256:9859d4c1b1fb3af3b85523c0c89f52e45a04f305622ed55f31aa32dd2fba19e9", size = 864091, upload-time = "2025-04-07T17:17:04.706Z" }, +] + [[package]] name = "sqlalchemy" version = "1.4.54" @@ -3289,6 +4039,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, ] +[[package]] +name = "tabulate" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, +] + [[package]] name = "tenacity" version = "9.1.2" @@ -3298,6 +4057,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, ] +[[package]] +name = "tensorboardx" +version = "2.6.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "packaging" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2b/c5/d4cc6e293fb837aaf9f76dd7745476aeba8ef7ef5146c3b3f9ee375fe7a5/tensorboardx-2.6.4.tar.gz", hash = "sha256:b163ccb7798b31100b9f5fa4d6bc22dad362d7065c2f24b51e50731adde86828", size = 4769801, upload-time = "2025-06-10T22:37:07.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/1d/b5d63f1a6b824282b57f7b581810d20b7a28ca951f2d5b59f1eb0782c12b/tensorboardx-2.6.4-py3-none-any.whl", hash = "sha256:5970cf3a1f0a6a6e8b180ccf46f3fe832b8a25a70b86e5a237048a7c0beb18e2", size = 87201, upload-time = "2025-06-10T22:37:05.44Z" }, +] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, +] + [[package]] name = "tiktoken" version = "0.9.0" @@ -3438,6 +4220,40 @@ wheels = [ { url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_arm64.whl", hash = "sha256:99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434" }, ] +[[package]] +name = "torch-audiomentations" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "julius" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, + { name = "torch-pitch-shift" }, + { name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and platform_python_implementation == 'CPython' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'darwin')" }, + { name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_python_implementation == 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'CPython' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/8d/2f8fd7e34c75f5ee8de4310c3bd3f22270acd44d1f809e2fe7c12fbf35f8/torch_audiomentations-0.12.0.tar.gz", hash = "sha256:b02d4c5eb86376986a53eb405cca5e34f370ea9284411237508e720c529f7888", size = 52094, upload-time = "2025-01-15T09:07:01.071Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/21/9d/1ee04f49c15d2d632f6f7102061d7c07652858e6d91b58a091531034e84f/torch_audiomentations-0.12.0-py3-none-any.whl", hash = "sha256:1b80b91d2016ccf83979622cac8f702072a79b7dcc4c2bee40f00b26433a786b", size = 48506, upload-time = "2025-01-15T09:06:59.687Z" }, +] + +[[package]] +name = "torch-pitch-shift" +version = "1.2.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "primepy" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, + { name = "torchaudio", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine == 'aarch64' and platform_python_implementation == 'CPython' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'darwin')" }, + { name = "torchaudio", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_python_implementation == 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'CPython' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/79/a6/722a832bca75d5079f6731e005b3d0c2eec7c6c6863d030620952d143d57/torch_pitch_shift-1.2.5.tar.gz", hash = "sha256:6e1c7531f08d0f407a4c55e5ff8385a41355c5c5d27ab7fa08632e51defbd0ed", size = 4725, upload-time = "2024-09-25T19:10:12.922Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/4c/96ac2a09efb56cc3c41fb3ce9b6f4d8c0604499f7481d4a13a7b03e21382/torch_pitch_shift-1.2.5-py3-none-any.whl", hash = "sha256:6f8500cbc13f1c98b11cde1805ce5084f82cdd195c285f34287541f168a7c6a7", size = 5005, upload-time = "2024-09-25T19:10:11.521Z" }, +] + [[package]] name = "torchaudio" version = "2.8.0" @@ -3485,6 +4301,22 @@ wheels = [ { url = "https://download.pytorch.org/whl/cpu/torchaudio-2.8.0%2Bcpu-cp312-cp312-win_amd64.whl", hash = "sha256:9b302192b570657c1cc787a4d487ae4bbb7f2aab1c01b1fcc46757e7f86f391e" }, ] +[[package]] +name = "torchmetrics" +version = "1.8.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "lightning-utilities" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "torch", version = "2.8.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation != 'PyPy' and sys_platform == 'darwin'" }, + { name = "torch", version = "2.8.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_python_implementation == 'PyPy' or sys_platform != 'darwin'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/2e/48a887a59ecc4a10ce9e8b35b3e3c5cef29d902c4eac143378526e7485cb/torchmetrics-1.8.2.tar.gz", hash = "sha256:cf64a901036bf107f17a524009eea7781c9c5315d130713aeca5747a686fe7a5", size = 580679, upload-time = "2025-09-03T14:00:54.077Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/21/aa0f434434c48490f91b65962b1ce863fdcce63febc166ca9fe9d706c2b6/torchmetrics-1.8.2-py3-none-any.whl", hash = "sha256:08382fd96b923e39e904c4d570f3d49e2cc71ccabd2a94e0f895d1f0dac86242", size = 983161, upload-time = "2025-09-03T14:00:51.921Z" }, +] + [[package]] name = "tqdm" version = "4.67.1" diff --git a/www/app/(app)/transcripts/useMp3.ts b/www/app/(app)/transcripts/useMp3.ts index 7321a0c7..fef630c3 100644 --- a/www/app/(app)/transcripts/useMp3.ts +++ b/www/app/(app)/transcripts/useMp3.ts @@ -78,7 +78,9 @@ const useMp3 = (transcriptId: string, waiting?: boolean): Mp3Response => { // Audio is not deleted, proceed to load it audioElement = document.createElement("audio"); - const audioUrl = `${API_URL}/v1/transcripts/${transcriptId}/audio/mp3`; + const audioUrl = accessTokenInfo + ? `${API_URL}/v1/transcripts/${transcriptId}/audio/mp3?token=${encodeURIComponent(accessTokenInfo)}` + : `${API_URL}/v1/transcripts/${transcriptId}/audio/mp3`; audioElement.src = audioUrl; audioElement.crossOrigin = "anonymous"; audioElement.preload = "auto"; diff --git a/www/public/service-worker.js b/www/public/service-worker.js index e798e369..7a46dca3 100644 --- a/www/public/service-worker.js +++ b/www/public/service-worker.js @@ -1,5 +1,13 @@ let authToken = null; +self.addEventListener("install", () => { + self.skipWaiting(); +}); + +self.addEventListener("activate", (event) => { + event.waitUntil(self.clients.claim()); +}); + self.addEventListener("message", (event) => { if (event.data && event.data.type === "SET_AUTH_TOKEN") { authToken = event.data.token; @@ -7,8 +15,8 @@ self.addEventListener("message", (event) => { }); self.addEventListener("fetch", function (event) { - // Check if the request is for a media file - if (/\/v1\/transcripts\/.*\/audio\/mp3$/.test(event.request.url)) { + // Check if the request is for a media file (allow optional query params) + if (/\/v1\/transcripts\/.*\/audio\/mp3(\?|$)/.test(event.request.url)) { // Modify the request to add the Authorization header const modifiedHeaders = new Headers(event.request.headers); if (authToken) {