mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-21 20:59:05 +00:00
Compare commits
11 Commits
mathieu/sq
...
v0.15.0
| Author | SHA1 | Date | |
|---|---|---|---|
| c086b91445 | |||
|
|
9a258abc02 | ||
| af86c47f1d | |||
| 5f6910e513 | |||
| 9a71af145e | |||
| eef6dc3903 | |||
|
|
1dee255fed | ||
| 5d98754305 | |||
|
|
969bd84fcc | ||
|
|
36608849ec | ||
|
|
5bf64b5a41 |
57
.github/workflows/docker-frontend.yml
vendored
Normal file
57
.github/workflows/docker-frontend.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
name: Build and Push Frontend Docker Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- 'www/**'
|
||||||
|
- '.github/workflows/docker-frontend.yml'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: ${{ github.repository }}-frontend
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Log in to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=sha,prefix={{branch}}-
|
||||||
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: ./www
|
||||||
|
file: ./www/Dockerfile
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
24
CHANGELOG.md
24
CHANGELOG.md
@@ -1,5 +1,29 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## [0.15.0](https://github.com/Monadical-SAS/reflector/compare/v0.14.0...v0.15.0) (2025-10-20)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* api tokens ([#705](https://github.com/Monadical-SAS/reflector/issues/705)) ([9a258ab](https://github.com/Monadical-SAS/reflector/commit/9a258abc0209b0ac3799532a507ea6a9125d703a))
|
||||||
|
|
||||||
|
## [0.14.0](https://github.com/Monadical-SAS/reflector/compare/v0.13.1...v0.14.0) (2025-10-08)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* Add calendar event data to transcript webhook payload ([#689](https://github.com/Monadical-SAS/reflector/issues/689)) ([5f6910e](https://github.com/Monadical-SAS/reflector/commit/5f6910e5131b7f28f86c9ecdcc57fed8412ee3cd))
|
||||||
|
* container build for www / github ([#672](https://github.com/Monadical-SAS/reflector/issues/672)) ([969bd84](https://github.com/Monadical-SAS/reflector/commit/969bd84fcc14851d1a101412a0ba115f1b7cde82))
|
||||||
|
* docker-compose for production frontend ([#664](https://github.com/Monadical-SAS/reflector/issues/664)) ([5bf64b5](https://github.com/Monadical-SAS/reflector/commit/5bf64b5a41f64535e22849b4bb11734d4dbb4aae))
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* restore feature boolean logic ([#671](https://github.com/Monadical-SAS/reflector/issues/671)) ([3660884](https://github.com/Monadical-SAS/reflector/commit/36608849ec64e953e3be456172502762e3c33df9))
|
||||||
|
* security review ([#656](https://github.com/Monadical-SAS/reflector/issues/656)) ([5d98754](https://github.com/Monadical-SAS/reflector/commit/5d98754305c6c540dd194dda268544f6d88bfaf8))
|
||||||
|
* update transcript list on reprocess ([#676](https://github.com/Monadical-SAS/reflector/issues/676)) ([9a71af1](https://github.com/Monadical-SAS/reflector/commit/9a71af145ee9b833078c78d0c684590ab12e9f0e))
|
||||||
|
* upgrade nemo toolkit ([#678](https://github.com/Monadical-SAS/reflector/issues/678)) ([eef6dc3](https://github.com/Monadical-SAS/reflector/commit/eef6dc39037329b65804297786d852dddb0557f9))
|
||||||
|
|
||||||
## [0.13.1](https://github.com/Monadical-SAS/reflector/compare/v0.13.0...v0.13.1) (2025-09-22)
|
## [0.13.1](https://github.com/Monadical-SAS/reflector/compare/v0.13.0...v0.13.1) (2025-09-22)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -151,7 +151,7 @@ All endpoints prefixed `/v1/`:
|
|||||||
|
|
||||||
**Frontend** (`www/.env`):
|
**Frontend** (`www/.env`):
|
||||||
- `NEXTAUTH_URL`, `NEXTAUTH_SECRET` - Authentication configuration
|
- `NEXTAUTH_URL`, `NEXTAUTH_SECRET` - Authentication configuration
|
||||||
- `NEXT_PUBLIC_REFLECTOR_API_URL` - Backend API endpoint
|
- `REFLECTOR_API_URL` - Backend API endpoint
|
||||||
- `REFLECTOR_DOMAIN_CONFIG` - Feature flags and domain settings
|
- `REFLECTOR_DOMAIN_CONFIG` - Feature flags and domain settings
|
||||||
|
|
||||||
## Testing Strategy
|
## Testing Strategy
|
||||||
|
|||||||
25
README.md
25
README.md
@@ -168,6 +168,13 @@ You can manually process an audio file by calling the process tool:
|
|||||||
uv run python -m reflector.tools.process path/to/audio.wav
|
uv run python -m reflector.tools.process path/to/audio.wav
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Build-time env variables
|
||||||
|
|
||||||
|
Next.js projects are more used to NEXT_PUBLIC_ prefixed buildtime vars. We don't have those for the reason we need to serve a ccustomizable prebuild docker container.
|
||||||
|
|
||||||
|
Instead, all the variables are runtime. Variables needed to the frontend are served to the frontend app at initial render.
|
||||||
|
|
||||||
|
It also means there's no static prebuild and no static files to serve for js/html.
|
||||||
|
|
||||||
## Feature Flags
|
## Feature Flags
|
||||||
|
|
||||||
@@ -177,24 +184,24 @@ Reflector uses environment variable-based feature flags to control application f
|
|||||||
|
|
||||||
| Feature Flag | Environment Variable |
|
| Feature Flag | Environment Variable |
|
||||||
|-------------|---------------------|
|
|-------------|---------------------|
|
||||||
| `requireLogin` | `NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN` |
|
| `requireLogin` | `FEATURE_REQUIRE_LOGIN` |
|
||||||
| `privacy` | `NEXT_PUBLIC_FEATURE_PRIVACY` |
|
| `privacy` | `FEATURE_PRIVACY` |
|
||||||
| `browse` | `NEXT_PUBLIC_FEATURE_BROWSE` |
|
| `browse` | `FEATURE_BROWSE` |
|
||||||
| `sendToZulip` | `NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP` |
|
| `sendToZulip` | `FEATURE_SEND_TO_ZULIP` |
|
||||||
| `rooms` | `NEXT_PUBLIC_FEATURE_ROOMS` |
|
| `rooms` | `FEATURE_ROOMS` |
|
||||||
|
|
||||||
### Setting Feature Flags
|
### Setting Feature Flags
|
||||||
|
|
||||||
Feature flags are controlled via environment variables using the pattern `NEXT_PUBLIC_FEATURE_{FEATURE_NAME}` where `{FEATURE_NAME}` is the SCREAMING_SNAKE_CASE version of the feature name.
|
Feature flags are controlled via environment variables using the pattern `FEATURE_{FEATURE_NAME}` where `{FEATURE_NAME}` is the SCREAMING_SNAKE_CASE version of the feature name.
|
||||||
|
|
||||||
**Examples:**
|
**Examples:**
|
||||||
```bash
|
```bash
|
||||||
# Enable user authentication requirement
|
# Enable user authentication requirement
|
||||||
NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN=true
|
FEATURE_REQUIRE_LOGIN=true
|
||||||
|
|
||||||
# Disable browse functionality
|
# Disable browse functionality
|
||||||
NEXT_PUBLIC_FEATURE_BROWSE=false
|
FEATURE_BROWSE=false
|
||||||
|
|
||||||
# Enable Zulip integration
|
# Enable Zulip integration
|
||||||
NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP=true
|
FEATURE_SEND_TO_ZULIP=true
|
||||||
```
|
```
|
||||||
|
|||||||
39
docker-compose.prod.yml
Normal file
39
docker-compose.prod.yml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Production Docker Compose configuration for Frontend
|
||||||
|
# Usage: docker compose -f docker-compose.prod.yml up -d
|
||||||
|
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
build:
|
||||||
|
context: ./www
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
image: reflector-frontend:latest
|
||||||
|
environment:
|
||||||
|
- KV_URL=${KV_URL:-redis://redis:6379}
|
||||||
|
- SITE_URL=${SITE_URL}
|
||||||
|
- API_URL=${API_URL}
|
||||||
|
- WEBSOCKET_URL=${WEBSOCKET_URL}
|
||||||
|
- NEXTAUTH_URL=${NEXTAUTH_URL:-http://localhost:3000}
|
||||||
|
- NEXTAUTH_SECRET=${NEXTAUTH_SECRET:-changeme-in-production}
|
||||||
|
- AUTHENTIK_ISSUER=${AUTHENTIK_ISSUER}
|
||||||
|
- AUTHENTIK_CLIENT_ID=${AUTHENTIK_CLIENT_ID}
|
||||||
|
- AUTHENTIK_CLIENT_SECRET=${AUTHENTIK_CLIENT_SECRET}
|
||||||
|
- AUTHENTIK_REFRESH_TOKEN_URL=${AUTHENTIK_REFRESH_TOKEN_URL}
|
||||||
|
- SENTRY_DSN=${SENTRY_DSN}
|
||||||
|
- SENTRY_IGNORE_API_RESOLUTION_ERROR=${SENTRY_IGNORE_API_RESOLUTION_ERROR:-1}
|
||||||
|
depends_on:
|
||||||
|
- redis
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7.2-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "ping"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 3s
|
||||||
|
retries: 3
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
redis_data:
|
||||||
@@ -39,7 +39,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 6379:6379
|
- 6379:6379
|
||||||
web:
|
web:
|
||||||
image: node:18
|
image: node:22-alpine
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000"
|
- "3000:3000"
|
||||||
command: sh -c "corepack enable && pnpm install && pnpm dev"
|
command: sh -c "corepack enable && pnpm install && pnpm dev"
|
||||||
@@ -50,6 +50,8 @@ services:
|
|||||||
- /app/node_modules
|
- /app/node_modules
|
||||||
env_file:
|
env_file:
|
||||||
- ./www/.env.local
|
- ./www/.env.local
|
||||||
|
environment:
|
||||||
|
- NODE_ENV=development
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:17
|
image: postgres:17
|
||||||
@@ -77,7 +77,7 @@ image = (
|
|||||||
.pip_install(
|
.pip_install(
|
||||||
"hf_transfer==0.1.9",
|
"hf_transfer==0.1.9",
|
||||||
"huggingface_hub[hf-xet]==0.31.2",
|
"huggingface_hub[hf-xet]==0.31.2",
|
||||||
"nemo_toolkit[asr]==2.3.0",
|
"nemo_toolkit[asr]==2.5.0",
|
||||||
"cuda-python==12.8.0",
|
"cuda-python==12.8.0",
|
||||||
"fastapi==0.115.12",
|
"fastapi==0.115.12",
|
||||||
"numpy<2",
|
"numpy<2",
|
||||||
|
|||||||
@@ -1,3 +1,29 @@
|
|||||||
|
## API Key Management
|
||||||
|
|
||||||
|
### Finding Your User ID
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Get your OAuth sub (user ID) - requires authentication
|
||||||
|
curl -H "Authorization: Bearer <your_jwt>" http://localhost:1250/v1/me
|
||||||
|
# Returns: {"sub": "your-oauth-sub-here", "email": "...", ...}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating API Keys
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X POST http://localhost:1250/v1/user/api-keys \
|
||||||
|
-H "Authorization: Bearer <your_jwt>" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"name": "My API Key"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using API Keys
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Use X-API-Key header instead of Authorization
|
||||||
|
curl -H "X-API-Key: <your_api_key>" http://localhost:1250/v1/transcripts
|
||||||
|
```
|
||||||
|
|
||||||
## AWS S3/SQS usage clarification
|
## AWS S3/SQS usage clarification
|
||||||
|
|
||||||
Whereby.com uploads recordings directly to our S3 bucket when meetings end.
|
Whereby.com uploads recordings directly to our S3 bucket when meetings end.
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ Webhooks are configured at the room level with two fields:
|
|||||||
|
|
||||||
### `transcript.completed`
|
### `transcript.completed`
|
||||||
|
|
||||||
Triggered when a transcript has been fully processed, including transcription, diarization, summarization, and topic detection.
|
Triggered when a transcript has been fully processed, including transcription, diarization, summarization, topic detection and calendar event integration.
|
||||||
|
|
||||||
### `test`
|
### `test`
|
||||||
|
|
||||||
@@ -128,6 +128,27 @@ This event includes a convenient URL for accessing the transcript:
|
|||||||
"room": {
|
"room": {
|
||||||
"id": "room-789",
|
"id": "room-789",
|
||||||
"name": "Product Team Room"
|
"name": "Product Team Room"
|
||||||
|
},
|
||||||
|
"calendar_event": {
|
||||||
|
"id": "calendar-event-123",
|
||||||
|
"ics_uid": "event-123",
|
||||||
|
"title": "Q3 Product Planning Meeting",
|
||||||
|
"start_time": "2025-08-27T12:00:00Z",
|
||||||
|
"end_time": "2025-08-27T12:30:00Z",
|
||||||
|
"description": "Team discussed Q3 product roadmap, prioritizing mobile app features and API improvements.",
|
||||||
|
"location": "Conference Room 1",
|
||||||
|
"attendees": [
|
||||||
|
{
|
||||||
|
"id": "participant-1",
|
||||||
|
"name": "John Doe",
|
||||||
|
"speaker": "Speaker 1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "participant-2",
|
||||||
|
"name": "Jane Smith",
|
||||||
|
"speaker": "Speaker 2"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ AUTH_JWT_AUDIENCE=
|
|||||||
#TRANSCRIPT_MODAL_API_KEY=xxxxx
|
#TRANSCRIPT_MODAL_API_KEY=xxxxx
|
||||||
|
|
||||||
TRANSCRIPT_BACKEND=modal
|
TRANSCRIPT_BACKEND=modal
|
||||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-web.modal.run
|
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-parakeet-web.modal.run
|
||||||
TRANSCRIPT_MODAL_API_KEY=
|
TRANSCRIPT_MODAL_API_KEY=
|
||||||
|
|
||||||
## =======================================================
|
## =======================================================
|
||||||
|
|||||||
38
server/migrations/versions/9e3f7b2a4c8e_add_user_api_keys.py
Normal file
38
server/migrations/versions/9e3f7b2a4c8e_add_user_api_keys.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
"""add user api keys
|
||||||
|
|
||||||
|
Revision ID: 9e3f7b2a4c8e
|
||||||
|
Revises: dc035ff72fd5
|
||||||
|
Create Date: 2025-10-17 00:00:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "9e3f7b2a4c8e"
|
||||||
|
down_revision: Union[str, None] = "dc035ff72fd5"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table(
|
||||||
|
"user_api_key",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("user_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("key_hash", sa.String(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
with op.batch_alter_table("user_api_key", schema=None) as batch_op:
|
||||||
|
batch_op.create_index("idx_user_api_key_hash", ["key_hash"], unique=True)
|
||||||
|
batch_op.create_index("idx_user_api_key_user_id", ["user_id"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_table("user_api_key")
|
||||||
@@ -112,6 +112,7 @@ source = ["reflector"]
|
|||||||
[tool.pytest_env]
|
[tool.pytest_env]
|
||||||
ENVIRONMENT = "pytest"
|
ENVIRONMENT = "pytest"
|
||||||
DATABASE_URL = "postgresql://test_user:test_password@localhost:15432/reflector_test"
|
DATABASE_URL = "postgresql://test_user:test_password@localhost:15432/reflector_test"
|
||||||
|
AUTH_BACKEND = "jwt"
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
addopts = "-ra -q --disable-pytest-warnings --cov --cov-report html -v"
|
addopts = "-ra -q --disable-pytest-warnings --cov --cov-report html -v"
|
||||||
|
|||||||
@@ -26,6 +26,8 @@ from reflector.views.transcripts_upload import router as transcripts_upload_rout
|
|||||||
from reflector.views.transcripts_webrtc import router as transcripts_webrtc_router
|
from reflector.views.transcripts_webrtc import router as transcripts_webrtc_router
|
||||||
from reflector.views.transcripts_websocket import router as transcripts_websocket_router
|
from reflector.views.transcripts_websocket import router as transcripts_websocket_router
|
||||||
from reflector.views.user import router as user_router
|
from reflector.views.user import router as user_router
|
||||||
|
from reflector.views.user_api_keys import router as user_api_keys_router
|
||||||
|
from reflector.views.user_websocket import router as user_ws_router
|
||||||
from reflector.views.whereby import router as whereby_router
|
from reflector.views.whereby import router as whereby_router
|
||||||
from reflector.views.zulip import router as zulip_router
|
from reflector.views.zulip import router as zulip_router
|
||||||
|
|
||||||
@@ -65,6 +67,12 @@ app.add_middleware(
|
|||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health():
|
||||||
|
return {"status": "healthy"}
|
||||||
|
|
||||||
|
|
||||||
# metrics
|
# metrics
|
||||||
instrumentator = Instrumentator(
|
instrumentator = Instrumentator(
|
||||||
excluded_handlers=["/docs", "/metrics"],
|
excluded_handlers=["/docs", "/metrics"],
|
||||||
@@ -84,6 +92,8 @@ app.include_router(transcripts_websocket_router, prefix="/v1")
|
|||||||
app.include_router(transcripts_webrtc_router, prefix="/v1")
|
app.include_router(transcripts_webrtc_router, prefix="/v1")
|
||||||
app.include_router(transcripts_process_router, prefix="/v1")
|
app.include_router(transcripts_process_router, prefix="/v1")
|
||||||
app.include_router(user_router, prefix="/v1")
|
app.include_router(user_router, prefix="/v1")
|
||||||
|
app.include_router(user_api_keys_router, prefix="/v1")
|
||||||
|
app.include_router(user_ws_router, prefix="/v1")
|
||||||
app.include_router(zulip_router, prefix="/v1")
|
app.include_router(zulip_router, prefix="/v1")
|
||||||
app.include_router(whereby_router, prefix="/v1")
|
app.include_router(whereby_router, prefix="/v1")
|
||||||
add_pagination(app)
|
add_pagination(app)
|
||||||
|
|||||||
@@ -1,14 +1,16 @@
|
|||||||
from typing import Annotated, Optional
|
from typing import Annotated, List, Optional
|
||||||
|
|
||||||
from fastapi import Depends, HTTPException
|
from fastapi import Depends, HTTPException
|
||||||
from fastapi.security import OAuth2PasswordBearer
|
from fastapi.security import APIKeyHeader, OAuth2PasswordBearer
|
||||||
from jose import JWTError, jwt
|
from jose import JWTError, jwt
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from reflector.db.user_api_keys import user_api_keys_controller
|
||||||
from reflector.logger import logger
|
from reflector.logger import logger
|
||||||
from reflector.settings import settings
|
from reflector.settings import settings
|
||||||
|
|
||||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
|
||||||
|
api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
|
||||||
|
|
||||||
jwt_public_key = open(f"reflector/auth/jwt/keys/{settings.AUTH_JWT_PUBLIC_KEY}").read()
|
jwt_public_key = open(f"reflector/auth/jwt/keys/{settings.AUTH_JWT_PUBLIC_KEY}").read()
|
||||||
jwt_algorithm = settings.AUTH_JWT_ALGORITHM
|
jwt_algorithm = settings.AUTH_JWT_ALGORITHM
|
||||||
@@ -26,7 +28,7 @@ class JWTException(Exception):
|
|||||||
|
|
||||||
class UserInfo(BaseModel):
|
class UserInfo(BaseModel):
|
||||||
sub: str
|
sub: str
|
||||||
email: str
|
email: Optional[str] = None
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return getattr(self, key)
|
return getattr(self, key)
|
||||||
@@ -58,34 +60,53 @@ def authenticated(token: Annotated[str, Depends(oauth2_scheme)]):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def current_user(
|
async def _authenticate_user(
|
||||||
token: Annotated[Optional[str], Depends(oauth2_scheme)],
|
jwt_token: Optional[str],
|
||||||
jwtauth: JWTAuth = Depends(),
|
api_key: Optional[str],
|
||||||
):
|
jwtauth: JWTAuth,
|
||||||
if token is None:
|
) -> UserInfo | None:
|
||||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
user_infos: List[UserInfo] = []
|
||||||
try:
|
if api_key:
|
||||||
payload = jwtauth.verify_token(token)
|
user_api_key = await user_api_keys_controller.verify_key(api_key)
|
||||||
sub = payload["sub"]
|
if user_api_key:
|
||||||
email = payload["email"]
|
user_infos.append(UserInfo(sub=user_api_key.user_id, email=None))
|
||||||
return UserInfo(sub=sub, email=email)
|
|
||||||
except JWTError as e:
|
|
||||||
logger.error(f"JWT error: {e}")
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid authentication")
|
|
||||||
|
|
||||||
|
if jwt_token:
|
||||||
|
try:
|
||||||
|
payload = jwtauth.verify_token(jwt_token)
|
||||||
|
sub = payload["sub"]
|
||||||
|
email = payload["email"]
|
||||||
|
user_infos.append(UserInfo(sub=sub, email=email))
|
||||||
|
except JWTError as e:
|
||||||
|
logger.error(f"JWT error: {e}")
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid authentication")
|
||||||
|
|
||||||
def current_user_optional(
|
if len(user_infos) == 0:
|
||||||
token: Annotated[Optional[str], Depends(oauth2_scheme)],
|
|
||||||
jwtauth: JWTAuth = Depends(),
|
|
||||||
):
|
|
||||||
# we accept no token, but if one is provided, it must be a valid one.
|
|
||||||
if token is None:
|
|
||||||
return None
|
return None
|
||||||
try:
|
|
||||||
payload = jwtauth.verify_token(token)
|
if len(set([x.sub for x in user_infos])) > 1:
|
||||||
sub = payload["sub"]
|
raise JWTException(
|
||||||
email = payload["email"]
|
status_code=401,
|
||||||
return UserInfo(sub=sub, email=email)
|
detail="Invalid authentication: more than one user provided",
|
||||||
except JWTError as e:
|
)
|
||||||
logger.error(f"JWT error: {e}")
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid authentication")
|
return user_infos[0]
|
||||||
|
|
||||||
|
|
||||||
|
async def current_user(
|
||||||
|
jwt_token: Annotated[Optional[str], Depends(oauth2_scheme)],
|
||||||
|
api_key: Annotated[Optional[str], Depends(api_key_header)],
|
||||||
|
jwtauth: JWTAuth = Depends(),
|
||||||
|
):
|
||||||
|
user = await _authenticate_user(jwt_token, api_key, jwtauth)
|
||||||
|
if user is None:
|
||||||
|
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
async def current_user_optional(
|
||||||
|
jwt_token: Annotated[Optional[str], Depends(oauth2_scheme)],
|
||||||
|
api_key: Annotated[Optional[str], Depends(api_key_header)],
|
||||||
|
jwtauth: JWTAuth = Depends(),
|
||||||
|
):
|
||||||
|
return await _authenticate_user(jwt_token, api_key, jwtauth)
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ import reflector.db.meetings # noqa
|
|||||||
import reflector.db.recordings # noqa
|
import reflector.db.recordings # noqa
|
||||||
import reflector.db.rooms # noqa
|
import reflector.db.rooms # noqa
|
||||||
import reflector.db.transcripts # noqa
|
import reflector.db.transcripts # noqa
|
||||||
|
import reflector.db.user_api_keys # noqa
|
||||||
|
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if "postgres" not in settings.DATABASE_URL:
|
if "postgres" not in settings.DATABASE_URL:
|
||||||
|
|||||||
@@ -104,6 +104,11 @@ class CalendarEventController:
|
|||||||
results = await get_database().fetch_all(query)
|
results = await get_database().fetch_all(query)
|
||||||
return [CalendarEvent(**result) for result in results]
|
return [CalendarEvent(**result) for result in results]
|
||||||
|
|
||||||
|
async def get_by_id(self, event_id: str) -> CalendarEvent | None:
|
||||||
|
query = calendar_events.select().where(calendar_events.c.id == event_id)
|
||||||
|
result = await get_database().fetch_one(query)
|
||||||
|
return CalendarEvent(**result) if result else None
|
||||||
|
|
||||||
async def get_by_ics_uid(self, room_id: str, ics_uid: str) -> CalendarEvent | None:
|
async def get_by_ics_uid(self, room_id: str, ics_uid: str) -> CalendarEvent | None:
|
||||||
query = calendar_events.select().where(
|
query = calendar_events.select().where(
|
||||||
sa.and_(
|
sa.and_(
|
||||||
|
|||||||
@@ -647,6 +647,19 @@ class TranscriptController:
|
|||||||
query = transcripts.delete().where(transcripts.c.recording_id == recording_id)
|
query = transcripts.delete().where(transcripts.c.recording_id == recording_id)
|
||||||
await get_database().execute(query)
|
await get_database().execute(query)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def user_can_mutate(transcript: Transcript, user_id: str | None) -> bool:
|
||||||
|
"""
|
||||||
|
Returns True if the given user is allowed to modify the transcript.
|
||||||
|
|
||||||
|
Policy:
|
||||||
|
- Anonymous transcripts (user_id is None) cannot be modified via API
|
||||||
|
- Only the owner (matching user_id) can modify their transcript
|
||||||
|
"""
|
||||||
|
if transcript.user_id is None:
|
||||||
|
return False
|
||||||
|
return user_id and transcript.user_id == user_id
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def transaction(self):
|
async def transaction(self):
|
||||||
"""
|
"""
|
||||||
|
|||||||
90
server/reflector/db/user_api_keys.py
Normal file
90
server/reflector/db/user_api_keys.py
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import hmac
|
||||||
|
import secrets
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from hashlib import sha256
|
||||||
|
|
||||||
|
import sqlalchemy
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from reflector.db import get_database, metadata
|
||||||
|
from reflector.settings import settings
|
||||||
|
from reflector.utils import generate_uuid4
|
||||||
|
from reflector.utils.string import NonEmptyString
|
||||||
|
|
||||||
|
user_api_keys = sqlalchemy.Table(
|
||||||
|
"user_api_key",
|
||||||
|
metadata,
|
||||||
|
sqlalchemy.Column("id", sqlalchemy.String, primary_key=True),
|
||||||
|
sqlalchemy.Column("user_id", sqlalchemy.String, nullable=False),
|
||||||
|
sqlalchemy.Column("key_hash", sqlalchemy.String, nullable=False),
|
||||||
|
sqlalchemy.Column("name", sqlalchemy.String, nullable=True),
|
||||||
|
sqlalchemy.Column("created_at", sqlalchemy.DateTime(timezone=True), nullable=False),
|
||||||
|
sqlalchemy.Index("idx_user_api_key_hash", "key_hash", unique=True),
|
||||||
|
sqlalchemy.Index("idx_user_api_key_user_id", "user_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UserApiKey(BaseModel):
|
||||||
|
id: NonEmptyString = Field(default_factory=generate_uuid4)
|
||||||
|
user_id: NonEmptyString
|
||||||
|
key_hash: NonEmptyString
|
||||||
|
name: NonEmptyString | None = None
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
|
||||||
|
|
||||||
|
class UserApiKeyController:
|
||||||
|
@staticmethod
|
||||||
|
def generate_key() -> NonEmptyString:
|
||||||
|
return secrets.token_urlsafe(48)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hash_key(key: NonEmptyString) -> str:
|
||||||
|
return hmac.new(
|
||||||
|
settings.SECRET_KEY.encode(), key.encode(), digestmod=sha256
|
||||||
|
).hexdigest()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def create_key(
|
||||||
|
cls,
|
||||||
|
user_id: NonEmptyString,
|
||||||
|
name: NonEmptyString | None = None,
|
||||||
|
) -> tuple[UserApiKey, NonEmptyString]:
|
||||||
|
plaintext = cls.generate_key()
|
||||||
|
api_key = UserApiKey(
|
||||||
|
user_id=user_id,
|
||||||
|
key_hash=cls.hash_key(plaintext),
|
||||||
|
name=name,
|
||||||
|
)
|
||||||
|
query = user_api_keys.insert().values(**api_key.model_dump())
|
||||||
|
await get_database().execute(query)
|
||||||
|
return api_key, plaintext
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def verify_key(cls, plaintext_key: NonEmptyString) -> UserApiKey | None:
|
||||||
|
key_hash = cls.hash_key(plaintext_key)
|
||||||
|
query = user_api_keys.select().where(
|
||||||
|
user_api_keys.c.key_hash == key_hash,
|
||||||
|
)
|
||||||
|
result = await get_database().fetch_one(query)
|
||||||
|
return UserApiKey(**result) if result else None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def list_by_user_id(user_id: NonEmptyString) -> list[UserApiKey]:
|
||||||
|
query = (
|
||||||
|
user_api_keys.select()
|
||||||
|
.where(user_api_keys.c.user_id == user_id)
|
||||||
|
.order_by(user_api_keys.c.created_at.desc())
|
||||||
|
)
|
||||||
|
results = await get_database().fetch_all(query)
|
||||||
|
return [UserApiKey(**r) for r in results]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def delete_key(key_id: NonEmptyString, user_id: NonEmptyString) -> bool:
|
||||||
|
query = user_api_keys.delete().where(
|
||||||
|
(user_api_keys.c.id == key_id) & (user_api_keys.c.user_id == user_id)
|
||||||
|
)
|
||||||
|
result = await get_database().execute(query)
|
||||||
|
return result > 0
|
||||||
|
|
||||||
|
|
||||||
|
user_api_keys_controller = UserApiKeyController()
|
||||||
@@ -131,7 +131,7 @@ class PipelineMainFile(PipelineMainBase):
|
|||||||
|
|
||||||
self.logger.info("File pipeline complete")
|
self.logger.info("File pipeline complete")
|
||||||
|
|
||||||
await transcripts_controller.set_status(transcript.id, "ended")
|
await self.set_status(transcript.id, "ended")
|
||||||
|
|
||||||
async def extract_and_write_audio(
|
async def extract_and_write_audio(
|
||||||
self, file_path: Path, transcript: Transcript
|
self, file_path: Path, transcript: Transcript
|
||||||
|
|||||||
@@ -85,6 +85,20 @@ def broadcast_to_sockets(func):
|
|||||||
message=resp.model_dump(mode="json"),
|
message=resp.model_dump(mode="json"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
transcript = await transcripts_controller.get_by_id(self.transcript_id)
|
||||||
|
if transcript and transcript.user_id:
|
||||||
|
# Emit only relevant events to the user room to avoid noisy updates.
|
||||||
|
# Allowed: STATUS, FINAL_TITLE, DURATION. All are prefixed with TRANSCRIPT_
|
||||||
|
allowed_user_events = {"STATUS", "FINAL_TITLE", "DURATION"}
|
||||||
|
if resp.event in allowed_user_events:
|
||||||
|
await self.ws_manager.send_json(
|
||||||
|
room_id=f"user:{transcript.user_id}",
|
||||||
|
message={
|
||||||
|
"event": f"TRANSCRIPT_{resp.event}",
|
||||||
|
"data": {"id": self.transcript_id, **resp.data},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -199,6 +199,8 @@ async def rooms_get(
|
|||||||
room = await rooms_controller.get_by_id_for_http(room_id, user_id=user_id)
|
room = await rooms_controller.get_by_id_for_http(room_id, user_id=user_id)
|
||||||
if not room:
|
if not room:
|
||||||
raise HTTPException(status_code=404, detail="Room not found")
|
raise HTTPException(status_code=404, detail="Room not found")
|
||||||
|
if not room.is_shared and (user_id is None or room.user_id != user_id):
|
||||||
|
raise HTTPException(status_code=403, detail="Room access denied")
|
||||||
return room
|
return room
|
||||||
|
|
||||||
|
|
||||||
@@ -229,9 +231,9 @@ async def rooms_get_by_name(
|
|||||||
@router.post("/rooms", response_model=Room)
|
@router.post("/rooms", response_model=Room)
|
||||||
async def rooms_create(
|
async def rooms_create(
|
||||||
room: CreateRoom,
|
room: CreateRoom,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
):
|
):
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
|
|
||||||
return await rooms_controller.add(
|
return await rooms_controller.add(
|
||||||
name=room.name,
|
name=room.name,
|
||||||
@@ -256,12 +258,14 @@ async def rooms_create(
|
|||||||
async def rooms_update(
|
async def rooms_update(
|
||||||
room_id: str,
|
room_id: str,
|
||||||
info: UpdateRoom,
|
info: UpdateRoom,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
):
|
):
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
room = await rooms_controller.get_by_id_for_http(room_id, user_id=user_id)
|
room = await rooms_controller.get_by_id_for_http(room_id, user_id=user_id)
|
||||||
if not room:
|
if not room:
|
||||||
raise HTTPException(status_code=404, detail="Room not found")
|
raise HTTPException(status_code=404, detail="Room not found")
|
||||||
|
if room.user_id != user_id:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
values = info.dict(exclude_unset=True)
|
values = info.dict(exclude_unset=True)
|
||||||
await rooms_controller.update(room, values)
|
await rooms_controller.update(room, values)
|
||||||
return room
|
return room
|
||||||
@@ -270,12 +274,14 @@ async def rooms_update(
|
|||||||
@router.delete("/rooms/{room_id}", response_model=DeletionStatus)
|
@router.delete("/rooms/{room_id}", response_model=DeletionStatus)
|
||||||
async def rooms_delete(
|
async def rooms_delete(
|
||||||
room_id: str,
|
room_id: str,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
):
|
):
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
room = await rooms_controller.get_by_id(room_id, user_id=user_id)
|
room = await rooms_controller.get_by_id(room_id)
|
||||||
if not room:
|
if not room:
|
||||||
raise HTTPException(status_code=404, detail="Room not found")
|
raise HTTPException(status_code=404, detail="Room not found")
|
||||||
|
if room.user_id != user_id:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
await rooms_controller.remove_by_id(room.id, user_id=user_id)
|
await rooms_controller.remove_by_id(room.id, user_id=user_id)
|
||||||
return DeletionStatus(status="ok")
|
return DeletionStatus(status="ok")
|
||||||
|
|
||||||
@@ -339,16 +345,16 @@ async def rooms_create_meeting(
|
|||||||
@router.post("/rooms/{room_id}/webhook/test", response_model=WebhookTestResult)
|
@router.post("/rooms/{room_id}/webhook/test", response_model=WebhookTestResult)
|
||||||
async def rooms_test_webhook(
|
async def rooms_test_webhook(
|
||||||
room_id: str,
|
room_id: str,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
):
|
):
|
||||||
"""Test webhook configuration by sending a sample payload."""
|
"""Test webhook configuration by sending a sample payload."""
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
|
|
||||||
room = await rooms_controller.get_by_id(room_id)
|
room = await rooms_controller.get_by_id(room_id)
|
||||||
if not room:
|
if not room:
|
||||||
raise HTTPException(status_code=404, detail="Room not found")
|
raise HTTPException(status_code=404, detail="Room not found")
|
||||||
|
|
||||||
if user_id and room.user_id != user_id:
|
if room.user_id != user_id:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=403, detail="Not authorized to test this room's webhook"
|
status_code=403, detail="Not authorized to test this room's webhook"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -9,8 +9,6 @@ from pydantic import BaseModel, Field, constr, field_serializer
|
|||||||
|
|
||||||
import reflector.auth as auth
|
import reflector.auth as auth
|
||||||
from reflector.db import get_database
|
from reflector.db import get_database
|
||||||
from reflector.db.meetings import meetings_controller
|
|
||||||
from reflector.db.rooms import rooms_controller
|
|
||||||
from reflector.db.search import (
|
from reflector.db.search import (
|
||||||
DEFAULT_SEARCH_LIMIT,
|
DEFAULT_SEARCH_LIMIT,
|
||||||
SearchLimit,
|
SearchLimit,
|
||||||
@@ -34,6 +32,7 @@ from reflector.db.transcripts import (
|
|||||||
from reflector.processors.types import Transcript as ProcessorTranscript
|
from reflector.processors.types import Transcript as ProcessorTranscript
|
||||||
from reflector.processors.types import Word
|
from reflector.processors.types import Word
|
||||||
from reflector.settings import settings
|
from reflector.settings import settings
|
||||||
|
from reflector.ws_manager import get_ws_manager
|
||||||
from reflector.zulip import (
|
from reflector.zulip import (
|
||||||
InvalidMessageError,
|
InvalidMessageError,
|
||||||
get_zulip_message,
|
get_zulip_message,
|
||||||
@@ -213,7 +212,7 @@ async def transcripts_create(
|
|||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||||
):
|
):
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"] if user else None
|
||||||
return await transcripts_controller.add(
|
transcript = await transcripts_controller.add(
|
||||||
info.name,
|
info.name,
|
||||||
source_kind=info.source_kind or SourceKind.LIVE,
|
source_kind=info.source_kind or SourceKind.LIVE,
|
||||||
source_language=info.source_language,
|
source_language=info.source_language,
|
||||||
@@ -221,6 +220,14 @@ async def transcripts_create(
|
|||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if user_id:
|
||||||
|
await get_ws_manager().send_json(
|
||||||
|
room_id=f"user:{user_id}",
|
||||||
|
message={"event": "TRANSCRIPT_CREATED", "data": {"id": transcript.id}},
|
||||||
|
)
|
||||||
|
|
||||||
|
return transcript
|
||||||
|
|
||||||
|
|
||||||
# ==============================================================
|
# ==============================================================
|
||||||
# Single transcript
|
# Single transcript
|
||||||
@@ -344,12 +351,14 @@ async def transcript_get(
|
|||||||
async def transcript_update(
|
async def transcript_update(
|
||||||
transcript_id: str,
|
transcript_id: str,
|
||||||
info: UpdateTranscript,
|
info: UpdateTranscript,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
):
|
):
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
transcript = await transcripts_controller.get_by_id_for_http(
|
transcript = await transcripts_controller.get_by_id_for_http(
|
||||||
transcript_id, user_id=user_id
|
transcript_id, user_id=user_id
|
||||||
)
|
)
|
||||||
|
if not transcripts_controller.user_can_mutate(transcript, user_id):
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
values = info.dict(exclude_unset=True)
|
values = info.dict(exclude_unset=True)
|
||||||
updated_transcript = await transcripts_controller.update(transcript, values)
|
updated_transcript = await transcripts_controller.update(transcript, values)
|
||||||
return updated_transcript
|
return updated_transcript
|
||||||
@@ -358,20 +367,20 @@ async def transcript_update(
|
|||||||
@router.delete("/transcripts/{transcript_id}", response_model=DeletionStatus)
|
@router.delete("/transcripts/{transcript_id}", response_model=DeletionStatus)
|
||||||
async def transcript_delete(
|
async def transcript_delete(
|
||||||
transcript_id: str,
|
transcript_id: str,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
):
|
):
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||||
if not transcript:
|
if not transcript:
|
||||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||||
|
if not transcripts_controller.user_can_mutate(transcript, user_id):
|
||||||
if transcript.meeting_id:
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
meeting = await meetings_controller.get_by_id(transcript.meeting_id)
|
|
||||||
room = await rooms_controller.get_by_id(meeting.room_id)
|
|
||||||
if room.is_shared:
|
|
||||||
user_id = None
|
|
||||||
|
|
||||||
await transcripts_controller.remove_by_id(transcript.id, user_id=user_id)
|
await transcripts_controller.remove_by_id(transcript.id, user_id=user_id)
|
||||||
|
await get_ws_manager().send_json(
|
||||||
|
room_id=f"user:{user_id}",
|
||||||
|
message={"event": "TRANSCRIPT_DELETED", "data": {"id": transcript.id}},
|
||||||
|
)
|
||||||
return DeletionStatus(status="ok")
|
return DeletionStatus(status="ok")
|
||||||
|
|
||||||
|
|
||||||
@@ -443,15 +452,16 @@ async def transcript_post_to_zulip(
|
|||||||
stream: str,
|
stream: str,
|
||||||
topic: str,
|
topic: str,
|
||||||
include_topics: bool,
|
include_topics: bool,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
):
|
):
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
transcript = await transcripts_controller.get_by_id_for_http(
|
transcript = await transcripts_controller.get_by_id_for_http(
|
||||||
transcript_id, user_id=user_id
|
transcript_id, user_id=user_id
|
||||||
)
|
)
|
||||||
if not transcript:
|
if not transcript:
|
||||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||||
|
if not transcripts_controller.user_can_mutate(transcript, user_id):
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
content = get_zulip_message(transcript, include_topics)
|
content = get_zulip_message(transcript, include_topics)
|
||||||
|
|
||||||
message_updated = False
|
message_updated = False
|
||||||
|
|||||||
@@ -56,12 +56,14 @@ async def transcript_get_participants(
|
|||||||
async def transcript_add_participant(
|
async def transcript_add_participant(
|
||||||
transcript_id: str,
|
transcript_id: str,
|
||||||
participant: CreateParticipant,
|
participant: CreateParticipant,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
) -> Participant:
|
) -> Participant:
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
transcript = await transcripts_controller.get_by_id_for_http(
|
transcript = await transcripts_controller.get_by_id_for_http(
|
||||||
transcript_id, user_id=user_id
|
transcript_id, user_id=user_id
|
||||||
)
|
)
|
||||||
|
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
|
|
||||||
# ensure the speaker is unique
|
# ensure the speaker is unique
|
||||||
if participant.speaker is not None and transcript.participants is not None:
|
if participant.speaker is not None and transcript.participants is not None:
|
||||||
@@ -101,12 +103,14 @@ async def transcript_update_participant(
|
|||||||
transcript_id: str,
|
transcript_id: str,
|
||||||
participant_id: str,
|
participant_id: str,
|
||||||
participant: UpdateParticipant,
|
participant: UpdateParticipant,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
) -> Participant:
|
) -> Participant:
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
transcript = await transcripts_controller.get_by_id_for_http(
|
transcript = await transcripts_controller.get_by_id_for_http(
|
||||||
transcript_id, user_id=user_id
|
transcript_id, user_id=user_id
|
||||||
)
|
)
|
||||||
|
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
|
|
||||||
# ensure the speaker is unique
|
# ensure the speaker is unique
|
||||||
for p in transcript.participants:
|
for p in transcript.participants:
|
||||||
@@ -138,11 +142,13 @@ async def transcript_update_participant(
|
|||||||
async def transcript_delete_participant(
|
async def transcript_delete_participant(
|
||||||
transcript_id: str,
|
transcript_id: str,
|
||||||
participant_id: str,
|
participant_id: str,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
) -> DeletionStatus:
|
) -> DeletionStatus:
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
transcript = await transcripts_controller.get_by_id_for_http(
|
transcript = await transcripts_controller.get_by_id_for_http(
|
||||||
transcript_id, user_id=user_id
|
transcript_id, user_id=user_id
|
||||||
)
|
)
|
||||||
|
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
await transcripts_controller.delete_participant(transcript, participant_id)
|
await transcripts_controller.delete_participant(transcript, participant_id)
|
||||||
return DeletionStatus(status="ok")
|
return DeletionStatus(status="ok")
|
||||||
|
|||||||
@@ -35,12 +35,14 @@ class SpeakerMerge(BaseModel):
|
|||||||
async def transcript_assign_speaker(
|
async def transcript_assign_speaker(
|
||||||
transcript_id: str,
|
transcript_id: str,
|
||||||
assignment: SpeakerAssignment,
|
assignment: SpeakerAssignment,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
) -> SpeakerAssignmentStatus:
|
) -> SpeakerAssignmentStatus:
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
transcript = await transcripts_controller.get_by_id_for_http(
|
transcript = await transcripts_controller.get_by_id_for_http(
|
||||||
transcript_id, user_id=user_id
|
transcript_id, user_id=user_id
|
||||||
)
|
)
|
||||||
|
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
|
|
||||||
if not transcript:
|
if not transcript:
|
||||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||||
@@ -113,12 +115,14 @@ async def transcript_assign_speaker(
|
|||||||
async def transcript_merge_speaker(
|
async def transcript_merge_speaker(
|
||||||
transcript_id: str,
|
transcript_id: str,
|
||||||
merge: SpeakerMerge,
|
merge: SpeakerMerge,
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
) -> SpeakerAssignmentStatus:
|
) -> SpeakerAssignmentStatus:
|
||||||
user_id = user["sub"] if user else None
|
user_id = user["sub"]
|
||||||
transcript = await transcripts_controller.get_by_id_for_http(
|
transcript = await transcripts_controller.get_by_id_for_http(
|
||||||
transcript_id, user_id=user_id
|
transcript_id, user_id=user_id
|
||||||
)
|
)
|
||||||
|
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized")
|
||||||
|
|
||||||
if not transcript:
|
if not transcript:
|
||||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||||
|
|||||||
@@ -4,8 +4,11 @@ Transcripts websocket API
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketDisconnect
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, WebSocket, WebSocketDisconnect
|
||||||
|
|
||||||
|
import reflector.auth as auth
|
||||||
from reflector.db.transcripts import transcripts_controller
|
from reflector.db.transcripts import transcripts_controller
|
||||||
from reflector.ws_manager import get_ws_manager
|
from reflector.ws_manager import get_ws_manager
|
||||||
|
|
||||||
@@ -21,10 +24,12 @@ async def transcript_get_websocket_events(transcript_id: str):
|
|||||||
async def transcript_events_websocket(
|
async def transcript_events_websocket(
|
||||||
transcript_id: str,
|
transcript_id: str,
|
||||||
websocket: WebSocket,
|
websocket: WebSocket,
|
||||||
# user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
user: Optional[auth.UserInfo] = Depends(auth.current_user_optional),
|
||||||
):
|
):
|
||||||
# user_id = user["sub"] if user else None
|
user_id = user["sub"] if user else None
|
||||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
transcript = await transcripts_controller.get_by_id_for_http(
|
||||||
|
transcript_id, user_id=user_id
|
||||||
|
)
|
||||||
if not transcript:
|
if not transcript:
|
||||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||||
|
|
||||||
|
|||||||
62
server/reflector/views/user_api_keys.py
Normal file
62
server/reflector/views/user_api_keys.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
import structlog
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
import reflector.auth as auth
|
||||||
|
from reflector.db.user_api_keys import user_api_keys_controller
|
||||||
|
from reflector.utils.string import NonEmptyString
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
logger = structlog.get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CreateApiKeyRequest(BaseModel):
|
||||||
|
name: NonEmptyString | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyResponse(BaseModel):
|
||||||
|
id: NonEmptyString
|
||||||
|
user_id: NonEmptyString
|
||||||
|
name: NonEmptyString | None
|
||||||
|
created_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class CreateApiKeyResponse(ApiKeyResponse):
|
||||||
|
key: NonEmptyString
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/user/api-keys", response_model=CreateApiKeyResponse)
|
||||||
|
async def create_api_key(
|
||||||
|
req: CreateApiKeyRequest,
|
||||||
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
|
):
|
||||||
|
api_key_model, plaintext = await user_api_keys_controller.create_key(
|
||||||
|
user_id=user["sub"],
|
||||||
|
name=req.name,
|
||||||
|
)
|
||||||
|
return CreateApiKeyResponse(
|
||||||
|
**api_key_model.model_dump(),
|
||||||
|
key=plaintext,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/user/api-keys", response_model=list[ApiKeyResponse])
|
||||||
|
async def list_api_keys(
|
||||||
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
|
):
|
||||||
|
api_keys = await user_api_keys_controller.list_by_user_id(user["sub"])
|
||||||
|
return [ApiKeyResponse(**k.model_dump()) for k in api_keys]
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/user/api-keys/{key_id}")
|
||||||
|
async def delete_api_key(
|
||||||
|
key_id: NonEmptyString,
|
||||||
|
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||||
|
):
|
||||||
|
deleted = await user_api_keys_controller.delete_key(key_id, user["sub"])
|
||||||
|
if not deleted:
|
||||||
|
raise HTTPException(status_code=404)
|
||||||
|
return {"status": "ok"}
|
||||||
53
server/reflector/views/user_websocket.py
Normal file
53
server/reflector/views/user_websocket.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, WebSocket
|
||||||
|
|
||||||
|
from reflector.auth.auth_jwt import JWTAuth # type: ignore
|
||||||
|
from reflector.ws_manager import get_ws_manager
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
# Close code for unauthorized WebSocket connections
|
||||||
|
UNAUTHORISED = 4401
|
||||||
|
|
||||||
|
|
||||||
|
@router.websocket("/events")
|
||||||
|
async def user_events_websocket(websocket: WebSocket):
|
||||||
|
# Browser can't send Authorization header for WS; use subprotocol: ["bearer", token]
|
||||||
|
raw_subprotocol = websocket.headers.get("sec-websocket-protocol") or ""
|
||||||
|
parts = [p.strip() for p in raw_subprotocol.split(",") if p.strip()]
|
||||||
|
token: Optional[str] = None
|
||||||
|
negotiated_subprotocol: Optional[str] = None
|
||||||
|
if len(parts) >= 2 and parts[0].lower() == "bearer":
|
||||||
|
negotiated_subprotocol = "bearer"
|
||||||
|
token = parts[1]
|
||||||
|
|
||||||
|
user_id: Optional[str] = None
|
||||||
|
if not token:
|
||||||
|
await websocket.close(code=UNAUTHORISED)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = JWTAuth().verify_token(token)
|
||||||
|
user_id = payload.get("sub")
|
||||||
|
except Exception:
|
||||||
|
await websocket.close(code=UNAUTHORISED)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not user_id:
|
||||||
|
await websocket.close(code=UNAUTHORISED)
|
||||||
|
return
|
||||||
|
|
||||||
|
room_id = f"user:{user_id}"
|
||||||
|
ws_manager = get_ws_manager()
|
||||||
|
|
||||||
|
await ws_manager.add_user_to_room(
|
||||||
|
room_id, websocket, subprotocol=negotiated_subprotocol
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
await websocket.receive()
|
||||||
|
finally:
|
||||||
|
if room_id:
|
||||||
|
await ws_manager.remove_user_from_room(room_id, websocket)
|
||||||
@@ -11,6 +11,8 @@ import structlog
|
|||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from celery.utils.log import get_task_logger
|
from celery.utils.log import get_task_logger
|
||||||
|
|
||||||
|
from reflector.db.calendar_events import calendar_events_controller
|
||||||
|
from reflector.db.meetings import meetings_controller
|
||||||
from reflector.db.rooms import rooms_controller
|
from reflector.db.rooms import rooms_controller
|
||||||
from reflector.db.transcripts import transcripts_controller
|
from reflector.db.transcripts import transcripts_controller
|
||||||
from reflector.pipelines.main_live_pipeline import asynctask
|
from reflector.pipelines.main_live_pipeline import asynctask
|
||||||
@@ -84,6 +86,18 @@ async def send_transcript_webhook(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Fetch meeting and calendar event if they exist
|
||||||
|
calendar_event = None
|
||||||
|
try:
|
||||||
|
if transcript.meeting_id:
|
||||||
|
meeting = await meetings_controller.get_by_id(transcript.meeting_id)
|
||||||
|
if meeting and meeting.calendar_event_id:
|
||||||
|
calendar_event = await calendar_events_controller.get_by_id(
|
||||||
|
meeting.calendar_event_id
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Error fetching meeting or calendar event", error=str(e))
|
||||||
|
|
||||||
# Build webhook payload
|
# Build webhook payload
|
||||||
frontend_url = f"{settings.UI_BASE_URL}/transcripts/{transcript.id}"
|
frontend_url = f"{settings.UI_BASE_URL}/transcripts/{transcript.id}"
|
||||||
participants = [
|
participants = [
|
||||||
@@ -116,6 +130,33 @@ async def send_transcript_webhook(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Always include calendar_event field, even if no event is present
|
||||||
|
payload_data["calendar_event"] = {}
|
||||||
|
|
||||||
|
# Add calendar event data if present
|
||||||
|
if calendar_event:
|
||||||
|
calendar_data = {
|
||||||
|
"id": calendar_event.id,
|
||||||
|
"ics_uid": calendar_event.ics_uid,
|
||||||
|
"title": calendar_event.title,
|
||||||
|
"start_time": calendar_event.start_time.isoformat()
|
||||||
|
if calendar_event.start_time
|
||||||
|
else None,
|
||||||
|
"end_time": calendar_event.end_time.isoformat()
|
||||||
|
if calendar_event.end_time
|
||||||
|
else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add optional fields only if they exist
|
||||||
|
if calendar_event.description:
|
||||||
|
calendar_data["description"] = calendar_event.description
|
||||||
|
if calendar_event.location:
|
||||||
|
calendar_data["location"] = calendar_event.location
|
||||||
|
if calendar_event.attendees:
|
||||||
|
calendar_data["attendees"] = calendar_event.attendees
|
||||||
|
|
||||||
|
payload_data["calendar_event"] = calendar_data
|
||||||
|
|
||||||
# Convert to JSON
|
# Convert to JSON
|
||||||
payload_json = json.dumps(payload_data, separators=(",", ":"))
|
payload_json = json.dumps(payload_data, separators=(",", ":"))
|
||||||
payload_bytes = payload_json.encode("utf-8")
|
payload_bytes = payload_json.encode("utf-8")
|
||||||
|
|||||||
@@ -65,8 +65,13 @@ class WebsocketManager:
|
|||||||
self.tasks: dict = {}
|
self.tasks: dict = {}
|
||||||
self.pubsub_client = pubsub_client
|
self.pubsub_client = pubsub_client
|
||||||
|
|
||||||
async def add_user_to_room(self, room_id: str, websocket: WebSocket) -> None:
|
async def add_user_to_room(
|
||||||
await websocket.accept()
|
self, room_id: str, websocket: WebSocket, subprotocol: str | None = None
|
||||||
|
) -> None:
|
||||||
|
if subprotocol:
|
||||||
|
await websocket.accept(subprotocol=subprotocol)
|
||||||
|
else:
|
||||||
|
await websocket.accept()
|
||||||
|
|
||||||
if room_id in self.rooms:
|
if room_id in self.rooms:
|
||||||
self.rooms[room_id].append(websocket)
|
self.rooms[room_id].append(websocket)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
@@ -337,6 +338,166 @@ async def client():
|
|||||||
yield ac
|
yield ac
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
async def ws_manager_in_memory(monkeypatch):
|
||||||
|
"""Replace Redis-based WS manager with an in-memory implementation for tests."""
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
|
||||||
|
from reflector.ws_manager import WebsocketManager
|
||||||
|
|
||||||
|
class _InMemorySubscriber:
|
||||||
|
def __init__(self, queue: asyncio.Queue):
|
||||||
|
self.queue = queue
|
||||||
|
|
||||||
|
async def get_message(self, ignore_subscribe_messages: bool = True):
|
||||||
|
try:
|
||||||
|
return await asyncio.wait_for(self.queue.get(), timeout=0.05)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
class InMemoryPubSubManager:
|
||||||
|
def __init__(self):
|
||||||
|
self.queues: dict[str, asyncio.Queue] = {}
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
async def connect(self) -> None:
|
||||||
|
self.connected = True
|
||||||
|
|
||||||
|
async def disconnect(self) -> None:
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
async def send_json(self, room_id: str, message: dict) -> None:
|
||||||
|
if room_id not in self.queues:
|
||||||
|
self.queues[room_id] = asyncio.Queue()
|
||||||
|
payload = json.dumps(message).encode("utf-8")
|
||||||
|
await self.queues[room_id].put(
|
||||||
|
{"channel": room_id.encode("utf-8"), "data": payload}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def subscribe(self, room_id: str):
|
||||||
|
if room_id not in self.queues:
|
||||||
|
self.queues[room_id] = asyncio.Queue()
|
||||||
|
return _InMemorySubscriber(self.queues[room_id])
|
||||||
|
|
||||||
|
async def unsubscribe(self, room_id: str) -> None:
|
||||||
|
# keep queue for potential later resubscribe within same test
|
||||||
|
pass
|
||||||
|
|
||||||
|
pubsub = InMemoryPubSubManager()
|
||||||
|
ws_manager = WebsocketManager(pubsub_client=pubsub)
|
||||||
|
|
||||||
|
def _get_ws_manager():
|
||||||
|
return ws_manager
|
||||||
|
|
||||||
|
# Patch all places that imported get_ws_manager at import time
|
||||||
|
monkeypatch.setattr("reflector.ws_manager.get_ws_manager", _get_ws_manager)
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"reflector.pipelines.main_live_pipeline.get_ws_manager", _get_ws_manager
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"reflector.views.transcripts_websocket.get_ws_manager", _get_ws_manager
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"reflector.views.user_websocket.get_ws_manager", _get_ws_manager
|
||||||
|
)
|
||||||
|
monkeypatch.setattr("reflector.views.transcripts.get_ws_manager", _get_ws_manager)
|
||||||
|
|
||||||
|
# Websocket auth: avoid OAuth2 on websocket dependencies; allow anonymous
|
||||||
|
import reflector.auth as auth
|
||||||
|
|
||||||
|
# Ensure FastAPI uses our override for routes that captured the original callable
|
||||||
|
from reflector.app import app as fastapi_app
|
||||||
|
|
||||||
|
try:
|
||||||
|
fastapi_app.dependency_overrides[auth.current_user_optional] = lambda: None
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Stub Redis cache used by profanity filter to avoid external Redis
|
||||||
|
from reflector import redis_cache as rc
|
||||||
|
|
||||||
|
class _FakeRedis:
|
||||||
|
def __init__(self):
|
||||||
|
self._data = {}
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
value = self._data.get(key)
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
if isinstance(value, bytes):
|
||||||
|
return value
|
||||||
|
return str(value).encode("utf-8")
|
||||||
|
|
||||||
|
def setex(self, key, duration, value):
|
||||||
|
# ignore duration for tests
|
||||||
|
if isinstance(value, bytes):
|
||||||
|
self._data[key] = value
|
||||||
|
else:
|
||||||
|
self._data[key] = str(value).encode("utf-8")
|
||||||
|
|
||||||
|
fake_redises: dict[int, _FakeRedis] = {}
|
||||||
|
|
||||||
|
def _get_redis_client(db=0):
|
||||||
|
if db not in fake_redises:
|
||||||
|
fake_redises[db] = _FakeRedis()
|
||||||
|
return fake_redises[db]
|
||||||
|
|
||||||
|
monkeypatch.setattr(rc, "get_redis_client", _get_redis_client)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def authenticated_client():
|
||||||
|
async with authenticated_client_ctx():
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def authenticated_client2():
|
||||||
|
async with authenticated_client2_ctx():
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def authenticated_client_ctx():
|
||||||
|
from reflector.app import app
|
||||||
|
from reflector.auth import current_user, current_user_optional
|
||||||
|
|
||||||
|
app.dependency_overrides[current_user] = lambda: {
|
||||||
|
"sub": "randomuserid",
|
||||||
|
"email": "test@mail.com",
|
||||||
|
}
|
||||||
|
app.dependency_overrides[current_user_optional] = lambda: {
|
||||||
|
"sub": "randomuserid",
|
||||||
|
"email": "test@mail.com",
|
||||||
|
}
|
||||||
|
yield
|
||||||
|
del app.dependency_overrides[current_user]
|
||||||
|
del app.dependency_overrides[current_user_optional]
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def authenticated_client2_ctx():
|
||||||
|
from reflector.app import app
|
||||||
|
from reflector.auth import current_user, current_user_optional
|
||||||
|
|
||||||
|
app.dependency_overrides[current_user] = lambda: {
|
||||||
|
"sub": "randomuserid2",
|
||||||
|
"email": "test@mail.com",
|
||||||
|
}
|
||||||
|
app.dependency_overrides[current_user_optional] = lambda: {
|
||||||
|
"sub": "randomuserid2",
|
||||||
|
"email": "test@mail.com",
|
||||||
|
}
|
||||||
|
yield
|
||||||
|
del app.dependency_overrides[current_user]
|
||||||
|
del app.dependency_overrides[current_user_optional]
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def fake_mp3_upload():
|
def fake_mp3_upload():
|
||||||
with patch(
|
with patch(
|
||||||
|
|||||||
@@ -11,14 +11,21 @@ from reflector.db.rooms import rooms_controller
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def authenticated_client(client):
|
async def authenticated_client(client):
|
||||||
from reflector.app import app
|
from reflector.app import app
|
||||||
from reflector.auth import current_user_optional
|
from reflector.auth import current_user, current_user_optional
|
||||||
|
|
||||||
|
app.dependency_overrides[current_user] = lambda: {
|
||||||
|
"sub": "test-user",
|
||||||
|
"email": "test@example.com",
|
||||||
|
}
|
||||||
app.dependency_overrides[current_user_optional] = lambda: {
|
app.dependency_overrides[current_user_optional] = lambda: {
|
||||||
"sub": "test-user",
|
"sub": "test-user",
|
||||||
"email": "test@example.com",
|
"email": "test@example.com",
|
||||||
}
|
}
|
||||||
yield client
|
try:
|
||||||
del app.dependency_overrides[current_user_optional]
|
yield client
|
||||||
|
finally:
|
||||||
|
del app.dependency_overrides[current_user]
|
||||||
|
del app.dependency_overrides[current_user_optional]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|||||||
384
server/tests/test_security_permissions.py
Normal file
384
server/tests/test_security_permissions.py
Normal file
@@ -0,0 +1,384 @@
|
|||||||
|
import asyncio
|
||||||
|
import shutil
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx_ws import aconnect_ws
|
||||||
|
from uvicorn import Config, Server
|
||||||
|
|
||||||
|
from reflector import zulip as zulip_module
|
||||||
|
from reflector.app import app
|
||||||
|
from reflector.db import get_database
|
||||||
|
from reflector.db.meetings import meetings_controller
|
||||||
|
from reflector.db.rooms import Room, rooms_controller
|
||||||
|
from reflector.db.transcripts import (
|
||||||
|
SourceKind,
|
||||||
|
TranscriptTopic,
|
||||||
|
transcripts_controller,
|
||||||
|
)
|
||||||
|
from reflector.processors.types import Word
|
||||||
|
from reflector.settings import settings
|
||||||
|
from reflector.views.transcripts import create_access_token
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_delete_transcript_in_shared_room(client):
|
||||||
|
# Create a shared room with a fake owner id so meeting has a room_id
|
||||||
|
room = await rooms_controller.add(
|
||||||
|
name="shared-room-test",
|
||||||
|
user_id="owner-1",
|
||||||
|
zulip_auto_post=False,
|
||||||
|
zulip_stream="",
|
||||||
|
zulip_topic="",
|
||||||
|
is_locked=False,
|
||||||
|
room_mode="normal",
|
||||||
|
recording_type="cloud",
|
||||||
|
recording_trigger="automatic-2nd-participant",
|
||||||
|
is_shared=True,
|
||||||
|
webhook_url="",
|
||||||
|
webhook_secret="",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a meeting for that room (so transcript.meeting_id links to the shared room)
|
||||||
|
meeting = await meetings_controller.create(
|
||||||
|
id="meeting-sec-test",
|
||||||
|
room_name="room-sec-test",
|
||||||
|
room_url="room-url",
|
||||||
|
host_room_url="host-url",
|
||||||
|
start_date=Room.model_fields["created_at"].default_factory(),
|
||||||
|
end_date=Room.model_fields["created_at"].default_factory(),
|
||||||
|
room=room,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a transcript owned by someone else and link it to meeting
|
||||||
|
t = await transcripts_controller.add(
|
||||||
|
name="to-delete",
|
||||||
|
source_kind=SourceKind.LIVE,
|
||||||
|
user_id="owner-2",
|
||||||
|
meeting_id=meeting.id,
|
||||||
|
room_id=room.id,
|
||||||
|
share_mode="private",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Anonymous DELETE should be rejected
|
||||||
|
del_resp = await client.delete(f"/transcripts/{t.id}")
|
||||||
|
assert del_resp.status_code == 401, del_resp.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_mutate_participants_on_public_transcript(client):
|
||||||
|
# Create a public transcript with no owner
|
||||||
|
t = await transcripts_controller.add(
|
||||||
|
name="public-transcript",
|
||||||
|
source_kind=SourceKind.LIVE,
|
||||||
|
user_id=None,
|
||||||
|
share_mode="public",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Anonymous POST participant must be rejected
|
||||||
|
resp = await client.post(
|
||||||
|
f"/transcripts/{t.id}/participants",
|
||||||
|
json={"name": "AnonUser", "speaker": 0},
|
||||||
|
)
|
||||||
|
assert resp.status_code == 401, resp.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_update_and_delete_room(client):
|
||||||
|
# Create room as owner id "owner-3" via controller
|
||||||
|
room = await rooms_controller.add(
|
||||||
|
name="room-anon-update-delete",
|
||||||
|
user_id="owner-3",
|
||||||
|
zulip_auto_post=False,
|
||||||
|
zulip_stream="",
|
||||||
|
zulip_topic="",
|
||||||
|
is_locked=False,
|
||||||
|
room_mode="normal",
|
||||||
|
recording_type="cloud",
|
||||||
|
recording_trigger="automatic-2nd-participant",
|
||||||
|
is_shared=False,
|
||||||
|
webhook_url="",
|
||||||
|
webhook_secret="",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Anonymous PATCH via API (no auth)
|
||||||
|
resp = await client.patch(
|
||||||
|
f"/rooms/{room.id}",
|
||||||
|
json={
|
||||||
|
"name": "room-anon-updated",
|
||||||
|
"zulip_auto_post": False,
|
||||||
|
"zulip_stream": "",
|
||||||
|
"zulip_topic": "",
|
||||||
|
"is_locked": False,
|
||||||
|
"room_mode": "normal",
|
||||||
|
"recording_type": "cloud",
|
||||||
|
"recording_trigger": "automatic-2nd-participant",
|
||||||
|
"is_shared": False,
|
||||||
|
"webhook_url": "",
|
||||||
|
"webhook_secret": "",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Expect authentication required
|
||||||
|
assert resp.status_code == 401, resp.text
|
||||||
|
|
||||||
|
# Anonymous DELETE via API
|
||||||
|
del_resp = await client.delete(f"/rooms/{room.id}")
|
||||||
|
assert del_resp.status_code == 401, del_resp.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_post_transcript_to_zulip(client, monkeypatch):
|
||||||
|
# Create a public transcript with some content
|
||||||
|
t = await transcripts_controller.add(
|
||||||
|
name="zulip-public",
|
||||||
|
source_kind=SourceKind.LIVE,
|
||||||
|
user_id=None,
|
||||||
|
share_mode="public",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock send/update calls
|
||||||
|
def _fake_send_message_to_zulip(stream, topic, content):
|
||||||
|
return {"id": 12345}
|
||||||
|
|
||||||
|
async def _fake_update_message(message_id, stream, topic, content):
|
||||||
|
return {"result": "success"}
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
zulip_module, "send_message_to_zulip", _fake_send_message_to_zulip
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(zulip_module, "update_zulip_message", _fake_update_message)
|
||||||
|
|
||||||
|
# Anonymous POST to Zulip endpoint
|
||||||
|
resp = await client.post(
|
||||||
|
f"/transcripts/{t.id}/zulip",
|
||||||
|
params={"stream": "general", "topic": "Updates", "include_topics": False},
|
||||||
|
)
|
||||||
|
assert resp.status_code == 401, resp.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_assign_speaker_on_public_transcript(client):
|
||||||
|
# Create public transcript
|
||||||
|
t = await transcripts_controller.add(
|
||||||
|
name="public-assign",
|
||||||
|
source_kind=SourceKind.LIVE,
|
||||||
|
user_id=None,
|
||||||
|
share_mode="public",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add a topic with words to be reassigned
|
||||||
|
topic = TranscriptTopic(
|
||||||
|
title="T1",
|
||||||
|
summary="S1",
|
||||||
|
timestamp=0.0,
|
||||||
|
transcript="Hello",
|
||||||
|
words=[Word(start=0.0, end=1.0, text="Hello", speaker=0)],
|
||||||
|
)
|
||||||
|
transcript = await transcripts_controller.get_by_id(t.id)
|
||||||
|
await transcripts_controller.upsert_topic(transcript, topic)
|
||||||
|
|
||||||
|
# Anonymous assign speaker over time range covering the word
|
||||||
|
resp = await client.patch(
|
||||||
|
f"/transcripts/{t.id}/speaker/assign",
|
||||||
|
json={
|
||||||
|
"speaker": 1,
|
||||||
|
"timestamp_from": 0.0,
|
||||||
|
"timestamp_to": 1.0,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert resp.status_code == 401, resp.text
|
||||||
|
|
||||||
|
|
||||||
|
# Minimal server fixture for websocket tests
|
||||||
|
@pytest.fixture
|
||||||
|
def appserver_ws_simple(setup_database):
|
||||||
|
host = "127.0.0.1"
|
||||||
|
port = 1256
|
||||||
|
server_started = threading.Event()
|
||||||
|
server_exception = None
|
||||||
|
server_instance = None
|
||||||
|
|
||||||
|
def run_server():
|
||||||
|
nonlocal server_exception, server_instance
|
||||||
|
try:
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
|
||||||
|
config = Config(app=app, host=host, port=port, loop=loop)
|
||||||
|
server_instance = Server(config)
|
||||||
|
|
||||||
|
async def start_server():
|
||||||
|
database = get_database()
|
||||||
|
await database.connect()
|
||||||
|
try:
|
||||||
|
await server_instance.serve()
|
||||||
|
finally:
|
||||||
|
await database.disconnect()
|
||||||
|
|
||||||
|
server_started.set()
|
||||||
|
loop.run_until_complete(start_server())
|
||||||
|
except Exception as e:
|
||||||
|
server_exception = e
|
||||||
|
server_started.set()
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
server_thread = threading.Thread(target=run_server, daemon=True)
|
||||||
|
server_thread.start()
|
||||||
|
|
||||||
|
server_started.wait(timeout=30)
|
||||||
|
if server_exception:
|
||||||
|
raise server_exception
|
||||||
|
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
||||||
|
yield host, port
|
||||||
|
|
||||||
|
if server_instance:
|
||||||
|
server_instance.should_exit = True
|
||||||
|
server_thread.join(timeout=30)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_websocket_denies_anonymous_on_private_transcript(appserver_ws_simple):
|
||||||
|
host, port = appserver_ws_simple
|
||||||
|
|
||||||
|
# Create a private transcript owned by someone
|
||||||
|
t = await transcripts_controller.add(
|
||||||
|
name="private-ws",
|
||||||
|
source_kind=SourceKind.LIVE,
|
||||||
|
user_id="owner-x",
|
||||||
|
share_mode="private",
|
||||||
|
)
|
||||||
|
|
||||||
|
base_url = f"http://{host}:{port}/v1"
|
||||||
|
# Anonymous connect should be denied
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
async with aconnect_ws(f"{base_url}/transcripts/{t.id}/events") as ws:
|
||||||
|
await ws.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_update_public_transcript(client):
|
||||||
|
t = await transcripts_controller.add(
|
||||||
|
name="update-me",
|
||||||
|
source_kind=SourceKind.LIVE,
|
||||||
|
user_id=None,
|
||||||
|
share_mode="public",
|
||||||
|
)
|
||||||
|
|
||||||
|
resp = await client.patch(
|
||||||
|
f"/transcripts/{t.id}",
|
||||||
|
json={"title": "New Title From Anonymous"},
|
||||||
|
)
|
||||||
|
assert resp.status_code == 401, resp.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_get_nonshared_room_by_id(client):
|
||||||
|
room = await rooms_controller.add(
|
||||||
|
name="private-room-exposed",
|
||||||
|
user_id="owner-z",
|
||||||
|
zulip_auto_post=False,
|
||||||
|
zulip_stream="",
|
||||||
|
zulip_topic="",
|
||||||
|
is_locked=False,
|
||||||
|
room_mode="normal",
|
||||||
|
recording_type="cloud",
|
||||||
|
recording_trigger="automatic-2nd-participant",
|
||||||
|
is_shared=False,
|
||||||
|
webhook_url="",
|
||||||
|
webhook_secret="",
|
||||||
|
)
|
||||||
|
|
||||||
|
resp = await client.get(f"/rooms/{room.id}")
|
||||||
|
assert resp.status_code == 403, resp.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_call_rooms_webhook_test(client):
|
||||||
|
room = await rooms_controller.add(
|
||||||
|
name="room-webhook-test",
|
||||||
|
user_id="owner-y",
|
||||||
|
zulip_auto_post=False,
|
||||||
|
zulip_stream="",
|
||||||
|
zulip_topic="",
|
||||||
|
is_locked=False,
|
||||||
|
room_mode="normal",
|
||||||
|
recording_type="cloud",
|
||||||
|
recording_trigger="automatic-2nd-participant",
|
||||||
|
is_shared=False,
|
||||||
|
webhook_url="http://localhost.invalid/webhook",
|
||||||
|
webhook_secret="secret",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Anonymous caller
|
||||||
|
resp = await client.post(f"/rooms/{room.id}/webhook/test")
|
||||||
|
assert resp.status_code == 401, resp.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_anonymous_cannot_create_room(client):
|
||||||
|
payload = {
|
||||||
|
"name": "room-create-auth-required",
|
||||||
|
"zulip_auto_post": False,
|
||||||
|
"zulip_stream": "",
|
||||||
|
"zulip_topic": "",
|
||||||
|
"is_locked": False,
|
||||||
|
"room_mode": "normal",
|
||||||
|
"recording_type": "cloud",
|
||||||
|
"recording_trigger": "automatic-2nd-participant",
|
||||||
|
"is_shared": False,
|
||||||
|
"webhook_url": "",
|
||||||
|
"webhook_secret": "",
|
||||||
|
}
|
||||||
|
resp = await client.post("/rooms", json=payload)
|
||||||
|
assert resp.status_code == 401, resp.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_list_search_401_when_public_mode_false(client, monkeypatch):
|
||||||
|
monkeypatch.setattr(settings, "PUBLIC_MODE", False)
|
||||||
|
|
||||||
|
resp = await client.get("/transcripts")
|
||||||
|
assert resp.status_code == 401
|
||||||
|
|
||||||
|
resp = await client.get("/transcripts/search", params={"q": "hello"})
|
||||||
|
assert resp.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_audio_mp3_requires_token_for_owned_transcript(
|
||||||
|
client, tmpdir, monkeypatch
|
||||||
|
):
|
||||||
|
# Use temp data dir
|
||||||
|
monkeypatch.setattr(settings, "DATA_DIR", Path(tmpdir).as_posix())
|
||||||
|
|
||||||
|
# Create owner transcript and attach a local mp3
|
||||||
|
t = await transcripts_controller.add(
|
||||||
|
name="owned-audio",
|
||||||
|
source_kind=SourceKind.LIVE,
|
||||||
|
user_id="owner-a",
|
||||||
|
share_mode="private",
|
||||||
|
)
|
||||||
|
|
||||||
|
tr = await transcripts_controller.get_by_id(t.id)
|
||||||
|
await transcripts_controller.update(tr, {"status": "ended"})
|
||||||
|
|
||||||
|
# copy fixture audio to transcript path
|
||||||
|
audio_path = Path(__file__).parent / "records" / "test_mathieu_hello.mp3"
|
||||||
|
tr.audio_mp3_filename.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
shutil.copy(audio_path, tr.audio_mp3_filename)
|
||||||
|
|
||||||
|
# Anonymous GET without token should be 403 or 404 depending on access; we call mp3
|
||||||
|
resp = await client.get(f"/transcripts/{t.id}/audio/mp3")
|
||||||
|
assert resp.status_code == 403
|
||||||
|
|
||||||
|
# With token should succeed
|
||||||
|
token = create_access_token(
|
||||||
|
{"sub": tr.user_id}, expires_delta=__import__("datetime").timedelta(minutes=15)
|
||||||
|
)
|
||||||
|
resp2 = await client.get(f"/transcripts/{t.id}/audio/mp3", params={"token": token})
|
||||||
|
assert resp2.status_code == 200
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
@@ -19,7 +17,7 @@ async def test_transcript_create(client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_get_update_name(client):
|
async def test_transcript_get_update_name(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["name"] == "test"
|
assert response.json()["name"] == "test"
|
||||||
@@ -40,7 +38,7 @@ async def test_transcript_get_update_name(client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_get_update_locked(client):
|
async def test_transcript_get_update_locked(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["locked"] is False
|
assert response.json()["locked"] is False
|
||||||
@@ -61,7 +59,7 @@ async def test_transcript_get_update_locked(client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_get_update_summary(client):
|
async def test_transcript_get_update_summary(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["long_summary"] is None
|
assert response.json()["long_summary"] is None
|
||||||
@@ -89,7 +87,7 @@ async def test_transcript_get_update_summary(client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_get_update_title(client):
|
async def test_transcript_get_update_title(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["title"] is None
|
assert response.json()["title"] is None
|
||||||
@@ -127,56 +125,6 @@ async def test_transcripts_list_anonymous(client):
|
|||||||
settings.PUBLIC_MODE = False
|
settings.PUBLIC_MODE = False
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def authenticated_client_ctx():
|
|
||||||
from reflector.app import app
|
|
||||||
from reflector.auth import current_user, current_user_optional
|
|
||||||
|
|
||||||
app.dependency_overrides[current_user] = lambda: {
|
|
||||||
"sub": "randomuserid",
|
|
||||||
"email": "test@mail.com",
|
|
||||||
}
|
|
||||||
app.dependency_overrides[current_user_optional] = lambda: {
|
|
||||||
"sub": "randomuserid",
|
|
||||||
"email": "test@mail.com",
|
|
||||||
}
|
|
||||||
yield
|
|
||||||
del app.dependency_overrides[current_user]
|
|
||||||
del app.dependency_overrides[current_user_optional]
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def authenticated_client2_ctx():
|
|
||||||
from reflector.app import app
|
|
||||||
from reflector.auth import current_user, current_user_optional
|
|
||||||
|
|
||||||
app.dependency_overrides[current_user] = lambda: {
|
|
||||||
"sub": "randomuserid2",
|
|
||||||
"email": "test@mail.com",
|
|
||||||
}
|
|
||||||
app.dependency_overrides[current_user_optional] = lambda: {
|
|
||||||
"sub": "randomuserid2",
|
|
||||||
"email": "test@mail.com",
|
|
||||||
}
|
|
||||||
yield
|
|
||||||
del app.dependency_overrides[current_user]
|
|
||||||
del app.dependency_overrides[current_user_optional]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def authenticated_client():
|
|
||||||
async with authenticated_client_ctx():
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def authenticated_client2():
|
|
||||||
async with authenticated_client2_ctx():
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcripts_list_authenticated(authenticated_client, client):
|
async def test_transcripts_list_authenticated(authenticated_client, client):
|
||||||
# XXX this test is a bit fragile, as it depends on the storage which
|
# XXX this test is a bit fragile, as it depends on the storage which
|
||||||
@@ -199,7 +147,7 @@ async def test_transcripts_list_authenticated(authenticated_client, client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_delete(client):
|
async def test_transcript_delete(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "testdel1"})
|
response = await client.post("/transcripts", json={"name": "testdel1"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["name"] == "testdel1"
|
assert response.json()["name"] == "testdel1"
|
||||||
@@ -214,7 +162,7 @@ async def test_transcript_delete(client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_mark_reviewed(client):
|
async def test_transcript_mark_reviewed(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["name"] == "test"
|
assert response.json()["name"] == "test"
|
||||||
|
|||||||
@@ -111,7 +111,9 @@ async def test_transcript_audio_download_range_with_seek(
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_delete_with_audio(fake_transcript, client):
|
async def test_transcript_delete_with_audio(
|
||||||
|
authenticated_client, fake_transcript, client
|
||||||
|
):
|
||||||
response = await client.delete(f"/transcripts/{fake_transcript.id}")
|
response = await client.delete(f"/transcripts/{fake_transcript.id}")
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["status"] == "ok"
|
assert response.json()["status"] == "ok"
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import pytest
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_participants(client):
|
async def test_transcript_participants(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["participants"] == []
|
assert response.json()["participants"] == []
|
||||||
@@ -39,7 +39,7 @@ async def test_transcript_participants(client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_participants_same_speaker(client):
|
async def test_transcript_participants_same_speaker(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["participants"] == []
|
assert response.json()["participants"] == []
|
||||||
@@ -62,7 +62,7 @@ async def test_transcript_participants_same_speaker(client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_participants_update_name(client):
|
async def test_transcript_participants_update_name(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["participants"] == []
|
assert response.json()["participants"] == []
|
||||||
@@ -100,7 +100,7 @@ async def test_transcript_participants_update_name(client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_participants_update_speaker(client):
|
async def test_transcript_participants_update_speaker(authenticated_client, client):
|
||||||
response = await client.post("/transcripts", json={"name": "test"})
|
response = await client.post("/transcripts", json={"name": "test"})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["participants"] == []
|
assert response.json()["participants"] == []
|
||||||
|
|||||||
@@ -2,7 +2,9 @@ import pytest
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_reassign_speaker(fake_transcript_with_topics, client):
|
async def test_transcript_reassign_speaker(
|
||||||
|
authenticated_client, fake_transcript_with_topics, client
|
||||||
|
):
|
||||||
transcript_id = fake_transcript_with_topics.id
|
transcript_id = fake_transcript_with_topics.id
|
||||||
|
|
||||||
# check the transcript exists
|
# check the transcript exists
|
||||||
@@ -114,7 +116,9 @@ async def test_transcript_reassign_speaker(fake_transcript_with_topics, client):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_merge_speaker(fake_transcript_with_topics, client):
|
async def test_transcript_merge_speaker(
|
||||||
|
authenticated_client, fake_transcript_with_topics, client
|
||||||
|
):
|
||||||
transcript_id = fake_transcript_with_topics.id
|
transcript_id = fake_transcript_with_topics.id
|
||||||
|
|
||||||
# check the transcript exists
|
# check the transcript exists
|
||||||
@@ -181,7 +185,7 @@ async def test_transcript_merge_speaker(fake_transcript_with_topics, client):
|
|||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_reassign_with_participant(
|
async def test_transcript_reassign_with_participant(
|
||||||
fake_transcript_with_topics, client
|
authenticated_client, fake_transcript_with_topics, client
|
||||||
):
|
):
|
||||||
transcript_id = fake_transcript_with_topics.id
|
transcript_id = fake_transcript_with_topics.id
|
||||||
|
|
||||||
@@ -347,7 +351,9 @@ async def test_transcript_reassign_with_participant(
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_transcript_reassign_edge_cases(fake_transcript_with_topics, client):
|
async def test_transcript_reassign_edge_cases(
|
||||||
|
authenticated_client, fake_transcript_with_topics, client
|
||||||
|
):
|
||||||
transcript_id = fake_transcript_with_topics.id
|
transcript_id = fake_transcript_with_topics.id
|
||||||
|
|
||||||
# check the transcript exists
|
# check the transcript exists
|
||||||
|
|||||||
70
server/tests/test_user_api_keys.py
Normal file
70
server/tests/test_user_api_keys.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from reflector.db.user_api_keys import user_api_keys_controller
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_api_key_creation_and_verification():
|
||||||
|
api_key_model, plaintext = await user_api_keys_controller.create_key(
|
||||||
|
user_id="test_user",
|
||||||
|
name="Test API Key",
|
||||||
|
)
|
||||||
|
|
||||||
|
verified = await user_api_keys_controller.verify_key(plaintext)
|
||||||
|
assert verified is not None
|
||||||
|
assert verified.user_id == "test_user"
|
||||||
|
assert verified.name == "Test API Key"
|
||||||
|
|
||||||
|
invalid = await user_api_keys_controller.verify_key("fake_key")
|
||||||
|
assert invalid is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_api_key_hashing():
|
||||||
|
_, plaintext = await user_api_keys_controller.create_key(
|
||||||
|
user_id="test_user_2",
|
||||||
|
)
|
||||||
|
|
||||||
|
api_keys = await user_api_keys_controller.list_by_user_id("test_user_2")
|
||||||
|
assert len(api_keys) == 1
|
||||||
|
assert api_keys[0].key_hash != plaintext
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_api_key_uniqueness():
|
||||||
|
key1 = user_api_keys_controller.generate_key()
|
||||||
|
key2 = user_api_keys_controller.generate_key()
|
||||||
|
assert key1 != key2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_hash_api_key_deterministic():
|
||||||
|
key = "test_key_123"
|
||||||
|
hash1 = user_api_keys_controller.hash_key(key)
|
||||||
|
hash2 = user_api_keys_controller.hash_key(key)
|
||||||
|
assert hash1 == hash2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_by_user_id_empty():
|
||||||
|
api_keys = await user_api_keys_controller.list_by_user_id("nonexistent_user")
|
||||||
|
assert api_keys == []
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_by_user_id_multiple():
|
||||||
|
user_id = "multi_key_user"
|
||||||
|
|
||||||
|
_, plaintext1 = await user_api_keys_controller.create_key(
|
||||||
|
user_id=user_id,
|
||||||
|
name="API Key 1",
|
||||||
|
)
|
||||||
|
_, plaintext2 = await user_api_keys_controller.create_key(
|
||||||
|
user_id=user_id,
|
||||||
|
name="API Key 2",
|
||||||
|
)
|
||||||
|
|
||||||
|
api_keys = await user_api_keys_controller.list_by_user_id(user_id)
|
||||||
|
assert len(api_keys) == 2
|
||||||
|
names = {k.name for k in api_keys}
|
||||||
|
assert names == {"API Key 1", "API Key 2"}
|
||||||
156
server/tests/test_user_websocket_auth.py
Normal file
156
server/tests/test_user_websocket_auth.py
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
import asyncio
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx_ws import aconnect_ws
|
||||||
|
from uvicorn import Config, Server
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def appserver_ws_user(setup_database):
|
||||||
|
from reflector.app import app
|
||||||
|
from reflector.db import get_database
|
||||||
|
|
||||||
|
host = "127.0.0.1"
|
||||||
|
port = 1257
|
||||||
|
server_started = threading.Event()
|
||||||
|
server_exception = None
|
||||||
|
server_instance = None
|
||||||
|
|
||||||
|
def run_server():
|
||||||
|
nonlocal server_exception, server_instance
|
||||||
|
try:
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
|
||||||
|
config = Config(app=app, host=host, port=port, loop=loop)
|
||||||
|
server_instance = Server(config)
|
||||||
|
|
||||||
|
async def start_server():
|
||||||
|
database = get_database()
|
||||||
|
await database.connect()
|
||||||
|
try:
|
||||||
|
await server_instance.serve()
|
||||||
|
finally:
|
||||||
|
await database.disconnect()
|
||||||
|
|
||||||
|
server_started.set()
|
||||||
|
loop.run_until_complete(start_server())
|
||||||
|
except Exception as e:
|
||||||
|
server_exception = e
|
||||||
|
server_started.set()
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
server_thread = threading.Thread(target=run_server, daemon=True)
|
||||||
|
server_thread.start()
|
||||||
|
|
||||||
|
server_started.wait(timeout=30)
|
||||||
|
if server_exception:
|
||||||
|
raise server_exception
|
||||||
|
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
||||||
|
yield host, port
|
||||||
|
|
||||||
|
if server_instance:
|
||||||
|
server_instance.should_exit = True
|
||||||
|
server_thread.join(timeout=30)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def patch_jwt_verification(monkeypatch):
|
||||||
|
"""Patch JWT verification to accept HS256 tokens signed with SECRET_KEY for tests."""
|
||||||
|
from jose import jwt
|
||||||
|
|
||||||
|
from reflector.settings import settings
|
||||||
|
|
||||||
|
def _verify_token(self, token: str):
|
||||||
|
# Do not validate audience in tests
|
||||||
|
return jwt.decode(token, settings.SECRET_KEY, algorithms=["HS256"]) # type: ignore[arg-type]
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"reflector.auth.auth_jwt.JWTAuth.verify_token", _verify_token, raising=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_dummy_jwt(sub: str = "user123") -> str:
|
||||||
|
# Create a short HS256 JWT using the app secret to pass verification in tests
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
|
||||||
|
from jose import jwt
|
||||||
|
|
||||||
|
from reflector.settings import settings
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"sub": sub,
|
||||||
|
"email": f"{sub}@example.com",
|
||||||
|
"exp": datetime.now(timezone.utc) + timedelta(minutes=5),
|
||||||
|
}
|
||||||
|
# Note: production uses RS256 public key verification; tests can sign with SECRET_KEY
|
||||||
|
return jwt.encode(payload, settings.SECRET_KEY, algorithm="HS256")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_user_ws_rejects_missing_subprotocol(appserver_ws_user):
|
||||||
|
host, port = appserver_ws_user
|
||||||
|
base_ws = f"http://{host}:{port}/v1/events"
|
||||||
|
# No subprotocol/header with token
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
async with aconnect_ws(base_ws) as ws: # type: ignore
|
||||||
|
# Should close during handshake; if not, close explicitly
|
||||||
|
await ws.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_user_ws_rejects_invalid_token(appserver_ws_user):
|
||||||
|
host, port = appserver_ws_user
|
||||||
|
base_ws = f"http://{host}:{port}/v1/events"
|
||||||
|
|
||||||
|
# Send wrong token via WebSocket subprotocols
|
||||||
|
protocols = ["bearer", "totally-invalid-token"]
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
async with aconnect_ws(base_ws, subprotocols=protocols) as ws: # type: ignore
|
||||||
|
await ws.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_user_ws_accepts_valid_token_and_receives_events(appserver_ws_user):
|
||||||
|
host, port = appserver_ws_user
|
||||||
|
base_ws = f"http://{host}:{port}/v1/events"
|
||||||
|
|
||||||
|
token = _make_dummy_jwt("user-abc")
|
||||||
|
subprotocols = ["bearer", token]
|
||||||
|
|
||||||
|
# Connect and then trigger an event via HTTP create
|
||||||
|
async with aconnect_ws(base_ws, subprotocols=subprotocols) as ws:
|
||||||
|
# Emit an event to the user's room via a standard HTTP action
|
||||||
|
from httpx import AsyncClient
|
||||||
|
|
||||||
|
from reflector.app import app
|
||||||
|
from reflector.auth import current_user, current_user_optional
|
||||||
|
|
||||||
|
# Override auth dependencies so HTTP request is performed as the same user
|
||||||
|
app.dependency_overrides[current_user] = lambda: {
|
||||||
|
"sub": "user-abc",
|
||||||
|
"email": "user-abc@example.com",
|
||||||
|
}
|
||||||
|
app.dependency_overrides[current_user_optional] = lambda: {
|
||||||
|
"sub": "user-abc",
|
||||||
|
"email": "user-abc@example.com",
|
||||||
|
}
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url=f"http://{host}:{port}/v1") as ac:
|
||||||
|
# Create a transcript as this user so that the server publishes TRANSCRIPT_CREATED to user room
|
||||||
|
resp = await ac.post("/transcripts", json={"name": "WS Test"})
|
||||||
|
assert resp.status_code == 200
|
||||||
|
|
||||||
|
# Receive the published event
|
||||||
|
msg = await ws.receive_json()
|
||||||
|
assert msg["event"] == "TRANSCRIPT_CREATED"
|
||||||
|
assert "id" in msg["data"]
|
||||||
|
|
||||||
|
# Clean overrides
|
||||||
|
del app.dependency_overrides[current_user]
|
||||||
|
del app.dependency_overrides[current_user_optional]
|
||||||
14
www/.dockerignore
Normal file
14
www/.dockerignore
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
.env.local
|
||||||
|
.env.development
|
||||||
|
.env.production
|
||||||
|
node_modules
|
||||||
|
.next
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
*.md
|
||||||
|
.DS_Store
|
||||||
|
coverage
|
||||||
|
.pnpm-store
|
||||||
|
*.log
|
||||||
@@ -1,9 +1,5 @@
|
|||||||
# Environment
|
|
||||||
ENVIRONMENT=development
|
|
||||||
NEXT_PUBLIC_ENV=development
|
|
||||||
|
|
||||||
# Site Configuration
|
# Site Configuration
|
||||||
NEXT_PUBLIC_SITE_URL=http://localhost:3000
|
SITE_URL=http://localhost:3000
|
||||||
|
|
||||||
# Nextauth envs
|
# Nextauth envs
|
||||||
# not used in app code but in lib code
|
# not used in app code but in lib code
|
||||||
@@ -18,16 +14,16 @@ AUTHENTIK_CLIENT_ID=your-client-id-here
|
|||||||
AUTHENTIK_CLIENT_SECRET=your-client-secret-here
|
AUTHENTIK_CLIENT_SECRET=your-client-secret-here
|
||||||
|
|
||||||
# Feature Flags
|
# Feature Flags
|
||||||
# NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN=true
|
# FEATURE_REQUIRE_LOGIN=true
|
||||||
# NEXT_PUBLIC_FEATURE_PRIVACY=false
|
# FEATURE_PRIVACY=false
|
||||||
# NEXT_PUBLIC_FEATURE_BROWSE=true
|
# FEATURE_BROWSE=true
|
||||||
# NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP=true
|
# FEATURE_SEND_TO_ZULIP=true
|
||||||
# NEXT_PUBLIC_FEATURE_ROOMS=true
|
# FEATURE_ROOMS=true
|
||||||
|
|
||||||
# API URLs
|
# API URLs
|
||||||
NEXT_PUBLIC_API_URL=http://127.0.0.1:1250
|
API_URL=http://127.0.0.1:1250
|
||||||
NEXT_PUBLIC_WEBSOCKET_URL=ws://127.0.0.1:1250
|
WEBSOCKET_URL=ws://127.0.0.1:1250
|
||||||
NEXT_PUBLIC_AUTH_CALLBACK_URL=http://localhost:3000/auth-callback
|
AUTH_CALLBACK_URL=http://localhost:3000/auth-callback
|
||||||
|
|
||||||
# Sentry
|
# Sentry
|
||||||
# SENTRY_DSN=https://your-dsn@sentry.io/project-id
|
# SENTRY_DSN=https://your-dsn@sentry.io/project-id
|
||||||
|
|||||||
81
www/DOCKER_README.md
Normal file
81
www/DOCKER_README.md
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# Docker Production Build Guide
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Docker image builds without any environment variables and requires all configuration to be provided at runtime.
|
||||||
|
|
||||||
|
## Environment Variables (ALL Runtime)
|
||||||
|
|
||||||
|
### Required Runtime Variables
|
||||||
|
|
||||||
|
```bash
|
||||||
|
API_URL # Backend API URL (e.g., https://api.example.com)
|
||||||
|
WEBSOCKET_URL # WebSocket URL (e.g., wss://api.example.com)
|
||||||
|
NEXTAUTH_URL # NextAuth base URL (e.g., https://app.example.com)
|
||||||
|
NEXTAUTH_SECRET # Random secret for NextAuth (generate with: openssl rand -base64 32)
|
||||||
|
KV_URL # Redis URL (e.g., redis://redis:6379)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Optional Runtime Variables
|
||||||
|
|
||||||
|
```bash
|
||||||
|
SITE_URL # Frontend URL (defaults to NEXTAUTH_URL)
|
||||||
|
|
||||||
|
AUTHENTIK_ISSUER # OAuth issuer URL
|
||||||
|
AUTHENTIK_CLIENT_ID # OAuth client ID
|
||||||
|
AUTHENTIK_CLIENT_SECRET # OAuth client secret
|
||||||
|
AUTHENTIK_REFRESH_TOKEN_URL # OAuth token refresh URL
|
||||||
|
|
||||||
|
FEATURE_REQUIRE_LOGIN=false # Require authentication
|
||||||
|
FEATURE_PRIVACY=true # Enable privacy features
|
||||||
|
FEATURE_BROWSE=true # Enable browsing features
|
||||||
|
FEATURE_SEND_TO_ZULIP=false # Enable Zulip integration
|
||||||
|
FEATURE_ROOMS=true # Enable rooms feature
|
||||||
|
|
||||||
|
SENTRY_DSN # Sentry error tracking
|
||||||
|
AUTH_CALLBACK_URL # OAuth callback URL
|
||||||
|
```
|
||||||
|
|
||||||
|
## Building the Image
|
||||||
|
|
||||||
|
### Option 1: Using Docker Compose
|
||||||
|
|
||||||
|
1. Build the image (no environment variables needed):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose -f docker-compose.prod.yml build
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Create a `.env` file with runtime variables
|
||||||
|
|
||||||
|
3. Run with environment variables:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose -f docker-compose.prod.yml --env-file .env up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 2: Using Docker CLI
|
||||||
|
|
||||||
|
1. Build the image (no build args):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker build -t reflector-frontend:latest ./www
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Run with environment variables:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -d \
|
||||||
|
-p 3000:3000 \
|
||||||
|
-e API_URL=https://api.example.com \
|
||||||
|
-e WEBSOCKET_URL=wss://api.example.com \
|
||||||
|
-e NEXTAUTH_URL=https://app.example.com \
|
||||||
|
-e NEXTAUTH_SECRET=your-secret \
|
||||||
|
-e KV_URL=redis://redis:6379 \
|
||||||
|
-e AUTHENTIK_ISSUER=https://auth.example.com/application/o/reflector \
|
||||||
|
-e AUTHENTIK_CLIENT_ID=your-client-id \
|
||||||
|
-e AUTHENTIK_CLIENT_SECRET=your-client-secret \
|
||||||
|
-e AUTHENTIK_REFRESH_TOKEN_URL=https://auth.example.com/application/o/token/ \
|
||||||
|
-e FEATURE_REQUIRE_LOGIN=true \
|
||||||
|
reflector-frontend:latest
|
||||||
|
```
|
||||||
@@ -24,7 +24,8 @@ COPY --link . .
|
|||||||
ENV NEXT_TELEMETRY_DISABLED 1
|
ENV NEXT_TELEMETRY_DISABLED 1
|
||||||
|
|
||||||
# If using npm comment out above and use below instead
|
# If using npm comment out above and use below instead
|
||||||
RUN pnpm build
|
# next.js has the feature of excluding build step planned https://github.com/vercel/next.js/discussions/46544
|
||||||
|
RUN pnpm build-production
|
||||||
# RUN npm run build
|
# RUN npm run build
|
||||||
|
|
||||||
# Production image, copy all the files and run next
|
# Production image, copy all the files and run next
|
||||||
@@ -51,6 +52,10 @@ USER nextjs
|
|||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|
||||||
ENV PORT 3000
|
ENV PORT 3000
|
||||||
ENV HOSTNAME localhost
|
ENV HOSTNAME 0.0.0.0
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \
|
||||||
|
CMD wget --no-verbose --tries=1 --spider http://127.0.0.1:3000/api/health \
|
||||||
|
|| exit 1
|
||||||
|
|
||||||
CMD ["node", "server.js"]
|
CMD ["node", "server.js"]
|
||||||
|
|||||||
@@ -200,7 +200,13 @@ export default function ICSSettings({
|
|||||||
<HStack gap={0} position="relative" width="100%">
|
<HStack gap={0} position="relative" width="100%">
|
||||||
<Input
|
<Input
|
||||||
ref={roomUrlInputRef}
|
ref={roomUrlInputRef}
|
||||||
value={roomAbsoluteUrl(parseNonEmptyString(roomName))}
|
value={roomAbsoluteUrl(
|
||||||
|
parseNonEmptyString(
|
||||||
|
roomName,
|
||||||
|
true,
|
||||||
|
"panic! roomName is required",
|
||||||
|
),
|
||||||
|
)}
|
||||||
readOnly
|
readOnly
|
||||||
onClick={handleRoomUrlClick}
|
onClick={handleRoomUrlClick}
|
||||||
cursor="pointer"
|
cursor="pointer"
|
||||||
|
|||||||
@@ -274,15 +274,31 @@ export function RoomTable({
|
|||||||
<IconButton
|
<IconButton
|
||||||
aria-label="Force sync calendar"
|
aria-label="Force sync calendar"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
handleForceSync(parseNonEmptyString(room.name))
|
handleForceSync(
|
||||||
|
parseNonEmptyString(
|
||||||
|
room.name,
|
||||||
|
true,
|
||||||
|
"panic! room.name is required",
|
||||||
|
),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
size="sm"
|
size="sm"
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
disabled={syncingRooms.has(
|
disabled={syncingRooms.has(
|
||||||
parseNonEmptyString(room.name),
|
parseNonEmptyString(
|
||||||
|
room.name,
|
||||||
|
true,
|
||||||
|
"panic! room.name is required",
|
||||||
|
),
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{syncingRooms.has(parseNonEmptyString(room.name)) ? (
|
{syncingRooms.has(
|
||||||
|
parseNonEmptyString(
|
||||||
|
room.name,
|
||||||
|
true,
|
||||||
|
"panic! room.name is required",
|
||||||
|
),
|
||||||
|
) ? (
|
||||||
<Spinner size="sm" />
|
<Spinner size="sm" />
|
||||||
) : (
|
) : (
|
||||||
<CalendarSyncIcon />
|
<CalendarSyncIcon />
|
||||||
@@ -297,7 +313,13 @@ export function RoomTable({
|
|||||||
<IconButton
|
<IconButton
|
||||||
aria-label="Copy URL"
|
aria-label="Copy URL"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
onCopyUrl(parseNonEmptyString(room.name))
|
onCopyUrl(
|
||||||
|
parseNonEmptyString(
|
||||||
|
room.name,
|
||||||
|
true,
|
||||||
|
"panic! room.name is required",
|
||||||
|
),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
size="sm"
|
size="sm"
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
|
|||||||
@@ -833,7 +833,13 @@ export default function RoomsList() {
|
|||||||
<Field.Root>
|
<Field.Root>
|
||||||
<ICSSettings
|
<ICSSettings
|
||||||
roomName={
|
roomName={
|
||||||
room.name ? parseNonEmptyString(room.name) : null
|
room.name
|
||||||
|
? parseNonEmptyString(
|
||||||
|
room.name,
|
||||||
|
true,
|
||||||
|
"panic! room.name required",
|
||||||
|
)
|
||||||
|
: null
|
||||||
}
|
}
|
||||||
icsUrl={room.icsUrl}
|
icsUrl={room.icsUrl}
|
||||||
icsEnabled={room.icsEnabled}
|
icsEnabled={room.icsEnabled}
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
document.onkeyup = (e) => {
|
document.onkeyup = (e) => {
|
||||||
if (e.key === "a" && process.env.NEXT_PUBLIC_ENV === "development") {
|
if (e.key === "a" && process.env.NODE_ENV === "development") {
|
||||||
const segments: GetTranscriptSegmentTopic[] = [
|
const segments: GetTranscriptSegmentTopic[] = [
|
||||||
{
|
{
|
||||||
speaker: 1,
|
speaker: 1,
|
||||||
@@ -201,7 +201,7 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
|
|||||||
|
|
||||||
setFinalSummary({ summary: "This is the final summary" });
|
setFinalSummary({ summary: "This is the final summary" });
|
||||||
}
|
}
|
||||||
if (e.key === "z" && process.env.NEXT_PUBLIC_ENV === "development") {
|
if (e.key === "z" && process.env.NODE_ENV === "development") {
|
||||||
setTranscriptTextLive(
|
setTranscriptTextLive(
|
||||||
"This text is in English, and it is a pretty long sentence to test the limits",
|
"This text is in English, and it is a pretty long sentence to test the limits",
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -261,7 +261,11 @@ export default function Room(details: RoomDetails) {
|
|||||||
const params = use(details.params);
|
const params = use(details.params);
|
||||||
const wherebyLoaded = useWhereby();
|
const wherebyLoaded = useWhereby();
|
||||||
const wherebyRef = useRef<HTMLElement>(null);
|
const wherebyRef = useRef<HTMLElement>(null);
|
||||||
const roomName = parseNonEmptyString(params.roomName);
|
const roomName = parseNonEmptyString(
|
||||||
|
params.roomName,
|
||||||
|
true,
|
||||||
|
"panic! params.roomName is required",
|
||||||
|
);
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const auth = useAuth();
|
const auth = useAuth();
|
||||||
const status = auth.status;
|
const status = auth.status;
|
||||||
@@ -308,7 +312,14 @@ export default function Room(details: RoomDetails) {
|
|||||||
|
|
||||||
const handleMeetingSelect = (selectedMeeting: Meeting) => {
|
const handleMeetingSelect = (selectedMeeting: Meeting) => {
|
||||||
router.push(
|
router.push(
|
||||||
roomMeetingUrl(roomName, parseNonEmptyString(selectedMeeting.id)),
|
roomMeetingUrl(
|
||||||
|
roomName,
|
||||||
|
parseNonEmptyString(
|
||||||
|
selectedMeeting.id,
|
||||||
|
true,
|
||||||
|
"panic! selectedMeeting.id is required",
|
||||||
|
),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
38
www/app/api/health/route.ts
Normal file
38
www/app/api/health/route.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
const health = {
|
||||||
|
status: "healthy",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
uptime: process.uptime(),
|
||||||
|
environment: process.env.NODE_ENV,
|
||||||
|
checks: {
|
||||||
|
redis: await checkRedis(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const allHealthy = Object.values(health.checks).every((check) => check);
|
||||||
|
|
||||||
|
return NextResponse.json(health, {
|
||||||
|
status: allHealthy ? 200 : 503,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function checkRedis(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
if (!process.env.KV_URL) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { tokenCacheRedis } = await import("../../lib/redisClient");
|
||||||
|
const testKey = `health:check:${Date.now()}`;
|
||||||
|
await tokenCacheRedis.setex(testKey, 10, "OK");
|
||||||
|
const value = await tokenCacheRedis.get(testKey);
|
||||||
|
await tokenCacheRedis.del(testKey);
|
||||||
|
|
||||||
|
return value === "OK";
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Redis health check failed:", error);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,7 +6,10 @@ import ErrorMessage from "./(errors)/errorMessage";
|
|||||||
import { RecordingConsentProvider } from "./recordingConsentContext";
|
import { RecordingConsentProvider } from "./recordingConsentContext";
|
||||||
import { ErrorBoundary } from "@sentry/nextjs";
|
import { ErrorBoundary } from "@sentry/nextjs";
|
||||||
import { Providers } from "./providers";
|
import { Providers } from "./providers";
|
||||||
import { assertExistsAndNonEmptyString } from "./lib/utils";
|
import { getNextEnvVar } from "./lib/nextBuild";
|
||||||
|
import { getClientEnv } from "./lib/clientEnv";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
const poppins = Poppins({
|
const poppins = Poppins({
|
||||||
subsets: ["latin"],
|
subsets: ["latin"],
|
||||||
@@ -21,13 +24,11 @@ export const viewport: Viewport = {
|
|||||||
maximumScale: 1,
|
maximumScale: 1,
|
||||||
};
|
};
|
||||||
|
|
||||||
const NEXT_PUBLIC_SITE_URL = assertExistsAndNonEmptyString(
|
const SITE_URL = getNextEnvVar("SITE_URL");
|
||||||
process.env.NEXT_PUBLIC_SITE_URL,
|
const env = getClientEnv();
|
||||||
"NEXT_PUBLIC_SITE_URL required",
|
|
||||||
);
|
|
||||||
|
|
||||||
export const metadata: Metadata = {
|
export const metadata: Metadata = {
|
||||||
metadataBase: new URL(NEXT_PUBLIC_SITE_URL),
|
metadataBase: new URL(SITE_URL),
|
||||||
title: {
|
title: {
|
||||||
template: "%s – Reflector",
|
template: "%s – Reflector",
|
||||||
default: "Reflector - AI-Powered Meeting Transcriptions by Monadical",
|
default: "Reflector - AI-Powered Meeting Transcriptions by Monadical",
|
||||||
@@ -74,15 +75,16 @@ export default async function RootLayout({
|
|||||||
}) {
|
}) {
|
||||||
return (
|
return (
|
||||||
<html lang="en" className={poppins.className} suppressHydrationWarning>
|
<html lang="en" className={poppins.className} suppressHydrationWarning>
|
||||||
<body className={"h-[100svh] w-[100svw] overflow-x-hidden relative"}>
|
<body
|
||||||
<RecordingConsentProvider>
|
className={"h-[100svh] w-[100svw] overflow-x-hidden relative"}
|
||||||
<ErrorBoundary fallback={<p>"something went really wrong"</p>}>
|
data-env={JSON.stringify(env)}
|
||||||
<ErrorProvider>
|
>
|
||||||
<ErrorMessage />
|
<ErrorBoundary fallback={<p>"something went really wrong"</p>}>
|
||||||
<Providers>{children}</Providers>
|
<ErrorProvider>
|
||||||
</ErrorProvider>
|
<ErrorMessage />
|
||||||
</ErrorBoundary>
|
<Providers>{children}</Providers>
|
||||||
</RecordingConsentProvider>
|
</ErrorProvider>
|
||||||
|
</ErrorBoundary>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
);
|
);
|
||||||
|
|||||||
180
www/app/lib/UserEventsProvider.tsx
Normal file
180
www/app/lib/UserEventsProvider.tsx
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import React, { useEffect, useRef } from "react";
|
||||||
|
import { useQueryClient } from "@tanstack/react-query";
|
||||||
|
import { WEBSOCKET_URL } from "./apiClient";
|
||||||
|
import { useAuth } from "./AuthProvider";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { invalidateTranscriptLists, TRANSCRIPT_SEARCH_URL } from "./apiHooks";
|
||||||
|
|
||||||
|
const UserEvent = z.object({
|
||||||
|
event: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
type UserEvent = z.TypeOf<typeof UserEvent>;
|
||||||
|
|
||||||
|
class UserEventsStore {
|
||||||
|
private socket: WebSocket | null = null;
|
||||||
|
private listeners: Set<(event: MessageEvent) => void> = new Set();
|
||||||
|
private closeTimeoutId: number | null = null;
|
||||||
|
private isConnecting = false;
|
||||||
|
|
||||||
|
ensureConnection(url: string, subprotocols?: string[]) {
|
||||||
|
if (typeof window === "undefined") return;
|
||||||
|
if (this.closeTimeoutId !== null) {
|
||||||
|
clearTimeout(this.closeTimeoutId);
|
||||||
|
this.closeTimeoutId = null;
|
||||||
|
}
|
||||||
|
if (this.isConnecting) return;
|
||||||
|
if (
|
||||||
|
this.socket &&
|
||||||
|
(this.socket.readyState === WebSocket.OPEN ||
|
||||||
|
this.socket.readyState === WebSocket.CONNECTING)
|
||||||
|
) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.isConnecting = true;
|
||||||
|
const ws = new WebSocket(url, subprotocols || []);
|
||||||
|
this.socket = ws;
|
||||||
|
ws.onmessage = (event: MessageEvent) => {
|
||||||
|
this.listeners.forEach((listener) => {
|
||||||
|
try {
|
||||||
|
listener(event);
|
||||||
|
} catch (err) {
|
||||||
|
console.error("UserEvents listener error", err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
ws.onopen = () => {
|
||||||
|
if (this.socket === ws) this.isConnecting = false;
|
||||||
|
};
|
||||||
|
ws.onclose = () => {
|
||||||
|
if (this.socket === ws) {
|
||||||
|
this.socket = null;
|
||||||
|
this.isConnecting = false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
ws.onerror = () => {
|
||||||
|
if (this.socket === ws) this.isConnecting = false;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
subscribe(listener: (event: MessageEvent) => void): () => void {
|
||||||
|
this.listeners.add(listener);
|
||||||
|
if (this.closeTimeoutId !== null) {
|
||||||
|
clearTimeout(this.closeTimeoutId);
|
||||||
|
this.closeTimeoutId = null;
|
||||||
|
}
|
||||||
|
return () => {
|
||||||
|
this.listeners.delete(listener);
|
||||||
|
if (this.listeners.size === 0) {
|
||||||
|
this.closeTimeoutId = window.setTimeout(() => {
|
||||||
|
if (this.socket) {
|
||||||
|
try {
|
||||||
|
this.socket.close();
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("Error closing user events socket", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.socket = null;
|
||||||
|
this.closeTimeoutId = null;
|
||||||
|
}, 1000);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const sharedStore = new UserEventsStore();
|
||||||
|
|
||||||
|
export function UserEventsProvider({
|
||||||
|
children,
|
||||||
|
}: {
|
||||||
|
children: React.ReactNode;
|
||||||
|
}) {
|
||||||
|
const auth = useAuth();
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const tokenRef = useRef<string | null>(null);
|
||||||
|
const detachRef = useRef<(() => void) | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Only tear down when the user is truly unauthenticated
|
||||||
|
if (auth.status === "unauthenticated") {
|
||||||
|
if (detachRef.current) {
|
||||||
|
try {
|
||||||
|
detachRef.current();
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("Error detaching UserEvents listener", err);
|
||||||
|
}
|
||||||
|
detachRef.current = null;
|
||||||
|
}
|
||||||
|
tokenRef.current = null;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// During loading/refreshing, keep the existing connection intact
|
||||||
|
if (auth.status !== "authenticated") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authenticated: pin the initial token for the lifetime of this WS connection
|
||||||
|
if (!tokenRef.current && auth.accessToken) {
|
||||||
|
tokenRef.current = auth.accessToken;
|
||||||
|
}
|
||||||
|
const pinnedToken = tokenRef.current;
|
||||||
|
const url = `${WEBSOCKET_URL}/v1/events`;
|
||||||
|
|
||||||
|
// Ensure a single shared connection
|
||||||
|
sharedStore.ensureConnection(
|
||||||
|
url,
|
||||||
|
pinnedToken ? ["bearer", pinnedToken] : undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Subscribe once; avoid re-subscribing during transient status changes
|
||||||
|
if (!detachRef.current) {
|
||||||
|
const onMessage = (event: MessageEvent) => {
|
||||||
|
try {
|
||||||
|
const msg = UserEvent.parse(JSON.parse(event.data));
|
||||||
|
const eventName = msg.event;
|
||||||
|
|
||||||
|
const invalidateList = () => invalidateTranscriptLists(queryClient);
|
||||||
|
|
||||||
|
switch (eventName) {
|
||||||
|
case "TRANSCRIPT_CREATED":
|
||||||
|
case "TRANSCRIPT_DELETED":
|
||||||
|
case "TRANSCRIPT_STATUS":
|
||||||
|
case "TRANSCRIPT_FINAL_TITLE":
|
||||||
|
case "TRANSCRIPT_DURATION":
|
||||||
|
invalidateList().then(() => {});
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Ignore other content events for list updates
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("Invalid user event message", event.data);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const unsubscribe = sharedStore.subscribe(onMessage);
|
||||||
|
detachRef.current = unsubscribe;
|
||||||
|
}
|
||||||
|
}, [auth.status, queryClient]);
|
||||||
|
|
||||||
|
// On unmount, detach the listener and clear the pinned token
|
||||||
|
useEffect(() => {
|
||||||
|
return () => {
|
||||||
|
if (detachRef.current) {
|
||||||
|
try {
|
||||||
|
detachRef.current();
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("Error detaching UserEvents listener on unmount", err);
|
||||||
|
}
|
||||||
|
detachRef.current = null;
|
||||||
|
}
|
||||||
|
tokenRef.current = null;
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return <>{children}</>;
|
||||||
|
}
|
||||||
@@ -3,21 +3,19 @@
|
|||||||
import createClient from "openapi-fetch";
|
import createClient from "openapi-fetch";
|
||||||
import type { paths } from "../reflector-api";
|
import type { paths } from "../reflector-api";
|
||||||
import createFetchClient from "openapi-react-query";
|
import createFetchClient from "openapi-react-query";
|
||||||
import { assertExistsAndNonEmptyString, parseNonEmptyString } from "./utils";
|
import { parseNonEmptyString } from "./utils";
|
||||||
import { isBuildPhase } from "./next";
|
import { isBuildPhase } from "./next";
|
||||||
import { getSession } from "next-auth/react";
|
import { getSession } from "next-auth/react";
|
||||||
import { assertExtendedToken } from "./types";
|
import { assertExtendedToken } from "./types";
|
||||||
|
import { getClientEnv } from "./clientEnv";
|
||||||
|
|
||||||
export const API_URL = !isBuildPhase
|
export const API_URL = !isBuildPhase
|
||||||
? assertExistsAndNonEmptyString(
|
? getClientEnv().API_URL
|
||||||
process.env.NEXT_PUBLIC_API_URL,
|
|
||||||
"NEXT_PUBLIC_API_URL required",
|
|
||||||
)
|
|
||||||
: "http://localhost";
|
: "http://localhost";
|
||||||
|
|
||||||
// TODO decide strict validation or not
|
export const WEBSOCKET_URL = !isBuildPhase
|
||||||
export const WEBSOCKET_URL =
|
? getClientEnv().WEBSOCKET_URL || "ws://127.0.0.1:1250"
|
||||||
process.env.NEXT_PUBLIC_WEBSOCKET_URL || "ws://127.0.0.1:1250";
|
: "ws://localhost";
|
||||||
|
|
||||||
export const client = createClient<paths>({
|
export const client = createClient<paths>({
|
||||||
baseUrl: API_URL,
|
baseUrl: API_URL,
|
||||||
@@ -44,7 +42,7 @@ client.use({
|
|||||||
if (token !== null) {
|
if (token !== null) {
|
||||||
request.headers.set(
|
request.headers.set(
|
||||||
"Authorization",
|
"Authorization",
|
||||||
`Bearer ${parseNonEmptyString(token)}`,
|
`Bearer ${parseNonEmptyString(token, true, "panic! token is required")}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// XXX Only set Content-Type if not already set (FormData will set its own boundary)
|
// XXX Only set Content-Type if not already set (FormData will set its own boundary)
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import { $api } from "./apiClient";
|
import { $api } from "./apiClient";
|
||||||
import { useError } from "../(errors)/errorContext";
|
import { useError } from "../(errors)/errorContext";
|
||||||
import { useQueryClient } from "@tanstack/react-query";
|
import { QueryClient, useQueryClient } from "@tanstack/react-query";
|
||||||
import type { components } from "../reflector-api";
|
import type { components } from "../reflector-api";
|
||||||
import { useAuth } from "./AuthProvider";
|
import { useAuth } from "./AuthProvider";
|
||||||
|
|
||||||
@@ -40,6 +40,13 @@ export function useRoomsList(page: number = 1) {
|
|||||||
|
|
||||||
type SourceKind = components["schemas"]["SourceKind"];
|
type SourceKind = components["schemas"]["SourceKind"];
|
||||||
|
|
||||||
|
export const TRANSCRIPT_SEARCH_URL = "/v1/transcripts/search" as const;
|
||||||
|
|
||||||
|
export const invalidateTranscriptLists = (queryClient: QueryClient) =>
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: ["get", TRANSCRIPT_SEARCH_URL],
|
||||||
|
});
|
||||||
|
|
||||||
export function useTranscriptsSearch(
|
export function useTranscriptsSearch(
|
||||||
q: string = "",
|
q: string = "",
|
||||||
options: {
|
options: {
|
||||||
@@ -51,7 +58,7 @@ export function useTranscriptsSearch(
|
|||||||
) {
|
) {
|
||||||
return $api.useQuery(
|
return $api.useQuery(
|
||||||
"get",
|
"get",
|
||||||
"/v1/transcripts/search",
|
TRANSCRIPT_SEARCH_URL,
|
||||||
{
|
{
|
||||||
params: {
|
params: {
|
||||||
query: {
|
query: {
|
||||||
@@ -76,7 +83,7 @@ export function useTranscriptDelete() {
|
|||||||
return $api.useMutation("delete", "/v1/transcripts/{transcript_id}", {
|
return $api.useMutation("delete", "/v1/transcripts/{transcript_id}", {
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
return queryClient.invalidateQueries({
|
return queryClient.invalidateQueries({
|
||||||
queryKey: ["get", "/v1/transcripts/search"],
|
queryKey: ["get", TRANSCRIPT_SEARCH_URL],
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: (error) => {
|
||||||
@@ -613,7 +620,7 @@ export function useTranscriptCreate() {
|
|||||||
return $api.useMutation("post", "/v1/transcripts", {
|
return $api.useMutation("post", "/v1/transcripts", {
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
return queryClient.invalidateQueries({
|
return queryClient.invalidateQueries({
|
||||||
queryKey: ["get", "/v1/transcripts/search"],
|
queryKey: ["get", TRANSCRIPT_SEARCH_URL],
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: (error) => {
|
||||||
|
|||||||
@@ -18,26 +18,25 @@ import {
|
|||||||
deleteTokenCache,
|
deleteTokenCache,
|
||||||
} from "./redisTokenCache";
|
} from "./redisTokenCache";
|
||||||
import { tokenCacheRedis, redlock } from "./redisClient";
|
import { tokenCacheRedis, redlock } from "./redisClient";
|
||||||
import { isBuildPhase } from "./next";
|
|
||||||
import { sequenceThrows } from "./errorUtils";
|
import { sequenceThrows } from "./errorUtils";
|
||||||
import { featureEnabled } from "./features";
|
import { featureEnabled } from "./features";
|
||||||
|
import { getNextEnvVar } from "./nextBuild";
|
||||||
|
|
||||||
const TOKEN_CACHE_TTL = REFRESH_ACCESS_TOKEN_BEFORE;
|
const TOKEN_CACHE_TTL = REFRESH_ACCESS_TOKEN_BEFORE;
|
||||||
const getAuthentikClientId = () =>
|
const getAuthentikClientId = () => getNextEnvVar("AUTHENTIK_CLIENT_ID");
|
||||||
assertExistsAndNonEmptyString(
|
const getAuthentikClientSecret = () => getNextEnvVar("AUTHENTIK_CLIENT_SECRET");
|
||||||
process.env.AUTHENTIK_CLIENT_ID,
|
|
||||||
"AUTHENTIK_CLIENT_ID required",
|
|
||||||
);
|
|
||||||
const getAuthentikClientSecret = () =>
|
|
||||||
assertExistsAndNonEmptyString(
|
|
||||||
process.env.AUTHENTIK_CLIENT_SECRET,
|
|
||||||
"AUTHENTIK_CLIENT_SECRET required",
|
|
||||||
);
|
|
||||||
const getAuthentikRefreshTokenUrl = () =>
|
const getAuthentikRefreshTokenUrl = () =>
|
||||||
assertExistsAndNonEmptyString(
|
getNextEnvVar("AUTHENTIK_REFRESH_TOKEN_URL");
|
||||||
process.env.AUTHENTIK_REFRESH_TOKEN_URL,
|
|
||||||
"AUTHENTIK_REFRESH_TOKEN_URL required",
|
const getAuthentikIssuer = () => {
|
||||||
);
|
const stringUrl = getNextEnvVar("AUTHENTIK_ISSUER");
|
||||||
|
try {
|
||||||
|
new URL(stringUrl);
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error("AUTHENTIK_ISSUER is not a valid URL: " + stringUrl);
|
||||||
|
}
|
||||||
|
return stringUrl;
|
||||||
|
};
|
||||||
|
|
||||||
export const authOptions = (): AuthOptions =>
|
export const authOptions = (): AuthOptions =>
|
||||||
featureEnabled("requireLogin")
|
featureEnabled("requireLogin")
|
||||||
@@ -45,16 +44,17 @@ export const authOptions = (): AuthOptions =>
|
|||||||
providers: [
|
providers: [
|
||||||
AuthentikProvider({
|
AuthentikProvider({
|
||||||
...(() => {
|
...(() => {
|
||||||
const [clientId, clientSecret] = sequenceThrows(
|
const [clientId, clientSecret, issuer] = sequenceThrows(
|
||||||
getAuthentikClientId,
|
getAuthentikClientId,
|
||||||
getAuthentikClientSecret,
|
getAuthentikClientSecret,
|
||||||
|
getAuthentikIssuer,
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
clientId,
|
clientId,
|
||||||
clientSecret,
|
clientSecret,
|
||||||
|
issuer,
|
||||||
};
|
};
|
||||||
})(),
|
})(),
|
||||||
issuer: process.env.AUTHENTIK_ISSUER,
|
|
||||||
authorization: {
|
authorization: {
|
||||||
params: {
|
params: {
|
||||||
scope: "openid email profile offline_access",
|
scope: "openid email profile offline_access",
|
||||||
|
|||||||
91
www/app/lib/clientEnv.ts
Normal file
91
www/app/lib/clientEnv.ts
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
import {
|
||||||
|
assertExists,
|
||||||
|
assertExistsAndNonEmptyString,
|
||||||
|
NonEmptyString,
|
||||||
|
parseNonEmptyString,
|
||||||
|
} from "./utils";
|
||||||
|
import { isBuildPhase } from "./next";
|
||||||
|
import { getNextEnvVar } from "./nextBuild";
|
||||||
|
|
||||||
|
export const FEATURE_REQUIRE_LOGIN_ENV_NAME = "FEATURE_REQUIRE_LOGIN" as const;
|
||||||
|
export const FEATURE_PRIVACY_ENV_NAME = "FEATURE_PRIVACY" as const;
|
||||||
|
export const FEATURE_BROWSE_ENV_NAME = "FEATURE_BROWSE" as const;
|
||||||
|
export const FEATURE_SEND_TO_ZULIP_ENV_NAME = "FEATURE_SEND_TO_ZULIP" as const;
|
||||||
|
export const FEATURE_ROOMS_ENV_NAME = "FEATURE_ROOMS" as const;
|
||||||
|
|
||||||
|
const FEATURE_ENV_NAMES = [
|
||||||
|
FEATURE_REQUIRE_LOGIN_ENV_NAME,
|
||||||
|
FEATURE_PRIVACY_ENV_NAME,
|
||||||
|
FEATURE_BROWSE_ENV_NAME,
|
||||||
|
FEATURE_SEND_TO_ZULIP_ENV_NAME,
|
||||||
|
FEATURE_ROOMS_ENV_NAME,
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
export type FeatureEnvName = (typeof FEATURE_ENV_NAMES)[number];
|
||||||
|
|
||||||
|
export type EnvFeaturePartial = {
|
||||||
|
[key in FeatureEnvName]: boolean | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// CONTRACT: isomorphic with JSON.stringify
|
||||||
|
export type ClientEnvCommon = EnvFeaturePartial & {
|
||||||
|
API_URL: NonEmptyString;
|
||||||
|
WEBSOCKET_URL: NonEmptyString | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
let clientEnv: ClientEnvCommon | null = null;
|
||||||
|
export const getClientEnvClient = (): ClientEnvCommon => {
|
||||||
|
if (typeof window === "undefined") {
|
||||||
|
throw new Error(
|
||||||
|
"getClientEnv() called during SSR - this should only be called in browser environment",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (clientEnv) return clientEnv;
|
||||||
|
clientEnv = assertExists(
|
||||||
|
JSON.parse(
|
||||||
|
assertExistsAndNonEmptyString(
|
||||||
|
document.body.dataset.env,
|
||||||
|
"document.body.dataset.env is missing",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
"document.body.dataset.env is parsed to nullish",
|
||||||
|
);
|
||||||
|
return clientEnv!;
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseBooleanString = (str: string | undefined): boolean | null => {
|
||||||
|
if (str === undefined) return null;
|
||||||
|
return str === "true";
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getClientEnvServer = (): ClientEnvCommon => {
|
||||||
|
if (typeof window !== "undefined") {
|
||||||
|
throw new Error(
|
||||||
|
"getClientEnv() not called during SSR - this should only be called in server environment",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (clientEnv) return clientEnv;
|
||||||
|
|
||||||
|
const features = FEATURE_ENV_NAMES.reduce((acc, x) => {
|
||||||
|
acc[x] = parseBooleanString(process.env[x]);
|
||||||
|
return acc;
|
||||||
|
}, {} as EnvFeaturePartial);
|
||||||
|
|
||||||
|
if (isBuildPhase) {
|
||||||
|
return {
|
||||||
|
API_URL: getNextEnvVar("API_URL"),
|
||||||
|
WEBSOCKET_URL: getNextEnvVar("WEBSOCKET_URL"),
|
||||||
|
...features,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
clientEnv = {
|
||||||
|
API_URL: getNextEnvVar("API_URL"),
|
||||||
|
WEBSOCKET_URL: getNextEnvVar("WEBSOCKET_URL"),
|
||||||
|
...features,
|
||||||
|
};
|
||||||
|
return clientEnv;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getClientEnv =
|
||||||
|
typeof window === "undefined" ? getClientEnvServer : getClientEnvClient;
|
||||||
@@ -1,3 +1,13 @@
|
|||||||
|
import {
|
||||||
|
FEATURE_BROWSE_ENV_NAME,
|
||||||
|
FEATURE_PRIVACY_ENV_NAME,
|
||||||
|
FEATURE_REQUIRE_LOGIN_ENV_NAME,
|
||||||
|
FEATURE_ROOMS_ENV_NAME,
|
||||||
|
FEATURE_SEND_TO_ZULIP_ENV_NAME,
|
||||||
|
FeatureEnvName,
|
||||||
|
getClientEnv,
|
||||||
|
} from "./clientEnv";
|
||||||
|
|
||||||
export const FEATURES = [
|
export const FEATURES = [
|
||||||
"requireLogin",
|
"requireLogin",
|
||||||
"privacy",
|
"privacy",
|
||||||
@@ -18,38 +28,30 @@ export const DEFAULT_FEATURES: Features = {
|
|||||||
rooms: true,
|
rooms: true,
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
function parseBooleanEnv(
|
export const ENV_TO_FEATURE: {
|
||||||
value: string | undefined,
|
[k in FeatureEnvName]: FeatureName;
|
||||||
defaultValue: boolean = false,
|
} = {
|
||||||
): boolean {
|
FEATURE_REQUIRE_LOGIN: "requireLogin",
|
||||||
if (!value) return defaultValue;
|
FEATURE_PRIVACY: "privacy",
|
||||||
return value.toLowerCase() === "true";
|
FEATURE_BROWSE: "browse",
|
||||||
}
|
FEATURE_SEND_TO_ZULIP: "sendToZulip",
|
||||||
|
FEATURE_ROOMS: "rooms",
|
||||||
|
} as const;
|
||||||
|
|
||||||
// WARNING: keep process.env.* as-is, next.js won't see them if you generate dynamically
|
export const FEATURE_TO_ENV: {
|
||||||
const features: Features = {
|
[k in FeatureName]: FeatureEnvName;
|
||||||
requireLogin: parseBooleanEnv(
|
} = {
|
||||||
process.env.NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN,
|
requireLogin: "FEATURE_REQUIRE_LOGIN",
|
||||||
DEFAULT_FEATURES.requireLogin,
|
privacy: "FEATURE_PRIVACY",
|
||||||
),
|
browse: "FEATURE_BROWSE",
|
||||||
privacy: parseBooleanEnv(
|
sendToZulip: "FEATURE_SEND_TO_ZULIP",
|
||||||
process.env.NEXT_PUBLIC_FEATURE_PRIVACY,
|
rooms: "FEATURE_ROOMS",
|
||||||
DEFAULT_FEATURES.privacy,
|
|
||||||
),
|
|
||||||
browse: parseBooleanEnv(
|
|
||||||
process.env.NEXT_PUBLIC_FEATURE_BROWSE,
|
|
||||||
DEFAULT_FEATURES.browse,
|
|
||||||
),
|
|
||||||
sendToZulip: parseBooleanEnv(
|
|
||||||
process.env.NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP,
|
|
||||||
DEFAULT_FEATURES.sendToZulip,
|
|
||||||
),
|
|
||||||
rooms: parseBooleanEnv(
|
|
||||||
process.env.NEXT_PUBLIC_FEATURE_ROOMS,
|
|
||||||
DEFAULT_FEATURES.rooms,
|
|
||||||
),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const features = getClientEnv();
|
||||||
|
|
||||||
export const featureEnabled = (featureName: FeatureName): boolean => {
|
export const featureEnabled = (featureName: FeatureName): boolean => {
|
||||||
return features[featureName];
|
const isSet = features[FEATURE_TO_ENV[featureName]];
|
||||||
|
if (isSet === null) return DEFAULT_FEATURES[featureName];
|
||||||
|
return isSet;
|
||||||
};
|
};
|
||||||
|
|||||||
17
www/app/lib/nextBuild.ts
Normal file
17
www/app/lib/nextBuild.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { isBuildPhase } from "./next";
|
||||||
|
import { assertExistsAndNonEmptyString, NonEmptyString } from "./utils";
|
||||||
|
|
||||||
|
const _getNextEnvVar = (name: string, e?: string): NonEmptyString =>
|
||||||
|
isBuildPhase
|
||||||
|
? (() => {
|
||||||
|
throw new Error(
|
||||||
|
"panic! getNextEnvVar called during build phase; we don't support build envs",
|
||||||
|
);
|
||||||
|
})()
|
||||||
|
: assertExistsAndNonEmptyString(
|
||||||
|
process.env[name],
|
||||||
|
`${name} is required; ${e}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
export const getNextEnvVar = (name: string, e?: string): NonEmptyString =>
|
||||||
|
_getNextEnvVar(name, e);
|
||||||
@@ -1,7 +1,3 @@
|
|||||||
export function isDevelopment() {
|
|
||||||
return process.env.NEXT_PUBLIC_ENV === "development";
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to calculate WCAG contrast ratio
|
// Function to calculate WCAG contrast ratio
|
||||||
export const getContrastRatio = (
|
export const getContrastRatio = (
|
||||||
foreground: [number, number, number],
|
foreground: [number, number, number],
|
||||||
@@ -145,8 +141,15 @@ export const parseMaybeNonEmptyString = (
|
|||||||
s = trim ? s.trim() : s;
|
s = trim ? s.trim() : s;
|
||||||
return s.length > 0 ? (s as NonEmptyString) : null;
|
return s.length > 0 ? (s as NonEmptyString) : null;
|
||||||
};
|
};
|
||||||
export const parseNonEmptyString = (s: string, trim = true): NonEmptyString =>
|
export const parseNonEmptyString = (
|
||||||
assertExists(parseMaybeNonEmptyString(s, trim), "Expected non-empty string");
|
s: string,
|
||||||
|
trim = true,
|
||||||
|
e?: string,
|
||||||
|
): NonEmptyString =>
|
||||||
|
assertExists(
|
||||||
|
parseMaybeNonEmptyString(s, trim),
|
||||||
|
"Expected non-empty string" + (e ? `: ${e}` : ""),
|
||||||
|
);
|
||||||
|
|
||||||
export const assertExists = <T>(
|
export const assertExists = <T>(
|
||||||
value: T | null | undefined,
|
value: T | null | undefined,
|
||||||
@@ -173,4 +176,8 @@ export const assertExistsAndNonEmptyString = (
|
|||||||
value: string | null | undefined,
|
value: string | null | undefined,
|
||||||
err?: string,
|
err?: string,
|
||||||
): NonEmptyString =>
|
): NonEmptyString =>
|
||||||
parseNonEmptyString(assertExists(value, err || "Expected non-empty string"));
|
parseNonEmptyString(
|
||||||
|
assertExists(value, err || "Expected non-empty string"),
|
||||||
|
true,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ import { QueryClientProvider } from "@tanstack/react-query";
|
|||||||
import { queryClient } from "./lib/queryClient";
|
import { queryClient } from "./lib/queryClient";
|
||||||
import { AuthProvider } from "./lib/AuthProvider";
|
import { AuthProvider } from "./lib/AuthProvider";
|
||||||
import { SessionProvider as SessionProviderNextAuth } from "next-auth/react";
|
import { SessionProvider as SessionProviderNextAuth } from "next-auth/react";
|
||||||
|
import { RecordingConsentProvider } from "./recordingConsentContext";
|
||||||
|
import { UserEventsProvider } from "./lib/UserEventsProvider";
|
||||||
|
|
||||||
const WherebyProvider = dynamic(
|
const WherebyProvider = dynamic(
|
||||||
() =>
|
() =>
|
||||||
@@ -26,10 +28,14 @@ export function Providers({ children }: { children: React.ReactNode }) {
|
|||||||
<SessionProviderNextAuth>
|
<SessionProviderNextAuth>
|
||||||
<AuthProvider>
|
<AuthProvider>
|
||||||
<ChakraProvider value={system}>
|
<ChakraProvider value={system}>
|
||||||
<WherebyProvider>
|
<RecordingConsentProvider>
|
||||||
{children}
|
<UserEventsProvider>
|
||||||
<Toaster />
|
<WherebyProvider>
|
||||||
</WherebyProvider>
|
{children}
|
||||||
|
<Toaster />
|
||||||
|
</WherebyProvider>
|
||||||
|
</UserEventsProvider>
|
||||||
|
</RecordingConsentProvider>
|
||||||
</ChakraProvider>
|
</ChakraProvider>
|
||||||
</AuthProvider>
|
</AuthProvider>
|
||||||
</SessionProviderNextAuth>
|
</SessionProviderNextAuth>
|
||||||
|
|||||||
191
www/app/reflector-api.d.ts
vendored
191
www/app/reflector-api.d.ts
vendored
@@ -4,6 +4,23 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
export interface paths {
|
export interface paths {
|
||||||
|
"/health": {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
/** Health */
|
||||||
|
get: operations["health"];
|
||||||
|
put?: never;
|
||||||
|
post?: never;
|
||||||
|
delete?: never;
|
||||||
|
options?: never;
|
||||||
|
head?: never;
|
||||||
|
patch?: never;
|
||||||
|
trace?: never;
|
||||||
|
};
|
||||||
"/metrics": {
|
"/metrics": {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
@@ -587,6 +604,41 @@ export interface paths {
|
|||||||
patch?: never;
|
patch?: never;
|
||||||
trace?: never;
|
trace?: never;
|
||||||
};
|
};
|
||||||
|
"/v1/user/tokens": {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
/** List Tokens */
|
||||||
|
get: operations["v1_list_tokens"];
|
||||||
|
put?: never;
|
||||||
|
/** Create Token */
|
||||||
|
post: operations["v1_create_token"];
|
||||||
|
delete?: never;
|
||||||
|
options?: never;
|
||||||
|
head?: never;
|
||||||
|
patch?: never;
|
||||||
|
trace?: never;
|
||||||
|
};
|
||||||
|
"/v1/user/tokens/{token_id}": {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
get?: never;
|
||||||
|
put?: never;
|
||||||
|
post?: never;
|
||||||
|
/** Delete Token */
|
||||||
|
delete: operations["v1_delete_token"];
|
||||||
|
options?: never;
|
||||||
|
head?: never;
|
||||||
|
patch?: never;
|
||||||
|
trace?: never;
|
||||||
|
};
|
||||||
"/v1/zulip/streams": {
|
"/v1/zulip/streams": {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
@@ -759,6 +811,27 @@ export interface components {
|
|||||||
*/
|
*/
|
||||||
allow_duplicated: boolean | null;
|
allow_duplicated: boolean | null;
|
||||||
};
|
};
|
||||||
|
/** CreateTokenRequest */
|
||||||
|
CreateTokenRequest: {
|
||||||
|
/** Name */
|
||||||
|
name?: string | null;
|
||||||
|
};
|
||||||
|
/** CreateTokenResponse */
|
||||||
|
CreateTokenResponse: {
|
||||||
|
/** Id */
|
||||||
|
id: string;
|
||||||
|
/** User Id */
|
||||||
|
user_id: string;
|
||||||
|
/** Name */
|
||||||
|
name: string | null;
|
||||||
|
/**
|
||||||
|
* Created At
|
||||||
|
* Format: date-time
|
||||||
|
*/
|
||||||
|
created_at: string;
|
||||||
|
/** Token */
|
||||||
|
token: string;
|
||||||
|
};
|
||||||
/** CreateTranscript */
|
/** CreateTranscript */
|
||||||
CreateTranscript: {
|
CreateTranscript: {
|
||||||
/** Name */
|
/** Name */
|
||||||
@@ -1352,6 +1425,20 @@ export interface components {
|
|||||||
* @enum {string}
|
* @enum {string}
|
||||||
*/
|
*/
|
||||||
SyncStatus: "success" | "unchanged" | "error" | "skipped";
|
SyncStatus: "success" | "unchanged" | "error" | "skipped";
|
||||||
|
/** TokenResponse */
|
||||||
|
TokenResponse: {
|
||||||
|
/** Id */
|
||||||
|
id: string;
|
||||||
|
/** User Id */
|
||||||
|
user_id: string;
|
||||||
|
/** Name */
|
||||||
|
name: string | null;
|
||||||
|
/**
|
||||||
|
* Created At
|
||||||
|
* Format: date-time
|
||||||
|
*/
|
||||||
|
created_at: string;
|
||||||
|
};
|
||||||
/** Topic */
|
/** Topic */
|
||||||
Topic: {
|
Topic: {
|
||||||
/** Name */
|
/** Name */
|
||||||
@@ -1509,6 +1596,26 @@ export interface components {
|
|||||||
}
|
}
|
||||||
export type $defs = Record<string, never>;
|
export type $defs = Record<string, never>;
|
||||||
export interface operations {
|
export interface operations {
|
||||||
|
health: {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
requestBody?: never;
|
||||||
|
responses: {
|
||||||
|
/** @description Successful Response */
|
||||||
|
200: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": unknown;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
metrics: {
|
metrics: {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
@@ -2899,6 +3006,90 @@ export interface operations {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
v1_list_tokens: {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
requestBody?: never;
|
||||||
|
responses: {
|
||||||
|
/** @description Successful Response */
|
||||||
|
200: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": components["schemas"]["TokenResponse"][];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
v1_create_token: {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
requestBody: {
|
||||||
|
content: {
|
||||||
|
"application/json": components["schemas"]["CreateTokenRequest"];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
responses: {
|
||||||
|
/** @description Successful Response */
|
||||||
|
200: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": components["schemas"]["CreateTokenResponse"];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
/** @description Validation Error */
|
||||||
|
422: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": components["schemas"]["HTTPValidationError"];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
v1_delete_token: {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path: {
|
||||||
|
token_id: string;
|
||||||
|
};
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
requestBody?: never;
|
||||||
|
responses: {
|
||||||
|
/** @description Successful Response */
|
||||||
|
200: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": unknown;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
/** @description Validation Error */
|
||||||
|
422: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": components["schemas"]["HTTPValidationError"];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
v1_zulip_get_streams: {
|
v1_zulip_get_streams: {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "next dev",
|
"dev": "next dev",
|
||||||
"build": "next build",
|
"build": "next build",
|
||||||
|
"build-production": "next build --experimental-build-mode compile",
|
||||||
"start": "next start",
|
"start": "next start",
|
||||||
"lint": "next lint",
|
"lint": "next lint",
|
||||||
"format": "prettier --write .",
|
"format": "prettier --write .",
|
||||||
|
|||||||
Reference in New Issue
Block a user