mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-20 20:29:06 +00:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| dc4b737daa | |||
|
|
0baff7abf7 | ||
|
|
962c40e2b6 | ||
|
|
3c4b9f2103 | ||
|
|
c6c035aacf | ||
| c086b91445 | |||
|
|
9a258abc02 | ||
| af86c47f1d | |||
| 5f6910e513 | |||
| 9a71af145e | |||
| eef6dc3903 | |||
|
|
1dee255fed | ||
| 5d98754305 | |||
|
|
969bd84fcc | ||
|
|
36608849ec | ||
|
|
5bf64b5a41 | ||
| 0aaa42528a | |||
| 565a62900f | |||
|
|
27016e6051 | ||
| 6ddfee0b4e | |||
|
|
47716f6e5d | ||
| 0abcebfc94 | |||
|
|
2b723da08b |
2
.github/workflows/deploy.yml
vendored
2
.github/workflows/deploy.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Deploy to Amazon ECS
|
||||
name: Build container/push to container registry
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
|
||||
57
.github/workflows/docker-frontend.yml
vendored
Normal file
57
.github/workflows/docker-frontend.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Build and Push Frontend Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'www/**'
|
||||
- '.github/workflows/docker-frontend.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}-frontend
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=sha,prefix={{branch}}-
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./www
|
||||
file: ./www/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64,linux/arm64
|
||||
50
CHANGELOG.md
50
CHANGELOG.md
@@ -1,5 +1,55 @@
|
||||
# Changelog
|
||||
|
||||
## [0.16.0](https://github.com/Monadical-SAS/reflector/compare/v0.15.0...v0.16.0) (2025-10-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* search date filter ([#710](https://github.com/Monadical-SAS/reflector/issues/710)) ([962c40e](https://github.com/Monadical-SAS/reflector/commit/962c40e2b6428ac42fd10aea926782d7a6f3f902))
|
||||
|
||||
## [0.15.0](https://github.com/Monadical-SAS/reflector/compare/v0.14.0...v0.15.0) (2025-10-20)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* api tokens ([#705](https://github.com/Monadical-SAS/reflector/issues/705)) ([9a258ab](https://github.com/Monadical-SAS/reflector/commit/9a258abc0209b0ac3799532a507ea6a9125d703a))
|
||||
|
||||
## [0.14.0](https://github.com/Monadical-SAS/reflector/compare/v0.13.1...v0.14.0) (2025-10-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add calendar event data to transcript webhook payload ([#689](https://github.com/Monadical-SAS/reflector/issues/689)) ([5f6910e](https://github.com/Monadical-SAS/reflector/commit/5f6910e5131b7f28f86c9ecdcc57fed8412ee3cd))
|
||||
* container build for www / github ([#672](https://github.com/Monadical-SAS/reflector/issues/672)) ([969bd84](https://github.com/Monadical-SAS/reflector/commit/969bd84fcc14851d1a101412a0ba115f1b7cde82))
|
||||
* docker-compose for production frontend ([#664](https://github.com/Monadical-SAS/reflector/issues/664)) ([5bf64b5](https://github.com/Monadical-SAS/reflector/commit/5bf64b5a41f64535e22849b4bb11734d4dbb4aae))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* restore feature boolean logic ([#671](https://github.com/Monadical-SAS/reflector/issues/671)) ([3660884](https://github.com/Monadical-SAS/reflector/commit/36608849ec64e953e3be456172502762e3c33df9))
|
||||
* security review ([#656](https://github.com/Monadical-SAS/reflector/issues/656)) ([5d98754](https://github.com/Monadical-SAS/reflector/commit/5d98754305c6c540dd194dda268544f6d88bfaf8))
|
||||
* update transcript list on reprocess ([#676](https://github.com/Monadical-SAS/reflector/issues/676)) ([9a71af1](https://github.com/Monadical-SAS/reflector/commit/9a71af145ee9b833078c78d0c684590ab12e9f0e))
|
||||
* upgrade nemo toolkit ([#678](https://github.com/Monadical-SAS/reflector/issues/678)) ([eef6dc3](https://github.com/Monadical-SAS/reflector/commit/eef6dc39037329b65804297786d852dddb0557f9))
|
||||
|
||||
## [0.13.1](https://github.com/Monadical-SAS/reflector/compare/v0.13.0...v0.13.1) (2025-09-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* TypeError on not all arguments converted during string formatting in logger ([#667](https://github.com/Monadical-SAS/reflector/issues/667)) ([565a629](https://github.com/Monadical-SAS/reflector/commit/565a62900f5a02fc946b68f9269a42190ed70ab6))
|
||||
|
||||
## [0.13.0](https://github.com/Monadical-SAS/reflector/compare/v0.12.1...v0.13.0) (2025-09-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* room form edit with enter ([#662](https://github.com/Monadical-SAS/reflector/issues/662)) ([47716f6](https://github.com/Monadical-SAS/reflector/commit/47716f6e5ddee952609d2fa0ffabdfa865286796))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* invalid cleanup call ([#660](https://github.com/Monadical-SAS/reflector/issues/660)) ([0abcebf](https://github.com/Monadical-SAS/reflector/commit/0abcebfc9491f87f605f21faa3e53996fafedd9a))
|
||||
|
||||
## [0.12.1](https://github.com/Monadical-SAS/reflector/compare/v0.12.0...v0.12.1) (2025-09-17)
|
||||
|
||||
|
||||
|
||||
@@ -151,7 +151,7 @@ All endpoints prefixed `/v1/`:
|
||||
|
||||
**Frontend** (`www/.env`):
|
||||
- `NEXTAUTH_URL`, `NEXTAUTH_SECRET` - Authentication configuration
|
||||
- `NEXT_PUBLIC_REFLECTOR_API_URL` - Backend API endpoint
|
||||
- `REFLECTOR_API_URL` - Backend API endpoint
|
||||
- `REFLECTOR_DOMAIN_CONFIG` - Feature flags and domain settings
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
25
README.md
25
README.md
@@ -168,6 +168,13 @@ You can manually process an audio file by calling the process tool:
|
||||
uv run python -m reflector.tools.process path/to/audio.wav
|
||||
```
|
||||
|
||||
## Build-time env variables
|
||||
|
||||
Next.js projects are more used to NEXT_PUBLIC_ prefixed buildtime vars. We don't have those for the reason we need to serve a ccustomizable prebuild docker container.
|
||||
|
||||
Instead, all the variables are runtime. Variables needed to the frontend are served to the frontend app at initial render.
|
||||
|
||||
It also means there's no static prebuild and no static files to serve for js/html.
|
||||
|
||||
## Feature Flags
|
||||
|
||||
@@ -177,24 +184,24 @@ Reflector uses environment variable-based feature flags to control application f
|
||||
|
||||
| Feature Flag | Environment Variable |
|
||||
|-------------|---------------------|
|
||||
| `requireLogin` | `NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN` |
|
||||
| `privacy` | `NEXT_PUBLIC_FEATURE_PRIVACY` |
|
||||
| `browse` | `NEXT_PUBLIC_FEATURE_BROWSE` |
|
||||
| `sendToZulip` | `NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP` |
|
||||
| `rooms` | `NEXT_PUBLIC_FEATURE_ROOMS` |
|
||||
| `requireLogin` | `FEATURE_REQUIRE_LOGIN` |
|
||||
| `privacy` | `FEATURE_PRIVACY` |
|
||||
| `browse` | `FEATURE_BROWSE` |
|
||||
| `sendToZulip` | `FEATURE_SEND_TO_ZULIP` |
|
||||
| `rooms` | `FEATURE_ROOMS` |
|
||||
|
||||
### Setting Feature Flags
|
||||
|
||||
Feature flags are controlled via environment variables using the pattern `NEXT_PUBLIC_FEATURE_{FEATURE_NAME}` where `{FEATURE_NAME}` is the SCREAMING_SNAKE_CASE version of the feature name.
|
||||
Feature flags are controlled via environment variables using the pattern `FEATURE_{FEATURE_NAME}` where `{FEATURE_NAME}` is the SCREAMING_SNAKE_CASE version of the feature name.
|
||||
|
||||
**Examples:**
|
||||
```bash
|
||||
# Enable user authentication requirement
|
||||
NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN=true
|
||||
FEATURE_REQUIRE_LOGIN=true
|
||||
|
||||
# Disable browse functionality
|
||||
NEXT_PUBLIC_FEATURE_BROWSE=false
|
||||
FEATURE_BROWSE=false
|
||||
|
||||
# Enable Zulip integration
|
||||
NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP=true
|
||||
FEATURE_SEND_TO_ZULIP=true
|
||||
```
|
||||
|
||||
39
docker-compose.prod.yml
Normal file
39
docker-compose.prod.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
# Production Docker Compose configuration for Frontend
|
||||
# Usage: docker compose -f docker-compose.prod.yml up -d
|
||||
|
||||
services:
|
||||
web:
|
||||
build:
|
||||
context: ./www
|
||||
dockerfile: Dockerfile
|
||||
image: reflector-frontend:latest
|
||||
environment:
|
||||
- KV_URL=${KV_URL:-redis://redis:6379}
|
||||
- SITE_URL=${SITE_URL}
|
||||
- API_URL=${API_URL}
|
||||
- WEBSOCKET_URL=${WEBSOCKET_URL}
|
||||
- NEXTAUTH_URL=${NEXTAUTH_URL:-http://localhost:3000}
|
||||
- NEXTAUTH_SECRET=${NEXTAUTH_SECRET:-changeme-in-production}
|
||||
- AUTHENTIK_ISSUER=${AUTHENTIK_ISSUER}
|
||||
- AUTHENTIK_CLIENT_ID=${AUTHENTIK_CLIENT_ID}
|
||||
- AUTHENTIK_CLIENT_SECRET=${AUTHENTIK_CLIENT_SECRET}
|
||||
- AUTHENTIK_REFRESH_TOKEN_URL=${AUTHENTIK_REFRESH_TOKEN_URL}
|
||||
- SENTRY_DSN=${SENTRY_DSN}
|
||||
- SENTRY_IGNORE_API_RESOLUTION_ERROR=${SENTRY_IGNORE_API_RESOLUTION_ERROR:-1}
|
||||
depends_on:
|
||||
- redis
|
||||
restart: unless-stopped
|
||||
|
||||
redis:
|
||||
image: redis:7.2-alpine
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 3s
|
||||
retries: 3
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
@@ -39,7 +39,7 @@ services:
|
||||
ports:
|
||||
- 6379:6379
|
||||
web:
|
||||
image: node:18
|
||||
image: node:22-alpine
|
||||
ports:
|
||||
- "3000:3000"
|
||||
command: sh -c "corepack enable && pnpm install && pnpm dev"
|
||||
@@ -50,6 +50,8 @@ services:
|
||||
- /app/node_modules
|
||||
env_file:
|
||||
- ./www/.env.local
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
|
||||
postgres:
|
||||
image: postgres:17
|
||||
@@ -77,7 +77,7 @@ image = (
|
||||
.pip_install(
|
||||
"hf_transfer==0.1.9",
|
||||
"huggingface_hub[hf-xet]==0.31.2",
|
||||
"nemo_toolkit[asr]==2.3.0",
|
||||
"nemo_toolkit[asr]==2.5.0",
|
||||
"cuda-python==12.8.0",
|
||||
"fastapi==0.115.12",
|
||||
"numpy<2",
|
||||
|
||||
@@ -1,3 +1,29 @@
|
||||
## API Key Management
|
||||
|
||||
### Finding Your User ID
|
||||
|
||||
```bash
|
||||
# Get your OAuth sub (user ID) - requires authentication
|
||||
curl -H "Authorization: Bearer <your_jwt>" http://localhost:1250/v1/me
|
||||
# Returns: {"sub": "your-oauth-sub-here", "email": "...", ...}
|
||||
```
|
||||
|
||||
### Creating API Keys
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:1250/v1/user/api-keys \
|
||||
-H "Authorization: Bearer <your_jwt>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"name": "My API Key"}'
|
||||
```
|
||||
|
||||
### Using API Keys
|
||||
|
||||
```bash
|
||||
# Use X-API-Key header instead of Authorization
|
||||
curl -H "X-API-Key: <your_api_key>" http://localhost:1250/v1/transcripts
|
||||
```
|
||||
|
||||
## AWS S3/SQS usage clarification
|
||||
|
||||
Whereby.com uploads recordings directly to our S3 bucket when meetings end.
|
||||
|
||||
@@ -14,7 +14,7 @@ Webhooks are configured at the room level with two fields:
|
||||
|
||||
### `transcript.completed`
|
||||
|
||||
Triggered when a transcript has been fully processed, including transcription, diarization, summarization, and topic detection.
|
||||
Triggered when a transcript has been fully processed, including transcription, diarization, summarization, topic detection and calendar event integration.
|
||||
|
||||
### `test`
|
||||
|
||||
@@ -128,6 +128,27 @@ This event includes a convenient URL for accessing the transcript:
|
||||
"room": {
|
||||
"id": "room-789",
|
||||
"name": "Product Team Room"
|
||||
},
|
||||
"calendar_event": {
|
||||
"id": "calendar-event-123",
|
||||
"ics_uid": "event-123",
|
||||
"title": "Q3 Product Planning Meeting",
|
||||
"start_time": "2025-08-27T12:00:00Z",
|
||||
"end_time": "2025-08-27T12:30:00Z",
|
||||
"description": "Team discussed Q3 product roadmap, prioritizing mobile app features and API improvements.",
|
||||
"location": "Conference Room 1",
|
||||
"attendees": [
|
||||
{
|
||||
"id": "participant-1",
|
||||
"name": "John Doe",
|
||||
"speaker": "Speaker 1"
|
||||
},
|
||||
{
|
||||
"id": "participant-2",
|
||||
"name": "Jane Smith",
|
||||
"speaker": "Speaker 2"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -27,7 +27,7 @@ AUTH_JWT_AUDIENCE=
|
||||
#TRANSCRIPT_MODAL_API_KEY=xxxxx
|
||||
|
||||
TRANSCRIPT_BACKEND=modal
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-web.modal.run
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-parakeet-web.modal.run
|
||||
TRANSCRIPT_MODAL_API_KEY=
|
||||
|
||||
## =======================================================
|
||||
|
||||
38
server/migrations/versions/9e3f7b2a4c8e_add_user_api_keys.py
Normal file
38
server/migrations/versions/9e3f7b2a4c8e_add_user_api_keys.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""add user api keys
|
||||
|
||||
Revision ID: 9e3f7b2a4c8e
|
||||
Revises: dc035ff72fd5
|
||||
Create Date: 2025-10-17 00:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "9e3f7b2a4c8e"
|
||||
down_revision: Union[str, None] = "dc035ff72fd5"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"user_api_key",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("user_id", sa.String(), nullable=False),
|
||||
sa.Column("key_hash", sa.String(), nullable=False),
|
||||
sa.Column("name", sa.String(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
with op.batch_alter_table("user_api_key", schema=None) as batch_op:
|
||||
batch_op.create_index("idx_user_api_key_hash", ["key_hash"], unique=True)
|
||||
batch_op.create_index("idx_user_api_key_user_id", ["user_id"], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("user_api_key")
|
||||
@@ -112,6 +112,7 @@ source = ["reflector"]
|
||||
[tool.pytest_env]
|
||||
ENVIRONMENT = "pytest"
|
||||
DATABASE_URL = "postgresql://test_user:test_password@localhost:15432/reflector_test"
|
||||
AUTH_BACKEND = "jwt"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-ra -q --disable-pytest-warnings --cov --cov-report html -v"
|
||||
|
||||
@@ -26,6 +26,8 @@ from reflector.views.transcripts_upload import router as transcripts_upload_rout
|
||||
from reflector.views.transcripts_webrtc import router as transcripts_webrtc_router
|
||||
from reflector.views.transcripts_websocket import router as transcripts_websocket_router
|
||||
from reflector.views.user import router as user_router
|
||||
from reflector.views.user_api_keys import router as user_api_keys_router
|
||||
from reflector.views.user_websocket import router as user_ws_router
|
||||
from reflector.views.whereby import router as whereby_router
|
||||
from reflector.views.zulip import router as zulip_router
|
||||
|
||||
@@ -65,6 +67,12 @@ app.add_middleware(
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health():
|
||||
return {"status": "healthy"}
|
||||
|
||||
|
||||
# metrics
|
||||
instrumentator = Instrumentator(
|
||||
excluded_handlers=["/docs", "/metrics"],
|
||||
@@ -84,6 +92,8 @@ app.include_router(transcripts_websocket_router, prefix="/v1")
|
||||
app.include_router(transcripts_webrtc_router, prefix="/v1")
|
||||
app.include_router(transcripts_process_router, prefix="/v1")
|
||||
app.include_router(user_router, prefix="/v1")
|
||||
app.include_router(user_api_keys_router, prefix="/v1")
|
||||
app.include_router(user_ws_router, prefix="/v1")
|
||||
app.include_router(zulip_router, prefix="/v1")
|
||||
app.include_router(whereby_router, prefix="/v1")
|
||||
add_pagination(app)
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
from typing import Annotated, Optional
|
||||
from typing import Annotated, List, Optional
|
||||
|
||||
from fastapi import Depends, HTTPException
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
from fastapi.security import APIKeyHeader, OAuth2PasswordBearer
|
||||
from jose import JWTError, jwt
|
||||
from pydantic import BaseModel
|
||||
|
||||
from reflector.db.user_api_keys import user_api_keys_controller
|
||||
from reflector.logger import logger
|
||||
from reflector.settings import settings
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
|
||||
api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
|
||||
|
||||
jwt_public_key = open(f"reflector/auth/jwt/keys/{settings.AUTH_JWT_PUBLIC_KEY}").read()
|
||||
jwt_algorithm = settings.AUTH_JWT_ALGORITHM
|
||||
@@ -26,7 +28,7 @@ class JWTException(Exception):
|
||||
|
||||
class UserInfo(BaseModel):
|
||||
sub: str
|
||||
email: str
|
||||
email: Optional[str] = None
|
||||
|
||||
def __getitem__(self, key):
|
||||
return getattr(self, key)
|
||||
@@ -58,34 +60,53 @@ def authenticated(token: Annotated[str, Depends(oauth2_scheme)]):
|
||||
return None
|
||||
|
||||
|
||||
def current_user(
|
||||
token: Annotated[Optional[str], Depends(oauth2_scheme)],
|
||||
jwtauth: JWTAuth = Depends(),
|
||||
):
|
||||
if token is None:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
async def _authenticate_user(
|
||||
jwt_token: Optional[str],
|
||||
api_key: Optional[str],
|
||||
jwtauth: JWTAuth,
|
||||
) -> UserInfo | None:
|
||||
user_infos: List[UserInfo] = []
|
||||
if api_key:
|
||||
user_api_key = await user_api_keys_controller.verify_key(api_key)
|
||||
if user_api_key:
|
||||
user_infos.append(UserInfo(sub=user_api_key.user_id, email=None))
|
||||
|
||||
if jwt_token:
|
||||
try:
|
||||
payload = jwtauth.verify_token(token)
|
||||
payload = jwtauth.verify_token(jwt_token)
|
||||
sub = payload["sub"]
|
||||
email = payload["email"]
|
||||
return UserInfo(sub=sub, email=email)
|
||||
user_infos.append(UserInfo(sub=sub, email=email))
|
||||
except JWTError as e:
|
||||
logger.error(f"JWT error: {e}")
|
||||
raise HTTPException(status_code=401, detail="Invalid authentication")
|
||||
|
||||
|
||||
def current_user_optional(
|
||||
token: Annotated[Optional[str], Depends(oauth2_scheme)],
|
||||
jwtauth: JWTAuth = Depends(),
|
||||
):
|
||||
# we accept no token, but if one is provided, it must be a valid one.
|
||||
if token is None:
|
||||
if len(user_infos) == 0:
|
||||
return None
|
||||
try:
|
||||
payload = jwtauth.verify_token(token)
|
||||
sub = payload["sub"]
|
||||
email = payload["email"]
|
||||
return UserInfo(sub=sub, email=email)
|
||||
except JWTError as e:
|
||||
logger.error(f"JWT error: {e}")
|
||||
raise HTTPException(status_code=401, detail="Invalid authentication")
|
||||
|
||||
if len(set([x.sub for x in user_infos])) > 1:
|
||||
raise JWTException(
|
||||
status_code=401,
|
||||
detail="Invalid authentication: more than one user provided",
|
||||
)
|
||||
|
||||
return user_infos[0]
|
||||
|
||||
|
||||
async def current_user(
|
||||
jwt_token: Annotated[Optional[str], Depends(oauth2_scheme)],
|
||||
api_key: Annotated[Optional[str], Depends(api_key_header)],
|
||||
jwtauth: JWTAuth = Depends(),
|
||||
):
|
||||
user = await _authenticate_user(jwt_token, api_key, jwtauth)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
return user
|
||||
|
||||
|
||||
async def current_user_optional(
|
||||
jwt_token: Annotated[Optional[str], Depends(oauth2_scheme)],
|
||||
api_key: Annotated[Optional[str], Depends(api_key_header)],
|
||||
jwtauth: JWTAuth = Depends(),
|
||||
):
|
||||
return await _authenticate_user(jwt_token, api_key, jwtauth)
|
||||
|
||||
@@ -29,6 +29,7 @@ import reflector.db.meetings # noqa
|
||||
import reflector.db.recordings # noqa
|
||||
import reflector.db.rooms # noqa
|
||||
import reflector.db.transcripts # noqa
|
||||
import reflector.db.user_api_keys # noqa
|
||||
|
||||
kwargs = {}
|
||||
if "postgres" not in settings.DATABASE_URL:
|
||||
|
||||
@@ -104,6 +104,11 @@ class CalendarEventController:
|
||||
results = await get_database().fetch_all(query)
|
||||
return [CalendarEvent(**result) for result in results]
|
||||
|
||||
async def get_by_id(self, event_id: str) -> CalendarEvent | None:
|
||||
query = calendar_events.select().where(calendar_events.c.id == event_id)
|
||||
result = await get_database().fetch_one(query)
|
||||
return CalendarEvent(**result) if result else None
|
||||
|
||||
async def get_by_ics_uid(self, room_id: str, ics_uid: str) -> CalendarEvent | None:
|
||||
query = calendar_events.select().where(
|
||||
sa.and_(
|
||||
|
||||
@@ -135,6 +135,8 @@ class SearchParameters(BaseModel):
|
||||
user_id: str | None = None
|
||||
room_id: str | None = None
|
||||
source_kind: SourceKind | None = None
|
||||
from_datetime: datetime | None = None
|
||||
to_datetime: datetime | None = None
|
||||
|
||||
|
||||
class SearchResultDB(BaseModel):
|
||||
@@ -402,6 +404,14 @@ class SearchController:
|
||||
base_query = base_query.where(
|
||||
transcripts.c.source_kind == params.source_kind
|
||||
)
|
||||
if params.from_datetime:
|
||||
base_query = base_query.where(
|
||||
transcripts.c.created_at >= params.from_datetime
|
||||
)
|
||||
if params.to_datetime:
|
||||
base_query = base_query.where(
|
||||
transcripts.c.created_at <= params.to_datetime
|
||||
)
|
||||
|
||||
if params.query_text is not None:
|
||||
order_by = sqlalchemy.desc(sqlalchemy.text("rank"))
|
||||
|
||||
@@ -647,6 +647,19 @@ class TranscriptController:
|
||||
query = transcripts.delete().where(transcripts.c.recording_id == recording_id)
|
||||
await get_database().execute(query)
|
||||
|
||||
@staticmethod
|
||||
def user_can_mutate(transcript: Transcript, user_id: str | None) -> bool:
|
||||
"""
|
||||
Returns True if the given user is allowed to modify the transcript.
|
||||
|
||||
Policy:
|
||||
- Anonymous transcripts (user_id is None) cannot be modified via API
|
||||
- Only the owner (matching user_id) can modify their transcript
|
||||
"""
|
||||
if transcript.user_id is None:
|
||||
return False
|
||||
return user_id and transcript.user_id == user_id
|
||||
|
||||
@asynccontextmanager
|
||||
async def transaction(self):
|
||||
"""
|
||||
|
||||
90
server/reflector/db/user_api_keys.py
Normal file
90
server/reflector/db/user_api_keys.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import hmac
|
||||
import secrets
|
||||
from datetime import datetime, timezone
|
||||
from hashlib import sha256
|
||||
|
||||
import sqlalchemy
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from reflector.db import get_database, metadata
|
||||
from reflector.settings import settings
|
||||
from reflector.utils import generate_uuid4
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
user_api_keys = sqlalchemy.Table(
|
||||
"user_api_key",
|
||||
metadata,
|
||||
sqlalchemy.Column("id", sqlalchemy.String, primary_key=True),
|
||||
sqlalchemy.Column("user_id", sqlalchemy.String, nullable=False),
|
||||
sqlalchemy.Column("key_hash", sqlalchemy.String, nullable=False),
|
||||
sqlalchemy.Column("name", sqlalchemy.String, nullable=True),
|
||||
sqlalchemy.Column("created_at", sqlalchemy.DateTime(timezone=True), nullable=False),
|
||||
sqlalchemy.Index("idx_user_api_key_hash", "key_hash", unique=True),
|
||||
sqlalchemy.Index("idx_user_api_key_user_id", "user_id"),
|
||||
)
|
||||
|
||||
|
||||
class UserApiKey(BaseModel):
|
||||
id: NonEmptyString = Field(default_factory=generate_uuid4)
|
||||
user_id: NonEmptyString
|
||||
key_hash: NonEmptyString
|
||||
name: NonEmptyString | None = None
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
|
||||
|
||||
class UserApiKeyController:
|
||||
@staticmethod
|
||||
def generate_key() -> NonEmptyString:
|
||||
return secrets.token_urlsafe(48)
|
||||
|
||||
@staticmethod
|
||||
def hash_key(key: NonEmptyString) -> str:
|
||||
return hmac.new(
|
||||
settings.SECRET_KEY.encode(), key.encode(), digestmod=sha256
|
||||
).hexdigest()
|
||||
|
||||
@classmethod
|
||||
async def create_key(
|
||||
cls,
|
||||
user_id: NonEmptyString,
|
||||
name: NonEmptyString | None = None,
|
||||
) -> tuple[UserApiKey, NonEmptyString]:
|
||||
plaintext = cls.generate_key()
|
||||
api_key = UserApiKey(
|
||||
user_id=user_id,
|
||||
key_hash=cls.hash_key(plaintext),
|
||||
name=name,
|
||||
)
|
||||
query = user_api_keys.insert().values(**api_key.model_dump())
|
||||
await get_database().execute(query)
|
||||
return api_key, plaintext
|
||||
|
||||
@classmethod
|
||||
async def verify_key(cls, plaintext_key: NonEmptyString) -> UserApiKey | None:
|
||||
key_hash = cls.hash_key(plaintext_key)
|
||||
query = user_api_keys.select().where(
|
||||
user_api_keys.c.key_hash == key_hash,
|
||||
)
|
||||
result = await get_database().fetch_one(query)
|
||||
return UserApiKey(**result) if result else None
|
||||
|
||||
@staticmethod
|
||||
async def list_by_user_id(user_id: NonEmptyString) -> list[UserApiKey]:
|
||||
query = (
|
||||
user_api_keys.select()
|
||||
.where(user_api_keys.c.user_id == user_id)
|
||||
.order_by(user_api_keys.c.created_at.desc())
|
||||
)
|
||||
results = await get_database().fetch_all(query)
|
||||
return [UserApiKey(**r) for r in results]
|
||||
|
||||
@staticmethod
|
||||
async def delete_key(key_id: NonEmptyString, user_id: NonEmptyString) -> bool:
|
||||
query = user_api_keys.delete().where(
|
||||
(user_api_keys.c.id == key_id) & (user_api_keys.c.user_id == user_id)
|
||||
)
|
||||
result = await get_database().execute(query)
|
||||
return result > 0
|
||||
|
||||
|
||||
user_api_keys_controller = UserApiKeyController()
|
||||
@@ -131,7 +131,7 @@ class PipelineMainFile(PipelineMainBase):
|
||||
|
||||
self.logger.info("File pipeline complete")
|
||||
|
||||
await transcripts_controller.set_status(transcript.id, "ended")
|
||||
await self.set_status(transcript.id, "ended")
|
||||
|
||||
async def extract_and_write_audio(
|
||||
self, file_path: Path, transcript: Transcript
|
||||
@@ -426,7 +426,12 @@ async def task_pipeline_file_process(*, transcript_id: str):
|
||||
|
||||
await pipeline.process(audio_file)
|
||||
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"File pipeline failed for transcript {transcript_id}: {type(e).__name__}: {str(e)}",
|
||||
exc_info=True,
|
||||
transcript_id=transcript_id,
|
||||
)
|
||||
await pipeline.set_status(transcript_id, "error")
|
||||
raise
|
||||
|
||||
|
||||
@@ -85,6 +85,20 @@ def broadcast_to_sockets(func):
|
||||
message=resp.model_dump(mode="json"),
|
||||
)
|
||||
|
||||
transcript = await transcripts_controller.get_by_id(self.transcript_id)
|
||||
if transcript and transcript.user_id:
|
||||
# Emit only relevant events to the user room to avoid noisy updates.
|
||||
# Allowed: STATUS, FINAL_TITLE, DURATION. All are prefixed with TRANSCRIPT_
|
||||
allowed_user_events = {"STATUS", "FINAL_TITLE", "DURATION"}
|
||||
if resp.event in allowed_user_events:
|
||||
await self.ws_manager.send_json(
|
||||
room_id=f"user:{transcript.user_id}",
|
||||
message={
|
||||
"event": f"TRANSCRIPT_{resp.event}",
|
||||
"data": {"id": self.transcript_id, **resp.data},
|
||||
},
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
|
||||
@@ -56,6 +56,16 @@ class FileTranscriptModalProcessor(FileTranscriptProcessor):
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
error_body = response.text
|
||||
self.logger.error(
|
||||
"Modal API error",
|
||||
audio_url=data.audio_url,
|
||||
status_code=response.status_code,
|
||||
error_body=error_body,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
|
||||
@@ -34,8 +34,16 @@ TOPIC_PROMPT = dedent(
|
||||
class TopicResponse(BaseModel):
|
||||
"""Structured response for topic detection"""
|
||||
|
||||
title: str = Field(description="A descriptive title for the topic being discussed")
|
||||
summary: str = Field(description="A concise 1-2 sentence summary of the discussion")
|
||||
title: str = Field(
|
||||
description="A descriptive title for the topic being discussed",
|
||||
validation_alias="Title",
|
||||
)
|
||||
summary: str = Field(
|
||||
description="A concise 1-2 sentence summary of the discussion",
|
||||
validation_alias="Summary",
|
||||
)
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class TranscriptTopicDetectorProcessor(Processor):
|
||||
|
||||
@@ -199,6 +199,8 @@ async def rooms_get(
|
||||
room = await rooms_controller.get_by_id_for_http(room_id, user_id=user_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
if not room.is_shared and (user_id is None or room.user_id != user_id):
|
||||
raise HTTPException(status_code=403, detail="Room access denied")
|
||||
return room
|
||||
|
||||
|
||||
@@ -229,9 +231,9 @@ async def rooms_get_by_name(
|
||||
@router.post("/rooms", response_model=Room)
|
||||
async def rooms_create(
|
||||
room: CreateRoom,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
|
||||
return await rooms_controller.add(
|
||||
name=room.name,
|
||||
@@ -256,12 +258,14 @@ async def rooms_create(
|
||||
async def rooms_update(
|
||||
room_id: str,
|
||||
info: UpdateRoom,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
room = await rooms_controller.get_by_id_for_http(room_id, user_id=user_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
if room.user_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
values = info.dict(exclude_unset=True)
|
||||
await rooms_controller.update(room, values)
|
||||
return room
|
||||
@@ -270,12 +274,14 @@ async def rooms_update(
|
||||
@router.delete("/rooms/{room_id}", response_model=DeletionStatus)
|
||||
async def rooms_delete(
|
||||
room_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_id(room_id, user_id=user_id)
|
||||
user_id = user["sub"]
|
||||
room = await rooms_controller.get_by_id(room_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
if room.user_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
await rooms_controller.remove_by_id(room.id, user_id=user_id)
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
@@ -339,16 +345,16 @@ async def rooms_create_meeting(
|
||||
@router.post("/rooms/{room_id}/webhook/test", response_model=WebhookTestResult)
|
||||
async def rooms_test_webhook(
|
||||
room_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
"""Test webhook configuration by sending a sample payload."""
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
|
||||
room = await rooms_controller.get_by_id(room_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
if user_id and room.user_id != user_id:
|
||||
if room.user_id != user_id:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Not authorized to test this room's webhook"
|
||||
)
|
||||
|
||||
@@ -5,12 +5,10 @@ from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi_pagination import Page
|
||||
from fastapi_pagination.ext.databases import apaginate
|
||||
from jose import jwt
|
||||
from pydantic import BaseModel, Field, constr, field_serializer
|
||||
from pydantic import AwareDatetime, BaseModel, Field, constr, field_serializer
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_database
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.db.search import (
|
||||
DEFAULT_SEARCH_LIMIT,
|
||||
SearchLimit,
|
||||
@@ -34,6 +32,7 @@ from reflector.db.transcripts import (
|
||||
from reflector.processors.types import Transcript as ProcessorTranscript
|
||||
from reflector.processors.types import Word
|
||||
from reflector.settings import settings
|
||||
from reflector.ws_manager import get_ws_manager
|
||||
from reflector.zulip import (
|
||||
InvalidMessageError,
|
||||
get_zulip_message,
|
||||
@@ -134,6 +133,21 @@ SearchOffsetParam = Annotated[
|
||||
SearchOffsetBase, Query(description="Number of results to skip")
|
||||
]
|
||||
|
||||
SearchFromDatetimeParam = Annotated[
|
||||
AwareDatetime | None,
|
||||
Query(
|
||||
alias="from",
|
||||
description="Filter transcripts created on or after this datetime (ISO 8601 with timezone)",
|
||||
),
|
||||
]
|
||||
SearchToDatetimeParam = Annotated[
|
||||
AwareDatetime | None,
|
||||
Query(
|
||||
alias="to",
|
||||
description="Filter transcripts created on or before this datetime (ISO 8601 with timezone)",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
results: list[SearchResult]
|
||||
@@ -175,18 +189,23 @@ async def transcripts_search(
|
||||
offset: SearchOffsetParam = 0,
|
||||
room_id: Optional[str] = None,
|
||||
source_kind: Optional[SourceKind] = None,
|
||||
from_datetime: SearchFromDatetimeParam = None,
|
||||
to_datetime: SearchToDatetimeParam = None,
|
||||
user: Annotated[
|
||||
Optional[auth.UserInfo], Depends(auth.current_user_optional)
|
||||
] = None,
|
||||
):
|
||||
"""
|
||||
Full-text search across transcript titles and content.
|
||||
"""
|
||||
"""Full-text search across transcript titles and content."""
|
||||
if not user and not settings.PUBLIC_MODE:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
user_id = user["sub"] if user else None
|
||||
|
||||
if from_datetime and to_datetime and from_datetime > to_datetime:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="'from' must be less than or equal to 'to'"
|
||||
)
|
||||
|
||||
search_params = SearchParameters(
|
||||
query_text=parse_search_query_param(q),
|
||||
limit=limit,
|
||||
@@ -194,6 +213,8 @@ async def transcripts_search(
|
||||
user_id=user_id,
|
||||
room_id=room_id,
|
||||
source_kind=source_kind,
|
||||
from_datetime=from_datetime,
|
||||
to_datetime=to_datetime,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(search_params)
|
||||
@@ -213,7 +234,7 @@ async def transcripts_create(
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
return await transcripts_controller.add(
|
||||
transcript = await transcripts_controller.add(
|
||||
info.name,
|
||||
source_kind=info.source_kind or SourceKind.LIVE,
|
||||
source_language=info.source_language,
|
||||
@@ -221,6 +242,14 @@ async def transcripts_create(
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
if user_id:
|
||||
await get_ws_manager().send_json(
|
||||
room_id=f"user:{user_id}",
|
||||
message={"event": "TRANSCRIPT_CREATED", "data": {"id": transcript.id}},
|
||||
)
|
||||
|
||||
return transcript
|
||||
|
||||
|
||||
# ==============================================================
|
||||
# Single transcript
|
||||
@@ -344,12 +373,14 @@ async def transcript_get(
|
||||
async def transcript_update(
|
||||
transcript_id: str,
|
||||
info: UpdateTranscript,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
)
|
||||
if not transcripts_controller.user_can_mutate(transcript, user_id):
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
values = info.dict(exclude_unset=True)
|
||||
updated_transcript = await transcripts_controller.update(transcript, values)
|
||||
return updated_transcript
|
||||
@@ -358,20 +389,20 @@ async def transcript_update(
|
||||
@router.delete("/transcripts/{transcript_id}", response_model=DeletionStatus)
|
||||
async def transcript_delete(
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
|
||||
if transcript.meeting_id:
|
||||
meeting = await meetings_controller.get_by_id(transcript.meeting_id)
|
||||
room = await rooms_controller.get_by_id(meeting.room_id)
|
||||
if room.is_shared:
|
||||
user_id = None
|
||||
if not transcripts_controller.user_can_mutate(transcript, user_id):
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
await transcripts_controller.remove_by_id(transcript.id, user_id=user_id)
|
||||
await get_ws_manager().send_json(
|
||||
room_id=f"user:{user_id}",
|
||||
message={"event": "TRANSCRIPT_DELETED", "data": {"id": transcript.id}},
|
||||
)
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
|
||||
@@ -443,15 +474,16 @@ async def transcript_post_to_zulip(
|
||||
stream: str,
|
||||
topic: str,
|
||||
include_topics: bool,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
|
||||
if not transcripts_controller.user_can_mutate(transcript, user_id):
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
content = get_zulip_message(transcript, include_topics)
|
||||
|
||||
message_updated = False
|
||||
|
||||
@@ -56,12 +56,14 @@ async def transcript_get_participants(
|
||||
async def transcript_add_participant(
|
||||
transcript_id: str,
|
||||
participant: CreateParticipant,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
) -> Participant:
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
)
|
||||
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
# ensure the speaker is unique
|
||||
if participant.speaker is not None and transcript.participants is not None:
|
||||
@@ -101,12 +103,14 @@ async def transcript_update_participant(
|
||||
transcript_id: str,
|
||||
participant_id: str,
|
||||
participant: UpdateParticipant,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
) -> Participant:
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
)
|
||||
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
# ensure the speaker is unique
|
||||
for p in transcript.participants:
|
||||
@@ -138,11 +142,13 @@ async def transcript_update_participant(
|
||||
async def transcript_delete_participant(
|
||||
transcript_id: str,
|
||||
participant_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
) -> DeletionStatus:
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
)
|
||||
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
await transcripts_controller.delete_participant(transcript, participant_id)
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
@@ -35,12 +35,14 @@ class SpeakerMerge(BaseModel):
|
||||
async def transcript_assign_speaker(
|
||||
transcript_id: str,
|
||||
assignment: SpeakerAssignment,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
) -> SpeakerAssignmentStatus:
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
)
|
||||
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
@@ -113,12 +115,14 @@ async def transcript_assign_speaker(
|
||||
async def transcript_merge_speaker(
|
||||
transcript_id: str,
|
||||
merge: SpeakerMerge,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
) -> SpeakerAssignmentStatus:
|
||||
user_id = user["sub"] if user else None
|
||||
user_id = user["sub"]
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
)
|
||||
if transcript.user_id is not None and transcript.user_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
|
||||
@@ -4,8 +4,11 @@ Transcripts websocket API
|
||||
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketDisconnect
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, WebSocket, WebSocketDisconnect
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
from reflector.ws_manager import get_ws_manager
|
||||
|
||||
@@ -21,10 +24,12 @@ async def transcript_get_websocket_events(transcript_id: str):
|
||||
async def transcript_events_websocket(
|
||||
transcript_id: str,
|
||||
websocket: WebSocket,
|
||||
# user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
user: Optional[auth.UserInfo] = Depends(auth.current_user_optional),
|
||||
):
|
||||
# user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ router = APIRouter()
|
||||
class UserInfo(BaseModel):
|
||||
sub: str
|
||||
email: Optional[str]
|
||||
email_verified: Optional[bool]
|
||||
|
||||
|
||||
@router.get("/me")
|
||||
|
||||
62
server/reflector/views/user_api_keys.py
Normal file
62
server/reflector/views/user_api_keys.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from datetime import datetime
|
||||
from typing import Annotated
|
||||
|
||||
import structlog
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db.user_api_keys import user_api_keys_controller
|
||||
from reflector.utils.string import NonEmptyString
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
class CreateApiKeyRequest(BaseModel):
|
||||
name: NonEmptyString | None = None
|
||||
|
||||
|
||||
class ApiKeyResponse(BaseModel):
|
||||
id: NonEmptyString
|
||||
user_id: NonEmptyString
|
||||
name: NonEmptyString | None
|
||||
created_at: datetime
|
||||
|
||||
|
||||
class CreateApiKeyResponse(ApiKeyResponse):
|
||||
key: NonEmptyString
|
||||
|
||||
|
||||
@router.post("/user/api-keys", response_model=CreateApiKeyResponse)
|
||||
async def create_api_key(
|
||||
req: CreateApiKeyRequest,
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
api_key_model, plaintext = await user_api_keys_controller.create_key(
|
||||
user_id=user["sub"],
|
||||
name=req.name,
|
||||
)
|
||||
return CreateApiKeyResponse(
|
||||
**api_key_model.model_dump(),
|
||||
key=plaintext,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/user/api-keys", response_model=list[ApiKeyResponse])
|
||||
async def list_api_keys(
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
api_keys = await user_api_keys_controller.list_by_user_id(user["sub"])
|
||||
return [ApiKeyResponse(**k.model_dump()) for k in api_keys]
|
||||
|
||||
|
||||
@router.delete("/user/api-keys/{key_id}")
|
||||
async def delete_api_key(
|
||||
key_id: NonEmptyString,
|
||||
user: Annotated[auth.UserInfo, Depends(auth.current_user)],
|
||||
):
|
||||
deleted = await user_api_keys_controller.delete_key(key_id, user["sub"])
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404)
|
||||
return {"status": "ok"}
|
||||
53
server/reflector/views/user_websocket.py
Normal file
53
server/reflector/views/user_websocket.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, WebSocket
|
||||
|
||||
from reflector.auth.auth_jwt import JWTAuth # type: ignore
|
||||
from reflector.ws_manager import get_ws_manager
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Close code for unauthorized WebSocket connections
|
||||
UNAUTHORISED = 4401
|
||||
|
||||
|
||||
@router.websocket("/events")
|
||||
async def user_events_websocket(websocket: WebSocket):
|
||||
# Browser can't send Authorization header for WS; use subprotocol: ["bearer", token]
|
||||
raw_subprotocol = websocket.headers.get("sec-websocket-protocol") or ""
|
||||
parts = [p.strip() for p in raw_subprotocol.split(",") if p.strip()]
|
||||
token: Optional[str] = None
|
||||
negotiated_subprotocol: Optional[str] = None
|
||||
if len(parts) >= 2 and parts[0].lower() == "bearer":
|
||||
negotiated_subprotocol = "bearer"
|
||||
token = parts[1]
|
||||
|
||||
user_id: Optional[str] = None
|
||||
if not token:
|
||||
await websocket.close(code=UNAUTHORISED)
|
||||
return
|
||||
|
||||
try:
|
||||
payload = JWTAuth().verify_token(token)
|
||||
user_id = payload.get("sub")
|
||||
except Exception:
|
||||
await websocket.close(code=UNAUTHORISED)
|
||||
return
|
||||
|
||||
if not user_id:
|
||||
await websocket.close(code=UNAUTHORISED)
|
||||
return
|
||||
|
||||
room_id = f"user:{user_id}"
|
||||
ws_manager = get_ws_manager()
|
||||
|
||||
await ws_manager.add_user_to_room(
|
||||
room_id, websocket, subprotocol=negotiated_subprotocol
|
||||
)
|
||||
|
||||
try:
|
||||
while True:
|
||||
await websocket.receive()
|
||||
finally:
|
||||
if room_id:
|
||||
await ws_manager.remove_user_from_room(room_id, websocket)
|
||||
@@ -5,7 +5,6 @@ Deletes old anonymous transcripts and their associated meetings/recordings.
|
||||
Transcripts are the main entry point - any associated data is also removed.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import TypedDict
|
||||
|
||||
@@ -152,5 +151,5 @@ async def cleanup_old_public_data(
|
||||
retry_kwargs={"max_retries": 3, "countdown": 300},
|
||||
)
|
||||
@asynctask
|
||||
def cleanup_old_public_data_task(days: int | None = None):
|
||||
asyncio.run(cleanup_old_public_data(days=days))
|
||||
async def cleanup_old_public_data_task(days: int | None = None):
|
||||
await cleanup_old_public_data(days=days)
|
||||
|
||||
@@ -213,7 +213,6 @@ async def process_meetings():
|
||||
should_deactivate = True
|
||||
logger_.info(
|
||||
"Meeting deactivated - scheduled time ended with no participants",
|
||||
meeting.id,
|
||||
)
|
||||
else:
|
||||
logger_.debug("Meeting not yet started, keep it")
|
||||
@@ -224,8 +223,8 @@ async def process_meetings():
|
||||
|
||||
processed_count += 1
|
||||
|
||||
except Exception as e:
|
||||
logger_.error(f"Error processing meeting", exc_info=True)
|
||||
except Exception:
|
||||
logger_.error("Error processing meeting", exc_info=True)
|
||||
finally:
|
||||
try:
|
||||
lock.release()
|
||||
@@ -233,7 +232,7 @@ async def process_meetings():
|
||||
pass # Lock already released or expired
|
||||
|
||||
logger.info(
|
||||
f"Processed meetings finished",
|
||||
"Processed meetings finished",
|
||||
processed_count=processed_count,
|
||||
skipped_count=skipped_count,
|
||||
)
|
||||
|
||||
@@ -11,6 +11,8 @@ import structlog
|
||||
from celery import shared_task
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
from reflector.db.calendar_events import calendar_events_controller
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
from reflector.pipelines.main_live_pipeline import asynctask
|
||||
@@ -84,6 +86,18 @@ async def send_transcript_webhook(
|
||||
}
|
||||
)
|
||||
|
||||
# Fetch meeting and calendar event if they exist
|
||||
calendar_event = None
|
||||
try:
|
||||
if transcript.meeting_id:
|
||||
meeting = await meetings_controller.get_by_id(transcript.meeting_id)
|
||||
if meeting and meeting.calendar_event_id:
|
||||
calendar_event = await calendar_events_controller.get_by_id(
|
||||
meeting.calendar_event_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching meeting or calendar event", error=str(e))
|
||||
|
||||
# Build webhook payload
|
||||
frontend_url = f"{settings.UI_BASE_URL}/transcripts/{transcript.id}"
|
||||
participants = [
|
||||
@@ -116,6 +130,33 @@ async def send_transcript_webhook(
|
||||
},
|
||||
}
|
||||
|
||||
# Always include calendar_event field, even if no event is present
|
||||
payload_data["calendar_event"] = {}
|
||||
|
||||
# Add calendar event data if present
|
||||
if calendar_event:
|
||||
calendar_data = {
|
||||
"id": calendar_event.id,
|
||||
"ics_uid": calendar_event.ics_uid,
|
||||
"title": calendar_event.title,
|
||||
"start_time": calendar_event.start_time.isoformat()
|
||||
if calendar_event.start_time
|
||||
else None,
|
||||
"end_time": calendar_event.end_time.isoformat()
|
||||
if calendar_event.end_time
|
||||
else None,
|
||||
}
|
||||
|
||||
# Add optional fields only if they exist
|
||||
if calendar_event.description:
|
||||
calendar_data["description"] = calendar_event.description
|
||||
if calendar_event.location:
|
||||
calendar_data["location"] = calendar_event.location
|
||||
if calendar_event.attendees:
|
||||
calendar_data["attendees"] = calendar_event.attendees
|
||||
|
||||
payload_data["calendar_event"] = calendar_data
|
||||
|
||||
# Convert to JSON
|
||||
payload_json = json.dumps(payload_data, separators=(",", ":"))
|
||||
payload_bytes = payload_json.encode("utf-8")
|
||||
|
||||
@@ -65,7 +65,12 @@ class WebsocketManager:
|
||||
self.tasks: dict = {}
|
||||
self.pubsub_client = pubsub_client
|
||||
|
||||
async def add_user_to_room(self, room_id: str, websocket: WebSocket) -> None:
|
||||
async def add_user_to_room(
|
||||
self, room_id: str, websocket: WebSocket, subprotocol: str | None = None
|
||||
) -> None:
|
||||
if subprotocol:
|
||||
await websocket.accept(subprotocol=subprotocol)
|
||||
else:
|
||||
await websocket.accept()
|
||||
|
||||
if room_id in self.rooms:
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
from unittest.mock import patch
|
||||
|
||||
@@ -337,6 +338,166 @@ async def client():
|
||||
yield ac
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def ws_manager_in_memory(monkeypatch):
|
||||
"""Replace Redis-based WS manager with an in-memory implementation for tests."""
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from reflector.ws_manager import WebsocketManager
|
||||
|
||||
class _InMemorySubscriber:
|
||||
def __init__(self, queue: asyncio.Queue):
|
||||
self.queue = queue
|
||||
|
||||
async def get_message(self, ignore_subscribe_messages: bool = True):
|
||||
try:
|
||||
return await asyncio.wait_for(self.queue.get(), timeout=0.05)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
class InMemoryPubSubManager:
|
||||
def __init__(self):
|
||||
self.queues: dict[str, asyncio.Queue] = {}
|
||||
self.connected = False
|
||||
|
||||
async def connect(self) -> None:
|
||||
self.connected = True
|
||||
|
||||
async def disconnect(self) -> None:
|
||||
self.connected = False
|
||||
|
||||
async def send_json(self, room_id: str, message: dict) -> None:
|
||||
if room_id not in self.queues:
|
||||
self.queues[room_id] = asyncio.Queue()
|
||||
payload = json.dumps(message).encode("utf-8")
|
||||
await self.queues[room_id].put(
|
||||
{"channel": room_id.encode("utf-8"), "data": payload}
|
||||
)
|
||||
|
||||
async def subscribe(self, room_id: str):
|
||||
if room_id not in self.queues:
|
||||
self.queues[room_id] = asyncio.Queue()
|
||||
return _InMemorySubscriber(self.queues[room_id])
|
||||
|
||||
async def unsubscribe(self, room_id: str) -> None:
|
||||
# keep queue for potential later resubscribe within same test
|
||||
pass
|
||||
|
||||
pubsub = InMemoryPubSubManager()
|
||||
ws_manager = WebsocketManager(pubsub_client=pubsub)
|
||||
|
||||
def _get_ws_manager():
|
||||
return ws_manager
|
||||
|
||||
# Patch all places that imported get_ws_manager at import time
|
||||
monkeypatch.setattr("reflector.ws_manager.get_ws_manager", _get_ws_manager)
|
||||
monkeypatch.setattr(
|
||||
"reflector.pipelines.main_live_pipeline.get_ws_manager", _get_ws_manager
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"reflector.views.transcripts_websocket.get_ws_manager", _get_ws_manager
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"reflector.views.user_websocket.get_ws_manager", _get_ws_manager
|
||||
)
|
||||
monkeypatch.setattr("reflector.views.transcripts.get_ws_manager", _get_ws_manager)
|
||||
|
||||
# Websocket auth: avoid OAuth2 on websocket dependencies; allow anonymous
|
||||
import reflector.auth as auth
|
||||
|
||||
# Ensure FastAPI uses our override for routes that captured the original callable
|
||||
from reflector.app import app as fastapi_app
|
||||
|
||||
try:
|
||||
fastapi_app.dependency_overrides[auth.current_user_optional] = lambda: None
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Stub Redis cache used by profanity filter to avoid external Redis
|
||||
from reflector import redis_cache as rc
|
||||
|
||||
class _FakeRedis:
|
||||
def __init__(self):
|
||||
self._data = {}
|
||||
|
||||
def get(self, key):
|
||||
value = self._data.get(key)
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, bytes):
|
||||
return value
|
||||
return str(value).encode("utf-8")
|
||||
|
||||
def setex(self, key, duration, value):
|
||||
# ignore duration for tests
|
||||
if isinstance(value, bytes):
|
||||
self._data[key] = value
|
||||
else:
|
||||
self._data[key] = str(value).encode("utf-8")
|
||||
|
||||
fake_redises: dict[int, _FakeRedis] = {}
|
||||
|
||||
def _get_redis_client(db=0):
|
||||
if db not in fake_redises:
|
||||
fake_redises[db] = _FakeRedis()
|
||||
return fake_redises[db]
|
||||
|
||||
monkeypatch.setattr(rc, "get_redis_client", _get_redis_client)
|
||||
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@pytest.mark.asyncio
|
||||
async def authenticated_client():
|
||||
async with authenticated_client_ctx():
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@pytest.mark.asyncio
|
||||
async def authenticated_client2():
|
||||
async with authenticated_client2_ctx():
|
||||
yield
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def authenticated_client_ctx():
|
||||
from reflector.app import app
|
||||
from reflector.auth import current_user, current_user_optional
|
||||
|
||||
app.dependency_overrides[current_user] = lambda: {
|
||||
"sub": "randomuserid",
|
||||
"email": "test@mail.com",
|
||||
}
|
||||
app.dependency_overrides[current_user_optional] = lambda: {
|
||||
"sub": "randomuserid",
|
||||
"email": "test@mail.com",
|
||||
}
|
||||
yield
|
||||
del app.dependency_overrides[current_user]
|
||||
del app.dependency_overrides[current_user_optional]
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def authenticated_client2_ctx():
|
||||
from reflector.app import app
|
||||
from reflector.auth import current_user, current_user_optional
|
||||
|
||||
app.dependency_overrides[current_user] = lambda: {
|
||||
"sub": "randomuserid2",
|
||||
"email": "test@mail.com",
|
||||
}
|
||||
app.dependency_overrides[current_user_optional] = lambda: {
|
||||
"sub": "randomuserid2",
|
||||
"email": "test@mail.com",
|
||||
}
|
||||
yield
|
||||
del app.dependency_overrides[current_user]
|
||||
del app.dependency_overrides[current_user_optional]
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def fake_mp3_upload():
|
||||
with patch(
|
||||
|
||||
@@ -11,13 +11,20 @@ from reflector.db.rooms import rooms_controller
|
||||
@pytest.fixture
|
||||
async def authenticated_client(client):
|
||||
from reflector.app import app
|
||||
from reflector.auth import current_user_optional
|
||||
from reflector.auth import current_user, current_user_optional
|
||||
|
||||
app.dependency_overrides[current_user] = lambda: {
|
||||
"sub": "test-user",
|
||||
"email": "test@example.com",
|
||||
}
|
||||
app.dependency_overrides[current_user_optional] = lambda: {
|
||||
"sub": "test-user",
|
||||
"email": "test@example.com",
|
||||
}
|
||||
try:
|
||||
yield client
|
||||
finally:
|
||||
del app.dependency_overrides[current_user]
|
||||
del app.dependency_overrides[current_user_optional]
|
||||
|
||||
|
||||
|
||||
256
server/tests/test_search_date_filtering.py
Normal file
256
server/tests/test_search_date_filtering.py
Normal file
@@ -0,0 +1,256 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.db import get_database
|
||||
from reflector.db.search import SearchParameters, search_controller
|
||||
from reflector.db.transcripts import SourceKind, transcripts
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestDateRangeIntegration:
|
||||
async def setup_test_transcripts(self):
|
||||
# Use a test user_id that will match in our search parameters
|
||||
test_user_id = "test-user-123"
|
||||
|
||||
test_data = [
|
||||
{
|
||||
"id": "test-before-range",
|
||||
"created_at": datetime(2024, 1, 15, tzinfo=timezone.utc),
|
||||
"title": "Before Range Transcript",
|
||||
"user_id": test_user_id,
|
||||
},
|
||||
{
|
||||
"id": "test-start-boundary",
|
||||
"created_at": datetime(2024, 6, 1, tzinfo=timezone.utc),
|
||||
"title": "Start Boundary Transcript",
|
||||
"user_id": test_user_id,
|
||||
},
|
||||
{
|
||||
"id": "test-middle-range",
|
||||
"created_at": datetime(2024, 6, 15, tzinfo=timezone.utc),
|
||||
"title": "Middle Range Transcript",
|
||||
"user_id": test_user_id,
|
||||
},
|
||||
{
|
||||
"id": "test-end-boundary",
|
||||
"created_at": datetime(2024, 6, 30, 23, 59, 59, tzinfo=timezone.utc),
|
||||
"title": "End Boundary Transcript",
|
||||
"user_id": test_user_id,
|
||||
},
|
||||
{
|
||||
"id": "test-after-range",
|
||||
"created_at": datetime(2024, 12, 31, tzinfo=timezone.utc),
|
||||
"title": "After Range Transcript",
|
||||
"user_id": test_user_id,
|
||||
},
|
||||
]
|
||||
|
||||
for data in test_data:
|
||||
full_data = {
|
||||
"id": data["id"],
|
||||
"name": data["id"],
|
||||
"status": "ended",
|
||||
"locked": False,
|
||||
"duration": 60.0,
|
||||
"created_at": data["created_at"],
|
||||
"title": data["title"],
|
||||
"short_summary": "Test summary",
|
||||
"long_summary": "Test long summary",
|
||||
"share_mode": "public",
|
||||
"source_kind": SourceKind.FILE,
|
||||
"audio_deleted": False,
|
||||
"reviewed": False,
|
||||
"user_id": data["user_id"],
|
||||
}
|
||||
|
||||
await get_database().execute(transcripts.insert().values(**full_data))
|
||||
|
||||
return test_data
|
||||
|
||||
async def cleanup_test_transcripts(self, test_data):
|
||||
"""Clean up test transcripts."""
|
||||
for data in test_data:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == data["id"])
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_filter_with_from_datetime_only(self):
|
||||
"""Test filtering with only from_datetime parameter."""
|
||||
test_data = await self.setup_test_transcripts()
|
||||
test_user_id = "test-user-123"
|
||||
|
||||
try:
|
||||
params = SearchParameters(
|
||||
query_text=None,
|
||||
from_datetime=datetime(2024, 6, 1, tzinfo=timezone.utc),
|
||||
to_datetime=None,
|
||||
user_id=test_user_id,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
|
||||
# Should include: start_boundary, middle, end_boundary, after
|
||||
result_ids = [r.id for r in results]
|
||||
assert "test-before-range" not in result_ids
|
||||
assert "test-start-boundary" in result_ids
|
||||
assert "test-middle-range" in result_ids
|
||||
assert "test-end-boundary" in result_ids
|
||||
assert "test-after-range" in result_ids
|
||||
|
||||
finally:
|
||||
await self.cleanup_test_transcripts(test_data)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_filter_with_to_datetime_only(self):
|
||||
"""Test filtering with only to_datetime parameter."""
|
||||
test_data = await self.setup_test_transcripts()
|
||||
test_user_id = "test-user-123"
|
||||
|
||||
try:
|
||||
params = SearchParameters(
|
||||
query_text=None,
|
||||
from_datetime=None,
|
||||
to_datetime=datetime(2024, 6, 30, tzinfo=timezone.utc),
|
||||
user_id=test_user_id,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
|
||||
result_ids = [r.id for r in results]
|
||||
assert "test-before-range" in result_ids
|
||||
assert "test-start-boundary" in result_ids
|
||||
assert "test-middle-range" in result_ids
|
||||
assert "test-end-boundary" not in result_ids
|
||||
assert "test-after-range" not in result_ids
|
||||
|
||||
finally:
|
||||
await self.cleanup_test_transcripts(test_data)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_filter_with_both_datetimes(self):
|
||||
test_data = await self.setup_test_transcripts()
|
||||
test_user_id = "test-user-123"
|
||||
|
||||
try:
|
||||
params = SearchParameters(
|
||||
query_text=None,
|
||||
from_datetime=datetime(2024, 6, 1, tzinfo=timezone.utc),
|
||||
to_datetime=datetime(
|
||||
2024, 7, 1, tzinfo=timezone.utc
|
||||
), # Inclusive of 6/30
|
||||
user_id=test_user_id,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
|
||||
result_ids = [r.id for r in results]
|
||||
assert "test-before-range" not in result_ids
|
||||
assert "test-start-boundary" in result_ids
|
||||
assert "test-middle-range" in result_ids
|
||||
assert "test-end-boundary" in result_ids
|
||||
assert "test-after-range" not in result_ids
|
||||
|
||||
finally:
|
||||
await self.cleanup_test_transcripts(test_data)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_date_filter_with_room_and_source_kind(self):
|
||||
test_data = await self.setup_test_transcripts()
|
||||
test_user_id = "test-user-123"
|
||||
|
||||
try:
|
||||
params = SearchParameters(
|
||||
query_text=None,
|
||||
from_datetime=datetime(2024, 6, 1, tzinfo=timezone.utc),
|
||||
to_datetime=datetime(2024, 7, 1, tzinfo=timezone.utc),
|
||||
source_kind=SourceKind.FILE,
|
||||
room_id=None,
|
||||
user_id=test_user_id,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
|
||||
for result in results:
|
||||
assert result.source_kind == SourceKind.FILE
|
||||
assert result.created_at >= datetime(2024, 6, 1, tzinfo=timezone.utc)
|
||||
assert result.created_at <= datetime(2024, 7, 1, tzinfo=timezone.utc)
|
||||
|
||||
finally:
|
||||
await self.cleanup_test_transcripts(test_data)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_empty_results_for_future_dates(self):
|
||||
test_data = await self.setup_test_transcripts()
|
||||
test_user_id = "test-user-123"
|
||||
|
||||
try:
|
||||
params = SearchParameters(
|
||||
query_text=None,
|
||||
from_datetime=datetime(2099, 1, 1, tzinfo=timezone.utc),
|
||||
to_datetime=datetime(2099, 12, 31, tzinfo=timezone.utc),
|
||||
user_id=test_user_id,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
|
||||
assert results == []
|
||||
assert total == 0
|
||||
|
||||
finally:
|
||||
await self.cleanup_test_transcripts(test_data)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_date_only_input_handling(self):
|
||||
test_data = await self.setup_test_transcripts()
|
||||
test_user_id = "test-user-123"
|
||||
|
||||
try:
|
||||
# Pydantic will parse date-only strings to datetime at midnight
|
||||
from_dt = datetime(2024, 6, 15, 0, 0, 0, tzinfo=timezone.utc)
|
||||
to_dt = datetime(2024, 6, 16, 0, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
params = SearchParameters(
|
||||
query_text=None,
|
||||
from_datetime=from_dt,
|
||||
to_datetime=to_dt,
|
||||
user_id=test_user_id,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
|
||||
result_ids = [r.id for r in results]
|
||||
assert "test-middle-range" in result_ids
|
||||
assert "test-before-range" not in result_ids
|
||||
assert "test-after-range" not in result_ids
|
||||
|
||||
finally:
|
||||
await self.cleanup_test_transcripts(test_data)
|
||||
|
||||
|
||||
class TestDateValidationEdgeCases:
|
||||
"""Edge case tests for datetime validation."""
|
||||
|
||||
def test_timezone_aware_comparison(self):
|
||||
"""Test that timezone-aware comparisons work correctly."""
|
||||
# PST time (UTC-8)
|
||||
pst = timezone(timedelta(hours=-8))
|
||||
pst_dt = datetime(2024, 6, 15, 8, 0, 0, tzinfo=pst)
|
||||
|
||||
# UTC time equivalent (8AM PST = 4PM UTC)
|
||||
utc_dt = datetime(2024, 6, 15, 16, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
assert pst_dt == utc_dt
|
||||
|
||||
def test_mixed_timezone_input(self):
|
||||
"""Test handling mixed timezone inputs."""
|
||||
pst = timezone(timedelta(hours=-8))
|
||||
ist = timezone(timedelta(hours=5, minutes=30))
|
||||
|
||||
from_date = datetime(2024, 6, 15, 0, 0, 0, tzinfo=pst) # PST midnight
|
||||
to_date = datetime(2024, 6, 15, 23, 59, 59, tzinfo=ist) # IST end of day
|
||||
|
||||
assert from_date.tzinfo is not None
|
||||
assert to_date.tzinfo is not None
|
||||
assert from_date < to_date
|
||||
384
server/tests/test_security_permissions.py
Normal file
384
server/tests/test_security_permissions.py
Normal file
@@ -0,0 +1,384 @@
|
||||
import asyncio
|
||||
import shutil
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from httpx_ws import aconnect_ws
|
||||
from uvicorn import Config, Server
|
||||
|
||||
from reflector import zulip as zulip_module
|
||||
from reflector.app import app
|
||||
from reflector.db import get_database
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.rooms import Room, rooms_controller
|
||||
from reflector.db.transcripts import (
|
||||
SourceKind,
|
||||
TranscriptTopic,
|
||||
transcripts_controller,
|
||||
)
|
||||
from reflector.processors.types import Word
|
||||
from reflector.settings import settings
|
||||
from reflector.views.transcripts import create_access_token
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_delete_transcript_in_shared_room(client):
|
||||
# Create a shared room with a fake owner id so meeting has a room_id
|
||||
room = await rooms_controller.add(
|
||||
name="shared-room-test",
|
||||
user_id="owner-1",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=True,
|
||||
webhook_url="",
|
||||
webhook_secret="",
|
||||
)
|
||||
|
||||
# Create a meeting for that room (so transcript.meeting_id links to the shared room)
|
||||
meeting = await meetings_controller.create(
|
||||
id="meeting-sec-test",
|
||||
room_name="room-sec-test",
|
||||
room_url="room-url",
|
||||
host_room_url="host-url",
|
||||
start_date=Room.model_fields["created_at"].default_factory(),
|
||||
end_date=Room.model_fields["created_at"].default_factory(),
|
||||
room=room,
|
||||
)
|
||||
|
||||
# Create a transcript owned by someone else and link it to meeting
|
||||
t = await transcripts_controller.add(
|
||||
name="to-delete",
|
||||
source_kind=SourceKind.LIVE,
|
||||
user_id="owner-2",
|
||||
meeting_id=meeting.id,
|
||||
room_id=room.id,
|
||||
share_mode="private",
|
||||
)
|
||||
|
||||
# Anonymous DELETE should be rejected
|
||||
del_resp = await client.delete(f"/transcripts/{t.id}")
|
||||
assert del_resp.status_code == 401, del_resp.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_mutate_participants_on_public_transcript(client):
|
||||
# Create a public transcript with no owner
|
||||
t = await transcripts_controller.add(
|
||||
name="public-transcript",
|
||||
source_kind=SourceKind.LIVE,
|
||||
user_id=None,
|
||||
share_mode="public",
|
||||
)
|
||||
|
||||
# Anonymous POST participant must be rejected
|
||||
resp = await client.post(
|
||||
f"/transcripts/{t.id}/participants",
|
||||
json={"name": "AnonUser", "speaker": 0},
|
||||
)
|
||||
assert resp.status_code == 401, resp.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_update_and_delete_room(client):
|
||||
# Create room as owner id "owner-3" via controller
|
||||
room = await rooms_controller.add(
|
||||
name="room-anon-update-delete",
|
||||
user_id="owner-3",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
webhook_url="",
|
||||
webhook_secret="",
|
||||
)
|
||||
|
||||
# Anonymous PATCH via API (no auth)
|
||||
resp = await client.patch(
|
||||
f"/rooms/{room.id}",
|
||||
json={
|
||||
"name": "room-anon-updated",
|
||||
"zulip_auto_post": False,
|
||||
"zulip_stream": "",
|
||||
"zulip_topic": "",
|
||||
"is_locked": False,
|
||||
"room_mode": "normal",
|
||||
"recording_type": "cloud",
|
||||
"recording_trigger": "automatic-2nd-participant",
|
||||
"is_shared": False,
|
||||
"webhook_url": "",
|
||||
"webhook_secret": "",
|
||||
},
|
||||
)
|
||||
# Expect authentication required
|
||||
assert resp.status_code == 401, resp.text
|
||||
|
||||
# Anonymous DELETE via API
|
||||
del_resp = await client.delete(f"/rooms/{room.id}")
|
||||
assert del_resp.status_code == 401, del_resp.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_post_transcript_to_zulip(client, monkeypatch):
|
||||
# Create a public transcript with some content
|
||||
t = await transcripts_controller.add(
|
||||
name="zulip-public",
|
||||
source_kind=SourceKind.LIVE,
|
||||
user_id=None,
|
||||
share_mode="public",
|
||||
)
|
||||
|
||||
# Mock send/update calls
|
||||
def _fake_send_message_to_zulip(stream, topic, content):
|
||||
return {"id": 12345}
|
||||
|
||||
async def _fake_update_message(message_id, stream, topic, content):
|
||||
return {"result": "success"}
|
||||
|
||||
monkeypatch.setattr(
|
||||
zulip_module, "send_message_to_zulip", _fake_send_message_to_zulip
|
||||
)
|
||||
monkeypatch.setattr(zulip_module, "update_zulip_message", _fake_update_message)
|
||||
|
||||
# Anonymous POST to Zulip endpoint
|
||||
resp = await client.post(
|
||||
f"/transcripts/{t.id}/zulip",
|
||||
params={"stream": "general", "topic": "Updates", "include_topics": False},
|
||||
)
|
||||
assert resp.status_code == 401, resp.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_assign_speaker_on_public_transcript(client):
|
||||
# Create public transcript
|
||||
t = await transcripts_controller.add(
|
||||
name="public-assign",
|
||||
source_kind=SourceKind.LIVE,
|
||||
user_id=None,
|
||||
share_mode="public",
|
||||
)
|
||||
|
||||
# Add a topic with words to be reassigned
|
||||
topic = TranscriptTopic(
|
||||
title="T1",
|
||||
summary="S1",
|
||||
timestamp=0.0,
|
||||
transcript="Hello",
|
||||
words=[Word(start=0.0, end=1.0, text="Hello", speaker=0)],
|
||||
)
|
||||
transcript = await transcripts_controller.get_by_id(t.id)
|
||||
await transcripts_controller.upsert_topic(transcript, topic)
|
||||
|
||||
# Anonymous assign speaker over time range covering the word
|
||||
resp = await client.patch(
|
||||
f"/transcripts/{t.id}/speaker/assign",
|
||||
json={
|
||||
"speaker": 1,
|
||||
"timestamp_from": 0.0,
|
||||
"timestamp_to": 1.0,
|
||||
},
|
||||
)
|
||||
assert resp.status_code == 401, resp.text
|
||||
|
||||
|
||||
# Minimal server fixture for websocket tests
|
||||
@pytest.fixture
|
||||
def appserver_ws_simple(setup_database):
|
||||
host = "127.0.0.1"
|
||||
port = 1256
|
||||
server_started = threading.Event()
|
||||
server_exception = None
|
||||
server_instance = None
|
||||
|
||||
def run_server():
|
||||
nonlocal server_exception, server_instance
|
||||
try:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
config = Config(app=app, host=host, port=port, loop=loop)
|
||||
server_instance = Server(config)
|
||||
|
||||
async def start_server():
|
||||
database = get_database()
|
||||
await database.connect()
|
||||
try:
|
||||
await server_instance.serve()
|
||||
finally:
|
||||
await database.disconnect()
|
||||
|
||||
server_started.set()
|
||||
loop.run_until_complete(start_server())
|
||||
except Exception as e:
|
||||
server_exception = e
|
||||
server_started.set()
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
server_thread = threading.Thread(target=run_server, daemon=True)
|
||||
server_thread.start()
|
||||
|
||||
server_started.wait(timeout=30)
|
||||
if server_exception:
|
||||
raise server_exception
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
yield host, port
|
||||
|
||||
if server_instance:
|
||||
server_instance.should_exit = True
|
||||
server_thread.join(timeout=30)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_denies_anonymous_on_private_transcript(appserver_ws_simple):
|
||||
host, port = appserver_ws_simple
|
||||
|
||||
# Create a private transcript owned by someone
|
||||
t = await transcripts_controller.add(
|
||||
name="private-ws",
|
||||
source_kind=SourceKind.LIVE,
|
||||
user_id="owner-x",
|
||||
share_mode="private",
|
||||
)
|
||||
|
||||
base_url = f"http://{host}:{port}/v1"
|
||||
# Anonymous connect should be denied
|
||||
with pytest.raises(Exception):
|
||||
async with aconnect_ws(f"{base_url}/transcripts/{t.id}/events") as ws:
|
||||
await ws.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_update_public_transcript(client):
|
||||
t = await transcripts_controller.add(
|
||||
name="update-me",
|
||||
source_kind=SourceKind.LIVE,
|
||||
user_id=None,
|
||||
share_mode="public",
|
||||
)
|
||||
|
||||
resp = await client.patch(
|
||||
f"/transcripts/{t.id}",
|
||||
json={"title": "New Title From Anonymous"},
|
||||
)
|
||||
assert resp.status_code == 401, resp.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_get_nonshared_room_by_id(client):
|
||||
room = await rooms_controller.add(
|
||||
name="private-room-exposed",
|
||||
user_id="owner-z",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
webhook_url="",
|
||||
webhook_secret="",
|
||||
)
|
||||
|
||||
resp = await client.get(f"/rooms/{room.id}")
|
||||
assert resp.status_code == 403, resp.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_call_rooms_webhook_test(client):
|
||||
room = await rooms_controller.add(
|
||||
name="room-webhook-test",
|
||||
user_id="owner-y",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
webhook_url="http://localhost.invalid/webhook",
|
||||
webhook_secret="secret",
|
||||
)
|
||||
|
||||
# Anonymous caller
|
||||
resp = await client.post(f"/rooms/{room.id}/webhook/test")
|
||||
assert resp.status_code == 401, resp.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anonymous_cannot_create_room(client):
|
||||
payload = {
|
||||
"name": "room-create-auth-required",
|
||||
"zulip_auto_post": False,
|
||||
"zulip_stream": "",
|
||||
"zulip_topic": "",
|
||||
"is_locked": False,
|
||||
"room_mode": "normal",
|
||||
"recording_type": "cloud",
|
||||
"recording_trigger": "automatic-2nd-participant",
|
||||
"is_shared": False,
|
||||
"webhook_url": "",
|
||||
"webhook_secret": "",
|
||||
}
|
||||
resp = await client.post("/rooms", json=payload)
|
||||
assert resp.status_code == 401, resp.text
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_search_401_when_public_mode_false(client, monkeypatch):
|
||||
monkeypatch.setattr(settings, "PUBLIC_MODE", False)
|
||||
|
||||
resp = await client.get("/transcripts")
|
||||
assert resp.status_code == 401
|
||||
|
||||
resp = await client.get("/transcripts/search", params={"q": "hello"})
|
||||
assert resp.status_code == 401
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_audio_mp3_requires_token_for_owned_transcript(
|
||||
client, tmpdir, monkeypatch
|
||||
):
|
||||
# Use temp data dir
|
||||
monkeypatch.setattr(settings, "DATA_DIR", Path(tmpdir).as_posix())
|
||||
|
||||
# Create owner transcript and attach a local mp3
|
||||
t = await transcripts_controller.add(
|
||||
name="owned-audio",
|
||||
source_kind=SourceKind.LIVE,
|
||||
user_id="owner-a",
|
||||
share_mode="private",
|
||||
)
|
||||
|
||||
tr = await transcripts_controller.get_by_id(t.id)
|
||||
await transcripts_controller.update(tr, {"status": "ended"})
|
||||
|
||||
# copy fixture audio to transcript path
|
||||
audio_path = Path(__file__).parent / "records" / "test_mathieu_hello.mp3"
|
||||
tr.audio_mp3_filename.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy(audio_path, tr.audio_mp3_filename)
|
||||
|
||||
# Anonymous GET without token should be 403 or 404 depending on access; we call mp3
|
||||
resp = await client.get(f"/transcripts/{t.id}/audio/mp3")
|
||||
assert resp.status_code == 403
|
||||
|
||||
# With token should succeed
|
||||
token = create_access_token(
|
||||
{"sub": tr.user_id}, expires_delta=__import__("datetime").timedelta(minutes=15)
|
||||
)
|
||||
resp2 = await client.get(f"/transcripts/{t.id}/audio/mp3", params={"token": token})
|
||||
assert resp2.status_code == 200
|
||||
@@ -1,5 +1,3 @@
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@@ -19,7 +17,7 @@ async def test_transcript_create(client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_get_update_name(client):
|
||||
async def test_transcript_get_update_name(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["name"] == "test"
|
||||
@@ -40,7 +38,7 @@ async def test_transcript_get_update_name(client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_get_update_locked(client):
|
||||
async def test_transcript_get_update_locked(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["locked"] is False
|
||||
@@ -61,7 +59,7 @@ async def test_transcript_get_update_locked(client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_get_update_summary(client):
|
||||
async def test_transcript_get_update_summary(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["long_summary"] is None
|
||||
@@ -89,7 +87,7 @@ async def test_transcript_get_update_summary(client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_get_update_title(client):
|
||||
async def test_transcript_get_update_title(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["title"] is None
|
||||
@@ -127,56 +125,6 @@ async def test_transcripts_list_anonymous(client):
|
||||
settings.PUBLIC_MODE = False
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def authenticated_client_ctx():
|
||||
from reflector.app import app
|
||||
from reflector.auth import current_user, current_user_optional
|
||||
|
||||
app.dependency_overrides[current_user] = lambda: {
|
||||
"sub": "randomuserid",
|
||||
"email": "test@mail.com",
|
||||
}
|
||||
app.dependency_overrides[current_user_optional] = lambda: {
|
||||
"sub": "randomuserid",
|
||||
"email": "test@mail.com",
|
||||
}
|
||||
yield
|
||||
del app.dependency_overrides[current_user]
|
||||
del app.dependency_overrides[current_user_optional]
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def authenticated_client2_ctx():
|
||||
from reflector.app import app
|
||||
from reflector.auth import current_user, current_user_optional
|
||||
|
||||
app.dependency_overrides[current_user] = lambda: {
|
||||
"sub": "randomuserid2",
|
||||
"email": "test@mail.com",
|
||||
}
|
||||
app.dependency_overrides[current_user_optional] = lambda: {
|
||||
"sub": "randomuserid2",
|
||||
"email": "test@mail.com",
|
||||
}
|
||||
yield
|
||||
del app.dependency_overrides[current_user]
|
||||
del app.dependency_overrides[current_user_optional]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@pytest.mark.asyncio
|
||||
async def authenticated_client():
|
||||
async with authenticated_client_ctx():
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@pytest.mark.asyncio
|
||||
async def authenticated_client2():
|
||||
async with authenticated_client2_ctx():
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcripts_list_authenticated(authenticated_client, client):
|
||||
# XXX this test is a bit fragile, as it depends on the storage which
|
||||
@@ -199,7 +147,7 @@ async def test_transcripts_list_authenticated(authenticated_client, client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_delete(client):
|
||||
async def test_transcript_delete(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "testdel1"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["name"] == "testdel1"
|
||||
@@ -214,7 +162,7 @@ async def test_transcript_delete(client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_mark_reviewed(client):
|
||||
async def test_transcript_mark_reviewed(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["name"] == "test"
|
||||
|
||||
@@ -111,7 +111,9 @@ async def test_transcript_audio_download_range_with_seek(
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_delete_with_audio(fake_transcript, client):
|
||||
async def test_transcript_delete_with_audio(
|
||||
authenticated_client, fake_transcript, client
|
||||
):
|
||||
response = await client.delete(f"/transcripts/{fake_transcript.id}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == "ok"
|
||||
|
||||
@@ -2,7 +2,7 @@ import pytest
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_participants(client):
|
||||
async def test_transcript_participants(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["participants"] == []
|
||||
@@ -39,7 +39,7 @@ async def test_transcript_participants(client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_participants_same_speaker(client):
|
||||
async def test_transcript_participants_same_speaker(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["participants"] == []
|
||||
@@ -62,7 +62,7 @@ async def test_transcript_participants_same_speaker(client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_participants_update_name(client):
|
||||
async def test_transcript_participants_update_name(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["participants"] == []
|
||||
@@ -100,7 +100,7 @@ async def test_transcript_participants_update_name(client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_participants_update_speaker(client):
|
||||
async def test_transcript_participants_update_speaker(authenticated_client, client):
|
||||
response = await client.post("/transcripts", json={"name": "test"})
|
||||
assert response.status_code == 200
|
||||
assert response.json()["participants"] == []
|
||||
|
||||
@@ -2,7 +2,9 @@ import pytest
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_reassign_speaker(fake_transcript_with_topics, client):
|
||||
async def test_transcript_reassign_speaker(
|
||||
authenticated_client, fake_transcript_with_topics, client
|
||||
):
|
||||
transcript_id = fake_transcript_with_topics.id
|
||||
|
||||
# check the transcript exists
|
||||
@@ -114,7 +116,9 @@ async def test_transcript_reassign_speaker(fake_transcript_with_topics, client):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_merge_speaker(fake_transcript_with_topics, client):
|
||||
async def test_transcript_merge_speaker(
|
||||
authenticated_client, fake_transcript_with_topics, client
|
||||
):
|
||||
transcript_id = fake_transcript_with_topics.id
|
||||
|
||||
# check the transcript exists
|
||||
@@ -181,7 +185,7 @@ async def test_transcript_merge_speaker(fake_transcript_with_topics, client):
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_reassign_with_participant(
|
||||
fake_transcript_with_topics, client
|
||||
authenticated_client, fake_transcript_with_topics, client
|
||||
):
|
||||
transcript_id = fake_transcript_with_topics.id
|
||||
|
||||
@@ -347,7 +351,9 @@ async def test_transcript_reassign_with_participant(
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcript_reassign_edge_cases(fake_transcript_with_topics, client):
|
||||
async def test_transcript_reassign_edge_cases(
|
||||
authenticated_client, fake_transcript_with_topics, client
|
||||
):
|
||||
transcript_id = fake_transcript_with_topics.id
|
||||
|
||||
# check the transcript exists
|
||||
|
||||
70
server/tests/test_user_api_keys.py
Normal file
70
server/tests/test_user_api_keys.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import pytest
|
||||
|
||||
from reflector.db.user_api_keys import user_api_keys_controller
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_creation_and_verification():
|
||||
api_key_model, plaintext = await user_api_keys_controller.create_key(
|
||||
user_id="test_user",
|
||||
name="Test API Key",
|
||||
)
|
||||
|
||||
verified = await user_api_keys_controller.verify_key(plaintext)
|
||||
assert verified is not None
|
||||
assert verified.user_id == "test_user"
|
||||
assert verified.name == "Test API Key"
|
||||
|
||||
invalid = await user_api_keys_controller.verify_key("fake_key")
|
||||
assert invalid is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_hashing():
|
||||
_, plaintext = await user_api_keys_controller.create_key(
|
||||
user_id="test_user_2",
|
||||
)
|
||||
|
||||
api_keys = await user_api_keys_controller.list_by_user_id("test_user_2")
|
||||
assert len(api_keys) == 1
|
||||
assert api_keys[0].key_hash != plaintext
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_api_key_uniqueness():
|
||||
key1 = user_api_keys_controller.generate_key()
|
||||
key2 = user_api_keys_controller.generate_key()
|
||||
assert key1 != key2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_hash_api_key_deterministic():
|
||||
key = "test_key_123"
|
||||
hash1 = user_api_keys_controller.hash_key(key)
|
||||
hash2 = user_api_keys_controller.hash_key(key)
|
||||
assert hash1 == hash2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_user_id_empty():
|
||||
api_keys = await user_api_keys_controller.list_by_user_id("nonexistent_user")
|
||||
assert api_keys == []
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_user_id_multiple():
|
||||
user_id = "multi_key_user"
|
||||
|
||||
_, plaintext1 = await user_api_keys_controller.create_key(
|
||||
user_id=user_id,
|
||||
name="API Key 1",
|
||||
)
|
||||
_, plaintext2 = await user_api_keys_controller.create_key(
|
||||
user_id=user_id,
|
||||
name="API Key 2",
|
||||
)
|
||||
|
||||
api_keys = await user_api_keys_controller.list_by_user_id(user_id)
|
||||
assert len(api_keys) == 2
|
||||
names = {k.name for k in api_keys}
|
||||
assert names == {"API Key 1", "API Key 2"}
|
||||
156
server/tests/test_user_websocket_auth.py
Normal file
156
server/tests/test_user_websocket_auth.py
Normal file
@@ -0,0 +1,156 @@
|
||||
import asyncio
|
||||
import threading
|
||||
import time
|
||||
|
||||
import pytest
|
||||
from httpx_ws import aconnect_ws
|
||||
from uvicorn import Config, Server
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def appserver_ws_user(setup_database):
|
||||
from reflector.app import app
|
||||
from reflector.db import get_database
|
||||
|
||||
host = "127.0.0.1"
|
||||
port = 1257
|
||||
server_started = threading.Event()
|
||||
server_exception = None
|
||||
server_instance = None
|
||||
|
||||
def run_server():
|
||||
nonlocal server_exception, server_instance
|
||||
try:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
config = Config(app=app, host=host, port=port, loop=loop)
|
||||
server_instance = Server(config)
|
||||
|
||||
async def start_server():
|
||||
database = get_database()
|
||||
await database.connect()
|
||||
try:
|
||||
await server_instance.serve()
|
||||
finally:
|
||||
await database.disconnect()
|
||||
|
||||
server_started.set()
|
||||
loop.run_until_complete(start_server())
|
||||
except Exception as e:
|
||||
server_exception = e
|
||||
server_started.set()
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
server_thread = threading.Thread(target=run_server, daemon=True)
|
||||
server_thread.start()
|
||||
|
||||
server_started.wait(timeout=30)
|
||||
if server_exception:
|
||||
raise server_exception
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
yield host, port
|
||||
|
||||
if server_instance:
|
||||
server_instance.should_exit = True
|
||||
server_thread.join(timeout=30)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def patch_jwt_verification(monkeypatch):
|
||||
"""Patch JWT verification to accept HS256 tokens signed with SECRET_KEY for tests."""
|
||||
from jose import jwt
|
||||
|
||||
from reflector.settings import settings
|
||||
|
||||
def _verify_token(self, token: str):
|
||||
# Do not validate audience in tests
|
||||
return jwt.decode(token, settings.SECRET_KEY, algorithms=["HS256"]) # type: ignore[arg-type]
|
||||
|
||||
monkeypatch.setattr(
|
||||
"reflector.auth.auth_jwt.JWTAuth.verify_token", _verify_token, raising=True
|
||||
)
|
||||
|
||||
|
||||
def _make_dummy_jwt(sub: str = "user123") -> str:
|
||||
# Create a short HS256 JWT using the app secret to pass verification in tests
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from jose import jwt
|
||||
|
||||
from reflector.settings import settings
|
||||
|
||||
payload = {
|
||||
"sub": sub,
|
||||
"email": f"{sub}@example.com",
|
||||
"exp": datetime.now(timezone.utc) + timedelta(minutes=5),
|
||||
}
|
||||
# Note: production uses RS256 public key verification; tests can sign with SECRET_KEY
|
||||
return jwt.encode(payload, settings.SECRET_KEY, algorithm="HS256")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_user_ws_rejects_missing_subprotocol(appserver_ws_user):
|
||||
host, port = appserver_ws_user
|
||||
base_ws = f"http://{host}:{port}/v1/events"
|
||||
# No subprotocol/header with token
|
||||
with pytest.raises(Exception):
|
||||
async with aconnect_ws(base_ws) as ws: # type: ignore
|
||||
# Should close during handshake; if not, close explicitly
|
||||
await ws.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_user_ws_rejects_invalid_token(appserver_ws_user):
|
||||
host, port = appserver_ws_user
|
||||
base_ws = f"http://{host}:{port}/v1/events"
|
||||
|
||||
# Send wrong token via WebSocket subprotocols
|
||||
protocols = ["bearer", "totally-invalid-token"]
|
||||
with pytest.raises(Exception):
|
||||
async with aconnect_ws(base_ws, subprotocols=protocols) as ws: # type: ignore
|
||||
await ws.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_user_ws_accepts_valid_token_and_receives_events(appserver_ws_user):
|
||||
host, port = appserver_ws_user
|
||||
base_ws = f"http://{host}:{port}/v1/events"
|
||||
|
||||
token = _make_dummy_jwt("user-abc")
|
||||
subprotocols = ["bearer", token]
|
||||
|
||||
# Connect and then trigger an event via HTTP create
|
||||
async with aconnect_ws(base_ws, subprotocols=subprotocols) as ws:
|
||||
# Emit an event to the user's room via a standard HTTP action
|
||||
from httpx import AsyncClient
|
||||
|
||||
from reflector.app import app
|
||||
from reflector.auth import current_user, current_user_optional
|
||||
|
||||
# Override auth dependencies so HTTP request is performed as the same user
|
||||
app.dependency_overrides[current_user] = lambda: {
|
||||
"sub": "user-abc",
|
||||
"email": "user-abc@example.com",
|
||||
}
|
||||
app.dependency_overrides[current_user_optional] = lambda: {
|
||||
"sub": "user-abc",
|
||||
"email": "user-abc@example.com",
|
||||
}
|
||||
|
||||
async with AsyncClient(app=app, base_url=f"http://{host}:{port}/v1") as ac:
|
||||
# Create a transcript as this user so that the server publishes TRANSCRIPT_CREATED to user room
|
||||
resp = await ac.post("/transcripts", json={"name": "WS Test"})
|
||||
assert resp.status_code == 200
|
||||
|
||||
# Receive the published event
|
||||
msg = await ws.receive_json()
|
||||
assert msg["event"] == "TRANSCRIPT_CREATED"
|
||||
assert "id" in msg["data"]
|
||||
|
||||
# Clean overrides
|
||||
del app.dependency_overrides[current_user]
|
||||
del app.dependency_overrides[current_user_optional]
|
||||
14
www/.dockerignore
Normal file
14
www/.dockerignore
Normal file
@@ -0,0 +1,14 @@
|
||||
.env
|
||||
.env.*
|
||||
.env.local
|
||||
.env.development
|
||||
.env.production
|
||||
node_modules
|
||||
.next
|
||||
.git
|
||||
.gitignore
|
||||
*.md
|
||||
.DS_Store
|
||||
coverage
|
||||
.pnpm-store
|
||||
*.log
|
||||
@@ -1,9 +1,5 @@
|
||||
# Environment
|
||||
ENVIRONMENT=development
|
||||
NEXT_PUBLIC_ENV=development
|
||||
|
||||
# Site Configuration
|
||||
NEXT_PUBLIC_SITE_URL=http://localhost:3000
|
||||
SITE_URL=http://localhost:3000
|
||||
|
||||
# Nextauth envs
|
||||
# not used in app code but in lib code
|
||||
@@ -18,16 +14,16 @@ AUTHENTIK_CLIENT_ID=your-client-id-here
|
||||
AUTHENTIK_CLIENT_SECRET=your-client-secret-here
|
||||
|
||||
# Feature Flags
|
||||
# NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN=true
|
||||
# NEXT_PUBLIC_FEATURE_PRIVACY=false
|
||||
# NEXT_PUBLIC_FEATURE_BROWSE=true
|
||||
# NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP=true
|
||||
# NEXT_PUBLIC_FEATURE_ROOMS=true
|
||||
# FEATURE_REQUIRE_LOGIN=true
|
||||
# FEATURE_PRIVACY=false
|
||||
# FEATURE_BROWSE=true
|
||||
# FEATURE_SEND_TO_ZULIP=true
|
||||
# FEATURE_ROOMS=true
|
||||
|
||||
# API URLs
|
||||
NEXT_PUBLIC_API_URL=http://127.0.0.1:1250
|
||||
NEXT_PUBLIC_WEBSOCKET_URL=ws://127.0.0.1:1250
|
||||
NEXT_PUBLIC_AUTH_CALLBACK_URL=http://localhost:3000/auth-callback
|
||||
API_URL=http://127.0.0.1:1250
|
||||
WEBSOCKET_URL=ws://127.0.0.1:1250
|
||||
AUTH_CALLBACK_URL=http://localhost:3000/auth-callback
|
||||
|
||||
# Sentry
|
||||
# SENTRY_DSN=https://your-dsn@sentry.io/project-id
|
||||
|
||||
1
www/.npmrc
Normal file
1
www/.npmrc
Normal file
@@ -0,0 +1 @@
|
||||
minimum-release-age=1440 #24hr in minutes
|
||||
81
www/DOCKER_README.md
Normal file
81
www/DOCKER_README.md
Normal file
@@ -0,0 +1,81 @@
|
||||
# Docker Production Build Guide
|
||||
|
||||
## Overview
|
||||
|
||||
The Docker image builds without any environment variables and requires all configuration to be provided at runtime.
|
||||
|
||||
## Environment Variables (ALL Runtime)
|
||||
|
||||
### Required Runtime Variables
|
||||
|
||||
```bash
|
||||
API_URL # Backend API URL (e.g., https://api.example.com)
|
||||
WEBSOCKET_URL # WebSocket URL (e.g., wss://api.example.com)
|
||||
NEXTAUTH_URL # NextAuth base URL (e.g., https://app.example.com)
|
||||
NEXTAUTH_SECRET # Random secret for NextAuth (generate with: openssl rand -base64 32)
|
||||
KV_URL # Redis URL (e.g., redis://redis:6379)
|
||||
```
|
||||
|
||||
### Optional Runtime Variables
|
||||
|
||||
```bash
|
||||
SITE_URL # Frontend URL (defaults to NEXTAUTH_URL)
|
||||
|
||||
AUTHENTIK_ISSUER # OAuth issuer URL
|
||||
AUTHENTIK_CLIENT_ID # OAuth client ID
|
||||
AUTHENTIK_CLIENT_SECRET # OAuth client secret
|
||||
AUTHENTIK_REFRESH_TOKEN_URL # OAuth token refresh URL
|
||||
|
||||
FEATURE_REQUIRE_LOGIN=false # Require authentication
|
||||
FEATURE_PRIVACY=true # Enable privacy features
|
||||
FEATURE_BROWSE=true # Enable browsing features
|
||||
FEATURE_SEND_TO_ZULIP=false # Enable Zulip integration
|
||||
FEATURE_ROOMS=true # Enable rooms feature
|
||||
|
||||
SENTRY_DSN # Sentry error tracking
|
||||
AUTH_CALLBACK_URL # OAuth callback URL
|
||||
```
|
||||
|
||||
## Building the Image
|
||||
|
||||
### Option 1: Using Docker Compose
|
||||
|
||||
1. Build the image (no environment variables needed):
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml build
|
||||
```
|
||||
|
||||
2. Create a `.env` file with runtime variables
|
||||
|
||||
3. Run with environment variables:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yml --env-file .env up -d
|
||||
```
|
||||
|
||||
### Option 2: Using Docker CLI
|
||||
|
||||
1. Build the image (no build args):
|
||||
|
||||
```bash
|
||||
docker build -t reflector-frontend:latest ./www
|
||||
```
|
||||
|
||||
2. Run with environment variables:
|
||||
|
||||
```bash
|
||||
docker run -d \
|
||||
-p 3000:3000 \
|
||||
-e API_URL=https://api.example.com \
|
||||
-e WEBSOCKET_URL=wss://api.example.com \
|
||||
-e NEXTAUTH_URL=https://app.example.com \
|
||||
-e NEXTAUTH_SECRET=your-secret \
|
||||
-e KV_URL=redis://redis:6379 \
|
||||
-e AUTHENTIK_ISSUER=https://auth.example.com/application/o/reflector \
|
||||
-e AUTHENTIK_CLIENT_ID=your-client-id \
|
||||
-e AUTHENTIK_CLIENT_SECRET=your-client-secret \
|
||||
-e AUTHENTIK_REFRESH_TOKEN_URL=https://auth.example.com/application/o/token/ \
|
||||
-e FEATURE_REQUIRE_LOGIN=true \
|
||||
reflector-frontend:latest
|
||||
```
|
||||
@@ -24,7 +24,8 @@ COPY --link . .
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
# If using npm comment out above and use below instead
|
||||
RUN pnpm build
|
||||
# next.js has the feature of excluding build step planned https://github.com/vercel/next.js/discussions/46544
|
||||
RUN pnpm build-production
|
||||
# RUN npm run build
|
||||
|
||||
# Production image, copy all the files and run next
|
||||
@@ -51,6 +52,10 @@ USER nextjs
|
||||
EXPOSE 3000
|
||||
|
||||
ENV PORT 3000
|
||||
ENV HOSTNAME localhost
|
||||
ENV HOSTNAME 0.0.0.0
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://127.0.0.1:3000/api/health \
|
||||
|| exit 1
|
||||
|
||||
CMD ["node", "server.js"]
|
||||
|
||||
@@ -27,7 +27,7 @@ import {
|
||||
} from "../../../lib/utils";
|
||||
|
||||
interface ICSSettingsProps {
|
||||
roomName: NonEmptyString;
|
||||
roomName: NonEmptyString | null;
|
||||
icsUrl?: string;
|
||||
icsEnabled?: boolean;
|
||||
icsFetchInterval?: number;
|
||||
@@ -85,7 +85,7 @@ export default function ICSSettings({
|
||||
const handleCopyRoomUrl = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(
|
||||
roomAbsoluteUrl(assertExistsAndNonEmptyString(roomName)),
|
||||
roomAbsoluteUrl(assertExists(roomName)),
|
||||
);
|
||||
setJustCopied(true);
|
||||
|
||||
@@ -123,7 +123,7 @@ export default function ICSSettings({
|
||||
const handleRoomUrlClick = () => {
|
||||
if (roomUrlInputRef.current) {
|
||||
roomUrlInputRef.current.select();
|
||||
handleCopyRoomUrl();
|
||||
handleCopyRoomUrl().then(() => {});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -196,10 +196,17 @@ export default function ICSSettings({
|
||||
To enable Reflector to recognize your calendar events as meetings,
|
||||
add this URL as the location in your calendar events
|
||||
</Field.HelperText>
|
||||
{roomName ? (
|
||||
<HStack gap={0} position="relative" width="100%">
|
||||
<Input
|
||||
ref={roomUrlInputRef}
|
||||
value={roomAbsoluteUrl(parseNonEmptyString(roomName))}
|
||||
value={roomAbsoluteUrl(
|
||||
parseNonEmptyString(
|
||||
roomName,
|
||||
true,
|
||||
"panic! roomName is required",
|
||||
),
|
||||
)}
|
||||
readOnly
|
||||
onClick={handleRoomUrlClick}
|
||||
cursor="pointer"
|
||||
@@ -220,6 +227,7 @@ export default function ICSSettings({
|
||||
</IconButton>
|
||||
</HStack>
|
||||
</HStack>
|
||||
) : null}
|
||||
</Field.Root>
|
||||
|
||||
<Field.Root>
|
||||
|
||||
@@ -274,15 +274,31 @@ export function RoomTable({
|
||||
<IconButton
|
||||
aria-label="Force sync calendar"
|
||||
onClick={() =>
|
||||
handleForceSync(parseNonEmptyString(room.name))
|
||||
handleForceSync(
|
||||
parseNonEmptyString(
|
||||
room.name,
|
||||
true,
|
||||
"panic! room.name is required",
|
||||
),
|
||||
)
|
||||
}
|
||||
size="sm"
|
||||
variant="ghost"
|
||||
disabled={syncingRooms.has(
|
||||
parseNonEmptyString(room.name),
|
||||
parseNonEmptyString(
|
||||
room.name,
|
||||
true,
|
||||
"panic! room.name is required",
|
||||
),
|
||||
)}
|
||||
>
|
||||
{syncingRooms.has(parseNonEmptyString(room.name)) ? (
|
||||
{syncingRooms.has(
|
||||
parseNonEmptyString(
|
||||
room.name,
|
||||
true,
|
||||
"panic! room.name is required",
|
||||
),
|
||||
) ? (
|
||||
<Spinner size="sm" />
|
||||
) : (
|
||||
<CalendarSyncIcon />
|
||||
@@ -297,7 +313,13 @@ export function RoomTable({
|
||||
<IconButton
|
||||
aria-label="Copy URL"
|
||||
onClick={() =>
|
||||
onCopyUrl(parseNonEmptyString(room.name))
|
||||
onCopyUrl(
|
||||
parseNonEmptyString(
|
||||
room.name,
|
||||
true,
|
||||
"panic! room.name is required",
|
||||
),
|
||||
)
|
||||
}
|
||||
size="sm"
|
||||
variant="ghost"
|
||||
|
||||
@@ -309,7 +309,7 @@ export default function RoomsList() {
|
||||
|
||||
setRoomInput(null);
|
||||
setIsEditing(false);
|
||||
setEditRoomId("");
|
||||
setEditRoomId(null);
|
||||
setNameError("");
|
||||
refetch();
|
||||
onClose();
|
||||
@@ -449,6 +449,13 @@ export default function RoomsList() {
|
||||
</Dialog.CloseTrigger>
|
||||
</Dialog.Header>
|
||||
<Dialog.Body>
|
||||
<form
|
||||
id="room-form"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
handleSaveRoom();
|
||||
}}
|
||||
>
|
||||
<Tabs.Root defaultValue="general">
|
||||
<Tabs.List>
|
||||
<Tabs.Trigger value="general">General</Tabs.Trigger>
|
||||
@@ -465,6 +472,7 @@ export default function RoomsList() {
|
||||
placeholder="room-name"
|
||||
value={room.name}
|
||||
onChange={handleRoomChange}
|
||||
enterKeyHint="next"
|
||||
/>
|
||||
<Field.HelperText>
|
||||
No spaces or special characters allowed
|
||||
@@ -496,7 +504,6 @@ export default function RoomsList() {
|
||||
<Checkbox.Label>Locked room</Checkbox.Label>
|
||||
</Checkbox.Root>
|
||||
</Field.Root>
|
||||
|
||||
<Field.Root mt={4}>
|
||||
<Field.Label>Room size</Field.Label>
|
||||
<Select.Root
|
||||
@@ -527,7 +534,6 @@ export default function RoomsList() {
|
||||
</Select.Positioner>
|
||||
</Select.Root>
|
||||
</Field.Root>
|
||||
|
||||
<Field.Root mt={4}>
|
||||
<Field.Label>Recording type</Field.Label>
|
||||
<Select.Root
|
||||
@@ -565,13 +571,15 @@ export default function RoomsList() {
|
||||
</Select.Positioner>
|
||||
</Select.Root>
|
||||
</Field.Root>
|
||||
|
||||
<Field.Root mt={4}>
|
||||
<Field.Label>Cloud recording start trigger</Field.Label>
|
||||
<Select.Root
|
||||
value={[room.recordingTrigger]}
|
||||
onValueChange={(e) =>
|
||||
setRoomInput({ ...room, recordingTrigger: e.value[0] })
|
||||
setRoomInput({
|
||||
...room,
|
||||
recordingTrigger: e.value[0],
|
||||
})
|
||||
}
|
||||
collection={recordingTriggerCollection}
|
||||
disabled={room.recordingType !== "cloud"}
|
||||
@@ -622,34 +630,6 @@ export default function RoomsList() {
|
||||
</Field.Root>
|
||||
</Tabs.Content>
|
||||
|
||||
<Tabs.Content value="calendar" pt={6}>
|
||||
<ICSSettings
|
||||
roomName={parseNonEmptyString(room.name)}
|
||||
icsUrl={room.icsUrl}
|
||||
icsEnabled={room.icsEnabled}
|
||||
icsFetchInterval={room.icsFetchInterval}
|
||||
onChange={(settings) => {
|
||||
setRoomInput({
|
||||
...room,
|
||||
icsUrl:
|
||||
settings.ics_url !== undefined
|
||||
? settings.ics_url
|
||||
: room.icsUrl,
|
||||
icsEnabled:
|
||||
settings.ics_enabled !== undefined
|
||||
? settings.ics_enabled
|
||||
: room.icsEnabled,
|
||||
icsFetchInterval:
|
||||
settings.ics_fetch_interval !== undefined
|
||||
? settings.ics_fetch_interval
|
||||
: room.icsFetchInterval,
|
||||
});
|
||||
}}
|
||||
isOwner={true}
|
||||
isEditing={isEditing}
|
||||
/>
|
||||
</Tabs.Content>
|
||||
|
||||
<Tabs.Content value="share" pt={6}>
|
||||
<Field.Root>
|
||||
<Checkbox.Root
|
||||
@@ -675,7 +655,6 @@ export default function RoomsList() {
|
||||
</Checkbox.Label>
|
||||
</Checkbox.Root>
|
||||
</Field.Root>
|
||||
|
||||
<Field.Root mt={4}>
|
||||
<Field.Label>Zulip stream</Field.Label>
|
||||
<Select.Root
|
||||
@@ -711,7 +690,6 @@ export default function RoomsList() {
|
||||
</Select.Positioner>
|
||||
</Select.Root>
|
||||
</Field.Root>
|
||||
|
||||
<Field.Root mt={4}>
|
||||
<Field.Label>Zulip topic</Field.Label>
|
||||
<Select.Root
|
||||
@@ -750,10 +728,10 @@ export default function RoomsList() {
|
||||
<Field.Label>Webhook URL</Field.Label>
|
||||
<Input
|
||||
name="webhookUrl"
|
||||
type="url"
|
||||
placeholder="https://example.com/webhook"
|
||||
value={room.webhookUrl}
|
||||
onChange={handleRoomChange}
|
||||
enterKeyHint="next"
|
||||
/>
|
||||
<Field.HelperText>
|
||||
Optional: URL to receive notifications when transcripts
|
||||
@@ -832,7 +810,8 @@ export default function RoomsList() {
|
||||
maxWidth: "100%",
|
||||
padding: "8px",
|
||||
borderRadius: "4px",
|
||||
backgroundColor: webhookTestResult.startsWith(
|
||||
backgroundColor:
|
||||
webhookTestResult.startsWith(
|
||||
SUCCESS_EMOJI,
|
||||
)
|
||||
? "#f0fdf4"
|
||||
@@ -849,15 +828,55 @@ export default function RoomsList() {
|
||||
</>
|
||||
)}
|
||||
</Tabs.Content>
|
||||
|
||||
<Tabs.Content value="calendar" pt={6}>
|
||||
<Field.Root>
|
||||
<ICSSettings
|
||||
roomName={
|
||||
room.name
|
||||
? parseNonEmptyString(
|
||||
room.name,
|
||||
true,
|
||||
"panic! room.name required",
|
||||
)
|
||||
: null
|
||||
}
|
||||
icsUrl={room.icsUrl}
|
||||
icsEnabled={room.icsEnabled}
|
||||
icsFetchInterval={room.icsFetchInterval}
|
||||
onChange={(settings) => {
|
||||
setRoomInput({
|
||||
...room,
|
||||
icsUrl:
|
||||
settings.ics_url !== undefined
|
||||
? settings.ics_url
|
||||
: room.icsUrl,
|
||||
icsEnabled:
|
||||
settings.ics_enabled !== undefined
|
||||
? settings.ics_enabled
|
||||
: room.icsEnabled,
|
||||
icsFetchInterval:
|
||||
settings.ics_fetch_interval !== undefined
|
||||
? settings.ics_fetch_interval
|
||||
: room.icsFetchInterval,
|
||||
});
|
||||
}}
|
||||
isOwner={true}
|
||||
isEditing={isEditing}
|
||||
/>
|
||||
</Field.Root>
|
||||
</Tabs.Content>
|
||||
</Tabs.Root>
|
||||
</form>
|
||||
</Dialog.Body>
|
||||
<Dialog.Footer>
|
||||
<Button variant="ghost" onClick={handleCloseDialog}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
type="submit"
|
||||
colorPalette="primary"
|
||||
onClick={handleSaveRoom}
|
||||
form="room-form"
|
||||
disabled={
|
||||
!room.name || (room.zulipAutoPost && !room.zulipTopic)
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import React from "react";
|
||||
import Markdown from "react-markdown";
|
||||
import "../../../styles/markdown.css";
|
||||
@@ -16,17 +16,15 @@ import {
|
||||
} from "@chakra-ui/react";
|
||||
import { LuPen } from "react-icons/lu";
|
||||
import { useError } from "../../../(errors)/errorContext";
|
||||
import ShareAndPrivacy from "../shareAndPrivacy";
|
||||
|
||||
type FinalSummaryProps = {
|
||||
transcriptResponse: GetTranscript;
|
||||
topicsResponse: GetTranscriptTopic[];
|
||||
onUpdate?: (newSummary) => void;
|
||||
transcript: GetTranscript;
|
||||
topics: GetTranscriptTopic[];
|
||||
onUpdate: (newSummary: string) => void;
|
||||
finalSummaryRef: React.Dispatch<React.SetStateAction<HTMLDivElement | null>>;
|
||||
};
|
||||
|
||||
export default function FinalSummary(props: FinalSummaryProps) {
|
||||
const finalSummaryRef = useRef<HTMLParagraphElement>(null);
|
||||
|
||||
const [isEditMode, setIsEditMode] = useState(false);
|
||||
const [preEditSummary, setPreEditSummary] = useState("");
|
||||
const [editedSummary, setEditedSummary] = useState("");
|
||||
@@ -35,10 +33,10 @@ export default function FinalSummary(props: FinalSummaryProps) {
|
||||
const updateTranscriptMutation = useTranscriptUpdate();
|
||||
|
||||
useEffect(() => {
|
||||
setEditedSummary(props.transcriptResponse?.long_summary || "");
|
||||
}, [props.transcriptResponse?.long_summary]);
|
||||
setEditedSummary(props.transcript?.long_summary || "");
|
||||
}, [props.transcript?.long_summary]);
|
||||
|
||||
if (!props.topicsResponse || !props.transcriptResponse) {
|
||||
if (!props.topics || !props.transcript) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -54,9 +52,7 @@ export default function FinalSummary(props: FinalSummaryProps) {
|
||||
long_summary: newSummary,
|
||||
},
|
||||
});
|
||||
if (props.onUpdate) {
|
||||
props.onUpdate(newSummary);
|
||||
}
|
||||
console.log("Updated long summary:", updatedTranscript);
|
||||
} catch (err) {
|
||||
console.error("Failed to update long summary:", err);
|
||||
@@ -75,7 +71,7 @@ export default function FinalSummary(props: FinalSummaryProps) {
|
||||
};
|
||||
|
||||
const onSaveClick = () => {
|
||||
updateSummary(editedSummary, props.transcriptResponse.id);
|
||||
updateSummary(editedSummary, props.transcript.id);
|
||||
setIsEditMode(false);
|
||||
};
|
||||
|
||||
@@ -133,11 +129,6 @@ export default function FinalSummary(props: FinalSummaryProps) {
|
||||
>
|
||||
<LuPen />
|
||||
</IconButton>
|
||||
<ShareAndPrivacy
|
||||
finalSummaryRef={finalSummaryRef}
|
||||
transcriptResponse={props.transcriptResponse}
|
||||
topicsResponse={props.topicsResponse}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</Flex>
|
||||
@@ -153,7 +144,7 @@ export default function FinalSummary(props: FinalSummaryProps) {
|
||||
mt={2}
|
||||
/>
|
||||
) : (
|
||||
<div ref={finalSummaryRef} className="markdown">
|
||||
<div ref={props.finalSummaryRef} className="markdown">
|
||||
<Markdown>{editedSummary}</Markdown>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -41,6 +41,8 @@ export default function TranscriptDetails(details: TranscriptDetails) {
|
||||
waiting || mp3.audioDeleted === true,
|
||||
);
|
||||
const useActiveTopic = useState<Topic | null>(null);
|
||||
const [finalSummaryElement, setFinalSummaryElement] =
|
||||
useState<HTMLDivElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (waiting) {
|
||||
@@ -124,9 +126,12 @@ export default function TranscriptDetails(details: TranscriptDetails) {
|
||||
<TranscriptTitle
|
||||
title={transcript.data?.title || "Unnamed Transcript"}
|
||||
transcriptId={transcriptId}
|
||||
onUpdate={(newTitle) => {
|
||||
onUpdate={() => {
|
||||
transcript.refetch().then(() => {});
|
||||
}}
|
||||
transcript={transcript.data || null}
|
||||
topics={topics.topics}
|
||||
finalSummaryElement={finalSummaryElement}
|
||||
/>
|
||||
</Flex>
|
||||
{mp3.audioDeleted && (
|
||||
@@ -148,11 +153,12 @@ export default function TranscriptDetails(details: TranscriptDetails) {
|
||||
{transcript.data && topics.topics ? (
|
||||
<>
|
||||
<FinalSummary
|
||||
transcriptResponse={transcript.data}
|
||||
topicsResponse={topics.topics}
|
||||
transcript={transcript.data}
|
||||
topics={topics.topics}
|
||||
onUpdate={() => {
|
||||
transcript.refetch();
|
||||
transcript.refetch().then(() => {});
|
||||
}}
|
||||
finalSummaryRef={setFinalSummaryElement}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
|
||||
@@ -26,9 +26,9 @@ import { useAuth } from "../../lib/AuthProvider";
|
||||
import { featureEnabled } from "../../lib/features";
|
||||
|
||||
type ShareAndPrivacyProps = {
|
||||
finalSummaryRef: any;
|
||||
transcriptResponse: GetTranscript;
|
||||
topicsResponse: GetTranscriptTopic[];
|
||||
finalSummaryElement: HTMLDivElement | null;
|
||||
transcript: GetTranscript;
|
||||
topics: GetTranscriptTopic[];
|
||||
};
|
||||
|
||||
type ShareOption = { value: ShareMode; label: string };
|
||||
@@ -48,7 +48,7 @@ export default function ShareAndPrivacy(props: ShareAndPrivacyProps) {
|
||||
const [isOwner, setIsOwner] = useState(false);
|
||||
const [shareMode, setShareMode] = useState<ShareOption>(
|
||||
shareOptionsData.find(
|
||||
(option) => option.value === props.transcriptResponse.share_mode,
|
||||
(option) => option.value === props.transcript.share_mode,
|
||||
) || shareOptionsData[0],
|
||||
);
|
||||
const [shareLoading, setShareLoading] = useState(false);
|
||||
@@ -70,7 +70,7 @@ export default function ShareAndPrivacy(props: ShareAndPrivacyProps) {
|
||||
try {
|
||||
const updatedTranscript = await updateTranscriptMutation.mutateAsync({
|
||||
params: {
|
||||
path: { transcript_id: props.transcriptResponse.id },
|
||||
path: { transcript_id: props.transcript.id },
|
||||
},
|
||||
body: requestBody,
|
||||
});
|
||||
@@ -90,8 +90,8 @@ export default function ShareAndPrivacy(props: ShareAndPrivacyProps) {
|
||||
const userId = auth.status === "authenticated" ? auth.user?.id : null;
|
||||
|
||||
useEffect(() => {
|
||||
setIsOwner(!!(requireLogin && userId === props.transcriptResponse.user_id));
|
||||
}, [userId, props.transcriptResponse.user_id]);
|
||||
setIsOwner(!!(requireLogin && userId === props.transcript.user_id));
|
||||
}, [userId, props.transcript.user_id]);
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -171,19 +171,19 @@ export default function ShareAndPrivacy(props: ShareAndPrivacyProps) {
|
||||
<Flex gap={2} mb={2}>
|
||||
{requireLogin && (
|
||||
<ShareZulip
|
||||
transcriptResponse={props.transcriptResponse}
|
||||
topicsResponse={props.topicsResponse}
|
||||
transcript={props.transcript}
|
||||
topics={props.topics}
|
||||
disabled={toShareMode(shareMode.value) === "private"}
|
||||
/>
|
||||
)}
|
||||
<ShareCopy
|
||||
finalSummaryRef={props.finalSummaryRef}
|
||||
transcriptResponse={props.transcriptResponse}
|
||||
topicsResponse={props.topicsResponse}
|
||||
finalSummaryElement={props.finalSummaryElement}
|
||||
transcript={props.transcript}
|
||||
topics={props.topics}
|
||||
/>
|
||||
</Flex>
|
||||
|
||||
<ShareLink transcriptId={props.transcriptResponse.id} />
|
||||
<ShareLink transcriptId={props.transcript.id} />
|
||||
</Dialog.Body>
|
||||
</Dialog.Content>
|
||||
</Dialog.Positioner>
|
||||
|
||||
@@ -5,34 +5,35 @@ type GetTranscriptTopic = components["schemas"]["GetTranscriptTopic"];
|
||||
import { Button, BoxProps, Box } from "@chakra-ui/react";
|
||||
|
||||
type ShareCopyProps = {
|
||||
finalSummaryRef: any;
|
||||
transcriptResponse: GetTranscript;
|
||||
topicsResponse: GetTranscriptTopic[];
|
||||
finalSummaryElement: HTMLDivElement | null;
|
||||
transcript: GetTranscript;
|
||||
topics: GetTranscriptTopic[];
|
||||
};
|
||||
|
||||
export default function ShareCopy({
|
||||
finalSummaryRef,
|
||||
transcriptResponse,
|
||||
topicsResponse,
|
||||
finalSummaryElement,
|
||||
transcript,
|
||||
topics,
|
||||
...boxProps
|
||||
}: ShareCopyProps & BoxProps) {
|
||||
const [isCopiedSummary, setIsCopiedSummary] = useState(false);
|
||||
const [isCopiedTranscript, setIsCopiedTranscript] = useState(false);
|
||||
|
||||
const onCopySummaryClick = () => {
|
||||
let text_to_copy = finalSummaryRef.current?.innerText;
|
||||
const text_to_copy = finalSummaryElement?.innerText;
|
||||
|
||||
text_to_copy &&
|
||||
if (text_to_copy) {
|
||||
navigator.clipboard.writeText(text_to_copy).then(() => {
|
||||
setIsCopiedSummary(true);
|
||||
// Reset the copied state after 2 seconds
|
||||
setTimeout(() => setIsCopiedSummary(false), 2000);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const onCopyTranscriptClick = () => {
|
||||
let text_to_copy =
|
||||
topicsResponse
|
||||
topics
|
||||
?.map((topic) => topic.transcript)
|
||||
.join("\n\n")
|
||||
.replace(/ +/g, " ")
|
||||
|
||||
@@ -26,8 +26,8 @@ import {
|
||||
import { featureEnabled } from "../../lib/features";
|
||||
|
||||
type ShareZulipProps = {
|
||||
transcriptResponse: GetTranscript;
|
||||
topicsResponse: GetTranscriptTopic[];
|
||||
transcript: GetTranscript;
|
||||
topics: GetTranscriptTopic[];
|
||||
disabled: boolean;
|
||||
};
|
||||
|
||||
@@ -88,14 +88,14 @@ export default function ShareZulip(props: ShareZulipProps & BoxProps) {
|
||||
}, [stream, streams]);
|
||||
|
||||
const handleSendToZulip = async () => {
|
||||
if (!props.transcriptResponse) return;
|
||||
if (!props.transcript) return;
|
||||
|
||||
if (stream && topic) {
|
||||
try {
|
||||
await postToZulipMutation.mutateAsync({
|
||||
params: {
|
||||
path: {
|
||||
transcript_id: props.transcriptResponse.id,
|
||||
transcript_id: props.transcript.id,
|
||||
},
|
||||
query: {
|
||||
stream,
|
||||
|
||||
@@ -2,14 +2,22 @@ import { useState } from "react";
|
||||
import type { components } from "../../reflector-api";
|
||||
|
||||
type UpdateTranscript = components["schemas"]["UpdateTranscript"];
|
||||
type GetTranscript = components["schemas"]["GetTranscript"];
|
||||
type GetTranscriptTopic = components["schemas"]["GetTranscriptTopic"];
|
||||
import { useTranscriptUpdate } from "../../lib/apiHooks";
|
||||
import { Heading, IconButton, Input, Flex, Spacer } from "@chakra-ui/react";
|
||||
import { LuPen } from "react-icons/lu";
|
||||
import ShareAndPrivacy from "./shareAndPrivacy";
|
||||
|
||||
type TranscriptTitle = {
|
||||
title: string;
|
||||
transcriptId: string;
|
||||
onUpdate?: (newTitle: string) => void;
|
||||
onUpdate: (newTitle: string) => void;
|
||||
|
||||
// share props
|
||||
transcript: GetTranscript | null;
|
||||
topics: GetTranscriptTopic[] | null;
|
||||
finalSummaryElement: HTMLDivElement | null;
|
||||
};
|
||||
|
||||
const TranscriptTitle = (props: TranscriptTitle) => {
|
||||
@@ -29,9 +37,7 @@ const TranscriptTitle = (props: TranscriptTitle) => {
|
||||
},
|
||||
body: requestBody,
|
||||
});
|
||||
if (props.onUpdate) {
|
||||
props.onUpdate(newTitle);
|
||||
}
|
||||
console.log("Updated transcript title:", newTitle);
|
||||
} catch (err) {
|
||||
console.error("Failed to update transcript:", err);
|
||||
@@ -62,11 +68,11 @@ const TranscriptTitle = (props: TranscriptTitle) => {
|
||||
}
|
||||
setIsEditing(false);
|
||||
};
|
||||
const handleChange = (e) => {
|
||||
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setDisplayedTitle(e.target.value);
|
||||
};
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||
if (e.key === "Enter") {
|
||||
updateTitle(displayedTitle, props.transcriptId);
|
||||
setIsEditing(false);
|
||||
@@ -111,6 +117,13 @@ const TranscriptTitle = (props: TranscriptTitle) => {
|
||||
>
|
||||
<LuPen />
|
||||
</IconButton>
|
||||
{props.transcript && props.topics && (
|
||||
<ShareAndPrivacy
|
||||
finalSummaryElement={props.finalSummaryElement}
|
||||
transcript={props.transcript}
|
||||
topics={props.topics}
|
||||
/>
|
||||
)}
|
||||
</Flex>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -62,7 +62,7 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
|
||||
|
||||
useEffect(() => {
|
||||
document.onkeyup = (e) => {
|
||||
if (e.key === "a" && process.env.NEXT_PUBLIC_ENV === "development") {
|
||||
if (e.key === "a" && process.env.NODE_ENV === "development") {
|
||||
const segments: GetTranscriptSegmentTopic[] = [
|
||||
{
|
||||
speaker: 1,
|
||||
@@ -201,7 +201,7 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
|
||||
|
||||
setFinalSummary({ summary: "This is the final summary" });
|
||||
}
|
||||
if (e.key === "z" && process.env.NEXT_PUBLIC_ENV === "development") {
|
||||
if (e.key === "z" && process.env.NODE_ENV === "development") {
|
||||
setTranscriptTextLive(
|
||||
"This text is in English, and it is a pretty long sentence to test the limits",
|
||||
);
|
||||
|
||||
@@ -261,7 +261,11 @@ export default function Room(details: RoomDetails) {
|
||||
const params = use(details.params);
|
||||
const wherebyLoaded = useWhereby();
|
||||
const wherebyRef = useRef<HTMLElement>(null);
|
||||
const roomName = parseNonEmptyString(params.roomName);
|
||||
const roomName = parseNonEmptyString(
|
||||
params.roomName,
|
||||
true,
|
||||
"panic! params.roomName is required",
|
||||
);
|
||||
const router = useRouter();
|
||||
const auth = useAuth();
|
||||
const status = auth.status;
|
||||
@@ -308,7 +312,14 @@ export default function Room(details: RoomDetails) {
|
||||
|
||||
const handleMeetingSelect = (selectedMeeting: Meeting) => {
|
||||
router.push(
|
||||
roomMeetingUrl(roomName, parseNonEmptyString(selectedMeeting.id)),
|
||||
roomMeetingUrl(
|
||||
roomName,
|
||||
parseNonEmptyString(
|
||||
selectedMeeting.id,
|
||||
true,
|
||||
"panic! selectedMeeting.id is required",
|
||||
),
|
||||
),
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
38
www/app/api/health/route.ts
Normal file
38
www/app/api/health/route.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { NextResponse } from "next/server";
|
||||
|
||||
export async function GET() {
|
||||
const health = {
|
||||
status: "healthy",
|
||||
timestamp: new Date().toISOString(),
|
||||
uptime: process.uptime(),
|
||||
environment: process.env.NODE_ENV,
|
||||
checks: {
|
||||
redis: await checkRedis(),
|
||||
},
|
||||
};
|
||||
|
||||
const allHealthy = Object.values(health.checks).every((check) => check);
|
||||
|
||||
return NextResponse.json(health, {
|
||||
status: allHealthy ? 200 : 503,
|
||||
});
|
||||
}
|
||||
|
||||
async function checkRedis(): Promise<boolean> {
|
||||
try {
|
||||
if (!process.env.KV_URL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const { tokenCacheRedis } = await import("../../lib/redisClient");
|
||||
const testKey = `health:check:${Date.now()}`;
|
||||
await tokenCacheRedis.setex(testKey, 10, "OK");
|
||||
const value = await tokenCacheRedis.get(testKey);
|
||||
await tokenCacheRedis.del(testKey);
|
||||
|
||||
return value === "OK";
|
||||
} catch (error) {
|
||||
console.error("Redis health check failed:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,10 @@ import ErrorMessage from "./(errors)/errorMessage";
|
||||
import { RecordingConsentProvider } from "./recordingConsentContext";
|
||||
import { ErrorBoundary } from "@sentry/nextjs";
|
||||
import { Providers } from "./providers";
|
||||
import { assertExistsAndNonEmptyString } from "./lib/utils";
|
||||
import { getNextEnvVar } from "./lib/nextBuild";
|
||||
import { getClientEnv } from "./lib/clientEnv";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
const poppins = Poppins({
|
||||
subsets: ["latin"],
|
||||
@@ -21,13 +24,11 @@ export const viewport: Viewport = {
|
||||
maximumScale: 1,
|
||||
};
|
||||
|
||||
const NEXT_PUBLIC_SITE_URL = assertExistsAndNonEmptyString(
|
||||
process.env.NEXT_PUBLIC_SITE_URL,
|
||||
"NEXT_PUBLIC_SITE_URL required",
|
||||
);
|
||||
const SITE_URL = getNextEnvVar("SITE_URL");
|
||||
const env = getClientEnv();
|
||||
|
||||
export const metadata: Metadata = {
|
||||
metadataBase: new URL(NEXT_PUBLIC_SITE_URL),
|
||||
metadataBase: new URL(SITE_URL),
|
||||
title: {
|
||||
template: "%s – Reflector",
|
||||
default: "Reflector - AI-Powered Meeting Transcriptions by Monadical",
|
||||
@@ -74,15 +75,16 @@ export default async function RootLayout({
|
||||
}) {
|
||||
return (
|
||||
<html lang="en" className={poppins.className} suppressHydrationWarning>
|
||||
<body className={"h-[100svh] w-[100svw] overflow-x-hidden relative"}>
|
||||
<RecordingConsentProvider>
|
||||
<body
|
||||
className={"h-[100svh] w-[100svw] overflow-x-hidden relative"}
|
||||
data-env={JSON.stringify(env)}
|
||||
>
|
||||
<ErrorBoundary fallback={<p>"something went really wrong"</p>}>
|
||||
<ErrorProvider>
|
||||
<ErrorMessage />
|
||||
<Providers>{children}</Providers>
|
||||
</ErrorProvider>
|
||||
</ErrorBoundary>
|
||||
</RecordingConsentProvider>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
180
www/app/lib/UserEventsProvider.tsx
Normal file
180
www/app/lib/UserEventsProvider.tsx
Normal file
@@ -0,0 +1,180 @@
|
||||
"use client";
|
||||
|
||||
import React, { useEffect, useRef } from "react";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { WEBSOCKET_URL } from "./apiClient";
|
||||
import { useAuth } from "./AuthProvider";
|
||||
import { z } from "zod";
|
||||
import { invalidateTranscriptLists, TRANSCRIPT_SEARCH_URL } from "./apiHooks";
|
||||
|
||||
const UserEvent = z.object({
|
||||
event: z.string(),
|
||||
});
|
||||
|
||||
type UserEvent = z.TypeOf<typeof UserEvent>;
|
||||
|
||||
class UserEventsStore {
|
||||
private socket: WebSocket | null = null;
|
||||
private listeners: Set<(event: MessageEvent) => void> = new Set();
|
||||
private closeTimeoutId: number | null = null;
|
||||
private isConnecting = false;
|
||||
|
||||
ensureConnection(url: string, subprotocols?: string[]) {
|
||||
if (typeof window === "undefined") return;
|
||||
if (this.closeTimeoutId !== null) {
|
||||
clearTimeout(this.closeTimeoutId);
|
||||
this.closeTimeoutId = null;
|
||||
}
|
||||
if (this.isConnecting) return;
|
||||
if (
|
||||
this.socket &&
|
||||
(this.socket.readyState === WebSocket.OPEN ||
|
||||
this.socket.readyState === WebSocket.CONNECTING)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
this.isConnecting = true;
|
||||
const ws = new WebSocket(url, subprotocols || []);
|
||||
this.socket = ws;
|
||||
ws.onmessage = (event: MessageEvent) => {
|
||||
this.listeners.forEach((listener) => {
|
||||
try {
|
||||
listener(event);
|
||||
} catch (err) {
|
||||
console.error("UserEvents listener error", err);
|
||||
}
|
||||
});
|
||||
};
|
||||
ws.onopen = () => {
|
||||
if (this.socket === ws) this.isConnecting = false;
|
||||
};
|
||||
ws.onclose = () => {
|
||||
if (this.socket === ws) {
|
||||
this.socket = null;
|
||||
this.isConnecting = false;
|
||||
}
|
||||
};
|
||||
ws.onerror = () => {
|
||||
if (this.socket === ws) this.isConnecting = false;
|
||||
};
|
||||
}
|
||||
|
||||
subscribe(listener: (event: MessageEvent) => void): () => void {
|
||||
this.listeners.add(listener);
|
||||
if (this.closeTimeoutId !== null) {
|
||||
clearTimeout(this.closeTimeoutId);
|
||||
this.closeTimeoutId = null;
|
||||
}
|
||||
return () => {
|
||||
this.listeners.delete(listener);
|
||||
if (this.listeners.size === 0) {
|
||||
this.closeTimeoutId = window.setTimeout(() => {
|
||||
if (this.socket) {
|
||||
try {
|
||||
this.socket.close();
|
||||
} catch (err) {
|
||||
console.warn("Error closing user events socket", err);
|
||||
}
|
||||
}
|
||||
this.socket = null;
|
||||
this.closeTimeoutId = null;
|
||||
}, 1000);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const sharedStore = new UserEventsStore();
|
||||
|
||||
export function UserEventsProvider({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
const auth = useAuth();
|
||||
const queryClient = useQueryClient();
|
||||
const tokenRef = useRef<string | null>(null);
|
||||
const detachRef = useRef<(() => void) | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
// Only tear down when the user is truly unauthenticated
|
||||
if (auth.status === "unauthenticated") {
|
||||
if (detachRef.current) {
|
||||
try {
|
||||
detachRef.current();
|
||||
} catch (err) {
|
||||
console.warn("Error detaching UserEvents listener", err);
|
||||
}
|
||||
detachRef.current = null;
|
||||
}
|
||||
tokenRef.current = null;
|
||||
return;
|
||||
}
|
||||
|
||||
// During loading/refreshing, keep the existing connection intact
|
||||
if (auth.status !== "authenticated") {
|
||||
return;
|
||||
}
|
||||
|
||||
// Authenticated: pin the initial token for the lifetime of this WS connection
|
||||
if (!tokenRef.current && auth.accessToken) {
|
||||
tokenRef.current = auth.accessToken;
|
||||
}
|
||||
const pinnedToken = tokenRef.current;
|
||||
const url = `${WEBSOCKET_URL}/v1/events`;
|
||||
|
||||
// Ensure a single shared connection
|
||||
sharedStore.ensureConnection(
|
||||
url,
|
||||
pinnedToken ? ["bearer", pinnedToken] : undefined,
|
||||
);
|
||||
|
||||
// Subscribe once; avoid re-subscribing during transient status changes
|
||||
if (!detachRef.current) {
|
||||
const onMessage = (event: MessageEvent) => {
|
||||
try {
|
||||
const msg = UserEvent.parse(JSON.parse(event.data));
|
||||
const eventName = msg.event;
|
||||
|
||||
const invalidateList = () => invalidateTranscriptLists(queryClient);
|
||||
|
||||
switch (eventName) {
|
||||
case "TRANSCRIPT_CREATED":
|
||||
case "TRANSCRIPT_DELETED":
|
||||
case "TRANSCRIPT_STATUS":
|
||||
case "TRANSCRIPT_FINAL_TITLE":
|
||||
case "TRANSCRIPT_DURATION":
|
||||
invalidateList().then(() => {});
|
||||
break;
|
||||
|
||||
default:
|
||||
// Ignore other content events for list updates
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn("Invalid user event message", event.data);
|
||||
}
|
||||
};
|
||||
|
||||
const unsubscribe = sharedStore.subscribe(onMessage);
|
||||
detachRef.current = unsubscribe;
|
||||
}
|
||||
}, [auth.status, queryClient]);
|
||||
|
||||
// On unmount, detach the listener and clear the pinned token
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (detachRef.current) {
|
||||
try {
|
||||
detachRef.current();
|
||||
} catch (err) {
|
||||
console.warn("Error detaching UserEvents listener on unmount", err);
|
||||
}
|
||||
detachRef.current = null;
|
||||
}
|
||||
tokenRef.current = null;
|
||||
};
|
||||
}, []);
|
||||
|
||||
return <>{children}</>;
|
||||
}
|
||||
@@ -3,21 +3,19 @@
|
||||
import createClient from "openapi-fetch";
|
||||
import type { paths } from "../reflector-api";
|
||||
import createFetchClient from "openapi-react-query";
|
||||
import { assertExistsAndNonEmptyString, parseNonEmptyString } from "./utils";
|
||||
import { parseNonEmptyString } from "./utils";
|
||||
import { isBuildPhase } from "./next";
|
||||
import { getSession } from "next-auth/react";
|
||||
import { assertExtendedToken } from "./types";
|
||||
import { getClientEnv } from "./clientEnv";
|
||||
|
||||
export const API_URL = !isBuildPhase
|
||||
? assertExistsAndNonEmptyString(
|
||||
process.env.NEXT_PUBLIC_API_URL,
|
||||
"NEXT_PUBLIC_API_URL required",
|
||||
)
|
||||
? getClientEnv().API_URL
|
||||
: "http://localhost";
|
||||
|
||||
// TODO decide strict validation or not
|
||||
export const WEBSOCKET_URL =
|
||||
process.env.NEXT_PUBLIC_WEBSOCKET_URL || "ws://127.0.0.1:1250";
|
||||
export const WEBSOCKET_URL = !isBuildPhase
|
||||
? getClientEnv().WEBSOCKET_URL || "ws://127.0.0.1:1250"
|
||||
: "ws://localhost";
|
||||
|
||||
export const client = createClient<paths>({
|
||||
baseUrl: API_URL,
|
||||
@@ -44,7 +42,7 @@ client.use({
|
||||
if (token !== null) {
|
||||
request.headers.set(
|
||||
"Authorization",
|
||||
`Bearer ${parseNonEmptyString(token)}`,
|
||||
`Bearer ${parseNonEmptyString(token, true, "panic! token is required")}`,
|
||||
);
|
||||
}
|
||||
// XXX Only set Content-Type if not already set (FormData will set its own boundary)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { $api } from "./apiClient";
|
||||
import { useError } from "../(errors)/errorContext";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { QueryClient, useQueryClient } from "@tanstack/react-query";
|
||||
import type { components } from "../reflector-api";
|
||||
import { useAuth } from "./AuthProvider";
|
||||
|
||||
@@ -40,6 +40,13 @@ export function useRoomsList(page: number = 1) {
|
||||
|
||||
type SourceKind = components["schemas"]["SourceKind"];
|
||||
|
||||
export const TRANSCRIPT_SEARCH_URL = "/v1/transcripts/search" as const;
|
||||
|
||||
export const invalidateTranscriptLists = (queryClient: QueryClient) =>
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ["get", TRANSCRIPT_SEARCH_URL],
|
||||
});
|
||||
|
||||
export function useTranscriptsSearch(
|
||||
q: string = "",
|
||||
options: {
|
||||
@@ -51,7 +58,7 @@ export function useTranscriptsSearch(
|
||||
) {
|
||||
return $api.useQuery(
|
||||
"get",
|
||||
"/v1/transcripts/search",
|
||||
TRANSCRIPT_SEARCH_URL,
|
||||
{
|
||||
params: {
|
||||
query: {
|
||||
@@ -76,7 +83,7 @@ export function useTranscriptDelete() {
|
||||
return $api.useMutation("delete", "/v1/transcripts/{transcript_id}", {
|
||||
onSuccess: () => {
|
||||
return queryClient.invalidateQueries({
|
||||
queryKey: ["get", "/v1/transcripts/search"],
|
||||
queryKey: ["get", TRANSCRIPT_SEARCH_URL],
|
||||
});
|
||||
},
|
||||
onError: (error) => {
|
||||
@@ -613,7 +620,7 @@ export function useTranscriptCreate() {
|
||||
return $api.useMutation("post", "/v1/transcripts", {
|
||||
onSuccess: () => {
|
||||
return queryClient.invalidateQueries({
|
||||
queryKey: ["get", "/v1/transcripts/search"],
|
||||
queryKey: ["get", TRANSCRIPT_SEARCH_URL],
|
||||
});
|
||||
},
|
||||
onError: (error) => {
|
||||
|
||||
@@ -18,26 +18,25 @@ import {
|
||||
deleteTokenCache,
|
||||
} from "./redisTokenCache";
|
||||
import { tokenCacheRedis, redlock } from "./redisClient";
|
||||
import { isBuildPhase } from "./next";
|
||||
import { sequenceThrows } from "./errorUtils";
|
||||
import { featureEnabled } from "./features";
|
||||
import { getNextEnvVar } from "./nextBuild";
|
||||
|
||||
const TOKEN_CACHE_TTL = REFRESH_ACCESS_TOKEN_BEFORE;
|
||||
const getAuthentikClientId = () =>
|
||||
assertExistsAndNonEmptyString(
|
||||
process.env.AUTHENTIK_CLIENT_ID,
|
||||
"AUTHENTIK_CLIENT_ID required",
|
||||
);
|
||||
const getAuthentikClientSecret = () =>
|
||||
assertExistsAndNonEmptyString(
|
||||
process.env.AUTHENTIK_CLIENT_SECRET,
|
||||
"AUTHENTIK_CLIENT_SECRET required",
|
||||
);
|
||||
const getAuthentikClientId = () => getNextEnvVar("AUTHENTIK_CLIENT_ID");
|
||||
const getAuthentikClientSecret = () => getNextEnvVar("AUTHENTIK_CLIENT_SECRET");
|
||||
const getAuthentikRefreshTokenUrl = () =>
|
||||
assertExistsAndNonEmptyString(
|
||||
process.env.AUTHENTIK_REFRESH_TOKEN_URL,
|
||||
"AUTHENTIK_REFRESH_TOKEN_URL required",
|
||||
);
|
||||
getNextEnvVar("AUTHENTIK_REFRESH_TOKEN_URL");
|
||||
|
||||
const getAuthentikIssuer = () => {
|
||||
const stringUrl = getNextEnvVar("AUTHENTIK_ISSUER");
|
||||
try {
|
||||
new URL(stringUrl);
|
||||
} catch (e) {
|
||||
throw new Error("AUTHENTIK_ISSUER is not a valid URL: " + stringUrl);
|
||||
}
|
||||
return stringUrl;
|
||||
};
|
||||
|
||||
export const authOptions = (): AuthOptions =>
|
||||
featureEnabled("requireLogin")
|
||||
@@ -45,16 +44,17 @@ export const authOptions = (): AuthOptions =>
|
||||
providers: [
|
||||
AuthentikProvider({
|
||||
...(() => {
|
||||
const [clientId, clientSecret] = sequenceThrows(
|
||||
const [clientId, clientSecret, issuer] = sequenceThrows(
|
||||
getAuthentikClientId,
|
||||
getAuthentikClientSecret,
|
||||
getAuthentikIssuer,
|
||||
);
|
||||
return {
|
||||
clientId,
|
||||
clientSecret,
|
||||
issuer,
|
||||
};
|
||||
})(),
|
||||
issuer: process.env.AUTHENTIK_ISSUER,
|
||||
authorization: {
|
||||
params: {
|
||||
scope: "openid email profile offline_access",
|
||||
|
||||
91
www/app/lib/clientEnv.ts
Normal file
91
www/app/lib/clientEnv.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import {
|
||||
assertExists,
|
||||
assertExistsAndNonEmptyString,
|
||||
NonEmptyString,
|
||||
parseNonEmptyString,
|
||||
} from "./utils";
|
||||
import { isBuildPhase } from "./next";
|
||||
import { getNextEnvVar } from "./nextBuild";
|
||||
|
||||
export const FEATURE_REQUIRE_LOGIN_ENV_NAME = "FEATURE_REQUIRE_LOGIN" as const;
|
||||
export const FEATURE_PRIVACY_ENV_NAME = "FEATURE_PRIVACY" as const;
|
||||
export const FEATURE_BROWSE_ENV_NAME = "FEATURE_BROWSE" as const;
|
||||
export const FEATURE_SEND_TO_ZULIP_ENV_NAME = "FEATURE_SEND_TO_ZULIP" as const;
|
||||
export const FEATURE_ROOMS_ENV_NAME = "FEATURE_ROOMS" as const;
|
||||
|
||||
const FEATURE_ENV_NAMES = [
|
||||
FEATURE_REQUIRE_LOGIN_ENV_NAME,
|
||||
FEATURE_PRIVACY_ENV_NAME,
|
||||
FEATURE_BROWSE_ENV_NAME,
|
||||
FEATURE_SEND_TO_ZULIP_ENV_NAME,
|
||||
FEATURE_ROOMS_ENV_NAME,
|
||||
] as const;
|
||||
|
||||
export type FeatureEnvName = (typeof FEATURE_ENV_NAMES)[number];
|
||||
|
||||
export type EnvFeaturePartial = {
|
||||
[key in FeatureEnvName]: boolean | null;
|
||||
};
|
||||
|
||||
// CONTRACT: isomorphic with JSON.stringify
|
||||
export type ClientEnvCommon = EnvFeaturePartial & {
|
||||
API_URL: NonEmptyString;
|
||||
WEBSOCKET_URL: NonEmptyString | null;
|
||||
};
|
||||
|
||||
let clientEnv: ClientEnvCommon | null = null;
|
||||
export const getClientEnvClient = (): ClientEnvCommon => {
|
||||
if (typeof window === "undefined") {
|
||||
throw new Error(
|
||||
"getClientEnv() called during SSR - this should only be called in browser environment",
|
||||
);
|
||||
}
|
||||
if (clientEnv) return clientEnv;
|
||||
clientEnv = assertExists(
|
||||
JSON.parse(
|
||||
assertExistsAndNonEmptyString(
|
||||
document.body.dataset.env,
|
||||
"document.body.dataset.env is missing",
|
||||
),
|
||||
),
|
||||
"document.body.dataset.env is parsed to nullish",
|
||||
);
|
||||
return clientEnv!;
|
||||
};
|
||||
|
||||
const parseBooleanString = (str: string | undefined): boolean | null => {
|
||||
if (str === undefined) return null;
|
||||
return str === "true";
|
||||
};
|
||||
|
||||
export const getClientEnvServer = (): ClientEnvCommon => {
|
||||
if (typeof window !== "undefined") {
|
||||
throw new Error(
|
||||
"getClientEnv() not called during SSR - this should only be called in server environment",
|
||||
);
|
||||
}
|
||||
if (clientEnv) return clientEnv;
|
||||
|
||||
const features = FEATURE_ENV_NAMES.reduce((acc, x) => {
|
||||
acc[x] = parseBooleanString(process.env[x]);
|
||||
return acc;
|
||||
}, {} as EnvFeaturePartial);
|
||||
|
||||
if (isBuildPhase) {
|
||||
return {
|
||||
API_URL: getNextEnvVar("API_URL"),
|
||||
WEBSOCKET_URL: getNextEnvVar("WEBSOCKET_URL"),
|
||||
...features,
|
||||
};
|
||||
}
|
||||
|
||||
clientEnv = {
|
||||
API_URL: getNextEnvVar("API_URL"),
|
||||
WEBSOCKET_URL: getNextEnvVar("WEBSOCKET_URL"),
|
||||
...features,
|
||||
};
|
||||
return clientEnv;
|
||||
};
|
||||
|
||||
export const getClientEnv =
|
||||
typeof window === "undefined" ? getClientEnvServer : getClientEnvClient;
|
||||
@@ -1,3 +1,13 @@
|
||||
import {
|
||||
FEATURE_BROWSE_ENV_NAME,
|
||||
FEATURE_PRIVACY_ENV_NAME,
|
||||
FEATURE_REQUIRE_LOGIN_ENV_NAME,
|
||||
FEATURE_ROOMS_ENV_NAME,
|
||||
FEATURE_SEND_TO_ZULIP_ENV_NAME,
|
||||
FeatureEnvName,
|
||||
getClientEnv,
|
||||
} from "./clientEnv";
|
||||
|
||||
export const FEATURES = [
|
||||
"requireLogin",
|
||||
"privacy",
|
||||
@@ -18,38 +28,30 @@ export const DEFAULT_FEATURES: Features = {
|
||||
rooms: true,
|
||||
} as const;
|
||||
|
||||
function parseBooleanEnv(
|
||||
value: string | undefined,
|
||||
defaultValue: boolean = false,
|
||||
): boolean {
|
||||
if (!value) return defaultValue;
|
||||
return value.toLowerCase() === "true";
|
||||
}
|
||||
export const ENV_TO_FEATURE: {
|
||||
[k in FeatureEnvName]: FeatureName;
|
||||
} = {
|
||||
FEATURE_REQUIRE_LOGIN: "requireLogin",
|
||||
FEATURE_PRIVACY: "privacy",
|
||||
FEATURE_BROWSE: "browse",
|
||||
FEATURE_SEND_TO_ZULIP: "sendToZulip",
|
||||
FEATURE_ROOMS: "rooms",
|
||||
} as const;
|
||||
|
||||
// WARNING: keep process.env.* as-is, next.js won't see them if you generate dynamically
|
||||
const features: Features = {
|
||||
requireLogin: parseBooleanEnv(
|
||||
process.env.NEXT_PUBLIC_FEATURE_REQUIRE_LOGIN,
|
||||
DEFAULT_FEATURES.requireLogin,
|
||||
),
|
||||
privacy: parseBooleanEnv(
|
||||
process.env.NEXT_PUBLIC_FEATURE_PRIVACY,
|
||||
DEFAULT_FEATURES.privacy,
|
||||
),
|
||||
browse: parseBooleanEnv(
|
||||
process.env.NEXT_PUBLIC_FEATURE_BROWSE,
|
||||
DEFAULT_FEATURES.browse,
|
||||
),
|
||||
sendToZulip: parseBooleanEnv(
|
||||
process.env.NEXT_PUBLIC_FEATURE_SEND_TO_ZULIP,
|
||||
DEFAULT_FEATURES.sendToZulip,
|
||||
),
|
||||
rooms: parseBooleanEnv(
|
||||
process.env.NEXT_PUBLIC_FEATURE_ROOMS,
|
||||
DEFAULT_FEATURES.rooms,
|
||||
),
|
||||
export const FEATURE_TO_ENV: {
|
||||
[k in FeatureName]: FeatureEnvName;
|
||||
} = {
|
||||
requireLogin: "FEATURE_REQUIRE_LOGIN",
|
||||
privacy: "FEATURE_PRIVACY",
|
||||
browse: "FEATURE_BROWSE",
|
||||
sendToZulip: "FEATURE_SEND_TO_ZULIP",
|
||||
rooms: "FEATURE_ROOMS",
|
||||
};
|
||||
|
||||
const features = getClientEnv();
|
||||
|
||||
export const featureEnabled = (featureName: FeatureName): boolean => {
|
||||
return features[featureName];
|
||||
const isSet = features[FEATURE_TO_ENV[featureName]];
|
||||
if (isSet === null) return DEFAULT_FEATURES[featureName];
|
||||
return isSet;
|
||||
};
|
||||
|
||||
17
www/app/lib/nextBuild.ts
Normal file
17
www/app/lib/nextBuild.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { isBuildPhase } from "./next";
|
||||
import { assertExistsAndNonEmptyString, NonEmptyString } from "./utils";
|
||||
|
||||
const _getNextEnvVar = (name: string, e?: string): NonEmptyString =>
|
||||
isBuildPhase
|
||||
? (() => {
|
||||
throw new Error(
|
||||
"panic! getNextEnvVar called during build phase; we don't support build envs",
|
||||
);
|
||||
})()
|
||||
: assertExistsAndNonEmptyString(
|
||||
process.env[name],
|
||||
`${name} is required; ${e}`,
|
||||
);
|
||||
|
||||
export const getNextEnvVar = (name: string, e?: string): NonEmptyString =>
|
||||
_getNextEnvVar(name, e);
|
||||
@@ -1,7 +1,3 @@
|
||||
export function isDevelopment() {
|
||||
return process.env.NEXT_PUBLIC_ENV === "development";
|
||||
}
|
||||
|
||||
// Function to calculate WCAG contrast ratio
|
||||
export const getContrastRatio = (
|
||||
foreground: [number, number, number],
|
||||
@@ -145,8 +141,15 @@ export const parseMaybeNonEmptyString = (
|
||||
s = trim ? s.trim() : s;
|
||||
return s.length > 0 ? (s as NonEmptyString) : null;
|
||||
};
|
||||
export const parseNonEmptyString = (s: string, trim = true): NonEmptyString =>
|
||||
assertExists(parseMaybeNonEmptyString(s, trim), "Expected non-empty string");
|
||||
export const parseNonEmptyString = (
|
||||
s: string,
|
||||
trim = true,
|
||||
e?: string,
|
||||
): NonEmptyString =>
|
||||
assertExists(
|
||||
parseMaybeNonEmptyString(s, trim),
|
||||
"Expected non-empty string" + (e ? `: ${e}` : ""),
|
||||
);
|
||||
|
||||
export const assertExists = <T>(
|
||||
value: T | null | undefined,
|
||||
@@ -173,4 +176,8 @@ export const assertExistsAndNonEmptyString = (
|
||||
value: string | null | undefined,
|
||||
err?: string,
|
||||
): NonEmptyString =>
|
||||
parseNonEmptyString(assertExists(value, err || "Expected non-empty string"));
|
||||
parseNonEmptyString(
|
||||
assertExists(value, err || "Expected non-empty string"),
|
||||
true,
|
||||
err,
|
||||
);
|
||||
|
||||
@@ -10,6 +10,8 @@ import { QueryClientProvider } from "@tanstack/react-query";
|
||||
import { queryClient } from "./lib/queryClient";
|
||||
import { AuthProvider } from "./lib/AuthProvider";
|
||||
import { SessionProvider as SessionProviderNextAuth } from "next-auth/react";
|
||||
import { RecordingConsentProvider } from "./recordingConsentContext";
|
||||
import { UserEventsProvider } from "./lib/UserEventsProvider";
|
||||
|
||||
const WherebyProvider = dynamic(
|
||||
() =>
|
||||
@@ -26,10 +28,14 @@ export function Providers({ children }: { children: React.ReactNode }) {
|
||||
<SessionProviderNextAuth>
|
||||
<AuthProvider>
|
||||
<ChakraProvider value={system}>
|
||||
<RecordingConsentProvider>
|
||||
<UserEventsProvider>
|
||||
<WherebyProvider>
|
||||
{children}
|
||||
<Toaster />
|
||||
</WherebyProvider>
|
||||
</UserEventsProvider>
|
||||
</RecordingConsentProvider>
|
||||
</ChakraProvider>
|
||||
</AuthProvider>
|
||||
</SessionProviderNextAuth>
|
||||
|
||||
212
www/app/reflector-api.d.ts
vendored
212
www/app/reflector-api.d.ts
vendored
@@ -4,6 +4,23 @@
|
||||
*/
|
||||
|
||||
export interface paths {
|
||||
"/health": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
/** Health */
|
||||
get: operations["health"];
|
||||
put?: never;
|
||||
post?: never;
|
||||
delete?: never;
|
||||
options?: never;
|
||||
head?: never;
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/metrics": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
@@ -587,6 +604,41 @@ export interface paths {
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/user/api-keys": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
/** List Api Keys */
|
||||
get: operations["v1_list_api_keys"];
|
||||
put?: never;
|
||||
/** Create Api Key */
|
||||
post: operations["v1_create_api_key"];
|
||||
delete?: never;
|
||||
options?: never;
|
||||
head?: never;
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/user/api-keys/{key_id}": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
get?: never;
|
||||
put?: never;
|
||||
post?: never;
|
||||
/** Delete Api Key */
|
||||
delete: operations["v1_delete_api_key"];
|
||||
options?: never;
|
||||
head?: never;
|
||||
patch?: never;
|
||||
trace?: never;
|
||||
};
|
||||
"/v1/zulip/streams": {
|
||||
parameters: {
|
||||
query?: never;
|
||||
@@ -648,6 +700,26 @@ export interface paths {
|
||||
export type webhooks = Record<string, never>;
|
||||
export interface components {
|
||||
schemas: {
|
||||
/** ApiKeyResponse */
|
||||
ApiKeyResponse: {
|
||||
/**
|
||||
* Id
|
||||
* @description A non-empty string
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* User Id
|
||||
* @description A non-empty string
|
||||
*/
|
||||
user_id: string;
|
||||
/** Name */
|
||||
name: string | null;
|
||||
/**
|
||||
* Created At
|
||||
* Format: date-time
|
||||
*/
|
||||
created_at: string;
|
||||
};
|
||||
/** AudioWaveform */
|
||||
AudioWaveform: {
|
||||
/** Data */
|
||||
@@ -707,6 +779,36 @@ export interface components {
|
||||
*/
|
||||
updated_at: string;
|
||||
};
|
||||
/** CreateApiKeyRequest */
|
||||
CreateApiKeyRequest: {
|
||||
/** Name */
|
||||
name?: string | null;
|
||||
};
|
||||
/** CreateApiKeyResponse */
|
||||
CreateApiKeyResponse: {
|
||||
/**
|
||||
* Id
|
||||
* @description A non-empty string
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* User Id
|
||||
* @description A non-empty string
|
||||
*/
|
||||
user_id: string;
|
||||
/** Name */
|
||||
name: string | null;
|
||||
/**
|
||||
* Created At
|
||||
* Format: date-time
|
||||
*/
|
||||
created_at: string;
|
||||
/**
|
||||
* Key
|
||||
* @description A non-empty string
|
||||
*/
|
||||
key: string;
|
||||
};
|
||||
/** CreateParticipant */
|
||||
CreateParticipant: {
|
||||
/** Speaker */
|
||||
@@ -1431,8 +1533,6 @@ export interface components {
|
||||
sub: string;
|
||||
/** Email */
|
||||
email: string | null;
|
||||
/** Email Verified */
|
||||
email_verified: boolean | null;
|
||||
};
|
||||
/** ValidationError */
|
||||
ValidationError: {
|
||||
@@ -1509,6 +1609,26 @@ export interface components {
|
||||
}
|
||||
export type $defs = Record<string, never>;
|
||||
export interface operations {
|
||||
health: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
requestBody?: never;
|
||||
responses: {
|
||||
/** @description Successful Response */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": unknown;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
metrics: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
@@ -2158,6 +2278,10 @@ export interface operations {
|
||||
offset?: number;
|
||||
room_id?: string | null;
|
||||
source_kind?: components["schemas"]["SourceKind"] | null;
|
||||
/** @description Filter transcripts created on or after this datetime (ISO 8601 with timezone) */
|
||||
from?: string | null;
|
||||
/** @description Filter transcripts created on or before this datetime (ISO 8601 with timezone) */
|
||||
to?: string | null;
|
||||
};
|
||||
header?: never;
|
||||
path?: never;
|
||||
@@ -2899,6 +3023,90 @@ export interface operations {
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_list_api_keys: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
requestBody?: never;
|
||||
responses: {
|
||||
/** @description Successful Response */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": components["schemas"]["ApiKeyResponse"][];
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_create_api_key: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path?: never;
|
||||
cookie?: never;
|
||||
};
|
||||
requestBody: {
|
||||
content: {
|
||||
"application/json": components["schemas"]["CreateApiKeyRequest"];
|
||||
};
|
||||
};
|
||||
responses: {
|
||||
/** @description Successful Response */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": components["schemas"]["CreateApiKeyResponse"];
|
||||
};
|
||||
};
|
||||
/** @description Validation Error */
|
||||
422: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": components["schemas"]["HTTPValidationError"];
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_delete_api_key: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
header?: never;
|
||||
path: {
|
||||
key_id: string;
|
||||
};
|
||||
cookie?: never;
|
||||
};
|
||||
requestBody?: never;
|
||||
responses: {
|
||||
/** @description Successful Response */
|
||||
200: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": unknown;
|
||||
};
|
||||
};
|
||||
/** @description Validation Error */
|
||||
422: {
|
||||
headers: {
|
||||
[name: string]: unknown;
|
||||
};
|
||||
content: {
|
||||
"application/json": components["schemas"]["HTTPValidationError"];
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
v1_zulip_get_streams: {
|
||||
parameters: {
|
||||
query?: never;
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"build-production": "next build --experimental-build-mode compile",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"format": "prettier --write .",
|
||||
|
||||
Reference in New Issue
Block a user