diff --git a/CLAUDE.md b/CLAUDE.md
index 83ae5f17..691142c3 100644
--- a/CLAUDE.md
+++ b/CLAUDE.md
@@ -192,3 +192,8 @@ Modal.com integration for scalable ML processing:
## Pipeline/worker related info
If you need to do any worker/pipeline related work, search for "Pipeline" classes and their "create" or "build" methods to find the main processor sequence. Look for task orchestration patterns (like "chord", "group", or "chain") to identify the post-processing flow with parallel execution chains. This will give you abstract vision on how processing pipeling is organized.
+
+## Code Style
+
+- Always put imports at the top of the file. Let ruff/pre-commit handle sorting and formatting of imports.
+- Exception: In Hatchet pipeline task functions, DB controller imports (e.g., `transcripts_controller`, `meetings_controller`) stay as deferred/inline imports inside `fresh_db_connection()` blocks — this is intentional to avoid sharing DB connections across forked processes. Non-DB imports (utilities, services) should still go at the top of the file.
diff --git a/docker-compose.selfhosted.yml b/docker-compose.selfhosted.yml
index 5e1ee4c0..9c554fe7 100644
--- a/docker-compose.selfhosted.yml
+++ b/docker-compose.selfhosted.yml
@@ -308,6 +308,24 @@ services:
- web
- server
+ # ===========================================================
+ # Mailpit — local SMTP sink for testing email transcript notifications
+ # Start with: --profile mailpit
+ # Web UI at http://localhost:8025
+ # ===========================================================
+
+ mailpit:
+ image: axllent/mailpit:latest
+ profiles: [mailpit]
+ restart: unless-stopped
+ ports:
+ - "127.0.0.1:8025:8025" # Web UI
+ healthcheck:
+ test: ["CMD", "wget", "-q", "--spider", "http://localhost:8025/api/v1/messages"]
+ interval: 10s
+ timeout: 3s
+ retries: 5
+
# ===========================================================
# Hatchet workflow engine + workers
# Required for all processing pipelines (file, live, Daily.co multitrack).
diff --git a/scripts/run-integration-tests.sh b/scripts/run-integration-tests.sh
index f95d3ac4..cc39876a 100755
--- a/scripts/run-integration-tests.sh
+++ b/scripts/run-integration-tests.sh
@@ -13,14 +13,25 @@
# Optional:
# LLM_MODEL — Model name (default: qwen2.5:14b)
#
+# Flags:
+# --build — Rebuild backend Docker images (server, workers, test-runner)
+#
# Usage:
# export LLM_URL="https://api.openai.com/v1"
# export LLM_API_KEY="sk-..."
# export HF_TOKEN="hf_..."
# ./scripts/run-integration-tests.sh
+# ./scripts/run-integration-tests.sh --build # rebuild backend images
#
set -euo pipefail
+BUILD_FLAG=""
+for arg in "$@"; do
+ case "$arg" in
+ --build) BUILD_FLAG="--build" ;;
+ esac
+done
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
COMPOSE_DIR="$REPO_ROOT/server/tests"
@@ -66,7 +77,7 @@ trap cleanup EXIT
# ── Step 1: Build and start infrastructure ──────────────────────────────────
info "Building and starting infrastructure services..."
-$COMPOSE up -d --build postgres redis garage hatchet mock-daily
+$COMPOSE up -d --build postgres redis garage hatchet mock-daily mailpit
# ── Step 2: Set up Garage (S3 bucket + keys) ───────────────────────────────
wait_for "Garage" "$COMPOSE exec -T garage /garage stats" 60
@@ -116,7 +127,7 @@ ok "Hatchet token generated"
# ── Step 4: Start backend services ──────────────────────────────────────────
info "Starting backend services..."
-$COMPOSE up -d server worker hatchet-worker-cpu hatchet-worker-llm test-runner
+$COMPOSE up -d $BUILD_FLAG server worker hatchet-worker-cpu hatchet-worker-llm test-runner
# ── Step 5: Wait for server + run migrations ────────────────────────────────
wait_for "Server" "$COMPOSE exec -T test-runner curl -sf http://server:1250/health" 60
diff --git a/server/migrations/versions/a2b3c4d5e6f7_add_email_recipients_to_meeting.py b/server/migrations/versions/a2b3c4d5e6f7_add_email_recipients_to_meeting.py
new file mode 100644
index 00000000..2c524fad
--- /dev/null
+++ b/server/migrations/versions/a2b3c4d5e6f7_add_email_recipients_to_meeting.py
@@ -0,0 +1,29 @@
+"""add email_recipients to meeting
+
+Revision ID: a2b3c4d5e6f7
+Revises: 501c73a6b0d5
+Create Date: 2026-03-20 00:00:00.000000
+
+"""
+
+from typing import Sequence, Union
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.dialects.postgresql import JSONB
+
+revision: str = "a2b3c4d5e6f7"
+down_revision: Union[str, None] = "501c73a6b0d5"
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ op.add_column(
+ "meeting",
+ sa.Column("email_recipients", JSONB, nullable=True),
+ )
+
+
+def downgrade() -> None:
+ op.drop_column("meeting", "email_recipients")
diff --git a/server/pyproject.toml b/server/pyproject.toml
index c424326c..f01e1f8f 100644
--- a/server/pyproject.toml
+++ b/server/pyproject.toml
@@ -40,6 +40,8 @@ dependencies = [
"icalendar>=6.0.0",
"hatchet-sdk==1.22.16",
"pydantic>=2.12.5",
+ "aiosmtplib>=3.0.0",
+ "email-validator>=2.0.0",
]
[dependency-groups]
diff --git a/server/reflector/db/meetings.py b/server/reflector/db/meetings.py
index 02f407b2..5096eeda 100644
--- a/server/reflector/db/meetings.py
+++ b/server/reflector/db/meetings.py
@@ -1,3 +1,4 @@
+from contextlib import asynccontextmanager
from datetime import datetime, timedelta
from typing import Any, Literal
@@ -66,6 +67,8 @@ meetings = sa.Table(
# Daily.co composed video (Brady Bunch grid layout) - Daily.co only, not Whereby
sa.Column("daily_composed_video_s3_key", sa.String, nullable=True),
sa.Column("daily_composed_video_duration", sa.Integer, nullable=True),
+ # Email recipients for transcript notification
+ sa.Column("email_recipients", JSONB, nullable=True),
sa.Index("idx_meeting_room_id", "room_id"),
sa.Index("idx_meeting_calendar_event", "calendar_event_id"),
)
@@ -116,6 +119,8 @@ class Meeting(BaseModel):
# Daily.co composed video (Brady Bunch grid) - Daily.co only
daily_composed_video_s3_key: str | None = None
daily_composed_video_duration: int | None = None
+ # Email recipients for transcript notification
+ email_recipients: list[str] | None = None
class MeetingController:
@@ -388,6 +393,24 @@ class MeetingController:
# If was_null=False, the WHERE clause prevented the update
return was_null
+ @asynccontextmanager
+ async def transaction(self):
+ """A context manager for database transaction."""
+ async with get_database().transaction(isolation="serializable"):
+ yield
+
+ async def add_email_recipient(self, meeting_id: str, email: str) -> list[str]:
+ """Add an email to the meeting's email_recipients list (no duplicates)."""
+ async with self.transaction():
+ meeting = await self.get_by_id(meeting_id)
+ if not meeting:
+ raise ValueError(f"Meeting {meeting_id} not found")
+ current = meeting.email_recipients or []
+ if email not in current:
+ current.append(email)
+ await self.update_meeting(meeting_id, email_recipients=current)
+ return current
+
async def increment_num_clients(self, meeting_id: str) -> None:
"""Atomically increment participant count."""
query = (
diff --git a/server/reflector/email.py b/server/reflector/email.py
new file mode 100644
index 00000000..d10471e2
--- /dev/null
+++ b/server/reflector/email.py
@@ -0,0 +1,84 @@
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+
+import aiosmtplib
+import structlog
+
+from reflector.db.transcripts import Transcript
+from reflector.settings import settings
+
+logger = structlog.get_logger(__name__)
+
+
+def is_email_configured() -> bool:
+ return bool(settings.SMTP_HOST and settings.SMTP_FROM_EMAIL)
+
+
+def get_transcript_url(transcript: Transcript) -> str:
+ return f"{settings.UI_BASE_URL}/transcripts/{transcript.id}"
+
+
+def _build_plain_text(transcript: Transcript, url: str) -> str:
+ title = transcript.title or "Unnamed recording"
+ lines = [
+ f"Your transcript is ready: {title}",
+ "",
+ f"View it here: {url}",
+ ]
+ if transcript.short_summary:
+ lines.extend(["", "Summary:", transcript.short_summary])
+ return "\n".join(lines)
+
+
+def _build_html(transcript: Transcript, url: str) -> str:
+ title = transcript.title or "Unnamed recording"
+ summary_html = ""
+ if transcript.short_summary:
+ summary_html = f"
{transcript.short_summary}
"
+
+ return f"""\
+
+
Your transcript is ready
+
{title}
+ {summary_html}
+
View Transcript
+
This email was sent because you requested to receive the transcript from a meeting.
+
"""
+
+
+async def send_transcript_email(to_emails: list[str], transcript: Transcript) -> int:
+ """Send transcript notification to all emails. Returns count sent."""
+ if not is_email_configured() or not to_emails:
+ return 0
+
+ url = get_transcript_url(transcript)
+ title = transcript.title or "Unnamed recording"
+ sent = 0
+
+ for email_addr in to_emails:
+ msg = MIMEMultipart("alternative")
+ msg["Subject"] = f"Transcript Ready: {title}"
+ msg["From"] = settings.SMTP_FROM_EMAIL
+ msg["To"] = email_addr
+
+ msg.attach(MIMEText(_build_plain_text(transcript, url), "plain"))
+ msg.attach(MIMEText(_build_html(transcript, url), "html"))
+
+ try:
+ await aiosmtplib.send(
+ msg,
+ hostname=settings.SMTP_HOST,
+ port=settings.SMTP_PORT,
+ username=settings.SMTP_USERNAME,
+ password=settings.SMTP_PASSWORD,
+ start_tls=settings.SMTP_USE_TLS,
+ )
+ sent += 1
+ except Exception:
+ logger.exception(
+ "Failed to send transcript email",
+ to=email_addr,
+ transcript_id=transcript.id,
+ )
+
+ return sent
diff --git a/server/reflector/hatchet/constants.py b/server/reflector/hatchet/constants.py
index 7eb3ea43..bfb57bf2 100644
--- a/server/reflector/hatchet/constants.py
+++ b/server/reflector/hatchet/constants.py
@@ -21,6 +21,7 @@ class TaskName(StrEnum):
CLEANUP_CONSENT = "cleanup_consent"
POST_ZULIP = "post_zulip"
SEND_WEBHOOK = "send_webhook"
+ SEND_EMAIL = "send_email"
PAD_TRACK = "pad_track"
TRANSCRIBE_TRACK = "transcribe_track"
DETECT_CHUNK_TOPIC = "detect_chunk_topic"
@@ -59,7 +60,7 @@ TIMEOUT_AUDIO = 720 # Audio processing: padding, mixdown (Hatchet execution_tim
TIMEOUT_AUDIO_HTTP = (
660 # httpx timeout for pad_track — below 720 so Hatchet doesn't race
)
-TIMEOUT_HEAVY = 600 # Transcription, fan-out LLM tasks (Hatchet execution_timeout)
+TIMEOUT_HEAVY = 1200 # Transcription, fan-out LLM tasks (Hatchet execution_timeout)
TIMEOUT_HEAVY_HTTP = (
- 540 # httpx timeout for transcribe_track — below 600 so Hatchet doesn't race
+ 1150 # httpx timeout for transcribe_track — below 1200 so Hatchet doesn't race
)
diff --git a/server/reflector/hatchet/workflows/daily_multitrack_pipeline.py b/server/reflector/hatchet/workflows/daily_multitrack_pipeline.py
index 34a66516..5fe36b96 100644
--- a/server/reflector/hatchet/workflows/daily_multitrack_pipeline.py
+++ b/server/reflector/hatchet/workflows/daily_multitrack_pipeline.py
@@ -33,6 +33,7 @@ from hatchet_sdk.labels import DesiredWorkerLabel
from pydantic import BaseModel
from reflector.dailyco_api.client import DailyApiClient
+from reflector.email import is_email_configured, send_transcript_email
from reflector.hatchet.broadcast import (
append_event_and_broadcast,
set_status_and_broadcast,
@@ -51,6 +52,7 @@ from reflector.hatchet.error_classification import is_non_retryable
from reflector.hatchet.workflows.models import (
ActionItemsResult,
ConsentResult,
+ EmailResult,
FinalizeResult,
MixdownResult,
PaddedTrackInfo,
@@ -1465,6 +1467,52 @@ async def send_webhook(input: PipelineInput, ctx: Context) -> WebhookResult:
return WebhookResult(webhook_sent=False)
+@daily_multitrack_pipeline.task(
+ parents=[cleanup_consent],
+ execution_timeout=timedelta(seconds=TIMEOUT_SHORT),
+ retries=5,
+ backoff_factor=2.0,
+ backoff_max_seconds=15,
+)
+@with_error_handling(TaskName.SEND_EMAIL, set_error_status=False)
+async def send_email(input: PipelineInput, ctx: Context) -> EmailResult:
+ """Send transcript email to collected recipients."""
+ ctx.log(f"send_email: transcript_id={input.transcript_id}")
+
+ if not is_email_configured():
+ ctx.log("send_email skipped (SMTP not configured)")
+ return EmailResult(skipped=True)
+
+ async with fresh_db_connection():
+ from reflector.db.meetings import meetings_controller # noqa: PLC0415
+ from reflector.db.recordings import recordings_controller # noqa: PLC0415
+ from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
+
+ transcript = await transcripts_controller.get_by_id(input.transcript_id)
+ if not transcript:
+ ctx.log("send_email skipped (transcript not found)")
+ return EmailResult(skipped=True)
+
+ meeting = None
+ if transcript.meeting_id:
+ meeting = await meetings_controller.get_by_id(transcript.meeting_id)
+ if not meeting and transcript.recording_id:
+ recording = await recordings_controller.get_by_id(transcript.recording_id)
+ if recording and recording.meeting_id:
+ meeting = await meetings_controller.get_by_id(recording.meeting_id)
+
+ if not meeting or not meeting.email_recipients:
+ ctx.log("send_email skipped (no email recipients)")
+ return EmailResult(skipped=True)
+
+ await transcripts_controller.update(transcript, {"share_mode": "public"})
+
+ count = await send_transcript_email(meeting.email_recipients, transcript)
+ ctx.log(f"send_email complete: sent {count} emails")
+
+ return EmailResult(emails_sent=count)
+
+
async def on_workflow_failure(input: PipelineInput, ctx: Context) -> None:
"""Run when the workflow is truly dead (all retries exhausted).
diff --git a/server/reflector/hatchet/workflows/file_pipeline.py b/server/reflector/hatchet/workflows/file_pipeline.py
index 7a1f2d76..4b0b528e 100644
--- a/server/reflector/hatchet/workflows/file_pipeline.py
+++ b/server/reflector/hatchet/workflows/file_pipeline.py
@@ -18,6 +18,7 @@ from pathlib import Path
from hatchet_sdk import Context
from pydantic import BaseModel
+from reflector.email import is_email_configured, send_transcript_email
from reflector.hatchet.broadcast import (
append_event_and_broadcast,
set_status_and_broadcast,
@@ -37,6 +38,7 @@ from reflector.hatchet.workflows.daily_multitrack_pipeline import (
)
from reflector.hatchet.workflows.models import (
ConsentResult,
+ EmailResult,
TitleResult,
TopicsResult,
WaveformResult,
@@ -859,6 +861,54 @@ async def send_webhook(input: FilePipelineInput, ctx: Context) -> WebhookResult:
return WebhookResult(webhook_sent=False)
+@file_pipeline.task(
+ parents=[cleanup_consent],
+ execution_timeout=timedelta(seconds=TIMEOUT_SHORT),
+ retries=5,
+ backoff_factor=2.0,
+ backoff_max_seconds=15,
+)
+@with_error_handling(TaskName.SEND_EMAIL, set_error_status=False)
+async def send_email(input: FilePipelineInput, ctx: Context) -> EmailResult:
+ """Send transcript email to collected recipients."""
+ ctx.log(f"send_email: transcript_id={input.transcript_id}")
+
+ if not is_email_configured():
+ ctx.log("send_email skipped (SMTP not configured)")
+ return EmailResult(skipped=True)
+
+ async with fresh_db_connection():
+ from reflector.db.meetings import meetings_controller # noqa: PLC0415
+ from reflector.db.recordings import recordings_controller # noqa: PLC0415
+ from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
+
+ transcript = await transcripts_controller.get_by_id(input.transcript_id)
+ if not transcript:
+ ctx.log("send_email skipped (transcript not found)")
+ return EmailResult(skipped=True)
+
+ # Try transcript.meeting_id first, then fall back to recording.meeting_id
+ meeting = None
+ if transcript.meeting_id:
+ meeting = await meetings_controller.get_by_id(transcript.meeting_id)
+ if not meeting and transcript.recording_id:
+ recording = await recordings_controller.get_by_id(transcript.recording_id)
+ if recording and recording.meeting_id:
+ meeting = await meetings_controller.get_by_id(recording.meeting_id)
+
+ if not meeting or not meeting.email_recipients:
+ ctx.log("send_email skipped (no email recipients)")
+ return EmailResult(skipped=True)
+
+ # Set transcript to public so the link works for anyone
+ await transcripts_controller.update(transcript, {"share_mode": "public"})
+
+ count = await send_transcript_email(meeting.email_recipients, transcript)
+ ctx.log(f"send_email complete: sent {count} emails")
+
+ return EmailResult(emails_sent=count)
+
+
# --- On failure handler ---
diff --git a/server/reflector/hatchet/workflows/live_post_pipeline.py b/server/reflector/hatchet/workflows/live_post_pipeline.py
index 561bee5e..2de144df 100644
--- a/server/reflector/hatchet/workflows/live_post_pipeline.py
+++ b/server/reflector/hatchet/workflows/live_post_pipeline.py
@@ -17,6 +17,7 @@ from datetime import timedelta
from hatchet_sdk import Context
from pydantic import BaseModel
+from reflector.email import is_email_configured, send_transcript_email
from reflector.hatchet.client import HatchetClientManager
from reflector.hatchet.constants import (
TIMEOUT_HEAVY,
@@ -32,6 +33,7 @@ from reflector.hatchet.workflows.daily_multitrack_pipeline import (
)
from reflector.hatchet.workflows.models import (
ConsentResult,
+ EmailResult,
TitleResult,
WaveformResult,
WebhookResult,
@@ -361,6 +363,52 @@ async def send_webhook(input: LivePostPipelineInput, ctx: Context) -> WebhookRes
return WebhookResult(webhook_sent=False)
+@live_post_pipeline.task(
+ parents=[final_summaries],
+ execution_timeout=timedelta(seconds=TIMEOUT_SHORT),
+ retries=5,
+ backoff_factor=2.0,
+ backoff_max_seconds=15,
+)
+@with_error_handling(TaskName.SEND_EMAIL, set_error_status=False)
+async def send_email(input: LivePostPipelineInput, ctx: Context) -> EmailResult:
+ """Send transcript email to collected recipients."""
+ ctx.log(f"send_email: transcript_id={input.transcript_id}")
+
+ if not is_email_configured():
+ ctx.log("send_email skipped (SMTP not configured)")
+ return EmailResult(skipped=True)
+
+ async with fresh_db_connection():
+ from reflector.db.meetings import meetings_controller # noqa: PLC0415
+ from reflector.db.recordings import recordings_controller # noqa: PLC0415
+ from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
+
+ transcript = await transcripts_controller.get_by_id(input.transcript_id)
+ if not transcript:
+ ctx.log("send_email skipped (transcript not found)")
+ return EmailResult(skipped=True)
+
+ meeting = None
+ if transcript.meeting_id:
+ meeting = await meetings_controller.get_by_id(transcript.meeting_id)
+ if not meeting and transcript.recording_id:
+ recording = await recordings_controller.get_by_id(transcript.recording_id)
+ if recording and recording.meeting_id:
+ meeting = await meetings_controller.get_by_id(recording.meeting_id)
+
+ if not meeting or not meeting.email_recipients:
+ ctx.log("send_email skipped (no email recipients)")
+ return EmailResult(skipped=True)
+
+ await transcripts_controller.update(transcript, {"share_mode": "public"})
+
+ count = await send_transcript_email(meeting.email_recipients, transcript)
+ ctx.log(f"send_email complete: sent {count} emails")
+
+ return EmailResult(emails_sent=count)
+
+
# --- On failure handler ---
diff --git a/server/reflector/hatchet/workflows/models.py b/server/reflector/hatchet/workflows/models.py
index 1bad1f4a..e8a0b6ad 100644
--- a/server/reflector/hatchet/workflows/models.py
+++ b/server/reflector/hatchet/workflows/models.py
@@ -170,3 +170,10 @@ class WebhookResult(BaseModel):
webhook_sent: bool
skipped: bool = False
response_code: int | None = None
+
+
+class EmailResult(BaseModel):
+ """Result from send_email task."""
+
+ emails_sent: int = 0
+ skipped: bool = False
diff --git a/server/reflector/settings.py b/server/reflector/settings.py
index bef9a479..2e402b04 100644
--- a/server/reflector/settings.py
+++ b/server/reflector/settings.py
@@ -195,6 +195,14 @@ class Settings(BaseSettings):
ZULIP_API_KEY: str | None = None
ZULIP_BOT_EMAIL: str | None = None
+ # Email / SMTP integration (for transcript email notifications)
+ SMTP_HOST: str | None = None
+ SMTP_PORT: int = 587
+ SMTP_USERNAME: str | None = None
+ SMTP_PASSWORD: str | None = None
+ SMTP_FROM_EMAIL: str | None = None
+ SMTP_USE_TLS: bool = True
+
# Hatchet workflow orchestration (always enabled for multitrack processing)
HATCHET_CLIENT_TOKEN: str | None = None
HATCHET_CLIENT_TLS_STRATEGY: str = "none" # none, tls, mtls
diff --git a/server/reflector/views/meetings.py b/server/reflector/views/meetings.py
index 1ac76e73..7f256c41 100644
--- a/server/reflector/views/meetings.py
+++ b/server/reflector/views/meetings.py
@@ -4,7 +4,7 @@ from typing import Annotated, Any, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request
-from pydantic import BaseModel
+from pydantic import BaseModel, EmailStr
import reflector.auth as auth
from reflector.dailyco_api import RecordingType
@@ -151,3 +151,25 @@ async def start_recording(
raise HTTPException(
status_code=500, detail=f"Failed to start recording: {str(e)}"
)
+
+
+class AddEmailRecipientRequest(BaseModel):
+ email: EmailStr
+
+
+@router.post("/meetings/{meeting_id}/email-recipient")
+async def add_email_recipient(
+ meeting_id: str,
+ request: AddEmailRecipientRequest,
+ user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
+):
+ """Add an email address to receive the transcript link when processing completes."""
+ meeting = await meetings_controller.get_by_id(meeting_id)
+ if not meeting:
+ raise HTTPException(status_code=404, detail="Meeting not found")
+
+ recipients = await meetings_controller.add_email_recipient(
+ meeting_id, request.email
+ )
+
+ return {"status": "success", "email_recipients": recipients}
diff --git a/server/tests/docker-compose.integration.yml b/server/tests/docker-compose.integration.yml
index 752d4e47..4632aca9 100644
--- a/server/tests/docker-compose.integration.yml
+++ b/server/tests/docker-compose.integration.yml
@@ -40,6 +40,11 @@ x-backend-env: &backend-env
# Garage S3 credentials — hardcoded test keys, containers are ephemeral
TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID: GK0123456789abcdef01234567 # gitleaks:allow
TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef" # gitleaks:allow
+ # Email / SMTP — Mailpit captures emails without sending
+ SMTP_HOST: mailpit
+ SMTP_PORT: "1025"
+ SMTP_FROM_EMAIL: test@reflector.local
+ SMTP_USE_TLS: "false"
# NOTE: DAILYCO_STORAGE_AWS_* intentionally NOT set — forces fallback to
# get_transcripts_storage() which has ENDPOINT_URL pointing at Garage.
# Setting them would bypass the endpoint and generate presigned URLs for AWS.
@@ -101,6 +106,14 @@ services:
retries: 10
start_period: 5s
+ mailpit:
+ image: axllent/mailpit:latest
+ healthcheck:
+ test: ["CMD", "wget", "-q", "--spider", "http://localhost:8025/api/v1/messages"]
+ interval: 5s
+ timeout: 3s
+ retries: 5
+
mock-daily:
build:
context: .
@@ -131,6 +144,8 @@ services:
condition: service_healthy
mock-daily:
condition: service_healthy
+ mailpit:
+ condition: service_healthy
volumes:
- server_data:/app/data
@@ -194,6 +209,7 @@ services:
DATABASE_URL: postgresql+asyncpg://reflector:reflector@postgres:5432/reflector
SERVER_URL: http://server:1250
GARAGE_ENDPOINT: http://garage:3900
+ MAILPIT_URL: http://mailpit:8025
depends_on:
server:
condition: service_started
diff --git a/server/tests/integration/conftest.py b/server/tests/integration/conftest.py
index a1d6994e..225e6369 100644
--- a/server/tests/integration/conftest.py
+++ b/server/tests/integration/conftest.py
@@ -17,6 +17,7 @@ from sqlalchemy.ext.asyncio import create_async_engine
SERVER_URL = os.environ.get("SERVER_URL", "http://server:1250")
GARAGE_ENDPOINT = os.environ.get("GARAGE_ENDPOINT", "http://garage:3900")
+MAILPIT_URL = os.environ.get("MAILPIT_URL", "http://mailpit:8025")
DATABASE_URL = os.environ.get(
"DATABASE_URL_ASYNC",
os.environ.get(
@@ -114,3 +115,44 @@ async def _poll_transcript_status(
def poll_transcript_status():
"""Returns the poll_transcript_status async helper function."""
return _poll_transcript_status
+
+
+@pytest_asyncio.fixture
+async def mailpit_client():
+ """HTTP client for Mailpit API — query captured emails."""
+ async with httpx.AsyncClient(
+ base_url=MAILPIT_URL,
+ timeout=httpx.Timeout(10.0),
+ ) as client:
+ # Clear inbox before each test
+ await client.delete("/api/v1/messages")
+ yield client
+
+
+async def _poll_mailpit_messages(
+ mailpit: httpx.AsyncClient,
+ to_email: str,
+ max_wait: int = 30,
+ interval: int = 2,
+) -> list[dict]:
+ """
+ Poll Mailpit API until at least one message is delivered to the given address.
+ Returns the list of matching messages.
+ """
+ elapsed = 0
+ while elapsed < max_wait:
+ resp = await mailpit.get("/api/v1/messages", params={"query": f"to:{to_email}"})
+ resp.raise_for_status()
+ data = resp.json()
+ messages = data.get("messages", [])
+ if messages:
+ return messages
+ await asyncio.sleep(interval)
+ elapsed += interval
+ raise TimeoutError(f"No email delivered to {to_email} within {max_wait}s")
+
+
+@pytest_asyncio.fixture
+def poll_mailpit_messages():
+ """Returns the poll_mailpit_messages async helper function."""
+ return _poll_mailpit_messages
diff --git a/server/tests/integration/test_multitrack_pipeline.py b/server/tests/integration/test_multitrack_pipeline.py
index e8b8e546..34e799b3 100644
--- a/server/tests/integration/test_multitrack_pipeline.py
+++ b/server/tests/integration/test_multitrack_pipeline.py
@@ -4,10 +4,12 @@ Integration test: Multitrack → DailyMultitrackPipeline → full processing.
Exercises: S3 upload → DB recording setup → process endpoint →
Hatchet DiarizationPipeline → mock Daily API → whisper per-track transcription →
diarization → mixdown → LLM summarization/topics → status "ended".
+Also tests email transcript notification via Mailpit SMTP sink.
"""
import json
-from datetime import datetime, timezone
+import uuid
+from datetime import datetime, timedelta, timezone
import pytest
from sqlalchemy import text
@@ -22,6 +24,9 @@ TRACK_KEYS = [
]
+TEST_EMAIL = "integration-test@reflector.local"
+
+
@pytest.mark.asyncio
async def test_multitrack_pipeline_end_to_end(
api_client,
@@ -30,6 +35,8 @@ async def test_multitrack_pipeline_end_to_end(
test_records_dir,
bucket_name,
poll_transcript_status,
+ mailpit_client,
+ poll_mailpit_messages,
):
"""Set up multitrack recording in S3/DB and verify the full pipeline completes."""
# 1. Upload test audio as two separate tracks to Garage S3
@@ -52,16 +59,41 @@ async def test_multitrack_pipeline_end_to_end(
transcript = resp.json()
transcript_id = transcript["id"]
- # 3. Insert Recording row and link to transcript via direct DB access
+ # 3. Insert Meeting, Recording, and link to transcript via direct DB access
recording_id = f"rec-integration-{transcript_id[:8]}"
+ meeting_id = str(uuid.uuid4())
now = datetime.now(timezone.utc)
async with db_engine.begin() as conn:
- # Insert recording with track_keys
+ # Insert meeting with email_recipients for email notification test
await conn.execute(
text("""
- INSERT INTO recording (id, bucket_name, object_key, recorded_at, status, track_keys)
- VALUES (:id, :bucket_name, :object_key, :recorded_at, :status, CAST(:track_keys AS json))
+ INSERT INTO meeting (
+ id, room_name, room_url, host_room_url,
+ start_date, end_date, platform, email_recipients
+ )
+ VALUES (
+ :id, :room_name, :room_url, :host_room_url,
+ :start_date, :end_date, :platform, CAST(:email_recipients AS json)
+ )
+ """),
+ {
+ "id": meeting_id,
+ "room_name": "integration-test-room",
+ "room_url": "https://test.daily.co/integration-test-room",
+ "host_room_url": "https://test.daily.co/integration-test-room",
+ "start_date": now,
+ "end_date": now + timedelta(hours=1),
+ "platform": "daily",
+ "email_recipients": json.dumps([TEST_EMAIL]),
+ },
+ )
+
+ # Insert recording with track_keys, linked to meeting
+ await conn.execute(
+ text("""
+ INSERT INTO recording (id, bucket_name, object_key, recorded_at, status, track_keys, meeting_id)
+ VALUES (:id, :bucket_name, :object_key, :recorded_at, :status, CAST(:track_keys AS json), :meeting_id)
"""),
{
"id": recording_id,
@@ -70,6 +102,7 @@ async def test_multitrack_pipeline_end_to_end(
"recorded_at": now,
"status": "completed",
"track_keys": json.dumps(TRACK_KEYS),
+ "meeting_id": meeting_id,
},
)
@@ -127,3 +160,22 @@ async def test_multitrack_pipeline_end_to_end(
assert (
len(participants) >= 2
), f"Expected at least 2 speakers for multitrack, got {len(participants)}"
+
+ # 7. Verify email transcript notification
+ # The send_email pipeline task should have:
+ # a) Set the transcript to public share_mode
+ # b) Sent an email to TEST_EMAIL via Mailpit
+ transcript_resp = await api_client.get(f"/transcripts/{transcript_id}")
+ transcript_resp.raise_for_status()
+ transcript_data = transcript_resp.json()
+ assert (
+ transcript_data.get("share_mode") == "public"
+ ), "Transcript should be set to public when email recipients exist"
+
+ # Poll Mailpit for the delivered email (send_email task runs async after finalize)
+ messages = await poll_mailpit_messages(mailpit_client, TEST_EMAIL, max_wait=30)
+ assert len(messages) >= 1, "Should have received at least 1 email"
+ email_msg = messages[0]
+ assert (
+ "Transcript Ready" in email_msg.get("Subject", "")
+ ), f"Email subject should contain 'Transcript Ready', got: {email_msg.get('Subject')}"
diff --git a/server/uv.lock b/server/uv.lock
index d405b457..b1f3b964 100644
--- a/server/uv.lock
+++ b/server/uv.lock
@@ -188,6 +188,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" },
]
+[[package]]
+name = "aiosmtplib"
+version = "5.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e7/ad/240a7ce4e50713b111dff8b781a898d8d4770e5d6ad4899103f84c86005c/aiosmtplib-5.1.0.tar.gz", hash = "sha256:2504a23b2b63c9de6bc4ea719559a38996dba68f73f6af4eb97be20ee4c5e6c4", size = 66176, upload-time = "2026-01-25T01:51:11.408Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/37/82/70f2c452acd7ed18c558c8ace9a8cf4fdcc70eae9a41749b5bdc53eb6f45/aiosmtplib-5.1.0-py3-none-any.whl", hash = "sha256:368029440645b486b69db7029208a7a78c6691b90d24a5332ddba35d9109d55b", size = 27778, upload-time = "2026-01-25T01:51:10.026Z" },
+]
+
[[package]]
name = "aiosqlite"
version = "0.21.0"
@@ -3343,10 +3352,12 @@ dependencies = [
{ name = "aiohttp" },
{ name = "aiohttp-cors" },
{ name = "aiortc" },
+ { name = "aiosmtplib" },
{ name = "alembic" },
{ name = "av" },
{ name = "celery" },
{ name = "databases", extra = ["aiosqlite", "asyncpg"] },
+ { name = "email-validator" },
{ name = "fastapi", extra = ["standard"] },
{ name = "fastapi-pagination" },
{ name = "hatchet-sdk" },
@@ -3422,10 +3433,12 @@ requires-dist = [
{ name = "aiohttp", specifier = ">=3.9.0" },
{ name = "aiohttp-cors", specifier = ">=0.7.0" },
{ name = "aiortc", specifier = ">=1.5.0" },
+ { name = "aiosmtplib", specifier = ">=3.0.0" },
{ name = "alembic", specifier = ">=1.11.3" },
{ name = "av", specifier = ">=15.0.0" },
{ name = "celery", specifier = ">=5.3.4" },
{ name = "databases", extras = ["aiosqlite", "asyncpg"], specifier = ">=0.7.0" },
+ { name = "email-validator", specifier = ">=2.0.0" },
{ name = "fastapi", extras = ["standard"], specifier = ">=0.100.1" },
{ name = "fastapi-pagination", specifier = ">=0.14.2" },
{ name = "hatchet-sdk", specifier = "==1.22.16" },
diff --git a/www/app/[roomName]/components/DailyRoom.tsx b/www/app/[roomName]/components/DailyRoom.tsx
index d1c00254..2266cd3c 100644
--- a/www/app/[roomName]/components/DailyRoom.tsx
+++ b/www/app/[roomName]/components/DailyRoom.tsx
@@ -22,6 +22,8 @@ import DailyIframe, {
import type { components } from "../../reflector-api";
import { useAuth } from "../../lib/AuthProvider";
import { useConsentDialog } from "../../lib/consent";
+import { useEmailTranscriptDialog } from "../../lib/emailTranscript";
+import { featureEnabled } from "../../lib/features";
import {
useRoomJoinMeeting,
useMeetingStartRecording,
@@ -37,6 +39,7 @@ import { useUuidV5 } from "react-uuid-hook";
const CONSENT_BUTTON_ID = "recording-consent";
const RECORDING_INDICATOR_ID = "recording-indicator";
+const EMAIL_TRANSCRIPT_BUTTON_ID = "email-transcript";
// Namespace UUID for UUIDv5 generation of raw-tracks instanceIds
// DO NOT CHANGE: Breaks instanceId determinism across deployments
@@ -209,6 +212,12 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
const showConsentModalRef = useRef(showConsentModal);
showConsentModalRef.current = showConsentModal;
+ const { showEmailModal } = useEmailTranscriptDialog({
+ meetingId: assertMeetingId(meeting.id),
+ });
+ const showEmailModalRef = useRef(showEmailModal);
+ showEmailModalRef.current = showEmailModal;
+
useEffect(() => {
if (authLastUserId === undefined || !meeting?.id || !roomName) return;
@@ -242,6 +251,9 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
if (ev.button_id === CONSENT_BUTTON_ID) {
showConsentModalRef.current();
}
+ if (ev.button_id === EMAIL_TRANSCRIPT_BUTTON_ID) {
+ showEmailModalRef.current();
+ }
},
[
/*keep static; iframe recreation depends on it*/
@@ -319,6 +331,10 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
() => new URL("/recording-icon.svg", window.location.origin),
[],
);
+ const emailIconUrl = useMemo(
+ () => new URL("/email-icon.svg", window.location.origin),
+ [],
+ );
const [frame, { setCustomTrayButton }] = useFrame(container, {
onLeftMeeting: handleLeave,
@@ -371,6 +387,20 @@ export default function DailyRoom({ meeting, room }: DailyRoomProps) {
);
}, [showConsentButton, recordingIconUrl, setCustomTrayButton]);
+ useEffect(() => {
+ const show = featureEnabled("emailTranscript");
+ setCustomTrayButton(
+ EMAIL_TRANSCRIPT_BUTTON_ID,
+ show
+ ? {
+ iconPath: emailIconUrl.href,
+ label: "Email Transcript",
+ tooltip: "Get transcript emailed to you",
+ }
+ : null,
+ );
+ }, [emailIconUrl, setCustomTrayButton]);
+
if (authLastUserId === undefined) {
return (
diff --git a/www/app/lib/apiHooks.ts b/www/app/lib/apiHooks.ts
index 96c2b053..00ed56f4 100644
--- a/www/app/lib/apiHooks.ts
+++ b/www/app/lib/apiHooks.ts
@@ -643,6 +643,16 @@ export function useMeetingAudioConsent() {
});
}
+export function useMeetingAddEmailRecipient() {
+ const { setError } = useError();
+
+ return $api.useMutation("post", "/v1/meetings/{meeting_id}/email-recipient", {
+ onError: (error) => {
+ setError(error as Error, "There was an error adding the email");
+ },
+ });
+}
+
export function useMeetingDeactivate() {
const { setError } = useError();
const queryClient = useQueryClient();
diff --git a/www/app/lib/clientEnv.ts b/www/app/lib/clientEnv.ts
index 81214a8f..619d8124 100644
--- a/www/app/lib/clientEnv.ts
+++ b/www/app/lib/clientEnv.ts
@@ -13,6 +13,8 @@ export const FEATURE_PRIVACY_ENV_NAME = "FEATURE_PRIVACY" as const;
export const FEATURE_BROWSE_ENV_NAME = "FEATURE_BROWSE" as const;
export const FEATURE_SEND_TO_ZULIP_ENV_NAME = "FEATURE_SEND_TO_ZULIP" as const;
export const FEATURE_ROOMS_ENV_NAME = "FEATURE_ROOMS" as const;
+export const FEATURE_EMAIL_TRANSCRIPT_ENV_NAME =
+ "FEATURE_EMAIL_TRANSCRIPT" as const;
const FEATURE_ENV_NAMES = [
FEATURE_REQUIRE_LOGIN_ENV_NAME,
@@ -20,6 +22,7 @@ const FEATURE_ENV_NAMES = [
FEATURE_BROWSE_ENV_NAME,
FEATURE_SEND_TO_ZULIP_ENV_NAME,
FEATURE_ROOMS_ENV_NAME,
+ FEATURE_EMAIL_TRANSCRIPT_ENV_NAME,
] as const;
export type FeatureEnvName = (typeof FEATURE_ENV_NAMES)[number];
diff --git a/www/app/lib/emailTranscript/EmailTranscriptDialog.tsx b/www/app/lib/emailTranscript/EmailTranscriptDialog.tsx
new file mode 100644
index 00000000..f11f282d
--- /dev/null
+++ b/www/app/lib/emailTranscript/EmailTranscriptDialog.tsx
@@ -0,0 +1,70 @@
+"use client";
+
+import { useState, useEffect } from "react";
+import { Box, Button, Input, Text, VStack, HStack } from "@chakra-ui/react";
+
+interface EmailTranscriptDialogProps {
+ onSubmit: (email: string) => void;
+ onDismiss: () => void;
+}
+
+export function EmailTranscriptDialog({
+ onSubmit,
+ onDismiss,
+}: EmailTranscriptDialogProps) {
+ const [email, setEmail] = useState("");
+ const [inputEl, setInputEl] = useState(null);
+
+ useEffect(() => {
+ inputEl?.focus();
+ }, [inputEl]);
+
+ const handleSubmit = () => {
+ const trimmed = email.trim();
+ if (trimmed) {
+ onSubmit(trimmed);
+ }
+ };
+
+ return (
+
+
+
+ Enter your email to receive the transcript when it's ready
+
+ setEmail(e.target.value)}
+ onKeyDown={(e) => {
+ if (e.key === "Enter") handleSubmit();
+ }}
+ size="sm"
+ bg="white"
+ />
+
+
+
+
+
+
+ );
+}
diff --git a/www/app/lib/emailTranscript/index.ts b/www/app/lib/emailTranscript/index.ts
new file mode 100644
index 00000000..33ca3f16
--- /dev/null
+++ b/www/app/lib/emailTranscript/index.ts
@@ -0,0 +1 @@
+export { useEmailTranscriptDialog } from "./useEmailTranscriptDialog";
diff --git a/www/app/lib/emailTranscript/useEmailTranscriptDialog.tsx b/www/app/lib/emailTranscript/useEmailTranscriptDialog.tsx
new file mode 100644
index 00000000..e5a62866
--- /dev/null
+++ b/www/app/lib/emailTranscript/useEmailTranscriptDialog.tsx
@@ -0,0 +1,128 @@
+"use client";
+
+import { useCallback, useState, useEffect, useRef } from "react";
+import { Box, Text } from "@chakra-ui/react";
+import { toaster } from "../../components/ui/toaster";
+import { useMeetingAddEmailRecipient } from "../apiHooks";
+import { EmailTranscriptDialog } from "./EmailTranscriptDialog";
+import type { MeetingId } from "../types";
+
+const TOAST_CHECK_INTERVAL_MS = 100;
+
+type UseEmailTranscriptDialogParams = {
+ meetingId: MeetingId;
+};
+
+export function useEmailTranscriptDialog({
+ meetingId,
+}: UseEmailTranscriptDialogParams) {
+ const [modalOpen, setModalOpen] = useState(false);
+ const addEmailMutation = useMeetingAddEmailRecipient();
+ const intervalRef = useRef(null);
+ const keydownHandlerRef = useRef<((event: KeyboardEvent) => void) | null>(
+ null,
+ );
+
+ useEffect(() => {
+ return () => {
+ if (intervalRef.current) {
+ clearInterval(intervalRef.current);
+ intervalRef.current = null;
+ }
+ if (keydownHandlerRef.current) {
+ document.removeEventListener("keydown", keydownHandlerRef.current);
+ keydownHandlerRef.current = null;
+ }
+ };
+ }, []);
+
+ const handleSubmitEmail = useCallback(
+ async (email: string) => {
+ try {
+ await addEmailMutation.mutateAsync({
+ params: {
+ path: { meeting_id: meetingId },
+ },
+ body: {
+ email,
+ },
+ });
+
+ toaster.create({
+ duration: 4000,
+ render: () => (
+
+ Email registered
+
+ You will receive the transcript link when processing is
+ complete.
+
+
+ ),
+ });
+ } catch (error) {
+ console.error("Error adding email recipient:", error);
+ }
+ },
+ [addEmailMutation, meetingId],
+ );
+
+ const showEmailModal = useCallback(() => {
+ if (modalOpen) return;
+
+ setModalOpen(true);
+
+ const toastId = toaster.create({
+ placement: "top",
+ duration: null,
+ render: ({ dismiss }) => (
+ {
+ handleSubmitEmail(email);
+ dismiss();
+ }}
+ onDismiss={() => {
+ dismiss();
+ }}
+ />
+ ),
+ });
+
+ const handleKeyDown = (event: KeyboardEvent) => {
+ if (event.key === "Escape") {
+ toastId.then((id) => toaster.dismiss(id));
+ }
+ };
+
+ keydownHandlerRef.current = handleKeyDown;
+ document.addEventListener("keydown", handleKeyDown);
+
+ toastId.then((id) => {
+ intervalRef.current = setInterval(() => {
+ if (!toaster.isActive(id)) {
+ setModalOpen(false);
+
+ if (intervalRef.current) {
+ clearInterval(intervalRef.current);
+ intervalRef.current = null;
+ }
+
+ if (keydownHandlerRef.current) {
+ document.removeEventListener("keydown", keydownHandlerRef.current);
+ keydownHandlerRef.current = null;
+ }
+ }
+ }, TOAST_CHECK_INTERVAL_MS);
+ });
+ }, [handleSubmitEmail, modalOpen]);
+
+ return {
+ showEmailModal,
+ };
+}
diff --git a/www/app/lib/features.ts b/www/app/lib/features.ts
index eebfc816..b3b14bde 100644
--- a/www/app/lib/features.ts
+++ b/www/app/lib/features.ts
@@ -1,5 +1,6 @@
import {
FEATURE_BROWSE_ENV_NAME,
+ FEATURE_EMAIL_TRANSCRIPT_ENV_NAME,
FEATURE_PRIVACY_ENV_NAME,
FEATURE_REQUIRE_LOGIN_ENV_NAME,
FEATURE_ROOMS_ENV_NAME,
@@ -14,6 +15,7 @@ export const FEATURES = [
"browse",
"sendToZulip",
"rooms",
+ "emailTranscript",
] as const;
export type FeatureName = (typeof FEATURES)[number];
@@ -26,6 +28,7 @@ export const DEFAULT_FEATURES: Features = {
browse: true,
sendToZulip: true,
rooms: true,
+ emailTranscript: false,
} as const;
export const ENV_TO_FEATURE: {
@@ -36,6 +39,7 @@ export const ENV_TO_FEATURE: {
FEATURE_BROWSE: "browse",
FEATURE_SEND_TO_ZULIP: "sendToZulip",
FEATURE_ROOMS: "rooms",
+ FEATURE_EMAIL_TRANSCRIPT: "emailTranscript",
} as const;
export const FEATURE_TO_ENV: {
@@ -46,6 +50,7 @@ export const FEATURE_TO_ENV: {
browse: "FEATURE_BROWSE",
sendToZulip: "FEATURE_SEND_TO_ZULIP",
rooms: "FEATURE_ROOMS",
+ emailTranscript: "FEATURE_EMAIL_TRANSCRIPT",
};
const features = getClientEnv();
diff --git a/www/app/reflector-api.d.ts b/www/app/reflector-api.d.ts
index 53f754d5..7b1868bd 100644
--- a/www/app/reflector-api.d.ts
+++ b/www/app/reflector-api.d.ts
@@ -98,6 +98,26 @@ export interface paths {
patch?: never;
trace?: never;
};
+ "/v1/meetings/{meeting_id}/email-recipient": {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ get?: never;
+ put?: never;
+ /**
+ * Add Email Recipient
+ * @description Add an email address to receive the transcript link when processing completes.
+ */
+ post: operations["v1_add_email_recipient"];
+ delete?: never;
+ options?: never;
+ head?: never;
+ patch?: never;
+ trace?: never;
+ };
"/v1/rooms": {
parameters: {
query?: never;
@@ -838,6 +858,14 @@ export interface paths {
export type webhooks = Record;
export interface components {
schemas: {
+ /** AddEmailRecipientRequest */
+ AddEmailRecipientRequest: {
+ /**
+ * Email
+ * Format: email
+ */
+ email: string;
+ };
/** ApiKeyResponse */
ApiKeyResponse: {
/**
@@ -2602,6 +2630,41 @@ export interface operations {
};
};
};
+ v1_add_email_recipient: {
+ parameters: {
+ query?: never;
+ header?: never;
+ path: {
+ meeting_id: string;
+ };
+ cookie?: never;
+ };
+ requestBody: {
+ content: {
+ "application/json": components["schemas"]["AddEmailRecipientRequest"];
+ };
+ };
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": unknown;
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
v1_rooms_list: {
parameters: {
query?: {
diff --git a/www/public/email-icon.svg b/www/public/email-icon.svg
new file mode 100644
index 00000000..90bee7bf
--- /dev/null
+++ b/www/public/email-icon.svg
@@ -0,0 +1,4 @@
+