Compare commits

..

4 Commits

Author SHA1 Message Date
Igor Loskutov
23b6f5f5a1 transcript gutter 2026-01-19 19:18:54 -05:00
Igor Loskutov
95e58b943f transcript gutter 2026-01-16 12:28:38 -05:00
Igor Loskutov
13a3ec6148 transcription UI 2026-01-13 18:36:28 -05:00
Igor Loskutov
807b954340 transcription UI 2026-01-13 17:29:38 -05:00
22 changed files with 622 additions and 507 deletions

View File

@@ -1,26 +1,5 @@
# Changelog # Changelog
## [0.29.0](https://github.com/Monadical-SAS/reflector/compare/v0.28.1...v0.29.0) (2026-01-21)
### Features
* set hatchet as default for multitracks ([#822](https://github.com/Monadical-SAS/reflector/issues/822)) ([c723752](https://github.com/Monadical-SAS/reflector/commit/c723752b7e15aa48a41ad22856f147a5517d3f46))
## [0.28.1](https://github.com/Monadical-SAS/reflector/compare/v0.28.0...v0.28.1) (2026-01-21)
### Bug Fixes
* ics non-sync bugfix ([#823](https://github.com/Monadical-SAS/reflector/issues/823)) ([23d2bc2](https://github.com/Monadical-SAS/reflector/commit/23d2bc283d4d02187b250d2055103e0374ee93d6))
## [0.28.0](https://github.com/Monadical-SAS/reflector/compare/v0.27.0...v0.28.0) (2026-01-20)
### Features
* worker affinity ([#819](https://github.com/Monadical-SAS/reflector/issues/819)) ([3b6540e](https://github.com/Monadical-SAS/reflector/commit/3b6540eae5b597449f98661bdf15483b77be3268))
## [0.27.0](https://github.com/Monadical-SAS/reflector/compare/v0.26.0...v0.27.0) (2025-12-26) ## [0.27.0](https://github.com/Monadical-SAS/reflector/compare/v0.26.0...v0.27.0) (2025-12-26)

View File

@@ -34,7 +34,7 @@ services:
environment: environment:
ENTRYPOINT: beat ENTRYPOINT: beat
hatchet-worker-cpu: hatchet-worker:
build: build:
context: server context: server
volumes: volumes:
@@ -43,20 +43,7 @@ services:
env_file: env_file:
- ./server/.env - ./server/.env
environment: environment:
ENTRYPOINT: hatchet-worker-cpu ENTRYPOINT: hatchet-worker
depends_on:
hatchet:
condition: service_healthy
hatchet-worker-llm:
build:
context: server
volumes:
- ./server/:/app/
- /app/.venv
env_file:
- ./server/.env
environment:
ENTRYPOINT: hatchet-worker-llm
depends_on: depends_on:
hatchet: hatchet:
condition: service_healthy condition: service_healthy

View File

@@ -1,44 +0,0 @@
"""replace_use_hatchet_with_use_celery
Revision ID: 80beb1ea3269
Revises: bd3a729bb379
Create Date: 2026-01-20 16:26:25.555869
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "80beb1ea3269"
down_revision: Union[str, None] = "bd3a729bb379"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
with op.batch_alter_table("room", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"use_celery",
sa.Boolean(),
server_default=sa.text("false"),
nullable=False,
)
)
batch_op.drop_column("use_hatchet")
def downgrade() -> None:
with op.batch_alter_table("room", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"use_hatchet",
sa.Boolean(),
server_default=sa.text("false"),
nullable=False,
)
)
batch_op.drop_column("use_celery")

View File

@@ -58,7 +58,7 @@ rooms = sqlalchemy.Table(
nullable=False, nullable=False,
), ),
sqlalchemy.Column( sqlalchemy.Column(
"use_celery", "use_hatchet",
sqlalchemy.Boolean, sqlalchemy.Boolean,
nullable=False, nullable=False,
server_default=false(), server_default=false(),
@@ -97,7 +97,7 @@ class Room(BaseModel):
ics_last_sync: datetime | None = None ics_last_sync: datetime | None = None
ics_last_etag: str | None = None ics_last_etag: str | None = None
platform: Platform = Field(default_factory=lambda: settings.DEFAULT_VIDEO_PLATFORM) platform: Platform = Field(default_factory=lambda: settings.DEFAULT_VIDEO_PLATFORM)
use_celery: bool = False use_hatchet: bool = False
skip_consent: bool = False skip_consent: bool = False

View File

@@ -0,0 +1,77 @@
"""
Run Hatchet workers for the multitrack pipeline.
Runs as a separate process, just like Celery workers.
Usage:
uv run -m reflector.hatchet.run_workers
# Or via docker:
docker compose exec server uv run -m reflector.hatchet.run_workers
"""
import signal
import sys
from hatchet_sdk.rate_limit import RateLimitDuration
from reflector.hatchet.constants import LLM_RATE_LIMIT_KEY, LLM_RATE_LIMIT_PER_SECOND
from reflector.logger import logger
from reflector.settings import settings
def main() -> None:
"""Start Hatchet worker polling."""
if not settings.HATCHET_ENABLED:
logger.error("HATCHET_ENABLED is False, not starting workers")
sys.exit(1)
if not settings.HATCHET_CLIENT_TOKEN:
logger.error("HATCHET_CLIENT_TOKEN is not set")
sys.exit(1)
logger.info(
"Starting Hatchet workers",
debug=settings.HATCHET_DEBUG,
)
# Import here (not top-level) - workflow modules call HatchetClientManager.get_client()
# at module level because Hatchet SDK decorators (@workflow.task) bind at import time.
# Can't use lazy init: decorators need the client object when function is defined.
from reflector.hatchet.client import HatchetClientManager # noqa: PLC0415
from reflector.hatchet.workflows import ( # noqa: PLC0415
daily_multitrack_pipeline,
subject_workflow,
topic_chunk_workflow,
track_workflow,
)
hatchet = HatchetClientManager.get_client()
hatchet.rate_limits.put(
LLM_RATE_LIMIT_KEY, LLM_RATE_LIMIT_PER_SECOND, RateLimitDuration.SECOND
)
worker = hatchet.worker(
"reflector-pipeline-worker",
workflows=[
daily_multitrack_pipeline,
subject_workflow,
topic_chunk_workflow,
track_workflow,
],
)
def shutdown_handler(signum: int, frame) -> None:
logger.info("Received shutdown signal, stopping workers...")
# Worker cleanup happens automatically on exit
sys.exit(0)
signal.signal(signal.SIGINT, shutdown_handler)
signal.signal(signal.SIGTERM, shutdown_handler)
logger.info("Starting Hatchet worker polling...")
worker.start()
if __name__ == "__main__":
main()

View File

@@ -1,43 +0,0 @@
"""
CPU-heavy worker pool for audio processing tasks.
Handles ONLY: mixdown_tracks
Configuration:
- slots=1: Only mixdown (already serialized globally with max_runs=1)
- Worker affinity: pool=cpu-heavy
"""
from reflector.hatchet.client import HatchetClientManager
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
daily_multitrack_pipeline,
)
from reflector.logger import logger
def main():
hatchet = HatchetClientManager.get_client()
logger.info(
"Starting Hatchet CPU worker pool (mixdown only)",
worker_name="cpu-worker-pool",
slots=1,
labels={"pool": "cpu-heavy"},
)
cpu_worker = hatchet.worker(
"cpu-worker-pool",
slots=1, # Only 1 mixdown at a time (already serialized globally)
labels={
"pool": "cpu-heavy",
},
workflows=[daily_multitrack_pipeline],
)
try:
cpu_worker.start()
except KeyboardInterrupt:
logger.info("Received shutdown signal, stopping CPU workers...")
if __name__ == "__main__":
main()

View File

@@ -1,51 +0,0 @@
"""
LLM/I/O worker pool for all non-CPU tasks.
Handles: all tasks except mixdown_tracks (transcription, LLM inference, orchestration)
"""
from reflector.hatchet.client import HatchetClientManager
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
daily_multitrack_pipeline,
)
from reflector.hatchet.workflows.subject_processing import subject_workflow
from reflector.hatchet.workflows.topic_chunk_processing import topic_chunk_workflow
from reflector.hatchet.workflows.track_processing import track_workflow
from reflector.logger import logger
SLOTS = 10
WORKER_NAME = "llm-worker-pool"
POOL = "llm-io"
def main():
hatchet = HatchetClientManager.get_client()
logger.info(
"Starting Hatchet LLM worker pool (all tasks except mixdown)",
worker_name=WORKER_NAME,
slots=SLOTS,
labels={"pool": POOL},
)
llm_worker = hatchet.worker(
WORKER_NAME,
slots=SLOTS, # not all slots are probably used
labels={
"pool": POOL,
},
workflows=[
daily_multitrack_pipeline,
topic_chunk_workflow,
subject_workflow,
track_workflow,
],
)
try:
llm_worker.start()
except KeyboardInterrupt:
logger.info("Received shutdown signal, stopping LLM workers...")
if __name__ == "__main__":
main()

View File

@@ -23,12 +23,7 @@ from pathlib import Path
from typing import Any, Callable, Coroutine, Protocol, TypeVar from typing import Any, Callable, Coroutine, Protocol, TypeVar
import httpx import httpx
from hatchet_sdk import ( from hatchet_sdk import Context
ConcurrencyExpression,
ConcurrencyLimitStrategy,
Context,
)
from hatchet_sdk.labels import DesiredWorkerLabel
from pydantic import BaseModel from pydantic import BaseModel
from reflector.dailyco_api.client import DailyApiClient from reflector.dailyco_api.client import DailyApiClient
@@ -472,20 +467,6 @@ async def process_tracks(input: PipelineInput, ctx: Context) -> ProcessTracksRes
parents=[process_tracks], parents=[process_tracks],
execution_timeout=timedelta(seconds=TIMEOUT_AUDIO), execution_timeout=timedelta(seconds=TIMEOUT_AUDIO),
retries=3, retries=3,
desired_worker_labels={
"pool": DesiredWorkerLabel(
value="cpu-heavy",
required=True,
weight=100,
),
},
concurrency=[
ConcurrencyExpression(
expression="'mixdown-global'",
max_runs=1, # serialize mixdown to prevent resource contention
limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN, # Queue
)
],
) )
@with_error_handling(TaskName.MIXDOWN_TRACKS) @with_error_handling(TaskName.MIXDOWN_TRACKS)
async def mixdown_tracks(input: PipelineInput, ctx: Context) -> MixdownResult: async def mixdown_tracks(input: PipelineInput, ctx: Context) -> MixdownResult:

View File

@@ -7,11 +7,7 @@ Spawned dynamically by detect_topics via aio_run_many() for parallel processing.
from datetime import timedelta from datetime import timedelta
from hatchet_sdk import ( from hatchet_sdk import ConcurrencyExpression, ConcurrencyLimitStrategy, Context
ConcurrencyExpression,
ConcurrencyLimitStrategy,
Context,
)
from hatchet_sdk.rate_limit import RateLimit from hatchet_sdk.rate_limit import RateLimit
from pydantic import BaseModel from pydantic import BaseModel
@@ -38,13 +34,11 @@ hatchet = HatchetClientManager.get_client()
topic_chunk_workflow = hatchet.workflow( topic_chunk_workflow = hatchet.workflow(
name="TopicChunkProcessing", name="TopicChunkProcessing",
input_validator=TopicChunkInput, input_validator=TopicChunkInput,
concurrency=[ concurrency=ConcurrencyExpression(
ConcurrencyExpression( expression="'global'", # constant string = global limit across all runs
expression="'global'", # constant string = global limit across all runs max_runs=20,
max_runs=20, limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,
limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN, ),
)
],
) )

View File

@@ -319,6 +319,21 @@ class ICSSyncService:
calendar = self.fetch_service.parse_ics(ics_content) calendar = self.fetch_service.parse_ics(ics_content)
content_hash = hashlib.md5(ics_content.encode()).hexdigest() content_hash = hashlib.md5(ics_content.encode()).hexdigest()
if room.ics_last_etag == content_hash:
logger.info("No changes in ICS for room", room_id=room.id)
room_url = f"{settings.UI_BASE_URL}/{room.name}"
events, total_events = self.fetch_service.extract_room_events(
calendar, room.name, room_url
)
return {
"status": SyncStatus.UNCHANGED,
"hash": content_hash,
"events_found": len(events),
"total_events": total_events,
"events_created": 0,
"events_updated": 0,
"events_deleted": 0,
}
# Extract matching events # Extract matching events
room_url = f"{settings.UI_BASE_URL}/{room.name}" room_url = f"{settings.UI_BASE_URL}/{room.name}"
@@ -356,44 +371,6 @@ class ICSSyncService:
time_since_sync = datetime.now(timezone.utc) - room.ics_last_sync time_since_sync = datetime.now(timezone.utc) - room.ics_last_sync
return time_since_sync.total_seconds() >= room.ics_fetch_interval return time_since_sync.total_seconds() >= room.ics_fetch_interval
def _event_data_changed(self, existing: CalendarEvent, new_data: EventData) -> bool:
"""Check if event data has changed by comparing relevant fields.
IMPORTANT: When adding fields to CalendarEvent/EventData, update this method
and the _COMPARED_FIELDS set below for runtime validation.
"""
# Fields that come from ICS and should trigger updates when changed
_COMPARED_FIELDS = {
"title",
"description",
"start_time",
"end_time",
"location",
"attendees",
"ics_raw_data",
}
# Runtime exhaustiveness check: ensure we're comparing all EventData fields
event_data_fields = set(EventData.__annotations__.keys()) - {"ics_uid"}
if event_data_fields != _COMPARED_FIELDS:
missing = event_data_fields - _COMPARED_FIELDS
extra = _COMPARED_FIELDS - event_data_fields
raise RuntimeError(
f"_event_data_changed() field mismatch: "
f"missing={missing}, extra={extra}. "
f"Update the comparison logic when adding/removing fields."
)
return (
existing.title != new_data["title"]
or existing.description != new_data["description"]
or existing.start_time != new_data["start_time"]
or existing.end_time != new_data["end_time"]
or existing.location != new_data["location"]
or existing.attendees != new_data["attendees"]
or existing.ics_raw_data != new_data["ics_raw_data"]
)
async def _sync_events_to_database( async def _sync_events_to_database(
self, room_id: str, events: list[EventData] self, room_id: str, events: list[EventData]
) -> SyncStats: ) -> SyncStats:
@@ -409,14 +386,11 @@ class ICSSyncService:
) )
if existing: if existing:
# Only count as updated if data actually changed updated += 1
if self._event_data_changed(existing, event_data):
updated += 1
await calendar_events_controller.upsert(calendar_event)
else: else:
created += 1 created += 1
await calendar_events_controller.upsert(calendar_event)
await calendar_events_controller.upsert(calendar_event)
current_ics_uids.append(event_data["ics_uid"]) current_ics_uids.append(event_data["ics_uid"])
# Soft delete events that are no longer in calendar # Soft delete events that are no longer in calendar

View File

@@ -23,6 +23,7 @@ from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
from reflector.pipelines.main_multitrack_pipeline import ( from reflector.pipelines.main_multitrack_pipeline import (
task_pipeline_multitrack_process, task_pipeline_multitrack_process,
) )
from reflector.settings import settings
from reflector.utils.string import NonEmptyString from reflector.utils.string import NonEmptyString
@@ -101,8 +102,8 @@ async def validate_transcript_for_processing(
if transcript.locked: if transcript.locked:
return ValidationLocked(detail="Recording is locked") return ValidationLocked(detail="Recording is locked")
# Check if recording is ready for processing # hatchet is idempotent anyways + if it wasn't dispatched successfully
if transcript.status == "idle" and not transcript.workflow_run_id: if transcript.status == "idle" and not settings.HATCHET_ENABLED:
return ValidationNotReady(detail="Recording is not ready for processing") return ValidationNotReady(detail="Recording is not ready for processing")
# Check Celery tasks # Check Celery tasks
@@ -115,8 +116,7 @@ async def validate_transcript_for_processing(
): ):
return ValidationAlreadyScheduled(detail="already running") return ValidationAlreadyScheduled(detail="already running")
# Check Hatchet workflow status if workflow_run_id exists if settings.HATCHET_ENABLED and transcript.workflow_run_id:
if transcript.workflow_run_id:
try: try:
status = await HatchetClientManager.get_workflow_run_status( status = await HatchetClientManager.get_workflow_run_status(
transcript.workflow_run_id transcript.workflow_run_id
@@ -181,16 +181,19 @@ async def dispatch_transcript_processing(
Returns AsyncResult for Celery tasks, None for Hatchet workflows. Returns AsyncResult for Celery tasks, None for Hatchet workflows.
""" """
if isinstance(config, MultitrackProcessingConfig): if isinstance(config, MultitrackProcessingConfig):
use_celery = False # Check if room has use_hatchet=True (overrides env vars)
room_forces_hatchet = False
if config.room_id: if config.room_id:
room = await rooms_controller.get_by_id(config.room_id) room = await rooms_controller.get_by_id(config.room_id)
use_celery = room.use_celery if room else False room_forces_hatchet = room.use_hatchet if room else False
use_hatchet = not use_celery # Start durable workflow if enabled (Hatchet)
# and if room has use_hatchet=True
use_hatchet = settings.HATCHET_ENABLED and room_forces_hatchet
if use_celery: if room_forces_hatchet:
logger.info( logger.info(
"Room uses legacy Celery processing", "Room forces Hatchet workflow",
room_id=config.room_id, room_id=config.room_id,
transcript_id=config.transcript_id, transcript_id=config.transcript_id,
) )

View File

@@ -158,10 +158,19 @@ class Settings(BaseSettings):
ZULIP_API_KEY: str | None = None ZULIP_API_KEY: str | None = None
ZULIP_BOT_EMAIL: str | None = None ZULIP_BOT_EMAIL: str | None = None
# Hatchet workflow orchestration (always enabled for multitrack processing) # Durable workflow orchestration
# Provider: "hatchet" (or "none" to disable)
DURABLE_WORKFLOW_PROVIDER: str = "none"
# Hatchet workflow orchestration
HATCHET_CLIENT_TOKEN: str | None = None HATCHET_CLIENT_TOKEN: str | None = None
HATCHET_CLIENT_TLS_STRATEGY: str = "none" # none, tls, mtls HATCHET_CLIENT_TLS_STRATEGY: str = "none" # none, tls, mtls
HATCHET_DEBUG: bool = False HATCHET_DEBUG: bool = False
@property
def HATCHET_ENABLED(self) -> bool:
"""True if Hatchet is the active provider."""
return self.DURABLE_WORKFLOW_PROVIDER == "hatchet"
settings = Settings() settings = Settings()

View File

@@ -287,12 +287,11 @@ async def _process_multitrack_recording_inner(
room_id=room.id, room_id=room.id,
) )
use_celery = room and room.use_celery use_hatchet = settings.HATCHET_ENABLED and room and room.use_hatchet
use_hatchet = not use_celery
if use_celery: if room and room.use_hatchet and not settings.HATCHET_ENABLED:
logger.info( logger.info(
"Room uses legacy Celery processing", "Room forces Hatchet workflow",
room_id=room.id, room_id=room.id,
transcript_id=transcript.id, transcript_id=transcript.id,
) )
@@ -811,6 +810,7 @@ async def reprocess_failed_daily_recordings():
) )
continue continue
# Fetch room to check use_hatchet flag
room = None room = None
if meeting.room_id: if meeting.room_id:
room = await rooms_controller.get_by_id(meeting.room_id) room = await rooms_controller.get_by_id(meeting.room_id)
@@ -834,10 +834,10 @@ async def reprocess_failed_daily_recordings():
) )
continue continue
use_celery = room and room.use_celery use_hatchet = settings.HATCHET_ENABLED and room and room.use_hatchet
use_hatchet = not use_celery
if use_hatchet: if use_hatchet:
# Hatchet requires a transcript for workflow_run_id tracking
if not transcript: if not transcript:
logger.warning( logger.warning(
"No transcript for Hatchet reprocessing, skipping", "No transcript for Hatchet reprocessing, skipping",

View File

@@ -7,10 +7,8 @@ elif [ "${ENTRYPOINT}" = "worker" ]; then
uv run celery -A reflector.worker.app worker --loglevel=info uv run celery -A reflector.worker.app worker --loglevel=info
elif [ "${ENTRYPOINT}" = "beat" ]; then elif [ "${ENTRYPOINT}" = "beat" ]; then
uv run celery -A reflector.worker.app beat --loglevel=info uv run celery -A reflector.worker.app beat --loglevel=info
elif [ "${ENTRYPOINT}" = "hatchet-worker-cpu" ]; then elif [ "${ENTRYPOINT}" = "hatchet-worker" ]; then
uv run python -m reflector.hatchet.run_workers_cpu uv run python -m reflector.hatchet.run_workers
elif [ "${ENTRYPOINT}" = "hatchet-worker-llm" ]; then
uv run python -m reflector.hatchet.run_workers_llm
else else
echo "Unknown command" echo "Unknown command"
fi fi

View File

@@ -2,9 +2,10 @@
Tests for Hatchet workflow dispatch and routing logic. Tests for Hatchet workflow dispatch and routing logic.
These tests verify: These tests verify:
1. Hatchet workflow validation and replay logic 1. Routing to Hatchet when HATCHET_ENABLED=True
2. Force flag to cancel and restart workflows 2. Replay logic for failed workflows
3. Validation prevents concurrent workflows 3. Force flag to cancel and restart
4. Validation prevents concurrent workflows
""" """
from unittest.mock import AsyncMock, patch from unittest.mock import AsyncMock, patch
@@ -33,22 +34,25 @@ async def test_hatchet_validation_blocks_running_workflow():
workflow_run_id="running-workflow-123", workflow_run_id="running-workflow-123",
) )
with patch( with patch("reflector.services.transcript_process.settings") as mock_settings:
"reflector.services.transcript_process.HatchetClientManager" mock_settings.HATCHET_ENABLED = True
) as mock_hatchet:
mock_hatchet.get_workflow_run_status = AsyncMock(
return_value=V1TaskStatus.RUNNING
)
with patch( with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active" "reflector.services.transcript_process.HatchetClientManager"
) as mock_celery_check: ) as mock_hatchet:
mock_celery_check.return_value = False mock_hatchet.get_workflow_run_status = AsyncMock(
return_value=V1TaskStatus.RUNNING
)
result = await validate_transcript_for_processing(mock_transcript) with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active"
) as mock_celery_check:
mock_celery_check.return_value = False
assert isinstance(result, ValidationAlreadyScheduled) result = await validate_transcript_for_processing(mock_transcript)
assert "running" in result.detail.lower()
assert isinstance(result, ValidationAlreadyScheduled)
assert "running" in result.detail.lower()
@pytest.mark.usefixtures("setup_database") @pytest.mark.usefixtures("setup_database")
@@ -68,21 +72,24 @@ async def test_hatchet_validation_blocks_queued_workflow():
workflow_run_id="queued-workflow-123", workflow_run_id="queued-workflow-123",
) )
with patch( with patch("reflector.services.transcript_process.settings") as mock_settings:
"reflector.services.transcript_process.HatchetClientManager" mock_settings.HATCHET_ENABLED = True
) as mock_hatchet:
mock_hatchet.get_workflow_run_status = AsyncMock(
return_value=V1TaskStatus.QUEUED
)
with patch( with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active" "reflector.services.transcript_process.HatchetClientManager"
) as mock_celery_check: ) as mock_hatchet:
mock_celery_check.return_value = False mock_hatchet.get_workflow_run_status = AsyncMock(
return_value=V1TaskStatus.QUEUED
)
result = await validate_transcript_for_processing(mock_transcript) with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active"
) as mock_celery_check:
mock_celery_check.return_value = False
assert isinstance(result, ValidationAlreadyScheduled) result = await validate_transcript_for_processing(mock_transcript)
assert isinstance(result, ValidationAlreadyScheduled)
@pytest.mark.usefixtures("setup_database") @pytest.mark.usefixtures("setup_database")
@@ -103,22 +110,25 @@ async def test_hatchet_validation_allows_failed_workflow():
recording_id="test-recording-id", recording_id="test-recording-id",
) )
with patch( with patch("reflector.services.transcript_process.settings") as mock_settings:
"reflector.services.transcript_process.HatchetClientManager" mock_settings.HATCHET_ENABLED = True
) as mock_hatchet:
mock_hatchet.get_workflow_run_status = AsyncMock(
return_value=V1TaskStatus.FAILED
)
with patch( with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active" "reflector.services.transcript_process.HatchetClientManager"
) as mock_celery_check: ) as mock_hatchet:
mock_celery_check.return_value = False mock_hatchet.get_workflow_run_status = AsyncMock(
return_value=V1TaskStatus.FAILED
)
result = await validate_transcript_for_processing(mock_transcript) with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active"
) as mock_celery_check:
mock_celery_check.return_value = False
assert isinstance(result, ValidationOk) result = await validate_transcript_for_processing(mock_transcript)
assert result.transcript_id == "test-transcript-id"
assert isinstance(result, ValidationOk)
assert result.transcript_id == "test-transcript-id"
@pytest.mark.usefixtures("setup_database") @pytest.mark.usefixtures("setup_database")
@@ -139,21 +149,24 @@ async def test_hatchet_validation_allows_completed_workflow():
recording_id="test-recording-id", recording_id="test-recording-id",
) )
with patch( with patch("reflector.services.transcript_process.settings") as mock_settings:
"reflector.services.transcript_process.HatchetClientManager" mock_settings.HATCHET_ENABLED = True
) as mock_hatchet:
mock_hatchet.get_workflow_run_status = AsyncMock(
return_value=V1TaskStatus.COMPLETED
)
with patch( with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active" "reflector.services.transcript_process.HatchetClientManager"
) as mock_celery_check: ) as mock_hatchet:
mock_celery_check.return_value = False mock_hatchet.get_workflow_run_status = AsyncMock(
return_value=V1TaskStatus.COMPLETED
)
result = await validate_transcript_for_processing(mock_transcript) with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active"
) as mock_celery_check:
mock_celery_check.return_value = False
assert isinstance(result, ValidationOk) result = await validate_transcript_for_processing(mock_transcript)
assert isinstance(result, ValidationOk)
@pytest.mark.usefixtures("setup_database") @pytest.mark.usefixtures("setup_database")
@@ -174,23 +187,26 @@ async def test_hatchet_validation_allows_when_status_check_fails():
recording_id="test-recording-id", recording_id="test-recording-id",
) )
with patch( with patch("reflector.services.transcript_process.settings") as mock_settings:
"reflector.services.transcript_process.HatchetClientManager" mock_settings.HATCHET_ENABLED = True
) as mock_hatchet:
# Status check fails (workflow might be deleted)
mock_hatchet.get_workflow_run_status = AsyncMock(
side_effect=ApiException("Workflow not found")
)
with patch( with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active" "reflector.services.transcript_process.HatchetClientManager"
) as mock_celery_check: ) as mock_hatchet:
mock_celery_check.return_value = False # Status check fails (workflow might be deleted)
mock_hatchet.get_workflow_run_status = AsyncMock(
side_effect=ApiException("Workflow not found")
)
result = await validate_transcript_for_processing(mock_transcript) with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active"
) as mock_celery_check:
mock_celery_check.return_value = False
# Should allow processing when we can't get status result = await validate_transcript_for_processing(mock_transcript)
assert isinstance(result, ValidationOk)
# Should allow processing when we can't get status
assert isinstance(result, ValidationOk)
@pytest.mark.usefixtures("setup_database") @pytest.mark.usefixtures("setup_database")
@@ -211,11 +227,47 @@ async def test_hatchet_validation_skipped_when_no_workflow_id():
recording_id="test-recording-id", recording_id="test-recording-id",
) )
with patch( with patch("reflector.services.transcript_process.settings") as mock_settings:
"reflector.services.transcript_process.HatchetClientManager" mock_settings.HATCHET_ENABLED = True
) as mock_hatchet:
# Should not be called with patch(
mock_hatchet.get_workflow_run_status = AsyncMock() "reflector.services.transcript_process.HatchetClientManager"
) as mock_hatchet:
# Should not be called
mock_hatchet.get_workflow_run_status = AsyncMock()
with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active"
) as mock_celery_check:
mock_celery_check.return_value = False
result = await validate_transcript_for_processing(mock_transcript)
# Should not check Hatchet status
mock_hatchet.get_workflow_run_status.assert_not_called()
assert isinstance(result, ValidationOk)
@pytest.mark.usefixtures("setup_database")
@pytest.mark.asyncio
async def test_hatchet_validation_skipped_when_disabled():
"""Test that Hatchet validation is skipped when HATCHET_ENABLED is False."""
from reflector.services.transcript_process import (
ValidationOk,
validate_transcript_for_processing,
)
mock_transcript = Transcript(
id="test-transcript-id",
name="Test",
status="uploaded",
source_kind="room",
workflow_run_id="some-workflow-123",
recording_id="test-recording-id",
)
with patch("reflector.services.transcript_process.settings") as mock_settings:
mock_settings.HATCHET_ENABLED = False # Hatchet disabled
with patch( with patch(
"reflector.services.transcript_process.task_is_scheduled_or_active" "reflector.services.transcript_process.task_is_scheduled_or_active"
@@ -224,8 +276,7 @@ async def test_hatchet_validation_skipped_when_no_workflow_id():
result = await validate_transcript_for_processing(mock_transcript) result = await validate_transcript_for_processing(mock_transcript)
# Should not check Hatchet status # Should not check Hatchet at all
mock_hatchet.get_workflow_run_status.assert_not_called()
assert isinstance(result, ValidationOk) assert isinstance(result, ValidationOk)

View File

@@ -189,17 +189,14 @@ async def test_ics_sync_service_sync_room_calendar():
assert events[0].ics_uid == "sync-event-1" assert events[0].ics_uid == "sync-event-1"
assert events[0].title == "Sync Test Meeting" assert events[0].title == "Sync Test Meeting"
# Second sync with same content (calendar unchanged, but sync always runs) # Second sync with same content (should be unchanged)
# Refresh room to get updated etag and force sync by setting old sync time # Refresh room to get updated etag and force sync by setting old sync time
room = await rooms_controller.get_by_id(room.id) room = await rooms_controller.get_by_id(room.id)
await rooms_controller.update( await rooms_controller.update(
room, {"ics_last_sync": datetime.now(timezone.utc) - timedelta(minutes=10)} room, {"ics_last_sync": datetime.now(timezone.utc) - timedelta(minutes=10)}
) )
result = await sync_service.sync_room_calendar(room) result = await sync_service.sync_room_calendar(room)
assert result["status"] == "success" assert result["status"] == "unchanged"
assert result["events_created"] == 0
assert result["events_updated"] == 0
assert result["events_deleted"] == 0
# Third sync with updated event # Third sync with updated event
event["summary"] = "Updated Meeting Title" event["summary"] = "Updated Meeting Title"
@@ -291,43 +288,3 @@ async def test_ics_sync_service_error_handling():
result = await sync_service.sync_room_calendar(room) result = await sync_service.sync_room_calendar(room)
assert result["status"] == "error" assert result["status"] == "error"
assert "Network error" in result["error"] assert "Network error" in result["error"]
@pytest.mark.asyncio
async def test_event_data_changed_exhaustiveness():
"""Test that _event_data_changed compares all EventData fields (except ics_uid).
This test ensures programmers don't forget to update the comparison logic
when adding new fields to EventData/CalendarEvent.
"""
from reflector.services.ics_sync import EventData
sync_service = ICSSyncService()
from reflector.db.calendar_events import CalendarEvent
now = datetime.now(timezone.utc)
event_data: EventData = {
"ics_uid": "test-123",
"title": "Test",
"description": "Desc",
"location": "Loc",
"start_time": now,
"end_time": now + timedelta(hours=1),
"attendees": [],
"ics_raw_data": "raw",
}
existing = CalendarEvent(
room_id="room1",
**event_data,
)
# Will raise RuntimeError if fields are missing from comparison
result = sync_service._event_data_changed(existing, event_data)
assert result is False
modified_data = event_data.copy()
modified_data["title"] = "Changed Title"
result = sync_service._event_data_changed(existing, modified_data)
assert result is True

View File

@@ -162,24 +162,9 @@ async def test_dailyco_recording_uses_multitrack_pipeline(client):
from datetime import datetime, timezone from datetime import datetime, timezone
from reflector.db.recordings import Recording, recordings_controller from reflector.db.recordings import Recording, recordings_controller
from reflector.db.rooms import rooms_controller
from reflector.db.transcripts import transcripts_controller from reflector.db.transcripts import transcripts_controller
room = await rooms_controller.add( # Create transcript with Daily.co multitrack recording
name="test-room",
user_id="test-user",
zulip_auto_post=False,
zulip_stream="",
zulip_topic="",
is_locked=False,
room_mode="normal",
recording_type="cloud",
recording_trigger="automatic-2nd-participant",
is_shared=False,
)
# Force Celery backend for test
await rooms_controller.update(room, {"use_celery": True})
transcript = await transcripts_controller.add( transcript = await transcripts_controller.add(
"", "",
source_kind="room", source_kind="room",
@@ -187,7 +172,6 @@ async def test_dailyco_recording_uses_multitrack_pipeline(client):
target_language="en", target_language="en",
user_id="test-user", user_id="test-user",
share_mode="public", share_mode="public",
room_id=room.id,
) )
track_keys = [ track_keys = [

View File

@@ -1,12 +1,12 @@
import React, { useState, useEffect } from "react"; import React, { useState, useEffect } from "react";
import ScrollToBottom from "../../scrollToBottom"; import ScrollToBottom from "../../scrollToBottom";
import { Topic } from "../../webSocketTypes"; import { Topic } from "../../webSocketTypes";
import useParticipants from "../../useParticipants"; import { Box, Flex, Text } from "@chakra-ui/react";
import { Box, Flex, Text, Accordion } from "@chakra-ui/react"; import { formatTime } from "../../../../lib/time";
import { TopicItem } from "./TopicItem"; import { getTopicColor } from "../../../../lib/topicColors";
import { TranscriptStatus } from "../../../../lib/transcript"; import { TranscriptStatus } from "../../../../lib/transcript";
import { featureEnabled } from "../../../../lib/features"; import { featureEnabled } from "../../../../lib/features";
import { TOPICS_SCROLL_DIV_ID } from "./constants";
type TopicListProps = { type TopicListProps = {
topics: Topic[]; topics: Topic[];
@@ -18,6 +18,7 @@ type TopicListProps = {
transcriptId: string; transcriptId: string;
status: TranscriptStatus | null; status: TranscriptStatus | null;
currentTranscriptText: any; currentTranscriptText: any;
onTopicClick?: (topicId: string) => void;
}; };
export function TopicList({ export function TopicList({
@@ -27,30 +28,13 @@ export function TopicList({
transcriptId, transcriptId,
status, status,
currentTranscriptText, currentTranscriptText,
onTopicClick,
}: TopicListProps) { }: TopicListProps) {
const [activeTopic, setActiveTopic] = useActiveTopic; const [activeTopic, setActiveTopic] = useActiveTopic;
const [hoveredTopicId, setHoveredTopicId] = useState<string | null>(null);
const [autoscrollEnabled, setAutoscrollEnabled] = useState<boolean>(true); const [autoscrollEnabled, setAutoscrollEnabled] = useState<boolean>(true);
const participants = useParticipants(transcriptId);
const scrollToTopic = () => { const toggleScroll = (element: HTMLElement) => {
const topicDiv = document.getElementById(`topic-${activeTopic?.id}`);
setTimeout(() => {
topicDiv?.scrollIntoView({
behavior: "smooth",
block: "start",
inline: "nearest",
});
}, 200);
};
useEffect(() => {
if (activeTopic && autoscroll) scrollToTopic();
}, [activeTopic, autoscroll]);
// scroll top is not rounded, heights are, so exact match won't work.
// https://developer.mozilla.org/en-US/docs/Web/API/Element/scrollHeight#determine_if_an_element_has_been_totally_scrolled
const toggleScroll = (element) => {
const bottom = const bottom =
Math.abs( Math.abs(
element.scrollHeight - element.clientHeight - element.scrollTop, element.scrollHeight - element.clientHeight - element.scrollTop,
@@ -61,14 +45,19 @@ export function TopicList({
setAutoscrollEnabled(true); setAutoscrollEnabled(true);
} }
}; };
const handleScroll = (e) => {
toggleScroll(e.target); const handleScroll = (e: React.UIEvent<HTMLDivElement>) => {
toggleScroll(e.target as HTMLElement);
};
const scrollToBottom = () => {
const topicsDiv = document.getElementById(TOPICS_SCROLL_DIV_ID);
if (topicsDiv) topicsDiv.scrollTop = topicsDiv.scrollHeight;
}; };
useEffect(() => { useEffect(() => {
if (autoscroll) { if (autoscroll) {
const topicsDiv = document.getElementById("scroll-div"); const topicsDiv = document.getElementById(TOPICS_SCROLL_DIV_ID);
topicsDiv && toggleScroll(topicsDiv); topicsDiv && toggleScroll(topicsDiv);
} }
}, [activeTopic, autoscroll]); }, [activeTopic, autoscroll]);
@@ -77,37 +66,41 @@ export function TopicList({
if (autoscroll && autoscrollEnabled) scrollToBottom(); if (autoscroll && autoscrollEnabled) scrollToBottom();
}, [topics.length, currentTranscriptText]); }, [topics.length, currentTranscriptText]);
const scrollToBottom = () => {
const topicsDiv = document.getElementById("scroll-div");
if (topicsDiv) topicsDiv.scrollTop = topicsDiv.scrollHeight;
};
const getSpeakerName = (speakerNumber: number) => {
if (!participants.response) return;
return (
participants.response.find(
(participant) => participant.speaker == speakerNumber,
)?.name || `Speaker ${speakerNumber}`
);
};
const requireLogin = featureEnabled("requireLogin");
useEffect(() => { useEffect(() => {
if (autoscroll) { if (autoscroll) {
setActiveTopic(topics[topics.length - 1]); setActiveTopic(topics[topics.length - 1]);
} }
}, [topics, autoscroll]); }, [topics, autoscroll]);
const handleTopicClick = (topic: Topic) => {
setActiveTopic(topic);
if (onTopicClick) {
onTopicClick(topic.id);
}
};
const handleTopicMouseEnter = (topic: Topic) => {
setHoveredTopicId(topic.id);
// If already active, toggle off when mousing over
if (activeTopic?.id === topic.id) {
setActiveTopic(null);
} else {
setActiveTopic(topic);
}
};
const handleTopicMouseLeave = () => {
setHoveredTopicId(null);
};
const requireLogin = featureEnabled("requireLogin");
return ( return (
<Flex <Flex
position={"relative"} position="relative"
w={"100%"} w="full"
h={"95%"} h="200px"
flexDirection={"column"} flexDirection="column"
justify={"center"}
align={"center"}
flexShrink={0} flexShrink={0}
> >
{autoscroll && ( {autoscroll && (
@@ -118,45 +111,71 @@ export function TopicList({
)} )}
<Box <Box
id="scroll-div" id={TOPICS_SCROLL_DIV_ID}
overflowY={"auto"} overflowY="auto"
h={"100%"} h="full"
onScroll={handleScroll} onScroll={handleScroll}
width="full" width="full"
> >
{topics.length > 0 && ( {topics.length > 0 && (
<Accordion.Root <Flex direction="column" gap={1} p={2}>
multiple={false} {topics.map((topic, index) => {
collapsible={true} const color = getTopicColor(index);
value={activeTopic ? [activeTopic.id] : []} const isActive = activeTopic?.id === topic.id;
onValueChange={(details) => { const isHovered = hoveredTopicId === topic.id;
const selectedTopicId = details.value[0];
const selectedTopic = selectedTopicId return (
? topics.find((t) => t.id === selectedTopicId) <Flex
: null; key={topic.id}
setActiveTopic(selectedTopic || null); id={`topic-${topic.id}`}
}} gap={2}
> align="center"
{topics.map((topic) => ( py={1}
<TopicItem px={2}
key={topic.id} cursor="pointer"
topic={topic} bg={isActive || isHovered ? "gray.100" : "transparent"}
isActive={activeTopic?.id === topic.id} _hover={{ bg: "gray.50" }}
getSpeakerName={getSpeakerName} onClick={() => handleTopicClick(topic)}
/> onMouseEnter={() => handleTopicMouseEnter(topic)}
))} onMouseLeave={handleTopicMouseLeave}
</Accordion.Root> >
{/* Color indicator */}
<Box
w="12px"
h="12px"
borderRadius="full"
bg={color}
flexShrink={0}
/>
{/* Topic title */}
<Text
flex={1}
fontSize="sm"
fontWeight={isActive ? "semibold" : "normal"}
>
{topic.title}
</Text>
{/* Timestamp */}
<Text as="span" color="gray.500" fontSize="xs" flexShrink={0}>
{formatTime(topic.timestamp)}
</Text>
</Flex>
);
})}
</Flex>
)} )}
{status == "recording" && ( {status == "recording" && (
<Box textAlign={"center"}> <Box textAlign="center">
<Text>{currentTranscriptText}</Text> <Text>{currentTranscriptText}</Text>
</Box> </Box>
)} )}
{(status == "recording" || status == "idle") && {(status == "recording" || status == "idle") &&
currentTranscriptText.length == 0 && currentTranscriptText.length == 0 &&
topics.length == 0 && ( topics.length == 0 && (
<Box textAlign={"center"} color="gray"> <Box textAlign="center" color="gray">
<Text> <Text>
Full discussion transcript will appear here after you start Full discussion transcript will appear here after you start
recording. recording.
@@ -167,7 +186,7 @@ export function TopicList({
</Box> </Box>
)} )}
{status == "processing" && ( {status == "processing" && (
<Box textAlign={"center"} color="gray"> <Box textAlign="center" color="gray">
<Text>We are processing the recording, please wait.</Text> <Text>We are processing the recording, please wait.</Text>
{!requireLogin && ( {!requireLogin && (
<span> <span>
@@ -177,12 +196,12 @@ export function TopicList({
</Box> </Box>
)} )}
{status == "ended" && topics.length == 0 && ( {status == "ended" && topics.length == 0 && (
<Box textAlign={"center"} color="gray"> <Box textAlign="center" color="gray">
<Text>Recording has ended without topics being found.</Text> <Text>Recording has ended without topics being found.</Text>
</Box> </Box>
)} )}
{status == "error" && ( {status == "error" && (
<Box textAlign={"center"} color="gray"> <Box textAlign="center" color="gray">
<Text>There was an error processing your recording</Text> <Text>There was an error processing your recording</Text>
</Box> </Box>
)} )}

View File

@@ -0,0 +1,106 @@
import { Box, Text, IconButton } from "@chakra-ui/react";
import { ChevronUp } from "lucide-react";
import { Topic } from "../../webSocketTypes";
import { getTopicColor } from "../../../../lib/topicColors";
import { TOPICS_SCROLL_DIV_ID } from "./constants";
interface TranscriptWithGutterProps {
topics: Topic[];
getSpeakerName: (speakerNumber: number) => string | undefined;
onGutterClick: (topicId: string) => void;
}
export function TranscriptWithGutter({
topics,
getSpeakerName,
onGutterClick,
}: TranscriptWithGutterProps) {
const scrollToTopics = () => {
// Scroll to the topic list at the top
const topicList = document.getElementById(TOPICS_SCROLL_DIV_ID);
if (topicList) {
topicList.scrollIntoView({
behavior: "smooth",
block: "start",
});
}
};
return (
<Box>
{topics.map((topic, topicIndex) => {
const color = getTopicColor(topicIndex);
return (
<Box key={topic.id} position="relative">
{/* Topic Header with Up Button */}
<Box
py={3}
px={4}
fontWeight="semibold"
fontSize="lg"
display="flex"
alignItems="center"
justifyContent="space-between"
>
<Text>{topic.title}</Text>
<IconButton
aria-label="Scroll to topics"
size="sm"
variant="ghost"
onClick={scrollToTopics}
>
<ChevronUp size={16} />
</IconButton>
</Box>
{/* Segments container with single gutter */}
<Box position="relative">
{/* Single continuous gutter for entire topic */}
<Box
className="topic-gutter"
position="absolute"
left={0}
top={0}
bottom={0}
width="4px"
bg={color}
cursor="pointer"
transition="all 0.2s"
_hover={{
filter: "brightness(1.2)",
width: "6px",
}}
onClick={() => onGutterClick(topic.id)}
/>
{/* Segments */}
{topic.segments?.map((segment, segmentIndex) => (
<Box
key={segmentIndex}
id={`segment-${topic.id}-${segmentIndex}`}
py={2}
px={4}
pl={12}
_hover={{
bg: "gray.50",
}}
>
{/* Segment Content */}
<Text fontSize="sm">
<Text as="span" fontWeight="semibold" color="gray.700">
{getSpeakerName(segment.speaker) ||
`Speaker ${segment.speaker}`}
:
</Text>{" "}
{segment.text}
</Text>
</Box>
))}
</Box>
</Box>
);
})}
</Box>
);
}

View File

@@ -0,0 +1 @@
export const TOPICS_SCROLL_DIV_ID = "topics-scroll-div";

View File

@@ -3,7 +3,9 @@ import Modal from "../modal";
import useTopics from "../useTopics"; import useTopics from "../useTopics";
import useWaveform from "../useWaveform"; import useWaveform from "../useWaveform";
import useMp3 from "../useMp3"; import useMp3 from "../useMp3";
import useParticipants from "../useParticipants";
import { TopicList } from "./_components/TopicList"; import { TopicList } from "./_components/TopicList";
import { TranscriptWithGutter } from "./_components/TranscriptWithGutter";
import { Topic } from "../webSocketTypes"; import { Topic } from "../webSocketTypes";
import React, { useEffect, useState, use } from "react"; import React, { useEffect, useState, use } from "react";
import FinalSummary from "./finalSummary"; import FinalSummary from "./finalSummary";
@@ -45,14 +47,91 @@ export default function TranscriptDetails(details: TranscriptDetails) {
const mp3 = useMp3(transcriptId, waiting); const mp3 = useMp3(transcriptId, waiting);
const topics = useTopics(transcriptId); const topics = useTopics(transcriptId);
const participants = useParticipants(transcriptId);
const waveform = useWaveform( const waveform = useWaveform(
transcriptId, transcriptId,
waiting || mp3.audioDeleted === true, waiting || mp3.audioDeleted === true,
); );
const useActiveTopic = useState<Topic | null>(null); const useActiveTopic = useState<Topic | null>(null);
const [activeTopic, setActiveTopic] = useActiveTopic;
const [finalSummaryElement, setFinalSummaryElement] = const [finalSummaryElement, setFinalSummaryElement] =
useState<HTMLDivElement | null>(null); useState<HTMLDivElement | null>(null);
// IntersectionObserver for active topic detection based on scroll position
useEffect(() => {
if (!topics.topics || topics.topics.length === 0) return;
const observer = new IntersectionObserver(
(entries) => {
// Find the most visible segment
let mostVisibleEntry: IntersectionObserverEntry | null = null;
let maxRatio = 0;
entries.forEach((entry) => {
if (entry.isIntersecting && entry.intersectionRatio > maxRatio) {
maxRatio = entry.intersectionRatio;
mostVisibleEntry = entry;
}
});
if (mostVisibleEntry) {
// Extract topicId from segment id (format: "segment-{topicId}-{idx}")
const segmentId = mostVisibleEntry.target.id;
const match = segmentId.match(/^segment-([^-]+)-/);
if (match) {
const topicId = match[1];
const topic = topics.topics?.find((t) => t.id === topicId);
if (topic && activeTopic?.id !== topic.id) {
setActiveTopic(topic);
}
}
}
},
{
threshold: [0, 0.25, 0.5, 0.75, 1],
rootMargin: "-20% 0px -20% 0px",
},
);
// Observe all segment elements
const segments = document.querySelectorAll('[id^="segment-"]');
segments.forEach((segment) => observer.observe(segment));
return () => observer.disconnect();
}, [topics.topics, activeTopic?.id, setActiveTopic]);
// Scroll handlers for bidirectional navigation
const handleTopicClick = (topicId: string) => {
// Scroll to first segment of this topic in transcript
const firstSegment = document.querySelector(`[id^="segment-${topicId}-"]`);
if (firstSegment) {
firstSegment.scrollIntoView({
behavior: "smooth",
block: "center",
});
}
};
const handleGutterClick = (topicId: string) => {
// Scroll to topic in list
const topicChip = document.getElementById(`topic-${topicId}`);
if (topicChip) {
topicChip.scrollIntoView({
behavior: "smooth",
block: "center",
});
}
};
const getSpeakerName = (speakerNumber: number) => {
if (!participants.response) return `Speaker ${speakerNumber}`;
return (
participants.response.find(
(participant) => participant.speaker == speakerNumber,
)?.name || `Speaker ${speakerNumber}`
);
};
useEffect(() => { useEffect(() => {
if (!waiting || !transcript.data) return; if (!waiting || !transcript.data) return;
@@ -121,7 +200,7 @@ export default function TranscriptDetails(details: TranscriptDetails) {
<> <>
<Grid <Grid
templateColumns="1fr" templateColumns="1fr"
templateRows="auto minmax(0, 1fr)" templateRows="auto auto"
gap={4} gap={4}
mt={4} mt={4}
mb={4} mb={4}
@@ -153,18 +232,18 @@ export default function TranscriptDetails(details: TranscriptDetails) {
<Grid <Grid
templateColumns={{ base: "minmax(0, 1fr)", md: "repeat(2, 1fr)" }} templateColumns={{ base: "minmax(0, 1fr)", md: "repeat(2, 1fr)" }}
templateRows={{ templateRows={{
base: "auto minmax(0, 1fr) minmax(0, 1fr)", base: "auto auto auto",
md: "auto minmax(0, 1fr)", md: "auto auto",
}} }}
gap={4} gap={4}
gridRowGap={2} gridRowGap={2}
padding={4} padding={4}
paddingBottom={0}
background="gray.bg" background="gray.bg"
border={"2px solid"} border={"2px solid"}
borderColor={"gray.bg"} borderColor={"gray.bg"}
borderRadius={8} borderRadius={8}
> >
{/* Title */}
<GridItem colSpan={{ base: 1, md: 2 }}> <GridItem colSpan={{ base: 1, md: 2 }}>
<Flex direction="column" gap={0}> <Flex direction="column" gap={0}>
<Flex alignItems="center" gap={2}> <Flex alignItems="center" gap={2}>
@@ -187,28 +266,64 @@ export default function TranscriptDetails(details: TranscriptDetails) {
)} )}
</Flex> </Flex>
</GridItem> </GridItem>
<TopicList
topics={topics.topics || []} {/* Left column: Topics List */}
useActiveTopic={useActiveTopic} <GridItem display="flex" flexDirection="column" gap={4} h="100%">
autoscroll={false} <TopicList
transcriptId={transcriptId} topics={topics.topics || []}
status={transcript.data?.status || null} useActiveTopic={useActiveTopic}
currentTranscriptText="" autoscroll={false}
/> transcriptId={transcriptId}
{transcript.data && topics.topics ? ( status={transcript.data?.status || null}
<> currentTranscriptText=""
<FinalSummary onTopicClick={handleTopicClick}
transcript={transcript.data} />
topics={topics.topics}
onUpdate={() => { {/* Transcript with colored gutter (scrollable) */}
transcript.refetch().then(() => {}); {topics.topics && topics.topics.length > 0 && (
<Box
overflowY="auto"
flex={1}
minH="0"
pr={2}
css={{
"&::-webkit-scrollbar": {
width: "8px",
},
"&::-webkit-scrollbar-track": {
background: "transparent",
},
"&::-webkit-scrollbar-thumb": {
background: "#CBD5E0",
borderRadius: "4px",
},
"&::-webkit-scrollbar-thumb:hover": {
background: "#A0AEC0",
},
}} }}
finalSummaryRef={setFinalSummaryElement} >
/> <TranscriptWithGutter
</> topics={topics.topics}
getSpeakerName={getSpeakerName}
onGutterClick={handleGutterClick}
/>
</Box>
)}
</GridItem>
{/* Right column: Final Summary */}
{transcript.data && topics.topics ? (
<FinalSummary
transcript={transcript.data}
topics={topics.topics}
onUpdate={() => {
transcript.refetch().then(() => {});
}}
finalSummaryRef={setFinalSummaryElement}
/>
) : ( ) : (
<Flex justify={"center"} alignItems={"center"} h={"100%"}> <Flex justify="center" alignItems="center" h="100%">
<div className="flex flex-col h-full justify-center content-center"> <Flex direction="column" h="full" justify="center" align="center">
{transcript?.data?.status == "processing" ? ( {transcript?.data?.status == "processing" ? (
<Text>Loading Transcript</Text> <Text>Loading Transcript</Text>
) : ( ) : (
@@ -217,7 +332,7 @@ export default function TranscriptDetails(details: TranscriptDetails) {
back later back later
</Text> </Text>
)} )}
</div> </Flex>
</Flex> </Flex>
)} )}
</Grid> </Grid>

View File

@@ -0,0 +1,18 @@
// Predefined color palette for topics
// Colors chosen for good contrast and visual distinction
export const TOPIC_COLORS = [
"#3B82F6", // blue
"#10B981", // green
"#F59E0B", // amber
"#EF4444", // red
"#8B5CF6", // violet
"#EC4899", // pink
"#14B8A6", // teal
"#F97316", // orange
"#6366F1", // indigo
"#84CC16", // lime
] as const;
export function getTopicColor(topicIndex: number): string {
return TOPIC_COLORS[topicIndex % TOPIC_COLORS.length];
}