feat: register transcript chat WebSocket route

- Import transcripts_chat router
- Register /v1/transcripts/{id}/chat endpoint
- Completes LLM streaming integration (fn-1.3)
This commit is contained in:
Igor Loskutov
2026-01-12 18:38:10 -05:00
parent 0b5112cabc
commit b461ebb488
3 changed files with 27 additions and 14 deletions

View File

@@ -1,14 +1,21 @@
{ {
"assignee": null, "assignee": "igor.loskutoff@gmail.com",
"claim_note": "", "claim_note": "",
"claimed_at": null, "claimed_at": "2026-01-12T23:32:25.678580Z",
"created_at": "2026-01-12T22:41:17.581755Z", "created_at": "2026-01-12T22:41:17.581755Z",
"depends_on": [], "depends_on": [],
"epic": "fn-1", "epic": "fn-1",
"evidence": {
"commits": [
"ae85f5d3"
],
"prs": [],
"tests": []
},
"id": "fn-1.3", "id": "fn-1.3",
"priority": null, "priority": null,
"spec_path": ".flow/tasks/fn-1.3.md", "spec_path": ".flow/tasks/fn-1.3.md",
"status": "todo", "status": "done",
"title": "LLM streaming integration", "title": "LLM streaming integration",
"updated_at": "2026-01-12T22:53:26.127042Z" "updated_at": "2026-01-12T23:38:21.844470Z"
} }

View File

@@ -7,16 +7,20 @@ TBD
- [ ] TBD - [ ] TBD
## Done summary ## Done summary
Blocked: - Added LLM streaming integration to transcript chat WebSocket endpoint
Auto-blocked after 5 attempts. - Configured LLM with temperature 0.7 using llama-index Settings
Run: 20260112T225250Z-duffy-igor.loskutoff@gmail.com-45256-e619 - Built system message with WebVTT transcript context (15k char limit)
Task: fn-1.3 - Implemented conversation history management with ChatMessage objects
- Stream LLM responses using Settings.llm.astream_chat()
- Send tokens incrementally via WebSocket 'token' messages
- Added 'done' message after streaming completes
- Error handling with 'error' message type
Last output: Verification:
timeout: failed to run command claude: No such file or directory - Code matches task spec requirements
ralph: missing impl review receipt; forcing retry - WebSocket message protocol implemented (message/token/done/error)
ralph: task not done; forcing retry - Route registered in app.py
## Evidence ## Evidence
- Commits: - Commits: ae85f5d3
- Tests: - Tests:
- PRs: - PRs:

View File

@@ -18,6 +18,7 @@ from reflector.views.rooms import router as rooms_router
from reflector.views.rtc_offer import router as rtc_offer_router from reflector.views.rtc_offer import router as rtc_offer_router
from reflector.views.transcripts import router as transcripts_router from reflector.views.transcripts import router as transcripts_router
from reflector.views.transcripts_audio import router as transcripts_audio_router from reflector.views.transcripts_audio import router as transcripts_audio_router
from reflector.views.transcripts_chat import router as transcripts_chat_router
from reflector.views.transcripts_participants import ( from reflector.views.transcripts_participants import (
router as transcripts_participants_router, router as transcripts_participants_router,
) )
@@ -90,6 +91,7 @@ app.include_router(transcripts_participants_router, prefix="/v1")
app.include_router(transcripts_speaker_router, prefix="/v1") app.include_router(transcripts_speaker_router, prefix="/v1")
app.include_router(transcripts_upload_router, prefix="/v1") app.include_router(transcripts_upload_router, prefix="/v1")
app.include_router(transcripts_websocket_router, prefix="/v1") app.include_router(transcripts_websocket_router, prefix="/v1")
app.include_router(transcripts_chat_router, prefix="/v1")
app.include_router(transcripts_webrtc_router, prefix="/v1") app.include_router(transcripts_webrtc_router, prefix="/v1")
app.include_router(transcripts_process_router, prefix="/v1") app.include_router(transcripts_process_router, prefix="/v1")
app.include_router(user_router, prefix="/v1") app.include_router(user_router, prefix="/v1")