feat: use llamaindex everywhere (#525)

* feat: use llamaindex for transcript final title too

* refactor: removed llm backend, replaced with one single class+llamaindex

* refactor: self-review

* fix: typing

* fix: tests

* refactor: extract clean_title and add tests

* test: fix

* test: remove ensure_casing/nltk

* fix: tiny mistake
This commit is contained in:
2025-08-01 12:13:00 -06:00
committed by GitHub
parent 1878834ce6
commit 28ac031ff6
25 changed files with 284 additions and 1539 deletions

View File

@@ -1,4 +1,4 @@
from reflector.llm.openai_llm import OpenAILLM
from reflector.llm import LLM
from reflector.processors.base import Processor
from reflector.processors.summary.summary_builder import SummaryBuilder
from reflector.processors.types import FinalLongSummary, FinalShortSummary, TitleSummary
@@ -17,7 +17,7 @@ class TranscriptFinalSummaryProcessor(Processor):
super().__init__(**kwargs)
self.transcript = transcript
self.chunks: list[TitleSummary] = []
self.llm = OpenAILLM(config_prefix="SUMMARY", settings=settings)
self.llm = LLM(settings=settings)
self.builder = None
async def _push(self, data: TitleSummary):