make schema optional for all LLMs

This commit is contained in:
Gokul Mohanarangan
2023-08-16 22:37:20 +05:30
parent 976c0ab9a8
commit 5f79e04642
8 changed files with 79 additions and 37 deletions

View File

@@ -1,7 +1,7 @@
from reflector.processors.base import Processor
from reflector.processors.types import Transcript, TitleSummary
from reflector.utils.retry import retry
from reflector.llm import LLM
from reflector.processors.base import Processor
from reflector.processors.types import TitleSummary, Transcript
from reflector.utils.retry import retry
class TranscriptTopicDetectorProcessor(Processor):
@@ -31,6 +31,14 @@ class TranscriptTopicDetectorProcessor(Processor):
self.transcript = None
self.min_transcript_length = min_transcript_length
self.llm = LLM.get_instance()
self.topic_detector_schema = {
"type": "object",
"properties": {
"title": {"type": "string"},
"summary": {"type": "string"},
},
}
self.kwargs = {"schema": self.topic_detector_schema}
async def _warmup(self):
await self.llm.warmup(logger=self.logger)
@@ -53,7 +61,9 @@ class TranscriptTopicDetectorProcessor(Processor):
text = self.transcript.text
self.logger.info(f"Topic detector got {len(text)} length transcript")
prompt = self.PROMPT.format(input_text=text)
result = await retry(self.llm.generate)(prompt=prompt, logger=self.logger)
result = await retry(self.llm.generate)(
prompt=prompt, kwargs=self.kwargs, logger=self.logger
)
summary = TitleSummary(
title=result["title"],
summary=result["summary"],