This commit is contained in:
projects-g
2023-09-27 19:20:43 +05:30
committed by GitHub
parent 8f6313fb45
commit 24aa9a74bd
3 changed files with 8 additions and 1 deletions

View File

@@ -7,6 +7,7 @@ services:
- 1250:1250
environment:
LLM_URL: "${LLM_URL}"
MIN_TRANSCRIPT_LENGTH: "${MIN_TRANSCRIPT_LENGTH}"
volumes:
- model-cache:/root/.cache

View File

@@ -1,6 +1,7 @@
from reflector.llm import LLM, LLMTaskParams
from reflector.processors.base import Processor
from reflector.processors.types import TitleSummary, Transcript
from reflector.settings import settings
class TranscriptTopicDetectorProcessor(Processor):
@@ -12,7 +13,9 @@ class TranscriptTopicDetectorProcessor(Processor):
OUTPUT_TYPE = TitleSummary
TASK = "topic"
def __init__(self, min_transcript_length: int = 750, **kwargs):
def __init__(
self, min_transcript_length: int = int(settings.MIN_TRANSCRIPT_LENGTH), **kwargs
):
super().__init__(**kwargs)
self.transcript = None
self.min_transcript_length = min_transcript_length

View File

@@ -97,5 +97,8 @@ class Settings(BaseSettings):
# Cache directory for all model storage
CACHE_DIR: str = "./data"
# Min transcript length to generate topic + summary
MIN_TRANSCRIPT_LENGTH: int = 1800
settings = Settings()