mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-21 12:49:06 +00:00
@@ -14,7 +14,7 @@ from gpt4all import GPT4All
|
|||||||
from loguru import logger
|
from loguru import logger
|
||||||
from whisper_jax import FlaxWhisperPipline
|
from whisper_jax import FlaxWhisperPipline
|
||||||
|
|
||||||
from utils.run_utils import run_in_executor
|
from utils.run_utils import run_in_executor, config
|
||||||
|
|
||||||
pcs = set()
|
pcs = set()
|
||||||
relay = MediaRelay()
|
relay = MediaRelay()
|
||||||
@@ -28,7 +28,8 @@ RATE = 48000
|
|||||||
audio_buffer = AudioFifo()
|
audio_buffer = AudioFifo()
|
||||||
executor = ThreadPoolExecutor()
|
executor = ThreadPoolExecutor()
|
||||||
transcription_text = ""
|
transcription_text = ""
|
||||||
llm = GPT4All("/Users/gokulmohanarangan/Library/Application Support/nomic.ai/GPT4All/ggml-vicuna-13b-1.1-q4_2.bin")
|
# Load your locally downloaded Vicuna model and load it here. Set this path in the config.ini file
|
||||||
|
llm = GPT4All(config["DEFAULT"]["LLM_PATH"])
|
||||||
|
|
||||||
|
|
||||||
def get_title_and_summary():
|
def get_title_and_summary():
|
||||||
|
|||||||
24
utils/config.ini
Normal file
24
utils/config.ini
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
[DEFAULT]
|
||||||
|
#SetexceptionruleforOpenMPerrortoallowduplicatelibinitialization
|
||||||
|
KMP_DUPLICATE_LIB_OK=TRUE
|
||||||
|
#ExportOpenAIAPIKey
|
||||||
|
OPENAI_APIKEY=
|
||||||
|
#ExportWhisperModelSize
|
||||||
|
WHISPER_MODEL_SIZE=tiny
|
||||||
|
WHISPER_REAL_TIME_MODEL_SIZE=tiny
|
||||||
|
#AWSconfig
|
||||||
|
AWS_ACCESS_KEY=***REMOVED***
|
||||||
|
AWS_SECRET_KEY=***REMOVED***
|
||||||
|
BUCKET_NAME=reflector-bucket
|
||||||
|
#Summarizerconfig
|
||||||
|
SUMMARY_MODEL=facebook/bart-large-cnn
|
||||||
|
INPUT_ENCODING_MAX_LENGTH=1024
|
||||||
|
MAX_LENGTH=2048
|
||||||
|
BEAM_SIZE=6
|
||||||
|
MAX_CHUNK_LENGTH=1024
|
||||||
|
SUMMARIZE_USING_CHUNKS=YES
|
||||||
|
#Audiodevice
|
||||||
|
BLACKHOLE_INPUT_AGGREGATOR_DEVICE_NAME=aggregator
|
||||||
|
AV_FOUNDATION_DEVICE_ID=1
|
||||||
|
# LLM PATH
|
||||||
|
LLM_PATH=
|
||||||
Reference in New Issue
Block a user