# # This file serve as an example of possible configuration # All the settings are described here: reflector/settings.py # ## ======================================================= ## User authentication ## ======================================================= ## Using jwt/authentik AUTH_BACKEND=jwt AUTH_JWT_AUDIENCE= ## ======================================================= ## Transcription backend ## ## Check reflector/processors/audio_transcript_* for the ## full list of available transcription backend ## ======================================================= ## Using local whisper #TRANSCRIPT_BACKEND=whisper #WHISPER_MODEL_SIZE=tiny ## Using serverless modal.com (require reflector-gpu-modal deployed) #TRANSCRIPT_BACKEND=modal #TRANSCRIPT_URL=https://xxxxx--reflector-transcriber-web.modal.run #TRANSLATE_URL=https://xxxxx--reflector-translator-web.modal.run #TRANSCRIPT_MODAL_API_KEY=xxxxx TRANSCRIPT_BACKEND=modal TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-web.modal.run TRANSCRIPT_MODAL_API_KEY=***REMOVED*** ## ======================================================= ## Transcription backend ## ## Only available in modal atm ## ======================================================= TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run ## ======================================================= ## LLM backend ## ## Responsible for titles and short summary ## Check reflector/llm/* for the full list of available ## llm backend implementation ## ======================================================= ## Using serverless modal.com (require reflector-gpu-modal deployed) LLM_BACKEND=modal LLM_URL=https://monadical-sas--reflector-llm-web.modal.run LLM_MODAL_API_KEY=***REMOVED*** ZEPHYR_LLM_URL=https://monadical-sas--reflector-llm-zephyr-web.modal.run ## Using OpenAI #LLM_BACKEND=openai #LLM_OPENAI_KEY=xxx #LLM_OPENAI_MODEL=gpt-3.5-turbo ## Using GPT4ALL #LLM_BACKEND=openai #LLM_URL=http://localhost:4891/v1/completions #LLM_OPENAI_MODEL="GPT4All Falcon" ## Default LLM MODEL NAME #DEFAULT_LLM=lmsys/vicuna-13b-v1.5 ## Cache directory to store models CACHE_DIR=data ## ======================================================= ## Diarization ## ## Only available on modal ## To allow diarization, you need to expose expose the files to be dowloded by the pipeline ## ======================================================= DIARIZATION_ENABLED=false DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run ## ======================================================= ## Sentry ## ======================================================= ## Sentry DSN configuration #SENTRY_DSN=