mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-20 20:29:06 +00:00
committed by
Mathieu Virbel
parent
01806ce037
commit
93acea4ad9
72
server/env.example
Normal file
72
server/env.example
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
#
|
||||||
|
# This file serve as an example of possible configuration
|
||||||
|
# All the settings are described here: reflector/settings.py
|
||||||
|
#
|
||||||
|
|
||||||
|
## =======================================================
|
||||||
|
## Sentry
|
||||||
|
## =======================================================
|
||||||
|
|
||||||
|
## Sentry DSN configuration
|
||||||
|
#SENTRY_DSN=
|
||||||
|
|
||||||
|
## =======================================================
|
||||||
|
## Transcription backend
|
||||||
|
##
|
||||||
|
## Check reflector/processors/audio_transcript_* for the
|
||||||
|
## full list of available transcription backend
|
||||||
|
## =======================================================
|
||||||
|
|
||||||
|
## Using local whisper (default)
|
||||||
|
#TRANSCRIPT_BACKEND=whisper
|
||||||
|
#WHISPER_MODEL_SIZE=tiny
|
||||||
|
|
||||||
|
## Using serverless modal.com (require reflector-gpu-modal deployed)
|
||||||
|
#TRANSCRIPT_BACKEND=modal
|
||||||
|
#TRANSCRIPT_URL=https://xxxxx--reflector-transcriber-web.modal.run
|
||||||
|
#TRANSCRIPT_MODAL_API_KEY=xxxxx
|
||||||
|
|
||||||
|
## Using serverless banana.dev (require reflector-gpu-banana deployed)
|
||||||
|
## XXX this service is buggy do not use at the moment
|
||||||
|
## XXX it also require the audio to be saved to S3
|
||||||
|
#TRANSCRIPT_BACKEND=banana
|
||||||
|
#TRANSCRIPT_URL=https://reflector-gpu-banana-xxxxx.run.banana.dev
|
||||||
|
#TRANSCRIPT_BANANA_API_KEY=xxx
|
||||||
|
#TRANSCRIPT_BANANA_MODEL_KEY=xxx
|
||||||
|
#TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=xxx
|
||||||
|
#TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY=xxx
|
||||||
|
#TRANSCRIPT_STORAGE_AWS_BUCKET_NAME="reflector-bucket/chunks"
|
||||||
|
|
||||||
|
## =======================================================
|
||||||
|
## LLM backend
|
||||||
|
##
|
||||||
|
## Check reflector/llm/* for the full list of available
|
||||||
|
## llm backend implementation
|
||||||
|
## =======================================================
|
||||||
|
|
||||||
|
## Use oobagooda (default)
|
||||||
|
#LLM_BACKEND=oobagooda
|
||||||
|
#LLM_URL=http://xxx:7860/api/generate/v1
|
||||||
|
|
||||||
|
## Using serverless modal.com (require reflector-gpu-modal deployed)
|
||||||
|
#LLM_BACKEND=modal
|
||||||
|
#LLM_URL=https://xxxxxx--reflector-llm-web.modal.run
|
||||||
|
#LLM_MODAL_API_KEY=xxx
|
||||||
|
|
||||||
|
## Using serverless banana.dev (require reflector-gpu-banana deployed)
|
||||||
|
## XXX this service is buggy do not use at the moment
|
||||||
|
#LLM_BACKEND=banana
|
||||||
|
#LLM_URL=https://reflector-gpu-banana-xxxxx.run.banana.dev
|
||||||
|
#LLM_BANANA_API_KEY=xxxxx
|
||||||
|
#LLM_BANANA_MODEL_KEY=xxxxx
|
||||||
|
|
||||||
|
## Using OpenAI
|
||||||
|
#LLM_BACKEND=openai
|
||||||
|
#LLM_OPENAI_KEY=xxx
|
||||||
|
#LLM_OPENAI_MODEL=gpt-3.5-turbo
|
||||||
|
|
||||||
|
## Using GPT4ALL
|
||||||
|
#LLM_BACKEND=openai
|
||||||
|
#LLM_URL=http://localhost:4891/v1/completions
|
||||||
|
#LLM_OPENAI_MODEL="GPT4All Falcon"
|
||||||
|
|
||||||
@@ -27,7 +27,7 @@ class Settings(BaseSettings):
|
|||||||
AUDIO_BUFFER_SIZE: int = 256 * 960
|
AUDIO_BUFFER_SIZE: int = 256 * 960
|
||||||
|
|
||||||
# Audio Transcription
|
# Audio Transcription
|
||||||
# backends: whisper, banana
|
# backends: whisper, banana, modal
|
||||||
TRANSCRIPT_BACKEND: str = "whisper"
|
TRANSCRIPT_BACKEND: str = "whisper"
|
||||||
TRANSCRIPT_URL: str | None = None
|
TRANSCRIPT_URL: str | None = None
|
||||||
TRANSCRIPT_TIMEOUT: int = 90
|
TRANSCRIPT_TIMEOUT: int = 90
|
||||||
@@ -49,6 +49,7 @@ class Settings(BaseSettings):
|
|||||||
TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY: str | None = None
|
TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY: str | None = None
|
||||||
|
|
||||||
# LLM
|
# LLM
|
||||||
|
# available backend: openai, banana, modal, oobagooda
|
||||||
LLM_BACKEND: str = "oobagooda"
|
LLM_BACKEND: str = "oobagooda"
|
||||||
|
|
||||||
# LLM common configuration
|
# LLM common configuration
|
||||||
|
|||||||
Reference in New Issue
Block a user