Setup instuctions

This commit is contained in:
Sara
2024-08-12 12:22:21 +02:00
parent c0aa615d12
commit a4077005b2
4 changed files with 54 additions and 45 deletions

View File

@@ -4,7 +4,7 @@ TRANSCRIPT_MODAL_API_KEY=***REMOVED***
LLM_BACKEND=modal
LLM_URL=https://monadical-sas--reflector-llm-web.modal.run
LLM_MODAL_API_KEY=<ask in zulip>
LLM_MODAL_API_KEY=***REMOVED***
AUTH_BACKEND=fief
AUTH_FIEF_URL=https://auth.reflector.media/reflector-local

View File

@@ -3,36 +3,15 @@
# All the settings are described here: reflector/settings.py
#
## =======================================================
## Database
## =======================================================
#DATABASE_URL=sqlite://./reflector.db
#DATABASE_URL=postgresql://reflector:reflector@localhost:5432/reflector
## =======================================================
## User authentication
## =======================================================
## No authentication
#AUTH_BACKEND=none
## Using fief (fief.dev)
#AUTH_BACKEND=fief
#AUTH_FIEF_URL=https://your-fief-instance....
#AUTH_FIEF_CLIENT_ID=xxx
#AUTH_FIEF_CLIENT_SECRET=xxx
## =======================================================
## Public mode
## =======================================================
## If set to true, anonymous transcripts will be
## accessible to anybody.
#PUBLIC_MODE=false
AUTH_BACKEND=fief
AUTH_FIEF_URL=https://auth.reflector.media/reflector-local
AUTH_FIEF_CLIENT_ID=***REMOVED***
AUTH_FIEF_CLIENT_SECRET=<ask in zulip>
## =======================================================
## Transcription backend
@@ -41,7 +20,7 @@
## full list of available transcription backend
## =======================================================
## Using local whisper (default)
## Using local whisper
#TRANSCRIPT_BACKEND=whisper
#WHISPER_MODEL_SIZE=tiny
@@ -51,21 +30,31 @@
#TRANSLATE_URL=https://xxxxx--reflector-translator-web.modal.run
#TRANSCRIPT_MODAL_API_KEY=xxxxx
TRANSCRIPT_BACKEND=modal
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-web.modal.run
TRANSCRIPT_MODAL_API_KEY=***REMOVED***
## =======================================================
## Transcription backend
##
## Only available in modal atm
## =======================================================
TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run
## =======================================================
## LLM backend
##
## Responsible for titles and short summary
## Check reflector/llm/* for the full list of available
## llm backend implementation
## =======================================================
## Use oobabooga (default)
#LLM_BACKEND=oobabooga
#LLM_URL=http://xxx:7860/api/generate/v1
## Using serverless modal.com (require reflector-gpu-modal deployed)
#LLM_BACKEND=modal
#LLM_URL=https://xxxxxx--reflector-llm-web.modal.run
#LLM_MODAL_API_KEY=xxx
LLM_BACKEND=modal
LLM_URL=https://monadical-sas--reflector-llm-web.modal.run
LLM_MODAL_API_KEY=***REMOVED***
ZEPHYR_LLM_URL=https://monadical-sas--reflector-llm-zephyr-web.modal.run
## Using OpenAI
#LLM_BACKEND=openai
@@ -78,11 +67,21 @@
#LLM_OPENAI_MODEL="GPT4All Falcon"
## Default LLM MODEL NAME
DEFAULT_LLM=lmsys/vicuna-13b-v1.5
#DEFAULT_LLM=lmsys/vicuna-13b-v1.5
## Cache directory to store models
CACHE_DIR=data
## =======================================================
## Diarization
##
## Only available on modal
## To allow diarization, you need to expose expose the files to be dowloded by the pipeline
## =======================================================
DIARIZATION_ENABLED=false
DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run
## =======================================================
## Sentry
## =======================================================

View File

@@ -71,7 +71,7 @@ async def rtc_offer_base(
async def flush_pipeline_and_quit(close=True):
# may be called twice
# 1. either the client ask to sotp the meeting
# 1. either the client asked to stop the meeting
# - we flush and close
# - when we receive the close event, we do nothing.
# 2. or the client close the connection