diff --git a/server/.gitignore b/server/.gitignore index c6d9edb2..639e7cf4 100644 --- a/server/.gitignore +++ b/server/.gitignore @@ -165,7 +165,7 @@ cython_debug/ transcript_*.txt test_*.txt wordcloud*.png -utils/config.ini +utils/secrets.ini test_samples/ *.wav *.mp3 diff --git a/server/server.py b/server/server.py index f5ac945f..b8154fbd 100644 --- a/server/server.py +++ b/server/server.py @@ -20,7 +20,7 @@ from sortedcontainers import SortedDict from reflector_dataclasses import BlackListedMessages, FinalSummaryResult, ParseLLMResult, TitleSummaryInput, \ TitleSummaryOutput, TranscriptionInput, TranscriptionOutput from utils.log_utils import LOGGER -from utils.run_utils import CONFIG, run_in_executor +from utils.run_utils import CONFIG, run_in_executor, SECRETS # WebRTC components pcs = set() @@ -43,8 +43,8 @@ transcription_text = "" last_transcribed_time = 0.0 # LLM -LLM_MACHINE_IP = CONFIG["LLM"]["LLM_MACHINE_IP"] -LLM_MACHINE_PORT = CONFIG["LLM"]["LLM_MACHINE_PORT"] +LLM_MACHINE_IP = SECRETS["LLM"]["LLM_MACHINE_IP"] +LLM_MACHINE_PORT = SECRETS["LLM"]["LLM_MACHINE_PORT"] LLM_URL = f"http://{LLM_MACHINE_IP}:{LLM_MACHINE_PORT}/api/v1/generate" # Topic and summary responses diff --git a/server/utils/config.ini b/server/utils/config.ini new file mode 100644 index 00000000..9f8c1bfc --- /dev/null +++ b/server/utils/config.ini @@ -0,0 +1,25 @@ +[DEFAULT] +#Set exception rule for OpenMP error +#to allow duplicate lib initialization +KMP_DUPLICATE_LIB_OK = TRUE + +[WHISPER] +#ExportWhisperModelSize +WHISPER_MODEL_SIZE = tiny +WHISPER_REAL_TIME_MODEL_SIZE = tiny + +[SUMMARIZER] +#Summarizerconfig +SUMMARY_MODEL = facebook/bart-large-cnn +INPUT_ENCODING_MAX_LENGTH = 1024 +MAX_LENGTH = 2048 +BEAM_SIZE = 6 +MAX_CHUNK_LENGTH = 1024 +SUMMARIZE_USING_CHUNKS = YES + +[AUDIO] +# Audiodevice +BLACKHOLE_INPUT_AGGREGATOR_DEVICE_NAME = aggregator +AV_FOUNDATION_DEVICE_ID = 1 +CHANNELS = 2 +SAMPLING_RATE = 48000 \ No newline at end of file diff --git a/server/utils/file_utils.py b/server/utils/file_utils.py index 8b2f612b..9c85ebdc 100644 --- a/server/utils/file_utils.py +++ b/server/utils/file_utils.py @@ -10,13 +10,13 @@ import boto3 import botocore from .log_utils import LOGGER -from .run_utils import CONFIG +from .run_utils import SECRETS -BUCKET_NAME = CONFIG["AWS"]["BUCKET_NAME"] +BUCKET_NAME = SECRETS["AWS-S3"]["BUCKET_NAME"] s3 = boto3.client('s3', - aws_access_key_id=CONFIG["AWS"]["AWS_ACCESS_KEY"], - aws_secret_access_key=CONFIG["AWS"]["AWS_SECRET_KEY"]) + aws_access_key_id=SECRETS["AWS-S3"]["AWS_ACCESS_KEY"], + aws_secret_access_key=SECRETS["AWS-S3"]["AWS_SECRET_KEY"]) def upload_files(files_to_upload: List[str]) -> NoReturn: diff --git a/server/utils/run_utils.py b/server/utils/run_utils.py index 6ea03103..4a3dba30 100644 --- a/server/utils/run_utils.py +++ b/server/utils/run_utils.py @@ -15,6 +15,7 @@ class ReflectorConfig: Create a single config object to share across the project """ __config = None + __secrets = None @staticmethod def get_config(): @@ -27,8 +28,20 @@ class ReflectorConfig: ReflectorConfig.__config.read('utils/config.ini') return ReflectorConfig.__config + @staticmethod + def get_secrets(): + """ + Load the configurations from the local config.ini file + :return: + """ + if ReflectorConfig.__secrets is None: + ReflectorConfig.__secrets = configparser.ConfigParser() + ReflectorConfig.__secrets.read('utils/secrets.ini') + return ReflectorConfig.__secrets + CONFIG = ReflectorConfig.get_config() +SECRETS = ReflectorConfig.get_secrets() def run_in_executor(func, *args, executor=None, **kwargs): diff --git a/server/utils/secrets.ini.example b/server/utils/secrets.ini.example new file mode 100644 index 00000000..939227fd --- /dev/null +++ b/server/utils/secrets.ini.example @@ -0,0 +1,14 @@ +[LLM] +# LLM configs +LLM_MACHINE_IP= +LLM_MACHINE_PORT= + +[AWS-S3] +#AWSconfig +AWS_ACCESS_KEY= +AWS_SECRET_KEY= +BUCKET_NAME=reflector-bucket + +[OPENAI] +#ExportOpenAIAPIKey +OPENAI_APIKEY= \ No newline at end of file