diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index ecac11b4..1ab6a031 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -3,7 +3,7 @@ name: Deploy to Amazon ECS on: [workflow_dispatch] env: - # 384658522150.dkr.ecr.us-east-1.amazonaws.com/reflector + # 950402358378.dkr.ecr.us-east-1.amazonaws.com/reflector AWS_REGION: us-east-1 ECR_REPOSITORY: reflector diff --git a/.github/workflows/test_server.yml b/.github/workflows/test_server.yml index 1191fe92..9f3b9a6a 100644 --- a/.github/workflows/test_server.yml +++ b/.github/workflows/test_server.yml @@ -2,15 +2,20 @@ name: Unittests on: pull_request: - paths-ignore: - - 'www/**' + paths: + - 'server/**' push: - paths-ignore: - - 'www/**' + paths: + - 'server/**' jobs: pytest: runs-on: ubuntu-latest + services: + redis: + image: redis:6 + ports: + - 6379:6379 steps: - uses: actions/checkout@v3 - name: Install poetry diff --git a/.gitignore b/.gitignore index a43e88f7..e705c6b7 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,6 @@ server/.env .env server/exportdanswer +.vercel +.env*.local +dump.rdb diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..375f5cab --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.11.6 diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..c272fa42 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,39 @@ +{ + "configurations": [ + { + "type": "aws-sam", + "request": "direct-invoke", + "name": "lambda-nodejs18.x:HelloWorldFunction (nodejs18.x)", + "invokeTarget": { + "target": "template", + "templatePath": "${workspaceFolder}/aws/lambda-nodejs18.x/template.yaml", + "logicalId": "HelloWorldFunction" + }, + "lambda": { + "payload": {}, + "environmentVariables": {}, + "runtime": "nodejs18.x" + } + }, + { + "type": "aws-sam", + "request": "direct-invoke", + "name": "API lambda-nodejs18.x:HelloWorldFunction (nodejs18.x)", + "invokeTarget": { + "target": "api", + "templatePath": "${workspaceFolder}/aws/lambda-nodejs18.x/template.yaml", + "logicalId": "HelloWorldFunction" + }, + "api": { + "path": "/hello", + "httpMethod": "get", + "payload": { + "json": {} + } + }, + "lambda": { + "runtime": "nodejs18.x" + } + } + ] +} \ No newline at end of file diff --git a/README.md b/README.md index 3e8bd5e1..2dc76798 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,14 @@ # Reflector -Reflector is a cutting-edge web application under development by Monadical. It utilizes AI to record meetings, providing a permanent record with transcripts, translations, and automated summaries. +Reflector Audio Management and Analysis is a cutting-edge web application under development by Monadical. It utilizes AI to record meetings, providing a permanent record with transcripts, translations, and automated summaries. The project architecture consists of three primary components: * **Front-End**: NextJS React project hosted on Vercel, located in `www/`. * **Back-End**: Python server that offers an API and data persistence, found in `server/`. -* **AI Models**: Providing services such as speech-to-text transcription, topic generation, automated summaries, and translations. +* **GPU implementation**: Providing services such as speech-to-text transcription, topic generation, automated summaries, and translations. + +It also uses https://github.com/fief-dev for authentication, and Vercel for deployment and configuration of the front-end. ## Table of Contents @@ -21,7 +23,12 @@ The project architecture consists of three primary components: - [OpenAPI Code Generation](#openapi-code-generation) - [Back-End](#back-end) - [Installation](#installation-1) - - [Start the project](#start-the-project) + - [Start the API/Backend](#start-the-apibackend) + - [Redis (Mac)](#redis-mac) + - [Redis (Windows)](#redis-windows) + - [Update the database schema (run on first install, and after each pull containing a migration)](#update-the-database-schema-run-on-first-install-and-after-each-pull-containing-a-migration) + - [Main Server](#main-server) + - [Crontab (optional)](#crontab-optional) - [Using docker](#using-docker) - [Using local GPT4All](#using-local-gpt4all) - [Using local files](#using-local-files) @@ -31,12 +38,18 @@ The project architecture consists of three primary components: ### Contribution Guidelines -All new contributions should be made in a separate branch. Before any code is merged into `master`, it requires a code review. +All new contributions should be made in a separate branch. Before any code is merged into `main`, it requires a code review. ### How to Install Blackhole (Mac Only) +To record both your voice and the meeting you're taking part in, you need : +- For an in-person meeting, make sure your microphone is in range of all participants. +- If using several miscrophones, make sure to merge the audio feeds into one with an external tool. +- For an online meeting, if you do not use headphones, your microphone should be able to pick up both your voice and the audio feed of the meeting. +- If you want to use headphones, you need to merge the audio feeds with an external tool. + +This is an external tool for merging the audio feeds as explained in the previous section of this document. Note: We currently do not have instructions for Windows users. - * Install [Blackhole](https://github.com/ExistentialAudio/BlackHole)-2ch (2 ch is enough) by 1 of 2 options listed. * Setup ["Aggregate device"](https://github.com/ExistentialAudio/BlackHole/wiki/Aggregate-Device) to route web audio and local microphone input. * Setup [Multi-Output device](https://github.com/ExistentialAudio/BlackHole/wiki/Multi-Output-Device) @@ -59,8 +72,12 @@ To install the application, run: ```bash yarn install +cp .env_template .env +cp config-template.ts config.ts ``` +Then, fill in the environment variables in `.env` and the configuration in `config.ts` as needed. If you are unsure on how to proceed, ask in Zulip. + ### Run the Application To run the application in development mode, run: @@ -69,7 +86,7 @@ To run the application in development mode, run: yarn dev ``` -Then open [http://localhost:3000](http://localhost:3000) to view it in the browser. +Then (after completing server setup and starting it) open [http://localhost:3000](http://localhost:3000) to view it in the browser. ### OpenAPI Code Generation @@ -87,35 +104,76 @@ Start with `cd server`. ### Installation +Download [Python 3.11 from the official website](https://www.python.org/downloads/) and ensure you have version 3.11 by running `python --version`. + Run: ```bash -poetry install +python --version # It should say 3.11 +pip install poetry +poetry install --no-root +cp .env_template .env ``` -Then create an `.env` with: +Then fill `.env` with the omitted values (ask in Zulip). At the moment of this writing, the only value omitted is `AUTH_FIEF_CLIENT_SECRET`. -``` -TRANSCRIPT_BACKEND=modal -TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-web.modal.run -TRANSCRIPT_MODAL_API_KEY= +### Start the API/Backend -LLM_BACKEND=modal -LLM_URL=https://monadical-sas--reflector-llm-web.modal.run -LLM_MODAL_API_KEY= - -AUTH_BACKEND=fief -AUTH_FIEF_URL=https://auth.reflector.media/reflector-local -AUTH_FIEF_CLIENT_ID=KQzRsNgoY -AUTH_FIEF_CLIENT_SECRET= -``` - -### Start the project - -Use: +Start the background worker: ```bash -poetry run python3 -m reflector.app +poetry run celery -A reflector.worker.app worker --loglevel=info +``` + +### Redis (Mac) + +```bash +yarn add redis +redis-server +``` + +### Redis (Windows) + +**Option 1** + +```bash +docker compose up -d redis +``` + +**Option 2** + +Install: +- [Git for Windows](https://gitforwindows.org/) +- [Windows Subsystem for Linux (WSL)](https://docs.microsoft.com/en-us/windows/wsl/install) +- Install your preferred Linux distribution via the Microsoft Store (e.g., Ubuntu). + +Open your Linux distribution and update the package list: +```bash +sudo apt update +sudo apt install redis-server +redis-server +``` + +## Update the database schema (run on first install, and after each pull containing a migration) + +```bash +poetry run alembic heads +``` + +## Main Server + +Start the server: + +```bash +poetry run python -m reflector.app +``` + +### Crontab (optional) + +For crontab (only healthcheck for now), start the celery beat (you don't need it on your local dev environment): + +```bash +poetry run celery -A reflector.worker.app beat ``` #### Using docker @@ -141,4 +199,5 @@ poetry run python -m reflector.tools.process path/to/audio.wav ## AI Models -*(Documentation for this section is pending.)* \ No newline at end of file +*(Documentation for this section is pending.)* + diff --git a/aws/lambda-nodejs18.x/.gitignore b/aws/lambda-nodejs18.x/.gitignore new file mode 100644 index 00000000..aaf31ead --- /dev/null +++ b/aws/lambda-nodejs18.x/.gitignore @@ -0,0 +1,211 @@ + +# Created by https://www.toptal.com/developers/gitignore/api/osx,node,linux,windows,sam +# Edit at https://www.toptal.com/developers/gitignore?templates=osx,node,linux,windows,sam + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### Node ### +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# TypeScript v1 declaration files +typings/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env +.env.test +.env*.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next + +# Nuxt.js build / generate output +.nuxt +dist + +# Storybook build outputs +.out +.storybook-out +storybook-static + +# rollup.js default build output +dist/ + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# Temporary folders +tmp/ +temp/ + +### OSX ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### SAM ### +# Ignore build directories for the AWS Serverless Application Model (SAM) +# Info: https://aws.amazon.com/serverless/sam/ +# Docs: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-reference.html + +**/.aws-sam + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +.aws-sam + +UpdateZulipStreams/node_modules + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# End of https://www.toptal.com/developers/gitignore/api/osx,node,linux,windows,sam diff --git a/aws/lambda-nodejs18.x/README.TOOLKIT.md b/aws/lambda-nodejs18.x/README.TOOLKIT.md new file mode 100644 index 00000000..c3fd7a62 --- /dev/null +++ b/aws/lambda-nodejs18.x/README.TOOLKIT.md @@ -0,0 +1,38 @@ +# Developing AWS SAM Applications with the AWS Toolkit For Visual Studio Code + +This project contains source code and supporting files for a serverless application that you can locally run, debug, and deploy to AWS with the AWS Toolkit For Visual Studio Code. + +A "SAM" (serverless application model) project is a project that contains a template.yaml file which is understood by AWS tooling (such as SAM CLI, and the AWS Toolkit For Visual Studio Code). + +## Writing and Debugging Serverless Applications + +The code for this application will differ based on the runtime, but the path to a handler can be found in the [`template.yaml`](./template.yaml) file through a resource's `CodeUri` and `Handler` fields. + +AWS Toolkit For Visual Studio Code supports local debugging for serverless applications through VS Code's debugger. Since this application was created by the AWS Toolkit, launch configurations for all included handlers have been generated and can be found in the menu next to the Run button: + +* lambda-nodejs18.x:HelloWorldFunction (nodejs18.x) +* API lambda-nodejs18.x:HelloWorldFunction (nodejs18.x) + +You can debug the Lambda handlers locally by adding a breakpoint to the source file, then running the launch configuration. This works by using Docker on your local machine. + +Invocation parameters, including payloads and request parameters, can be edited either by the `Edit SAM Debug Configuration` command (through the Command Palette or CodeLens) or by editing the `launch.json` file. + +AWS Lambda functions not defined in the [`template.yaml`](./template.yaml) file can be invoked and debugged by creating a launch configuration through the CodeLens over the function declaration, or with the `Add SAM Debug Configuration` command. + +## Deploying Serverless Applications + +You can deploy a serverless application by invoking the `AWS: Deploy SAM application` command through the Command Palette or by right-clicking the Lambda node in the AWS Explorer and entering the deployment region, a valid S3 bucket from the region, and the name of a CloudFormation stack to deploy to. You can monitor your deployment's progress through the `AWS Toolkit` Output Channel. + +## Interacting With Deployed Serverless Applications + +A successfully-deployed serverless application can be found in the AWS Explorer under region and CloudFormation node that the serverless application was deployed to. + +In the AWS Explorer, you can invoke _remote_ AWS Lambda Functions by right-clicking the Lambda node and selecting "Invoke on AWS". + +Similarly, if the Function declaration contained an API Gateway event, the API Gateway API can be found in the API Gateway node under the region node the serverless application was deployed to, and can be invoked via right-clicking the API node and selecting "Invoke on AWS". + +## Resources + +General information about this SAM project can be found in the [`README.md`](./README.md) file in this folder. + +More information about using the AWS Toolkit For Visual Studio Code with serverless applications can be found [in the AWS documentation](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/serverless-apps.html) . diff --git a/aws/lambda-nodejs18.x/README.md b/aws/lambda-nodejs18.x/README.md new file mode 100644 index 00000000..9822ec2c --- /dev/null +++ b/aws/lambda-nodejs18.x/README.md @@ -0,0 +1,127 @@ +# lambda-nodejs18.x + +This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders. + +- hello-world - Code for the application's Lambda function. +- events - Invocation events that you can use to invoke the function. +- hello-world/tests - Unit tests for the application code. +- template.yaml - A template that defines the application's AWS resources. + +The application uses several AWS resources, including Lambda functions and an API Gateway API. These resources are defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code. + +If you prefer to use an integrated development environment (IDE) to build and test your application, you can use the AWS Toolkit. +The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI to build and deploy serverless applications on AWS. The AWS Toolkit also adds a simplified step-through debugging experience for Lambda function code. See the following links to get started. + +* [CLion](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [GoLand](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [IntelliJ](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [WebStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [Rider](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [PhpStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [PyCharm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [RubyMine](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [DataGrip](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [VS Code](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/welcome.html) +* [Visual Studio](https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/welcome.html) + +## Deploy the sample application + +The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API. + +To use the SAM CLI, you need the following tools. + +* SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) +* Node.js - [Install Node.js 18](https://nodejs.org/en/), including the NPM package management tool. +* Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) + +To build and deploy your application for the first time, run the following in your shell: + +```bash +sam build +sam deploy --guided +``` + +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: + +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modifies IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. + +You can find your API Gateway Endpoint URL in the output values displayed after deployment. + +## Use the SAM CLI to build and test locally + +Build your application with the `sam build` command. + +```bash +lambda-nodejs18.x$ sam build +``` + +The SAM CLI installs dependencies defined in `hello-world/package.json`, creates a deployment package, and saves it in the `.aws-sam/build` folder. + +Test a single function by invoking it directly with a test event. An event is a JSON document that represents the input that the function receives from the event source. Test events are included in the `events` folder in this project. + +Run functions locally and invoke them with the `sam local invoke` command. + +```bash +lambda-nodejs18.x$ sam local invoke HelloWorldFunction --event events/event.json +``` + +The SAM CLI can also emulate your application's API. Use the `sam local start-api` to run the API locally on port 3000. + +```bash +lambda-nodejs18.x$ sam local start-api +lambda-nodejs18.x$ curl http://localhost:3000/ +``` + +The SAM CLI reads the application template to determine the API's routes and the functions that they invoke. The `Events` property on each function's definition includes the route and method for each path. + +```yaml + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get +``` + +## Add a resource to your application +The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types. + +## Fetch, tail, and filter Lambda function logs + +To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug. + +`NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM. + +```bash +lambda-nodejs18.x$ sam logs -n HelloWorldFunction --stack-name lambda-nodejs18.x --tail +``` + +You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html). + +## Unit tests + +Tests are defined in the `hello-world/tests` folder in this project. Use NPM to install the [Mocha test framework](https://mochajs.org/) and run unit tests. + +```bash +lambda-nodejs18.x$ cd hello-world +hello-world$ npm install +hello-world$ npm run test +``` + +## Cleanup + +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: + +```bash +sam delete --stack-name lambda-nodejs18.x +``` + +## Resources + +See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts. + +Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/) diff --git a/aws/lambda-nodejs18.x/UpdateZulipStreams/index.js b/aws/lambda-nodejs18.x/UpdateZulipStreams/index.js new file mode 100644 index 00000000..271af143 --- /dev/null +++ b/aws/lambda-nodejs18.x/UpdateZulipStreams/index.js @@ -0,0 +1,71 @@ + + +const AWS = require('aws-sdk'); +const s3 = new AWS.S3(); +const axios = require('axios'); + +async function getTopics(stream_id) { + const response = await axios.get(`https://${process.env.ZULIP_REALM}/api/v1/users/me/${stream_id}/topics`, { + auth: { + username: process.env.ZULIP_BOT_EMAIL || "?", + password: process.env.ZULIP_API_KEY || "?" + } + }); + return response.data.topics.map(topic => topic.name); +} + +async function getStreams() { + + const response = await axios.get(`https://${process.env.ZULIP_REALM}/api/v1/streams`, { + auth: { + username: process.env.ZULIP_BOT_EMAIL || "?", + password: process.env.ZULIP_API_KEY || "?" + } + }); + + const streams = []; + for (const stream of response.data.streams) { + console.log("Loading topics for " + stream.name); + const topics = await getTopics(stream.stream_id); + streams.push({ id: stream.stream_id, name: stream.name, topics }); + } + + return streams; + +} + + +const handler = async (event) => { + const streams = await getStreams(); + + // Convert the streams to JSON + const json_data = JSON.stringify(streams); + + const bucketName = process.env.S3BUCKET_NAME; + const fileName = process.env.S3BUCKET_FILE_NAME; + + // Parameters for S3 upload + const params = { + Bucket: bucketName, + Key: fileName, + Body: json_data, + ContentType: 'application/json' + }; + + try { + // Write the JSON data to S3 + await s3.putObject(params).promise(); + return { + statusCode: 200, + body: JSON.stringify('File written to S3 successfully') + }; + } catch (error) { + console.error('Error writing to S3:', error); + return { + statusCode: 500, + body: JSON.stringify('Error writing file to S3') + }; + } +}; + +module.exports = { handler }; diff --git a/aws/lambda-nodejs18.x/UpdateZulipStreams/package-lock.json b/aws/lambda-nodejs18.x/UpdateZulipStreams/package-lock.json new file mode 100644 index 00000000..77f9b960 --- /dev/null +++ b/aws/lambda-nodejs18.x/UpdateZulipStreams/package-lock.json @@ -0,0 +1,1462 @@ +{ + "name": "updatezulipstreams", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "updatezulipstreams", + "version": "1.0.0", + "license": "All rights reserved", + "dependencies": { + "aws-sdk": "^2.1498.0", + "axios": "^1.6.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^10.1.0" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/available-typed-arrays": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", + "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/aws-sdk": { + "version": "2.1498.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1498.0.tgz", + "integrity": "sha512-m3yxYWrXsn9FJRcWBZZ4ySOEKxodP8KMhwoGJMALd0MLtfPUl/Yf0YuwVyTct2ZzRGMU8i7XoZYPY1fr76LD1w==", + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.16.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "util": "^0.12.4", + "uuid": "8.0.0", + "xml2js": "0.5.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/axios": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", + "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", + "dependencies": { + "follow-redirects": "^1.15.0", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "dependencies": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, + "node_modules/call-bind": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", + "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "dependencies": { + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.1", + "set-function-length": "^1.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chai": { + "version": "4.3.10", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.10.tgz", + "integrity": "sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", + "pathval": "^1.1.1", + "type-detect": "^4.0.8" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/check-error": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-eql": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", + "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/define-data-property": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", + "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "dependencies": { + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", + "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", + "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "dependencies": { + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", + "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "dependencies": { + "get-intrinsic": "^1.2.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", + "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-function": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", + "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "dependencies": { + "which-typed-array": "^1.1.11" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/jmespath": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", + "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/loupe": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.1" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "dependencies": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, + "node_modules/punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" + }, + "node_modules/querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==" + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/set-function-length": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", + "integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", + "dependencies": { + "define-data-property": "^1.1.1", + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "node_modules/uuid": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", + "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", + "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", + "dependencies": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.4", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/aws/lambda-nodejs18.x/UpdateZulipStreams/package.json b/aws/lambda-nodejs18.x/UpdateZulipStreams/package.json new file mode 100644 index 00000000..867ea5d9 --- /dev/null +++ b/aws/lambda-nodejs18.x/UpdateZulipStreams/package.json @@ -0,0 +1,16 @@ +{ + "name": "updatezulipstreams", + "version": "1.0.0", + "description": "Updates the JSON with the zulip streams and topics on S3", + "main": "index.js", + "author": "Andreas Bonini", + "license": "All rights reserved", + "dependencies": { + "aws-sdk": "^2.1498.0", + "axios": "^1.6.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^10.1.0" + } +} diff --git a/aws/lambda-nodejs18.x/events/event.json b/aws/lambda-nodejs18.x/events/event.json new file mode 100644 index 00000000..070ad8e0 --- /dev/null +++ b/aws/lambda-nodejs18.x/events/event.json @@ -0,0 +1,62 @@ +{ + "body": "{\"message\": \"hello world\"}", + "resource": "/{proxy+}", + "path": "/path/to/resource", + "httpMethod": "POST", + "isBase64Encoded": false, + "queryStringParameters": { + "foo": "bar" + }, + "pathParameters": { + "proxy": "/path/to/resource" + }, + "stageVariables": { + "baz": "qux" + }, + "headers": { + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Accept-Encoding": "gzip, deflate, sdch", + "Accept-Language": "en-US,en;q=0.8", + "Cache-Control": "max-age=0", + "CloudFront-Forwarded-Proto": "https", + "CloudFront-Is-Desktop-Viewer": "true", + "CloudFront-Is-Mobile-Viewer": "false", + "CloudFront-Is-SmartTV-Viewer": "false", + "CloudFront-Is-Tablet-Viewer": "false", + "CloudFront-Viewer-Country": "US", + "Host": "1234567890.execute-api.us-east-1.amazonaws.com", + "Upgrade-Insecure-Requests": "1", + "User-Agent": "Custom User Agent String", + "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", + "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", + "X-Forwarded-For": "127.0.0.1, 127.0.0.2", + "X-Forwarded-Port": "443", + "X-Forwarded-Proto": "https" + }, + "requestContext": { + "accountId": "123456789012", + "resourceId": "123456", + "stage": "prod", + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + "requestTime": "09/Apr/2015:12:34:56 +0000", + "requestTimeEpoch": 1428582896000, + "identity": { + "cognitoIdentityPoolId": null, + "accountId": null, + "cognitoIdentityId": null, + "caller": null, + "accessKey": null, + "sourceIp": "127.0.0.1", + "cognitoAuthenticationType": null, + "cognitoAuthenticationProvider": null, + "userArn": null, + "userAgent": "Custom User Agent String", + "user": null + }, + "path": "/prod/path/to/resource", + "resourcePath": "/{proxy+}", + "httpMethod": "POST", + "apiId": "1234567890", + "protocol": "HTTP/1.1" + } +} diff --git a/aws/lambda-nodejs18.x/samconfig.toml b/aws/lambda-nodejs18.x/samconfig.toml new file mode 100644 index 00000000..fe973fd8 --- /dev/null +++ b/aws/lambda-nodejs18.x/samconfig.toml @@ -0,0 +1,31 @@ +# More information about the configuration file can be found here: +# https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html +version = 0.1 + +[default] +[default.global.parameters] +stack_name = "lambda-nodejs18.x" + +[default.build.parameters] +cached = true +parallel = true + +[default.validate.parameters] +lint = true + +[default.deploy.parameters] +capabilities = "CAPABILITY_IAM" +confirm_changeset = true +resolve_s3 = true + +[default.package.parameters] +resolve_s3 = true + +[default.sync.parameters] +watch = true + +[default.local_start_api.parameters] +warm_containers = "EAGER" + +[default.local_start_lambda.parameters] +warm_containers = "EAGER" diff --git a/aws/lambda-nodejs18.x/template.yaml b/aws/lambda-nodejs18.x/template.yaml new file mode 100644 index 00000000..e1a9f6a7 --- /dev/null +++ b/aws/lambda-nodejs18.x/template.yaml @@ -0,0 +1,41 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + lambda-nodejs18.x + + Sample SAM Template for lambda-nodejs18.x + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 3 + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction + Properties: + CodeUri: hello-world/ + Handler: app.lambdaHandler + Runtime: nodejs18.x + Architectures: + - arm64 + Events: + HelloWorld: + Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api + Properties: + Path: /hello + Method: get + +Outputs: + # ServerlessRestApi is an implicit API created out of Events key under Serverless::Function + # Find out more about other implicit resources you can reference within SAM + # https://github.com/awslabs/serverless-application-model/blob/master/docs/internals/generated_resources.rst#api + HelloWorldApi: + Description: "API Gateway endpoint URL for Prod stage for Hello World function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" + HelloWorldFunction: + Description: "Hello World Lambda Function ARN" + Value: !GetAtt HelloWorldFunction.Arn + HelloWorldFunctionIamRole: + Description: "Implicit IAM Role created for Hello World function" + Value: !GetAtt HelloWorldFunctionRole.Arn diff --git a/docker-compose.yml b/docker-compose.yml index 934baaac..9e6519af 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,10 +5,19 @@ services: context: server ports: - 1250:1250 - environment: - LLM_URL: "${LLM_URL}" volumes: - model-cache:/root/.cache + environment: ENTRYPOINT=server + worker: + build: + context: server + volumes: + - model-cache:/root/.cache + environment: ENTRYPOINT=worker + redis: + image: redis:7.2 + ports: + - 6379:6379 web: build: context: www @@ -17,4 +26,3 @@ services: volumes: model-cache: - diff --git a/server/.env_template b/server/.env_template new file mode 100644 index 00000000..d25797d3 --- /dev/null +++ b/server/.env_template @@ -0,0 +1,20 @@ +TRANSCRIPT_BACKEND=modal +TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-web.modal.run +TRANSCRIPT_MODAL_API_KEY=***REMOVED*** + +LLM_BACKEND=modal +LLM_URL=https://monadical-sas--reflector-llm-web.modal.run +LLM_MODAL_API_KEY= + +AUTH_BACKEND=fief +AUTH_FIEF_URL=https://auth.reflector.media/reflector-local +AUTH_FIEF_CLIENT_ID=***REMOVED*** +AUTH_FIEF_CLIENT_SECRET= <----------------------------------------------------------------------------------------- + +TRANSLATE_URL=https://monadical-sas--reflector-translator-web.modal.run +ZEPHYR_LLM_URL=https://monadical-sas--reflector-llm-zephyr-web.modal.run +DIARIZATION_URL=https://monadical-sas--reflector-diarizer-web.modal.run + +BASE_URL=https://xxxxx.ngrok.app +DIARIZATION_ENABLED=false + diff --git a/server/.gitignore b/server/.gitignore index 937f2ddd..7adb7fc0 100644 --- a/server/.gitignore +++ b/server/.gitignore @@ -178,3 +178,5 @@ audio_*.wav # ignore local database reflector.sqlite3 data/ + +dump.rdb diff --git a/server/.python-version b/server/.python-version index 2c073331..375f5cab 100644 --- a/server/.python-version +++ b/server/.python-version @@ -1 +1 @@ -3.11 +3.11.6 diff --git a/server/docker-compose.yml b/server/docker-compose.yml index 374130fa..c8432816 100644 --- a/server/docker-compose.yml +++ b/server/docker-compose.yml @@ -5,11 +5,23 @@ services: context: . ports: - 1250:1250 - environment: - LLM_URL: "${LLM_URL}" - MIN_TRANSCRIPT_LENGTH: "${MIN_TRANSCRIPT_LENGTH}" volumes: - model-cache:/root/.cache + environment: + ENTRYPOINT: server + REDIS_HOST: redis + worker: + build: + context: . + volumes: + - model-cache:/root/.cache + environment: + ENTRYPOINT: worker + REDIS_HOST: redis + redis: + image: redis:7.2 + ports: + - 6379:6379 volumes: model-cache: diff --git a/server/env.example b/server/env.example index 8c4dcdab..c5a38bf5 100644 --- a/server/env.example +++ b/server/env.example @@ -51,17 +51,6 @@ #TRANSLATE_URL=https://xxxxx--reflector-translator-web.modal.run #TRANSCRIPT_MODAL_API_KEY=xxxxx -## Using serverless banana.dev (require reflector-gpu-banana deployed) -## XXX this service is buggy do not use at the moment -## XXX it also require the audio to be saved to S3 -#TRANSCRIPT_BACKEND=banana -#TRANSCRIPT_URL=https://reflector-gpu-banana-xxxxx.run.banana.dev -#TRANSCRIPT_BANANA_API_KEY=xxx -#TRANSCRIPT_BANANA_MODEL_KEY=xxx -#TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID=xxx -#TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY=xxx -#TRANSCRIPT_STORAGE_AWS_BUCKET_NAME="reflector-bucket/chunks" - ## ======================================================= ## LLM backend ## @@ -78,13 +67,6 @@ #LLM_URL=https://xxxxxx--reflector-llm-web.modal.run #LLM_MODAL_API_KEY=xxx -## Using serverless banana.dev (require reflector-gpu-banana deployed) -## XXX this service is buggy do not use at the moment -#LLM_BACKEND=banana -#LLM_URL=https://reflector-gpu-banana-xxxxx.run.banana.dev -#LLM_BANANA_API_KEY=xxxxx -#LLM_BANANA_MODEL_KEY=xxxxx - ## Using OpenAI #LLM_BACKEND=openai #LLM_OPENAI_KEY=xxx diff --git a/server/gpu/modal/reflector_diarizer.py b/server/gpu/modal/reflector_diarizer.py new file mode 100644 index 00000000..b1989a11 --- /dev/null +++ b/server/gpu/modal/reflector_diarizer.py @@ -0,0 +1,188 @@ +""" +Reflector GPU backend - diarizer +=================================== +""" + +import os + +import modal.gpu +from modal import Image, Secret, Stub, asgi_app, method +from pydantic import BaseModel + +PYANNOTE_MODEL_NAME: str = "pyannote/speaker-diarization-3.0" +MODEL_DIR = "/root/diarization_models" + +stub = Stub(name="reflector-diarizer") + + +def migrate_cache_llm(): + """ + XXX The cache for model files in Transformers v4.22.0 has been updated. + Migrating your old cache. This is a one-time only operation. You can + interrupt this and resume the migration later on by calling + `transformers.utils.move_cache()`. + """ + from transformers.utils.hub import move_cache + + print("Moving LLM cache") + move_cache(cache_dir=MODEL_DIR, new_cache_dir=MODEL_DIR) + print("LLM cache moved") + + +def download_pyannote_audio(): + from pyannote.audio import Pipeline + Pipeline.from_pretrained( + "pyannote/speaker-diarization-3.0", + cache_dir=MODEL_DIR, + use_auth_token="***REMOVED***" + ) + + +diarizer_image = ( + Image.debian_slim(python_version="3.10.8") + .pip_install( + "pyannote.audio", + "requests", + "onnx", + "torchaudio", + "onnxruntime-gpu", + "torch==2.0.0", + "transformers==4.34.0", + "sentencepiece", + "protobuf", + "numpy", + "huggingface_hub", + "hf-transfer" + ) + .run_function(migrate_cache_llm) + .run_function(download_pyannote_audio) + .env( + { + "LD_LIBRARY_PATH": ( + "/usr/local/lib/python3.10/site-packages/nvidia/cudnn/lib/:" + "/opt/conda/lib/python3.10/site-packages/nvidia/cublas/lib/" + ) + } + ) +) + + +@stub.cls( + gpu=modal.gpu.A100(memory=40), + timeout=60 * 30, + container_idle_timeout=60, + allow_concurrent_inputs=1, + image=diarizer_image, +) +class Diarizer: + def __enter__(self): + import torch + from pyannote.audio import Pipeline + + self.use_gpu = torch.cuda.is_available() + self.device = "cuda" if self.use_gpu else "cpu" + self.diarization_pipeline = Pipeline.from_pretrained( + "pyannote/speaker-diarization-3.0", + cache_dir=MODEL_DIR + ) + self.diarization_pipeline.to(torch.device(self.device)) + + @method() + def diarize( + self, + audio_data: str, + audio_suffix: str, + timestamp: float + ): + import tempfile + + import torchaudio + + with tempfile.NamedTemporaryFile("wb+", suffix=f".{audio_suffix}") as fp: + fp.write(audio_data) + + print("Diarizing audio") + waveform, sample_rate = torchaudio.load(fp.name) + diarization = self.diarization_pipeline({"waveform": waveform, "sample_rate": sample_rate}) + + words = [] + for diarization_segment, _, speaker in diarization.itertracks(yield_label=True): + words.append( + { + "start": round(timestamp + diarization_segment.start, 3), + "end": round(timestamp + diarization_segment.end, 3), + "speaker": int(speaker[-2:]) + } + ) + print("Diarization complete") + return { + "diarization": words + } + +# ------------------------------------------------------------------- +# Web API +# ------------------------------------------------------------------- + + +@stub.function( + timeout=60 * 10, + container_idle_timeout=60 * 3, + allow_concurrent_inputs=40, + secrets=[ + Secret.from_name("reflector-gpu"), + ], + image=diarizer_image +) +@asgi_app() +def web(): + import requests + from fastapi import Depends, FastAPI, HTTPException, status + from fastapi.security import OAuth2PasswordBearer + + diarizerstub = Diarizer() + + app = FastAPI() + + oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") + + def apikey_auth(apikey: str = Depends(oauth2_scheme)): + if apikey != os.environ["REFLECTOR_GPU_APIKEY"]: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid API key", + headers={"WWW-Authenticate": "Bearer"}, + ) + + def validate_audio_file(audio_file_url: str): + # Check if the audio file exists + response = requests.head(audio_file_url, allow_redirects=True) + if response.status_code == 404: + raise HTTPException( + status_code=response.status_code, + detail="The audio file does not exist." + ) + + class DiarizationResponse(BaseModel): + result: dict + + @app.post("/diarize", dependencies=[Depends(apikey_auth), Depends(validate_audio_file)]) + def diarize( + audio_file_url: str, + timestamp: float = 0.0 + ) -> HTTPException | DiarizationResponse: + # Currently the uploaded files are in mp3 format + audio_suffix = "mp3" + + print("Downloading audio file") + response = requests.get(audio_file_url, allow_redirects=True) + print("Audio file downloaded successfully") + + func = diarizerstub.diarize.spawn( + audio_data=response.content, + audio_suffix=audio_suffix, + timestamp=timestamp + ) + result = func.get() + return result + + return app diff --git a/server/gpu/modal/reflector_llm.py b/server/gpu/modal/reflector_llm.py index 02feedb7..f1e9d166 100644 --- a/server/gpu/modal/reflector_llm.py +++ b/server/gpu/modal/reflector_llm.py @@ -81,7 +81,8 @@ class LLM: LLM_MODEL, torch_dtype=getattr(torch, LLM_TORCH_DTYPE), low_cpu_mem_usage=LLM_LOW_CPU_MEM_USAGE, - cache_dir=IMAGE_MODEL_DIR + cache_dir=IMAGE_MODEL_DIR, + local_files_only=True ) # JSONFormer doesn't yet support generation configs @@ -96,7 +97,8 @@ class LLM: print("Instance llm tokenizer") tokenizer = AutoTokenizer.from_pretrained( LLM_MODEL, - cache_dir=IMAGE_MODEL_DIR + cache_dir=IMAGE_MODEL_DIR, + local_files_only=True ) # move model to gpu diff --git a/server/gpu/modal/reflector_llm_zephyr.py b/server/gpu/modal/reflector_llm_zephyr.py index cbb436b0..b101f5f2 100644 --- a/server/gpu/modal/reflector_llm_zephyr.py +++ b/server/gpu/modal/reflector_llm_zephyr.py @@ -17,7 +17,7 @@ LLM_LOW_CPU_MEM_USAGE: bool = True LLM_TORCH_DTYPE: str = "bfloat16" LLM_MAX_NEW_TOKENS: int = 300 -IMAGE_MODEL_DIR = "/root/llm_models" +IMAGE_MODEL_DIR = "/root/llm_models/zephyr" stub = Stub(name="reflector-llm-zephyr") @@ -81,7 +81,8 @@ class LLM: LLM_MODEL, torch_dtype=getattr(torch, LLM_TORCH_DTYPE), low_cpu_mem_usage=LLM_LOW_CPU_MEM_USAGE, - cache_dir=IMAGE_MODEL_DIR + cache_dir=IMAGE_MODEL_DIR, + local_files_only=True ) # JSONFormer doesn't yet support generation configs @@ -96,7 +97,8 @@ class LLM: print("Instance llm tokenizer") tokenizer = AutoTokenizer.from_pretrained( LLM_MODEL, - cache_dir=IMAGE_MODEL_DIR + cache_dir=IMAGE_MODEL_DIR, + local_files_only=True ) gen_cfg.pad_token_id = tokenizer.eos_token_id gen_cfg.eos_token_id = tokenizer.eos_token_id diff --git a/server/gpu/modal/reflector_transcriber.py b/server/gpu/modal/reflector_transcriber.py index bee9ccd1..4f746ded 100644 --- a/server/gpu/modal/reflector_transcriber.py +++ b/server/gpu/modal/reflector_transcriber.py @@ -95,7 +95,8 @@ class Transcriber: device=self.device, compute_type=WHISPER_COMPUTE_TYPE, num_workers=WHISPER_NUM_WORKERS, - download_root=WHISPER_MODEL_DIR + download_root=WHISPER_MODEL_DIR, + local_files_only=True ) @method() diff --git a/server/migrations/versions/0fea6d96b096_add_share_mode.py b/server/migrations/versions/0fea6d96b096_add_share_mode.py new file mode 100644 index 00000000..48746c3b --- /dev/null +++ b/server/migrations/versions/0fea6d96b096_add_share_mode.py @@ -0,0 +1,33 @@ +"""add share_mode + +Revision ID: 0fea6d96b096 +Revises: f819277e5169 +Create Date: 2023-11-07 11:12:21.614198 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "0fea6d96b096" +down_revision: Union[str, None] = "f819277e5169" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "transcript", + sa.Column("share_mode", sa.String(), server_default="private", nullable=False), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("transcript", "share_mode") + # ### end Alembic commands ### diff --git a/server/migrations/versions/125031f7cb78_participants.py b/server/migrations/versions/125031f7cb78_participants.py new file mode 100644 index 00000000..c345b083 --- /dev/null +++ b/server/migrations/versions/125031f7cb78_participants.py @@ -0,0 +1,30 @@ +"""participants + +Revision ID: 125031f7cb78 +Revises: 0fea6d96b096 +Create Date: 2023-11-30 15:56:03.341466 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '125031f7cb78' +down_revision: Union[str, None] = '0fea6d96b096' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('transcript', sa.Column('participants', sa.JSON(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('transcript', 'participants') + # ### end Alembic commands ### diff --git a/server/migrations/versions/38a927dcb099_rename_back_text_to_transcript.py b/server/migrations/versions/38a927dcb099_rename_back_text_to_transcript.py new file mode 100644 index 00000000..dffe6fa1 --- /dev/null +++ b/server/migrations/versions/38a927dcb099_rename_back_text_to_transcript.py @@ -0,0 +1,80 @@ +"""rename back text to transcript + +Revision ID: 38a927dcb099 +Revises: 9920ecfe2735 +Create Date: 2023-11-02 19:53:09.116240 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.sql import table, column +from sqlalchemy import select + + +# revision identifiers, used by Alembic. +revision: str = '38a927dcb099' +down_revision: Union[str, None] = '9920ecfe2735' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # bind the engine + bind = op.get_bind() + + # Reflect the table + transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON)) + + # Select all rows from the transcript table + results = bind.execute(select([transcript.c.id, transcript.c.topics])) + + for row in results: + transcript_id = row["id"] + topics_json = row["topics"] + + # Process each topic in the topics JSON array + updated_topics = [] + for topic in topics_json: + if "text" in topic: + # Rename key 'text' back to 'transcript' + topic["transcript"] = topic.pop("text") + updated_topics.append(topic) + + # Update the transcript table + bind.execute( + transcript.update() + .where(transcript.c.id == transcript_id) + .values(topics=updated_topics) + ) + + +def downgrade() -> None: + # bind the engine + bind = op.get_bind() + + # Reflect the table + transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON)) + + # Select all rows from the transcript table + results = bind.execute(select([transcript.c.id, transcript.c.topics])) + + for row in results: + transcript_id = row["id"] + topics_json = row["topics"] + + # Process each topic in the topics JSON array + updated_topics = [] + for topic in topics_json: + if "transcript" in topic: + # Rename key 'transcript' to 'text' + topic["text"] = topic.pop("transcript") + updated_topics.append(topic) + + # Update the transcript table + bind.execute( + transcript.update() + .where(transcript.c.id == transcript_id) + .values(topics=updated_topics) + ) diff --git a/server/migrations/versions/4814901632bc_fix_duration.py b/server/migrations/versions/4814901632bc_fix_duration.py new file mode 100644 index 00000000..66628bb5 --- /dev/null +++ b/server/migrations/versions/4814901632bc_fix_duration.py @@ -0,0 +1,64 @@ +"""fix duration + +Revision ID: 4814901632bc +Revises: 38a927dcb099 +Create Date: 2023-11-10 18:12:17.886522 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.sql import table, column +from sqlalchemy import select + + +# revision identifiers, used by Alembic. +revision: str = "4814901632bc" +down_revision: Union[str, None] = "38a927dcb099" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # for all the transcripts, calculate the duration from the mp3 + # and update the duration column + from pathlib import Path + from reflector.settings import settings + import av + + bind = op.get_bind() + transcript = table( + "transcript", column("id", sa.String), column("duration", sa.Float) + ) + + # select only the one with duration = 0 + results = bind.execute( + select([transcript.c.id, transcript.c.duration]).where( + transcript.c.duration == 0 + ) + ) + + data_dir = Path(settings.DATA_DIR) + for row in results: + audio_path = data_dir / row["id"] / "audio.mp3" + if not audio_path.exists(): + continue + + try: + print(f"Processing {audio_path}") + container = av.open(audio_path.as_posix()) + print(container.duration) + duration = round(float(container.duration / av.time_base), 2) + print(f"Duration: {duration}") + bind.execute( + transcript.update() + .where(transcript.c.id == row["id"]) + .values(duration=duration) + ) + except Exception as e: + print(f"Failed to process {audio_path}: {e}") + + +def downgrade() -> None: + pass diff --git a/server/migrations/versions/9920ecfe2735_rename_transcript_to_text.py b/server/migrations/versions/9920ecfe2735_rename_transcript_to_text.py new file mode 100644 index 00000000..caecaefd --- /dev/null +++ b/server/migrations/versions/9920ecfe2735_rename_transcript_to_text.py @@ -0,0 +1,80 @@ +"""Migration transcript to text field in transcripts table + +Revision ID: 9920ecfe2735 +Revises: 99365b0cd87b +Create Date: 2023-11-02 18:55:17.019498 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.sql import table, column +from sqlalchemy import select + + +# revision identifiers, used by Alembic. +revision: str = "9920ecfe2735" +down_revision: Union[str, None] = "99365b0cd87b" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # bind the engine + bind = op.get_bind() + + # Reflect the table + transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON)) + + # Select all rows from the transcript table + results = bind.execute(select([transcript.c.id, transcript.c.topics])) + + for row in results: + transcript_id = row["id"] + topics_json = row["topics"] + + # Process each topic in the topics JSON array + updated_topics = [] + for topic in topics_json: + if "transcript" in topic: + # Rename key 'transcript' to 'text' + topic["text"] = topic.pop("transcript") + updated_topics.append(topic) + + # Update the transcript table + bind.execute( + transcript.update() + .where(transcript.c.id == transcript_id) + .values(topics=updated_topics) + ) + + +def downgrade() -> None: + # bind the engine + bind = op.get_bind() + + # Reflect the table + transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON)) + + # Select all rows from the transcript table + results = bind.execute(select([transcript.c.id, transcript.c.topics])) + + for row in results: + transcript_id = row["id"] + topics_json = row["topics"] + + # Process each topic in the topics JSON array + updated_topics = [] + for topic in topics_json: + if "text" in topic: + # Rename key 'text' back to 'transcript' + topic["transcript"] = topic.pop("text") + updated_topics.append(topic) + + # Update the transcript table + bind.execute( + transcript.update() + .where(transcript.c.id == transcript_id) + .values(topics=updated_topics) + ) diff --git a/server/migrations/versions/b9348748bbbc_reviewed.py b/server/migrations/versions/b9348748bbbc_reviewed.py new file mode 100644 index 00000000..f9f23c3e --- /dev/null +++ b/server/migrations/versions/b9348748bbbc_reviewed.py @@ -0,0 +1,29 @@ +"""reviewed + +Revision ID: b9348748bbbc +Revises: 125031f7cb78 +Create Date: 2023-12-13 15:37:51.303970 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = 'b9348748bbbc' +down_revision: Union[str, None] = '125031f7cb78' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('transcript', sa.Column('reviewed', sa.Boolean(), server_default=sa.text('0'), nullable=False)) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('transcript', 'reviewed') + # ### end Alembic commands ### diff --git a/server/migrations/versions/f819277e5169_audio_location.py b/server/migrations/versions/f819277e5169_audio_location.py new file mode 100644 index 00000000..061abec4 --- /dev/null +++ b/server/migrations/versions/f819277e5169_audio_location.py @@ -0,0 +1,35 @@ +"""audio_location + +Revision ID: f819277e5169 +Revises: 4814901632bc +Create Date: 2023-11-16 10:29:09.351664 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "f819277e5169" +down_revision: Union[str, None] = "4814901632bc" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "transcript", + sa.Column( + "audio_location", sa.String(), server_default="local", nullable=False + ), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("transcript", "audio_location") + # ### end Alembic commands ### diff --git a/server/poetry.lock b/server/poetry.lock index 330c23e3..5bf722b9 100644 --- a/server/poetry.lock +++ b/server/poetry.lock @@ -42,111 +42,98 @@ boto3 = ["boto3 (>=1.28.17,<1.28.18)"] [[package]] name = "aiohttp" -version = "3.8.6" +version = "3.9.0" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, - {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, - {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, - {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, - {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, - {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, - {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, - {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, - {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, - {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, - {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, - {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, - {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, - {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, - {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, - {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, - {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, - {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, - {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, - {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, - {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, - {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, - {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, - {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, - {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, - {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, - {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, + {file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6896b8416be9ada4d22cd359d7cb98955576ce863eadad5596b7cdfbf3e17c6c"}, + {file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1736d87dad8ef46a8ec9cddd349fa9f7bd3a064c47dd6469c0d6763d3d49a4fc"}, + {file = "aiohttp-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c9e5f4d7208cda1a2bb600e29069eecf857e6980d0ccc922ccf9d1372c16f4b"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8488519aa05e636c5997719fe543c8daf19f538f4fa044f3ce94bee608817cff"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ab16c254e2312efeb799bc3c06897f65a133b38b69682bf75d1f1ee1a9c43a9"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a94bde005a8f926d0fa38b88092a03dea4b4875a61fbcd9ac6f4351df1b57cd"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b777c9286b6c6a94f50ddb3a6e730deec327e9e2256cb08b5530db0f7d40fd8"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571760ad7736b34d05597a1fd38cbc7d47f7b65deb722cb8e86fd827404d1f6b"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:deac0a32aec29608eb25d730f4bc5a261a65b6c48ded1ed861d2a1852577c932"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4ee1b4152bc3190cc40ddd6a14715e3004944263ea208229ab4c297712aa3075"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3607375053df58ed6f23903aa10cf3112b1240e8c799d243bbad0f7be0666986"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:65b0a70a25456d329a5e1426702dde67be0fb7a4ead718005ba2ca582d023a94"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a2eb5311a37fe105aa35f62f75a078537e1a9e4e1d78c86ec9893a3c97d7a30"}, + {file = "aiohttp-3.9.0-cp310-cp310-win32.whl", hash = "sha256:2cbc14a13fb6b42d344e4f27746a4b03a2cb0c1c3c5b932b0d6ad8881aa390e3"}, + {file = "aiohttp-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ac9669990e2016d644ba8ae4758688534aabde8dbbc81f9af129c3f5f01ca9cd"}, + {file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f8e05f5163528962ce1d1806fce763ab893b1c5b7ace0a3538cd81a90622f844"}, + {file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4afa8f71dba3a5a2e1e1282a51cba7341ae76585345c43d8f0e624882b622218"}, + {file = "aiohttp-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f929f4c9b9a00f3e6cc0587abb95ab9c05681f8b14e0fe1daecfa83ea90f8318"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28185e36a78d247c55e9fbea2332d16aefa14c5276a582ce7a896231c6b1c208"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a486ddf57ab98b6d19ad36458b9f09e6022de0381674fe00228ca7b741aacb2f"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70e851f596c00f40a2f00a46126c95c2e04e146015af05a9da3e4867cfc55911"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b7bf8fe4d39886adc34311a233a2e01bc10eb4e842220235ed1de57541a896"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c67a51ea415192c2e53e4e048c78bab82d21955b4281d297f517707dc836bf3d"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:694df243f394629bcae2d8ed94c589a181e8ba8604159e6e45e7b22e58291113"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3dd8119752dd30dd7bca7d4bc2a92a59be6a003e4e5c2cf7e248b89751b8f4b7"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:eb6dfd52063186ac97b4caa25764cdbcdb4b10d97f5c5f66b0fa95052e744eb7"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d97c3e286d0ac9af6223bc132dc4bad6540b37c8d6c0a15fe1e70fb34f9ec411"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:816f4db40555026e4cdda604a1088577c1fb957d02f3f1292e0221353403f192"}, + {file = "aiohttp-3.9.0-cp311-cp311-win32.whl", hash = "sha256:3abf0551874fecf95f93b58f25ef4fc9a250669a2257753f38f8f592db85ddea"}, + {file = "aiohttp-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:e18d92c3e9e22553a73e33784fcb0ed484c9874e9a3e96c16a8d6a1e74a0217b"}, + {file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:99ae01fb13a618b9942376df77a1f50c20a281390dad3c56a6ec2942e266220d"}, + {file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:05857848da443c8c12110d99285d499b4e84d59918a21132e45c3f0804876994"}, + {file = "aiohttp-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:317719d7f824eba55857fe0729363af58e27c066c731bc62cd97bc9c3d9c7ea4"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1e3b3c107ccb0e537f309f719994a55621acd2c8fdf6d5ce5152aed788fb940"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45820ddbb276113ead8d4907a7802adb77548087ff5465d5c554f9aa3928ae7d"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a183f1978802588711aed0dea31e697d760ce9055292db9dc1604daa9a8ded"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a4cd44788ea0b5e6bb8fa704597af3a30be75503a7ed1098bc5b8ffdf6c982"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673343fbc0c1ac44d0d2640addc56e97a052504beacd7ade0dc5e76d3a4c16e8"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e8a3b79b6d186a9c99761fd4a5e8dd575a48d96021f220ac5b5fa856e5dd029"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6777a390e41e78e7c45dab43a4a0196c55c3b8c30eebe017b152939372a83253"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7ae5f99a32c53731c93ac3075abd3e1e5cfbe72fc3eaac4c27c9dd64ba3b19fe"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f1e4f254e9c35d8965d377e065c4a8a55d396fe87c8e7e8429bcfdeeb229bfb3"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11ca808f9a6b63485059f5f6e164ef7ec826483c1212a44f268b3653c91237d8"}, + {file = "aiohttp-3.9.0-cp312-cp312-win32.whl", hash = "sha256:de3cc86f4ea8b4c34a6e43a7306c40c1275e52bfa9748d869c6b7d54aa6dad80"}, + {file = "aiohttp-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca4fddf84ac7d8a7d0866664936f93318ff01ee33e32381a115b19fb5a4d1202"}, + {file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f09960b5bb1017d16c0f9e9f7fc42160a5a49fa1e87a175fd4a2b1a1833ea0af"}, + {file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8303531e2c17b1a494ffaeba48f2da655fe932c4e9a2626c8718403c83e5dd2b"}, + {file = "aiohttp-3.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4790e44f46a4aa07b64504089def5744d3b6780468c4ec3a1a36eb7f2cae9814"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1d7edf74a36de0e5ca50787e83a77cf352f5504eb0ffa3f07000a911ba353fb"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94697c7293199c2a2551e3e3e18438b4cba293e79c6bc2319f5fd652fccb7456"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1b66dbb8a7d5f50e9e2ea3804b01e766308331d0cac76eb30c563ac89c95985"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9623cfd9e85b76b83ef88519d98326d4731f8d71869867e47a0b979ffec61c73"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f32c86dc967ab8c719fd229ce71917caad13cc1e8356ee997bf02c5b368799bf"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f50b4663c3e0262c3a361faf440761fbef60ccdde5fe8545689a4b3a3c149fb4"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dcf71c55ec853826cd70eadb2b6ac62ec577416442ca1e0a97ad875a1b3a0305"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:42fe4fd9f0dfcc7be4248c162d8056f1d51a04c60e53366b0098d1267c4c9da8"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76a86a9989ebf82ee61e06e2bab408aec4ea367dc6da35145c3352b60a112d11"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f9e09a1c83521d770d170b3801eea19b89f41ccaa61d53026ed111cb6f088887"}, + {file = "aiohttp-3.9.0-cp38-cp38-win32.whl", hash = "sha256:a00ce44c21612d185c5275c5cba4bab8d7c1590f248638b667ed8a782fa8cd6f"}, + {file = "aiohttp-3.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:d5b9345ab92ebe6003ae11d8092ce822a0242146e6fa270889b9ba965457ca40"}, + {file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98d21092bf2637c5fa724a428a69e8f5955f2182bff61f8036827cf6ce1157bf"}, + {file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35a68cd63ca6aaef5707888f17a70c36efe62b099a4e853d33dc2e9872125be8"}, + {file = "aiohttp-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7f6235c7475658acfc1769d968e07ab585c79f6ca438ddfecaa9a08006aee2"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db04d1de548f7a62d1dd7e7cdf7c22893ee168e22701895067a28a8ed51b3735"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:536b01513d67d10baf6f71c72decdf492fb7433c5f2f133e9a9087379d4b6f31"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c8b0a6487e8109427ccf638580865b54e2e3db4a6e0e11c02639231b41fc0f"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7276fe0017664414fdc3618fca411630405f1aaf0cc3be69def650eb50441787"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23170247ef89ffa842a02bbfdc425028574d9e010611659abeb24d890bc53bb8"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b1a2ea8252cacc7fd51df5a56d7a2bb1986ed39be9397b51a08015727dfb69bd"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d71abc15ff7047412ef26bf812dfc8d0d1020d664617f4913df2df469f26b76"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d820162c8c2bdbe97d328cd4f417c955ca370027dce593345e437b2e9ffdc4d"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2779f5e7c70f7b421915fd47db332c81de365678180a9f3ab404088f87ba5ff9"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:366bc870d7ac61726f32a489fbe3d1d8876e87506870be66b01aeb84389e967e"}, + {file = "aiohttp-3.9.0-cp39-cp39-win32.whl", hash = "sha256:1df43596b826022b14998f0460926ce261544fedefe0d2f653e1b20f49e96454"}, + {file = "aiohttp-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:9c196b30f1b1aa3363a69dd69079ae9bec96c2965c4707eaa6914ba099fb7d4f"}, + {file = "aiohttp-3.9.0.tar.gz", hash = "sha256:09f23292d29135025e19e8ff4f0a68df078fe4ee013bca0105b2e803989de92d"}, ] [package.dependencies] aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +speedups = ["Brotli", "aiodns", "brotlicffi"] [[package]] name = "aiohttp-cors" @@ -308,6 +295,20 @@ typing-extensions = ">=4" [package.extras] tz = ["python-dateutil"] +[[package]] +name = "amqp" +version = "5.1.1" +description = "Low-level AMQP client for Python (fork of amqplib)." +optional = false +python-versions = ">=3.6" +files = [ + {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, + {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, +] + +[package.dependencies] +vine = ">=5.0.0" + [[package]] name = "annotated-types" version = "0.6.0" @@ -474,6 +475,17 @@ files = [ {file = "av-10.0.0.tar.gz", hash = "sha256:8afd3d5610e1086f3b2d8389d66672ea78624516912c93612de64dcaa4c67e05"}, ] +[[package]] +name = "billiard" +version = "4.1.0" +description = "Python multiprocessing fork with improvements and bugfixes" +optional = false +python-versions = ">=3.7" +files = [ + {file = "billiard-4.1.0-py3-none-any.whl", hash = "sha256:0f50d6be051c6b2b75bfbc8bfd85af195c5739c281d3f5b86a5640c65563614a"}, + {file = "billiard-4.1.0.tar.gz", hash = "sha256:1ad2eeae8e28053d729ba3373d34d9d6e210f6e4d8bf0a9c64f92bd053f1edf5"}, +] + [[package]] name = "black" version = "23.9.1" @@ -556,6 +568,61 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.16.26)"] +[[package]] +name = "celery" +version = "5.3.4" +description = "Distributed Task Queue." +optional = false +python-versions = ">=3.8" +files = [ + {file = "celery-5.3.4-py3-none-any.whl", hash = "sha256:1e6ed40af72695464ce98ca2c201ad0ef8fd192246f6c9eac8bba343b980ad34"}, + {file = "celery-5.3.4.tar.gz", hash = "sha256:9023df6a8962da79eb30c0c84d5f4863d9793a466354cc931d7f72423996de28"}, +] + +[package.dependencies] +billiard = ">=4.1.0,<5.0" +click = ">=8.1.2,<9.0" +click-didyoumean = ">=0.3.0" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +kombu = ">=5.3.2,<6.0" +python-dateutil = ">=2.8.2" +tzdata = ">=2022.7" +vine = ">=5.0.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=2.0.2)"] +auth = ["cryptography (==41.0.3)"] +azureblockblob = ["azure-storage-blob (>=12.15.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (>=3.25.0,<4)"] +consul = ["python-consul2 (==0.1.5)"] +cosmosdbsql = ["pydocumentdb (==2.3.5)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb (==1.14.2)"] +django = ["Django (>=2.2.28)"] +dynamodb = ["boto3 (>=1.26.143)"] +elasticsearch = ["elasticsearch (<8.0)"] +eventlet = ["eventlet (>=0.32.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +memcache = ["pylibmc (==1.6.3)"] +mongodb = ["pymongo[srv] (>=4.0.2)"] +msgpack = ["msgpack (==1.0.5)"] +pymemcache = ["python-memcached (==1.59)"] +pyro = ["pyro4 (==4.82)"] +pytest = ["pytest-celery (==0.0.0)"] +redis = ["redis (>=4.5.2,!=4.5.5,<5.0.0)"] +s3 = ["boto3 (>=1.26.143)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem (==4.1.4)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard (==0.21.0)"] + [[package]] name = "certifi" version = "2023.7.22" @@ -744,6 +811,55 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "click-didyoumean" +version = "0.3.0" +description = "Enables git-like *did-you-mean* feature in click" +optional = false +python-versions = ">=3.6.2,<4.0.0" +files = [ + {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, + {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, +] + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +optional = false +python-versions = "*" +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "click-repl" +version = "0.3.0" +description = "REPL plugin for Click" +optional = false +python-versions = ">=3.6" +files = [ + {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, + {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, +] + +[package.dependencies] +click = ">=7.0" +prompt-toolkit = ">=3.0.36" + +[package.extras] +testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] + [[package]] name = "colorama" version = "0.4.6" @@ -883,41 +999,42 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "ctranslate2" -version = "3.20.0" +version = "3.23.0" description = "Fast inference engine for Transformer models" optional = false python-versions = ">=3.8" files = [ - {file = "ctranslate2-3.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:01754fdb1a5f0d9bcb194cdb4c6e91e3ea23c5be4a1bbd0fca448a1b41e222d0"}, - {file = "ctranslate2-3.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:825774f301f47dd26d51f98cda2b546fee1ad619f0c901446e6bd177ac5f6976"}, - {file = "ctranslate2-3.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47c051208cb9337030ca571f1561dc59e6d9ec7b6061d6ed8e6081214031edac"}, - {file = "ctranslate2-3.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d914f707263da9c2c0ef6b8242b9cb58bdb0d0ccc23eba175f8e7719b510c22"}, - {file = "ctranslate2-3.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:1a71a4faf437c0d832d23be704b4d2dc0406be16c0f35f46ad461902c5fae259"}, - {file = "ctranslate2-3.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d94c70905cd8a5d665e8cbd78a5eaa23c53db5b5feea2a3b54b79332f6475c6"}, - {file = "ctranslate2-3.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc05762f07c1a2aad578e1608addf987992e41b8b912dff9e73dc97c9611c630"}, - {file = "ctranslate2-3.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9144a15566b24ffd7310f9b3b116e4b70b7fab52c96bd18bbc63142172446ea0"}, - {file = "ctranslate2-3.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b173316e567d4117a53793235064ee00972b08a9e30065028d3373e1db8ccf"}, - {file = "ctranslate2-3.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:dea6434e53948a550b538fd22b6b9118f8f4fb84cc2baa52d3849369fdaba07f"}, - {file = "ctranslate2-3.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bab22a01a4a3419a9eb520f0e8e0a822947f0b948d8df77c6388151cb4eaafc0"}, - {file = "ctranslate2-3.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:112265df36fc83904bba52ba5ec9353f402bb7f96df66382f5faacc7c565867f"}, - {file = "ctranslate2-3.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3129c3065dbf554143527c5f48284624cd119ad7f071d5e09624ebab51aef6b7"}, - {file = "ctranslate2-3.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660f01d99462fb5c448ca801f77c5d5e52e7ebdca41075cdedb0a8134284c918"}, - {file = "ctranslate2-3.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:5fd6ff0afb1fc1ca68d3b2bd3513942634ed2773fdcd6083ce8d1c9be3290bbf"}, - {file = "ctranslate2-3.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f5d79d52ad1b2c7eb8aad2a14fa13afaca71a5865a2a59f46cf6b9280a25c2b"}, - {file = "ctranslate2-3.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a0be5d8dd42f8916d2363a174723e9ef3ca98038e7f26f786425db7f316f1955"}, - {file = "ctranslate2-3.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4043eeb9bb2275452b40884e04424b66f0eb8a6bfee356a8e9fc9d53eb6d40e3"}, - {file = "ctranslate2-3.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b311a47b603e8d22bb1dd2c0f733a3b0b0cb6b7f2eeb8fe67fccc1593415f6"}, - {file = "ctranslate2-3.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:18c6fbf1e7de576fef2a192884483065eaa761ec714f4d03aef5343f08621991"}, - {file = "ctranslate2-3.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b9f65982890e70838583d4ffd9766595339e51066c983ecdfada3b02da6957ae"}, - {file = "ctranslate2-3.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:74eacb7a86069df63ce45cef412f36880fe8edf745e565917d347cc84391a814"}, - {file = "ctranslate2-3.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8465c238f8f6ec74401fddf80bf00cdc27d3eec453881ba6566f819cf7939382"}, - {file = "ctranslate2-3.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7b90e08c8a7a483fb8c074e7d0f78e8a794a2d86d47dafd58985f1fbec702f3"}, - {file = "ctranslate2-3.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:cd78d95228235413f896ea0912546f9f8fc86ce5f110efa9282c792693a0d8d2"}, + {file = "ctranslate2-3.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f42013c51599efce30303286022095b084143d5bd45537e1acb36c032c4f683c"}, + {file = "ctranslate2-3.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e055981ba85a7a27a125749eee20e1dfa0740af9dd1f771eab5ef8e7af3d6ed7"}, + {file = "ctranslate2-3.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b211cd1cc22356c94ae260a11d9274eaed2f331bae91bbe502542edabc9c0bea"}, + {file = "ctranslate2-3.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87648abc1026ec9e1657ed88b8407db057c570ecc2ae5dbd321ddec1a21272c6"}, + {file = "ctranslate2-3.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:8d55b4689e866f62095cfed7c79f3ece75a4cba9a193df96129f83f33d049ccc"}, + {file = "ctranslate2-3.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc8e7a8900a9911854d4c820badbdff9ffde94e6d853f27f82a282f01f6c61a1"}, + {file = "ctranslate2-3.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b974d63c769e2246eb8384613a3a38417c8a47c00db78a972d3435d3fb92d54b"}, + {file = "ctranslate2-3.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f9f2554810157ca8a7dcb6da76aa326e0ea7424c8c62cec743d601dae45f051"}, + {file = "ctranslate2-3.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf8a0d3085297373208f6e703e096bfd3b19ec80bf133e006fdcd7e88996b11b"}, + {file = "ctranslate2-3.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:9999956b88cdff4d102868b90c5526af3910010db0acd1bdeb3a1b4c1380800d"}, + {file = "ctranslate2-3.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4bea2124f8ad6e3b218612bc40323922001720d2939653ca69a497583f566eb"}, + {file = "ctranslate2-3.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:517c40e1d19efd08572549249146af493812e76e05c08d5b5bab5baff09d6403"}, + {file = "ctranslate2-3.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d77e3249ea3e93f72941fb2da118e9b07aab3da3dc339ed17b97a3a10116b9"}, + {file = "ctranslate2-3.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46ec9a53ebc1f286d6407694311e9c83b48c42c0a6dff4975610d7d708cbab37"}, + {file = "ctranslate2-3.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:14db7daeee240c71c41e8021d431a865bb25a1f7ec0d1b37a0dd8cc0ecd64cc0"}, + {file = "ctranslate2-3.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aecc6e3944619a72c873d76ef658a44b1b998ecfe4678b1f7fb4190c2fdb3a11"}, + {file = "ctranslate2-3.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8603950fb1816e0943b6713dabe0b4e651fbc65addc88fb185dce2bd6d29522f"}, + {file = "ctranslate2-3.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a4c465b65697499bb992a2fd00d9957dc5e6baa479c76dc2a5fb2912c2eb28d"}, + {file = "ctranslate2-3.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f16d6e283c4b490fc8b847c49d813c541b780c2738243e34b12db96879f3385"}, + {file = "ctranslate2-3.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:bca8e286b0156208656eafedec73896b40318a0be9b925fca7d6bc8dd665827c"}, + {file = "ctranslate2-3.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f37424b57f6dc0ebeca513632d10d15608631b2f9ee7410537b8ca96a73e2c30"}, + {file = "ctranslate2-3.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:deaa43c1b47e20f2a79f1df699f41de62a881cc860889e027040d14b9b140db6"}, + {file = "ctranslate2-3.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38bdb154e149e954f63aff4d3291d4c8abd7799b2729c5aed533069f8c299b1"}, + {file = "ctranslate2-3.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b82e0dfa2ab179f185d0052ee5b1390cc2f7f905f8d9bfbaf736eefc8e5ed182"}, + {file = "ctranslate2-3.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:720d324f4fe2a56148f7a81248d6a703720685462b96d09c6f8c3ae1615f2f24"}, ] [package.dependencies] numpy = "*" pyyaml = ">=5.3,<7" +setuptools = "*" [[package]] name = "databases" @@ -981,6 +1098,24 @@ idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.23)"] wmi = ["wmi (>=1.5.1,<2.0.0)"] +[[package]] +name = "ecdsa" +version = "0.18.0" +description = "ECDSA cryptographic signature library (pure python)" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, + {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, +] + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + [[package]] name = "fastapi" version = "0.100.1" @@ -1034,21 +1169,20 @@ tortoise = ["tortoise-orm (>=0.16.18,<0.21.0)"] [[package]] name = "faster-whisper" -version = "0.7.1" +version = "0.10.0" description = "Faster Whisper transcription with CTranslate2" optional = false python-versions = ">=3.8" files = [ - {file = "faster-whisper-0.7.1.tar.gz", hash = "sha256:0cb4f47b1ba638c1ff93cb7ceaa1532bcb9f95ae1a9fc2faa6b6dfaf2c4309a2"}, - {file = "faster_whisper-0.7.1-py3-none-any.whl", hash = "sha256:454d8fcccb453033a3cd6473c11e5e166ae980e3cf1ff1eaf9280d48d2f17bc3"}, + {file = "faster-whisper-0.10.0.tar.gz", hash = "sha256:591809328b93c8e4594d52097ec6352a270a81fbb7b956254967f28700f7e4da"}, ] [package.dependencies] av = "==10.*" -ctranslate2 = ">=3.17,<4" -huggingface-hub = ">=0.13" +ctranslate2 = ">=3.22,<4" +huggingface_hub = ">=0.13" onnxruntime = ">=1.14,<2" -tokenizers = "==0.13.*" +tokenizers = ">=0.13,<0.16" [package.extras] conversion = ["transformers[torch] (>=4.23)"] @@ -1078,19 +1212,19 @@ flask = ["flask"] [[package]] name = "filelock" -version = "3.12.4" +version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "flatbuffers" @@ -1175,13 +1309,13 @@ files = [ [[package]] name = "fsspec" -version = "2023.9.2" +version = "2023.12.2" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.9.2-py3-none-any.whl", hash = "sha256:603dbc52c75b84da501b9b2ec8c11e1f61c25984c4a0dda1f129ef391fbfc9b4"}, - {file = "fsspec-2023.9.2.tar.gz", hash = "sha256:80bfb8c70cc27b2178cc62a935ecf242fc6e8c3fb801f9c571fc01b1e715ba7d"}, + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, ] [package.extras] @@ -1484,13 +1618,13 @@ wsproto = "*" [[package]] name = "huggingface-hub" -version = "0.18.0" +version = "0.20.1" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.18.0-py3-none-any.whl", hash = "sha256:ee0b6b68acbf6aeb6d083ea081e981c277a1104b82ab67fdf6780ff5396830af"}, - {file = "huggingface_hub-0.18.0.tar.gz", hash = "sha256:10eda12b9c1cfa800b4b7c096b3ace8843734c3f28d69d1c243743fb7d7a2e81"}, + {file = "huggingface_hub-0.20.1-py3-none-any.whl", hash = "sha256:ecfdea395a8bc68cd160106c5bd857f7e010768d95f9e1862a779010cc304831"}, + {file = "huggingface_hub-0.20.1.tar.gz", hash = "sha256:8c88c4c3c8853e22f2dfb4d84c3d493f4e1af52fb3856a90e1eeddcf191ddbb1"}, ] [package.dependencies] @@ -1503,17 +1637,16 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (==23.7)", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (<2.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (==23.7)", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (<2.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"] -docs = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (==23.7)", "gradio", "hf-doc-builder", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (<2.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)", "watchdog"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -inference = ["aiohttp", "pydantic (<2.0)"] -quality = ["black (==23.7)", "mypy (==1.5.1)", "ruff (>=0.0.241)"] +inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (<2.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] torch = ["torch"] -typing = ["pydantic (<2.0)", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] [[package]] name = "humanfriendly" @@ -1624,6 +1757,38 @@ files = [ cryptography = ">=3.4" deprecated = "*" +[[package]] +name = "kombu" +version = "5.3.2" +description = "Messaging library for Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "kombu-5.3.2-py3-none-any.whl", hash = "sha256:b753c9cfc9b1e976e637a7cbc1a65d446a22e45546cd996ea28f932082b7dc9e"}, + {file = "kombu-5.3.2.tar.gz", hash = "sha256:0ba213f630a2cb2772728aef56ac6883dc3a2f13435e10048f6e97d48506dbbd"}, +] + +[package.dependencies] +amqp = ">=5.1.1,<6.0.0" +vine = "*" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.10.0)"] +azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] +confluentkafka = ["confluent-kafka (==2.1.1)"] +consul = ["python-consul2"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=4.1.1)"] +msgpack = ["msgpack"] +pyro = ["pyro4"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=4.5.2)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=2.8.0)"] + [[package]] name = "levenshtein" version = "0.21.1" @@ -1983,69 +2148,80 @@ twitter = ["twython"] [[package]] name = "numpy" -version = "1.25.2" +version = "1.26.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, - {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, - {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, - {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, - {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, - {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, - {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, - {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, - {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, - {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, - {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, - {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, + {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, + {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, + {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, + {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, + {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, + {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, + {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, + {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, + {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, ] [[package]] name = "onnxruntime" -version = "1.16.1" +version = "1.16.3" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" optional = false python-versions = "*" files = [ - {file = "onnxruntime-1.16.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:28b2c7f444b4119950b69370801cd66067f403d19cbaf2a444735d7c269cce4a"}, - {file = "onnxruntime-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c24e04f33e7899f6aebb03ed51e51d346c1f906b05c5569d58ac9a12d38a2f58"}, - {file = "onnxruntime-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fa93b166f2d97063dc9f33c5118c5729a4a5dd5617296b6dbef42f9047b3e81"}, - {file = "onnxruntime-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042dd9201b3016ee18f8f8bc4609baf11ff34ca1ff489c0a46bcd30919bf883d"}, - {file = "onnxruntime-1.16.1-cp310-cp310-win32.whl", hash = "sha256:c20aa0591f305012f1b21aad607ed96917c86ae7aede4a4dd95824b3d124ceb7"}, - {file = "onnxruntime-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:5581873e578917bea76d6434ee7337e28195d03488dcf72d161d08e9398c6249"}, - {file = "onnxruntime-1.16.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:ef8c0c8abf5f309aa1caf35941380839dc5f7a2fa53da533be4a3f254993f120"}, - {file = "onnxruntime-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e680380bea35a137cbc3efd67a17486e96972901192ad3026ee79c8d8fe264f7"}, - {file = "onnxruntime-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e62cc38ce1a669013d0a596d984762dc9c67c56f60ecfeee0d5ad36da5863f6"}, - {file = "onnxruntime-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:025c7a4d57bd2e63b8a0f84ad3df53e419e3df1cc72d63184f2aae807b17c13c"}, - {file = "onnxruntime-1.16.1-cp311-cp311-win32.whl", hash = "sha256:9ad074057fa8d028df248b5668514088cb0937b6ac5954073b7fb9b2891ffc8c"}, - {file = "onnxruntime-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:d5e43a3478bffc01f817ecf826de7b25a2ca1bca8547d70888594ab80a77ad24"}, - {file = "onnxruntime-1.16.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3aef4d70b0930e29a8943eab248cd1565664458d3a62b2276bd11181f28fd0a3"}, - {file = "onnxruntime-1.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:55a7b843a57c8ca0c8ff169428137958146081d5d76f1a6dd444c4ffcd37c3c2"}, - {file = "onnxruntime-1.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c631af1941bf3b5f7d063d24c04aacce8cff0794e157c497e315e89ac5ad7b"}, - {file = "onnxruntime-1.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5671f296c3d5c233f601e97a10ab5a1dd8e65ba35c7b7b0c253332aba9dff330"}, - {file = "onnxruntime-1.16.1-cp38-cp38-win32.whl", hash = "sha256:eb3802305023dd05e16848d4e22b41f8147247894309c0c27122aaa08793b3d2"}, - {file = "onnxruntime-1.16.1-cp38-cp38-win_amd64.whl", hash = "sha256:fecfb07443d09d271b1487f401fbdf1ba0c829af6fd4fe8f6af25f71190e7eb9"}, - {file = "onnxruntime-1.16.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:de3e12094234db6545c67adbf801874b4eb91e9f299bda34c62967ef0050960f"}, - {file = "onnxruntime-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff723c2a5621b5e7103f3be84d5aae1e03a20621e72219dddceae81f65f240af"}, - {file = "onnxruntime-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14a7fb3073aaf6b462e3d7fb433320f7700558a8892e5021780522dc4574292a"}, - {file = "onnxruntime-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:963159f1f699b0454cd72fcef3276c8a1aab9389a7b301bcd8e320fb9d9e8597"}, - {file = "onnxruntime-1.16.1-cp39-cp39-win32.whl", hash = "sha256:85771adb75190db9364b25ddec353ebf07635b83eb94b64ed014f1f6d57a3857"}, - {file = "onnxruntime-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:d32d2b30799c1f950123c60ae8390818381fd5f88bdf3627eeca10071c155dc5"}, + {file = "onnxruntime-1.16.3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:3bc41f323ac77acfed190be8ffdc47a6a75e4beeb3473fbf55eeb075ccca8df2"}, + {file = "onnxruntime-1.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:212741b519ee61a4822c79c47147d63a8b0ffde25cd33988d3d7be9fbd51005d"}, + {file = "onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f91f5497fe3df4ceee2f9e66c6148d9bfeb320cd6a71df361c66c5b8bac985a"}, + {file = "onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b1fc269cabd27f129fb9058917d6fdc89b188c49ed8700f300b945c81f889"}, + {file = "onnxruntime-1.16.3-cp310-cp310-win32.whl", hash = "sha256:f36b56a593b49a3c430be008c2aea6658d91a3030115729609ec1d5ffbaab1b6"}, + {file = "onnxruntime-1.16.3-cp310-cp310-win_amd64.whl", hash = "sha256:3c467eaa3d2429c026b10c3d17b78b7f311f718ef9d2a0d6938e5c3c2611b0cf"}, + {file = "onnxruntime-1.16.3-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a225bb683991001d111f75323d355b3590e75e16b5e0f07a0401e741a0143ea1"}, + {file = "onnxruntime-1.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9aded21fe3d898edd86be8aa2eb995aa375e800ad3dfe4be9f618a20b8ee3630"}, + {file = "onnxruntime-1.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00cccc37a5195c8fca5011b9690b349db435986bd508eb44c9fce432da9228a4"}, + {file = "onnxruntime-1.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e253e572021563226a86f1c024f8f70cdae28f2fb1cc8c3a9221e8b1ce37db5"}, + {file = "onnxruntime-1.16.3-cp311-cp311-win32.whl", hash = "sha256:a82a8f0b4c978d08f9f5c7a6019ae51151bced9fd91e5aaa0c20a9e4ac7a60b6"}, + {file = "onnxruntime-1.16.3-cp311-cp311-win_amd64.whl", hash = "sha256:78d81d9af457a1dc90db9a7da0d09f3ccb1288ea1236c6ab19f0ca61f3eee2d3"}, + {file = "onnxruntime-1.16.3-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:04ebcd29c20473596a1412e471524b2fb88d55e6301c40b98dd2407b5911595f"}, + {file = "onnxruntime-1.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9996bab0f202a6435ab867bc55598f15210d0b72794d5de83712b53d564084ae"}, + {file = "onnxruntime-1.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b8f5083f903408238883821dd8c775f8120cb4a604166dbdabe97f4715256d5"}, + {file = "onnxruntime-1.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c2dcf1b70f8434abb1116fe0975c00e740722aaf321997195ea3618cc00558e"}, + {file = "onnxruntime-1.16.3-cp38-cp38-win32.whl", hash = "sha256:d4a0151e1accd04da6711f6fd89024509602f82c65a754498e960b032359b02d"}, + {file = "onnxruntime-1.16.3-cp38-cp38-win_amd64.whl", hash = "sha256:e8aa5bba78afbd4d8a2654b14ec7462ff3ce4a6aad312a3c2d2c2b65009f2541"}, + {file = "onnxruntime-1.16.3-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:6829dc2a79d48c911fedaf4c0f01e03c86297d32718a3fdee7a282766dfd282a"}, + {file = "onnxruntime-1.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:76f876c53bfa912c6c242fc38213a6f13f47612d4360bc9d599bd23753e53161"}, + {file = "onnxruntime-1.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4137e5d443e2dccebe5e156a47f1d6d66f8077b03587c35f11ee0c7eda98b533"}, + {file = "onnxruntime-1.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56695c1a343c7c008b647fff3df44da63741fbe7b6003ef576758640719be7b"}, + {file = "onnxruntime-1.16.3-cp39-cp39-win32.whl", hash = "sha256:985a029798744ce4743fcf8442240fed35c8e4d4d30ec7d0c2cdf1388cd44408"}, + {file = "onnxruntime-1.16.3-cp39-cp39-win_amd64.whl", hash = "sha256:28ff758b17ce3ca6bcad3d936ec53bd7f5482e7630a13f6dcae518eba8f71d85"}, ] [package.dependencies] @@ -2151,6 +2327,20 @@ files = [ fastapi = ">=0.38.1,<1.0.0" prometheus-client = ">=0.8.0,<1.0.0" +[[package]] +name = "prompt-toolkit" +version = "3.0.39" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, +] + +[package.dependencies] +wcwidth = "*" + [[package]] name = "protobuf" version = "4.24.4" @@ -2173,6 +2363,17 @@ files = [ {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, ] +[[package]] +name = "pyasn1" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, +] + [[package]] name = "pycparser" version = "2.21" @@ -2353,6 +2554,82 @@ typing-extensions = "*" [package.extras] dev = ["black", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "pytest", "pytest-asyncio", "pytest-trio", "toml", "tox", "trio", "trio", "trio-typing", "twine", "twisted", "validate-pyproject[all]"] +[[package]] +name = "pyinstrument" +version = "4.6.1" +description = "Call stack profiler for Python. Shows you why your code is slow!" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73476e4bc6e467ac1b2c3c0dd1f0b71c9061d4de14626676adfdfbb14aa342b4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4d1da8efd974cf9df52ee03edaee2d3875105ddd00de35aa542760f7c612bdf7"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507be1ee2f2b0c9fba74d622a272640dd6d1b0c9ec3388b2cdeb97ad1e77125f"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cee6de08eb45754ef4f602ce52b640d1c535d934a6a8733a974daa095def37"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7873e8cec92321251fdf894a72b3c78f4c5c20afdd1fef0baf9042ec843bb04"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a242f6cac40bc83e1f3002b6b53681846dfba007f366971db0bf21e02dbb1903"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:97c9660cdb4bd2a43cf4f3ab52cffd22f3ac9a748d913b750178fb34e5e39e64"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e304cd0723e2b18ada5e63c187abf6d777949454c734f5974d64a0865859f0f4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win32.whl", hash = "sha256:cee21a2d78187dd8a80f72f5d0f1ddb767b2d9800f8bb4d94b6d11f217c22cdb"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:2000712f71d693fed2f8a1c1638d37b7919124f367b37976d07128d49f1445eb"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a366c6f3dfb11f1739bdc1dee75a01c1563ad0bf4047071e5e77598087df457f"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6be327be65d934796558aa9cb0f75ce62ebd207d49ad1854610c97b0579ad47"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e160d9c5d20d3e4ef82269e4e8b246ff09bdf37af5fb8cb8ccca97936d95ad6"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ffbf56605ef21c2fcb60de2fa74ff81f417d8be0c5002a407e414d6ef6dee43"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92cc4924596d6e8f30a16182bbe90893b1572d847ae12652f72b34a9a17c24a"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f4b48a94d938cae981f6948d9ec603bab2087b178d2095d042d5a48aabaecaab"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7a386392275bdef4a1849712dc5b74f0023483fca14ef93d0ca27d453548982"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:871b131b83e9b1122f2325061c68ed1e861eebcb568c934d2fb193652f077f77"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win32.whl", hash = "sha256:8d8515156dd91f5652d13b5fcc87e634f8fe1c07b68d1d0840348cdd50bf5ace"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb868fbe089036e9f32525a249f4c78b8dc46967612393f204b8234f439c9cc4"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a18cd234cce4f230f1733807f17a134e64a1f1acabf74a14d27f583cf2b183df"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:574cfca69150be4ce4461fb224712fbc0722a49b0dc02fa204d02807adf6b5a0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e02cf505e932eb8ccf561b7527550a67ec14fcae1fe0e25319b09c9c166e914"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832fb2acef9d53701c1ab546564c45fb70a8770c816374f8dd11420d399103c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb57e9607545623ebe462345b3d0c4caee0125d2d02267043ece8aca8f4ea0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9be89e7419bcfe8dd6abb0d959d6d9c439c613a4a873514c43d16b48dae697c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:476785cfbc44e8e1b1ad447398aa3deae81a8df4d37eb2d8bbb0c404eff979cd"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e9cebd90128a3d2fee36d3ccb665c1b9dce75261061b2046203e45c4a8012d54"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win32.whl", hash = "sha256:1d0b76683df2ad5c40eff73607dc5c13828c92fbca36aff1ddf869a3c5a55fa6"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:c4b7af1d9d6a523cfbfedebcb69202242d5bd0cb89c4e094cc73d5d6e38279bd"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79ae152f8c6a680a188fb3be5e0f360ac05db5bbf410169a6c40851dfaebcce9"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cad2745964c174c65aa75f1bf68a4394d1b4d28f33894837cfd315d1e836f0"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb81f66f7f94045d723069cf317453d42375de9ff3c69089cf6466b078ac1db4"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab30ae75969da99e9a529e21ff497c18fdf958e822753db4ae7ed1e67094040"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f36cb5b644762fb3c86289324bbef17e95f91cd710603ac19444a47f638e8e96"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8b45075d9dbbc977dbc7007fb22bb0054c6990fbe91bf48dd80c0b96c6307ba7"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:475ac31477f6302e092463896d6a2055f3e6abcd293bad16ff94fc9185308a88"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win32.whl", hash = "sha256:29172ab3d8609fdf821c3f2562dc61e14f1a8ff5306607c32ca743582d3a760e"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:bd176f297c99035127b264369d2bb97a65255f65f8d4e843836baf55ebb3cee4"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:23e9b4526978432e9999021da9a545992cf2ac3df5ee82db7beb6908fc4c978c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2dbcaccc9f456ef95557ec501caeb292119c24446d768cb4fb43578b0f3d572c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2097f63c66c2bc9678c826b9ff0c25acde3ed455590d9dcac21220673fe74fbf"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:205ac2e76bd65d61b9611a9ce03d5f6393e34ec5b41dd38808f25d54e6b3e067"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f414ddf1161976a40fc0a333000e6a4ad612719eac0b8c9bb73f47153187148"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65e62ebfa2cd8fb57eda90006f4505ac4c70da00fc2f05b6d8337d776ea76d41"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d96309df4df10be7b4885797c5f69bb3a89414680ebaec0722d8156fde5268c3"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f3d1ad3bc8ebb4db925afa706aa865c4bfb40d52509f143491ac0df2440ee5d2"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win32.whl", hash = "sha256:dc37cb988c8854eb42bda2e438aaf553536566657d157c4473cc8aad5692a779"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:2cd4ce750c34a0318fc2d6c727cc255e9658d12a5cf3f2d0473f1c27157bdaeb"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ca95b21f022e995e062b371d1f42d901452bcbedd2c02f036de677119503355"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ac1e1d7e1f1b64054c4eb04eb4869a7a5eef2261440e73943cc1b1bc3c828c18"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0711845e953fce6ab781221aacffa2a66dbc3289f8343e5babd7b2ea34da6c90"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b7d28582017de35cb64eb4e4fa603e753095108ca03745f5d17295970ee631f"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7be57db08bd366a37db3aa3a6187941ee21196e8b14975db337ddc7d1490649d"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9a0ac0f56860398d2628ce389826ce83fb3a557d0c9a2351e8a2eac6eb869983"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a9045186ff13bc826fef16be53736a85029aae3c6adfe52e666cad00d7ca623b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6c4c56b6eab9004e92ad8a48bb54913fdd71fc8a748ae42a27b9e26041646f8b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win32.whl", hash = "sha256:37e989c44b51839d0c97466fa2b623638b9470d56d79e329f359f0e8fa6d83db"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:5494c5a84fee4309d7d973366ca6b8b9f8ba1d6b254e93b7c506264ef74f2cef"}, + {file = "pyinstrument-4.6.1.tar.gz", hash = "sha256:f4731b27121350f5a983d358d2272fe3df2f538aed058f57217eef7801a89288"}, +] + +[package.extras] +bin = ["click", "nox"] +docs = ["furo (==2021.6.18b36)", "myst-parser (==0.15.1)", "sphinx (==4.2.0)", "sphinxcontrib-programoutput (==0.17)"] +examples = ["django", "numpy"] +test = ["flaky", "greenlet (>=3.0.0a1)", "ipython", "pytest", "pytest-asyncio (==0.12.0)", "sphinx-autobuild (==2021.3.14)", "trio"] +types = ["typing-extensions"] + [[package]] name = "pylibsrtp" version = "0.8.0" @@ -2501,6 +2778,20 @@ pytest = ">=7.0.0" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +[[package]] +name = "pytest-celery" +version = "0.0.0" +description = "pytest-celery a shim pytest plugin to enable celery.contrib.pytest" +optional = false +python-versions = "*" +files = [ + {file = "pytest-celery-0.0.0.tar.gz", hash = "sha256:cfd060fc32676afa1e4f51b2938f903f7f75d952186b8c6cf631628c4088f406"}, + {file = "pytest_celery-0.0.0-py2.py3-none-any.whl", hash = "sha256:63dec132df3a839226ecb003ffdbb0c2cb88dd328550957e979c942766578060"}, +] + +[package.dependencies] +celery = ">=4.4.0" + [[package]] name = "pytest-cov" version = "4.1.0" @@ -2565,6 +2856,42 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +optional = false +python-versions = "*" +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.6" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"}, + {file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"}, +] + +[package.extras] +dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] + [[package]] name = "pyyaml" version = "6.0.1" @@ -2577,7 +2904,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2585,15 +2911,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2610,7 +2929,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2618,7 +2936,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2744,6 +3061,24 @@ files = [ [package.extras] full = ["numpy"] +[[package]] +name = "redis" +version = "5.0.1" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, + {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + [[package]] name = "regex" version = "2023.10.3" @@ -2862,6 +3197,20 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "s3transfer" version = "0.6.2" @@ -2881,109 +3230,109 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] [[package]] name = "safetensors" -version = "0.4.0" +version = "0.4.1" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "safetensors-0.4.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:2289ae6dbe6d027ecee016b28ced13a2e21a0b3a3a757a23033a2d1c0b1bad55"}, - {file = "safetensors-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bf6458959f310f551cbbeef2255527ade5f783f952738e73e4d0136198cc3bfe"}, - {file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6b60a58a8f7cc7aed3b5b73dce1f5259a53c83d9ba43a76a874e6ad868c1b4d"}, - {file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:491b3477e4d0d4599bb75d79da4b75af2e6ed9b1f6ec2b715991f0bc927bf09a"}, - {file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d2e10b7e0cd18bb73ed7c17c624a5957b003b81345e18159591771c26ee428"}, - {file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f667a4c12fb593f5f66ce966cb1b14a7148898b2b1a7f79e0761040ae1e3c51"}, - {file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9909512bcb6f712bdd04c296cdfb0d8ff73d258ffc5af884bb62ea02d221e0"}, - {file = "safetensors-0.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33d29e846821f0e4f92614022949b09ccf063cb36fe2f9fe099cde1efbfbb87"}, - {file = "safetensors-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4d512525a8e05a045ce6698066ba0c5378c174a83e0b3720a8c7799dc1bb06f3"}, - {file = "safetensors-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0219cea445177f6ad1f9acd3a8d025440c8ff436d70a4a7c7ba9c36066aa9474"}, - {file = "safetensors-0.4.0-cp310-none-win32.whl", hash = "sha256:67ab171eeaad6972d3971c53d29d53353c67f6743284c6d637b59fa3e54c8a94"}, - {file = "safetensors-0.4.0-cp310-none-win_amd64.whl", hash = "sha256:7ffc736039f08a9ca1f09816a7481b8e4469c06e8f8a5ffa8cb67ddd79e6d77f"}, - {file = "safetensors-0.4.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4fe9e3737b30de458225a23926219ca30b902ee779b6a3df96eaab2b6d625ec2"}, - {file = "safetensors-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7916e814a90008de767b1c164a1d83803693c661ffe9af5a697b22e2752edb0"}, - {file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbc4a4da01143472323c145f3c289e5f6fabde0ac0a3414dabf912a21692fff4"}, - {file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a54c21654a47669b38e359e8f852af754b786c9da884bb61ad5e9af12bd71ccb"}, - {file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25cd407955bad5340ba17f9f8ac789a0d751601a311e2f7b2733f9384478c95e"}, - {file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82e8fc4e3503cd738fd40718a430fe0e5ce6e7ff91a73d6ce628bbb89c41e8ce"}, - {file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b92059b1a4ad163024d4f526e0e73ebe2bb3ae70537e15e347820b4de5dc27"}, - {file = "safetensors-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5daa05058f7dce85b5f9f60c4eab483ed7859d63978f08a76e52e78859ff20ca"}, - {file = "safetensors-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a86565a5c112dd855909e20144947b4f53abb78c4de207f36ca71ee63ba5b90d"}, - {file = "safetensors-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38032078ed9fea52d06584e441bccc73fb475c4581600c6d6166de2fe2deb3d1"}, - {file = "safetensors-0.4.0-cp311-none-win32.whl", hash = "sha256:2f99d90c91b7c76b40a862acd9085bc77f7974a27dee7cfcebe46149af5a99a1"}, - {file = "safetensors-0.4.0-cp311-none-win_amd64.whl", hash = "sha256:74e2a448ffe19be188b457b130168190ee73b5a75e45ba96796320c1f5ae35d2"}, - {file = "safetensors-0.4.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:1e2f9c69b41d03b4826ffb96b29e07444bb6b34a78a7bafd0b88d59e8ec75b8a"}, - {file = "safetensors-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3910fb5bf747413b59f1a34e6d2a993b589fa7d919709518823c70efaaa350bd"}, - {file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8fdca709b2470a35a59b1e6dffea75cbe1214b22612b5dd4c93947697aea8b"}, - {file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f27b8ef814c5fb43456caeb7f3cbb889b76115180aad1f42402839c14a47c5b"}, - {file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b2d6101eccc43c7be0cb052f13ceda64288b3d8b344b988ed08d7133cbce2f3"}, - {file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdc34027b545a69be3d4220c140b276129523e4e46db06ad1a0b60d6a4cf9214"}, - {file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db7bb48ca9e90bb9526c71b388d38d8de160c0354f4c5126df23e8701a870dcb"}, - {file = "safetensors-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a78ffc0795d3595cd9e4d453502e35f764276c49e434b25556a15a337db4dafc"}, - {file = "safetensors-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e735b0f79090f6855b55e205e820b7b595502ffca0009a5c13eef3661ce465b"}, - {file = "safetensors-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f8d2416734e850d5392afffbcb2b8985ea29fb171f1cb197e2ae51b8e35d6438"}, - {file = "safetensors-0.4.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:e853e189ba7d47eaf561094586692ba2bbdd258c096f1755805cac098de0e6ab"}, - {file = "safetensors-0.4.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:4b2aa57b5a4d576f3d1dd6e56980026340f156f8a13c13016bfac4e25295b53f"}, - {file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b6c1316ffde6cb4bf22c7445bc9fd224b4d1b9dd7320695f5611c89e802e4b6"}, - {file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:003077ec85261d00061058fa12e3c1d2055366b02ce8f2938929359ffbaff2b8"}, - {file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd63d83a92f1437a8b0431779320376030ae43ace980bea5686d515de0784100"}, - {file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2077801800b4b13301d8d6290c7fb5bd60737320001717153ebc4371776643b5"}, - {file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7abe0e157a49a75aeeccfbc4f3dac38d8f98512d3cdb35c200f8e628dc5773cf"}, - {file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bfed574f6b1e7e7fe1f17213278875ef6c6e8b1582ab6eda93947db1178cae6"}, - {file = "safetensors-0.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:964ef166a286ce3b023d0d0bd0e21d440a1c8028981c8abdb136bc7872ba9b3d"}, - {file = "safetensors-0.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:44f84373e42183bd56a13a1f2d8acb1db7fedaeffbd83e79cec861477eee1af4"}, - {file = "safetensors-0.4.0-cp37-none-win32.whl", hash = "sha256:c68132727dd86fb641102e494d445f705efe402f4d5e24b278183a15499ab400"}, - {file = "safetensors-0.4.0-cp37-none-win_amd64.whl", hash = "sha256:1db87155454c168aef118d5657a403aee48a4cb08d8851a981157f07351ea317"}, - {file = "safetensors-0.4.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:9e583fa68e5a07cc859c4e13c1ebff12029904aa2e27185cf04a1f57fe9a81c4"}, - {file = "safetensors-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73e7696dcf3f72f99545eb1abe6106ad65ff1f62381d6ce4b34be3272552897a"}, - {file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4936096a57c62e84e200f92620a536be067fc5effe46ecc7f230ebb496ecd579"}, - {file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87b328ee1591adac332543e1f5fc2c2d7f149b745ebb0d58d7850818ff9cee27"}, - {file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b69554c143336256260eceff1d3c0969172a641b54d4668489a711b05f92a2c0"}, - {file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ebf6bcece5d5d1bd6416472f94604d2c834ca752ac60ed42dba7157e595a990"}, - {file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6686ce01b8602d55a7d9903c90d4a6e6f90aeb6ddced7cf4605892d0ba94bcb8"}, - {file = "safetensors-0.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b8fd6cc2f3bda444a048b541c843c7b7fefc89c4120d7898ea7d5b026e93891"}, - {file = "safetensors-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6abfe67692f81b8bdb99c837f28351c17e624ebf136970c850ee989c720446"}, - {file = "safetensors-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:27a24ca8822c469ee452db4c13418ba983315a0d863c018a9af15f2305eac38c"}, - {file = "safetensors-0.4.0-cp38-none-win32.whl", hash = "sha256:c4a0a47c8640167792d8261ee21b26430bbc39130a7edaad7f4c0bc05669d00e"}, - {file = "safetensors-0.4.0-cp38-none-win_amd64.whl", hash = "sha256:a738970a367f39249e2abb900d9441a8a86d7ff50083e5eaa6e7760a9f216014"}, - {file = "safetensors-0.4.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:806379f37e1abd5d302288c4b2f4186dd7ea7143d4c7811f90a8077f0ae8967b"}, - {file = "safetensors-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b9b94133ed2ae9dda0e95dcace7b7556eba023ffa4c4ae6df8f99377f571d6a"}, - {file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b563a14c43614815a6b524d2e4edeaace50b717f7e7487bb227dd5b68350f5a"}, - {file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00a9b157be660fb7ba88fa2eedd05ec93793a5b61e43e783e10cb0b995372802"}, - {file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8f194f45ab6aa767993c24f0aeb950af169dbc5d611b94c9021a1d13b8a1a34"}, - {file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:469360b9451db10bfed3881378d5a71b347ecb1ab4f42367d77b8164a13af70b"}, - {file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5f75fa97ccf32a3c7af476c6a0e851023197d3c078f6de3612008fff94735f9"}, - {file = "safetensors-0.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:acf0180283c2efae72f1d8c0a4a7974662091df01be3aa43b5237b1e52ed0a01"}, - {file = "safetensors-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cd02b495ba0814619f40bda46771bb06dbbf1d42524b66fa03b2a736c77e4515"}, - {file = "safetensors-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c42bdea183dbaa99e2f0e6120dc524df79cf4289a6f90f30a534444ef20f49fa"}, - {file = "safetensors-0.4.0-cp39-none-win32.whl", hash = "sha256:cef7bb5d9feae7146c3c3c7b3aef7d2c8b39ba7f5ff4252d368eb69462a47076"}, - {file = "safetensors-0.4.0-cp39-none-win_amd64.whl", hash = "sha256:79dd46fb1f19282fd12f544471efb97823ede927cedbf9cf35550d92b349fdd2"}, - {file = "safetensors-0.4.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:002301c1afa32909f83745b0c124d002e7ae07e15671f3b43cbebd0ffc5e6037"}, - {file = "safetensors-0.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:67762d36ae088c73d4a3c96bfc4ea8d31233554f35b6cace3a18533238d462ea"}, - {file = "safetensors-0.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f45230f20a206e5e4c7f7bbf9342178410c6f8b0af889843aa99045a76f7691"}, - {file = "safetensors-0.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f2ca939bbd8fb2f4dfa28e39a146dad03bc9325e9fc831b68f7b98f69a5a2f1"}, - {file = "safetensors-0.4.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:61a00f281391fae5ce91df70918bb61c12d2d514a493fd8056e12114be729911"}, - {file = "safetensors-0.4.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:435fd136a42492b280cb55126f9ce9535b35dd49df2c5d572a5945455a439448"}, - {file = "safetensors-0.4.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f0daa788273d683258fb1e4a5e16bef4486b2fca536451a2591bc0f4a6488895"}, - {file = "safetensors-0.4.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0620ab0d41e390ccb1c4ea8f63dc00cb5f0b96a5cdd3cd0d64c21765720c074a"}, - {file = "safetensors-0.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc1fa8d067733cb67f22926689ee808f08afacf7700d2ffb44efae90a0693eb1"}, - {file = "safetensors-0.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaa40bc363edda145db75cd030f3b1822e5478d550c3500a42502ecef32c959"}, - {file = "safetensors-0.4.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b561fbc044db7beff2ece0ec219a291809d45a38d30c6b38e7cc46482582f4ba"}, - {file = "safetensors-0.4.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:79a983b09782dacf9a1adb19bb98f4a8f6c3144108939f572c047b5797e43cf5"}, - {file = "safetensors-0.4.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:10b65cd3ad79f5d0daf281523b4146bc271a34bb7430d4e03212e0de8622dab8"}, - {file = "safetensors-0.4.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:114decacc475a6a9e2f9102a00c171d113ddb5d35cb0bda0db2c0c82b2eaa9ce"}, - {file = "safetensors-0.4.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:72ddb741dd5fe42521db76a70e012f76995516a12e7e0ef26be03ea9be77802a"}, - {file = "safetensors-0.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c5556c2ec75f5a6134866eddd7341cb36062e6edaea343478a279591b63ddba"}, - {file = "safetensors-0.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed50f239b0ce7ae85b078395593b4a351ede7e6f73af25f4873e3392336f64c9"}, - {file = "safetensors-0.4.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495dcaea8fbab70b927d2274e2547824462737acbf98ccd851a71124f779a5c6"}, - {file = "safetensors-0.4.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3f4d90c79a65ba2fe2ff0876f6140748f0a3ce6a21e27a35190f4f96321803f8"}, - {file = "safetensors-0.4.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7a524382b5c55b5fbb168e0e9d3f502450c8cf3fb81b93e880018437c206a482"}, - {file = "safetensors-0.4.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:9849ea60c7e840bfdd6030ad454d4a6ba837b3398c902f15a30460dd6961c28c"}, - {file = "safetensors-0.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:6c42623ae7045615d9eaa6877b9df1db4e9cc71ecc14bcc721ea1e475dddd595"}, - {file = "safetensors-0.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80cb8342f00f3c41b3b93b1a599b84723280d3ac90829bc62262efc03ab28793"}, - {file = "safetensors-0.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c4f5ed4ede384dea8c99bae76b0718a828dbf7b2c8ced1f44e3b9b1a124475"}, - {file = "safetensors-0.4.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40d7cf03493bfe75ef62e2c716314474b28d9ba5bf4909763e4b8dd14330c01a"}, - {file = "safetensors-0.4.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:232029f0a9fa6fa1f737324eda98a700409811186888536a2333cbbf64e41741"}, - {file = "safetensors-0.4.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:9ed55f4a20c78ff3e8477efb63c8303c2152cdfb3bfea4d025a80f54d38fd628"}, - {file = "safetensors-0.4.0.tar.gz", hash = "sha256:b985953c3cf11e942eac4317ef3db3da713e274109cf7cfb6076d877054f013e"}, + {file = "safetensors-0.4.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:cba01c6b76e01ec453933b3b3c0157c59b52881c83eaa0f7666244e71aa75fd1"}, + {file = "safetensors-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a8f6f679d97ea0135c7935c202feefbd042c149aa70ee759855e890c01c7814"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc2ce1f5ae5143a7fb72b71fa71db6a42b4f6cf912aa3acdc6b914084778e68"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d87d993eaefe6611a9c241a8bd364a5f1ffed5771c74840363a6c4ed8d868f6"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:097e9af2efa8778cd2f0cba451784253e62fa7cc9fc73c0744d27212f7294e25"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d10a9f7bae608ccfdc009351f01dc3d8535ff57f9488a58a4c38e45bf954fe93"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:270b99885ec14abfd56c1d7f28ada81740a9220b4bae960c3de1c6fe84af9e4d"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:285b52a481e7ba93e29ad4ec5841ef2c4479ef0a6c633c4e2629e0508453577b"}, + {file = "safetensors-0.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c3c9f0ca510e0de95abd6424789dcbc879942a3a4e29b0dfa99d9427bf1da75c"}, + {file = "safetensors-0.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:88b4653059c903015284a9722f9a46838c654257173b279c8f6f46dbe80b612d"}, + {file = "safetensors-0.4.1-cp310-none-win32.whl", hash = "sha256:2fe6926110e3d425c4b684a4379b7796fdc26ad7d16922ea1696c8e6ea7e920f"}, + {file = "safetensors-0.4.1-cp310-none-win_amd64.whl", hash = "sha256:a79e16222106b2f5edbca1b8185661477d8971b659a3c814cc6f15181a9b34c8"}, + {file = "safetensors-0.4.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:d93321eea0dd7e81b283e47a1d20dee6069165cc158286316d0d06d340de8fe8"}, + {file = "safetensors-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ff8e41c8037db17de0ea2a23bc684f43eaf623be7d34906fe1ac10985b8365e"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39d36f1d88468a87c437a1bc27c502e71b6ca44c385a9117a9f9ba03a75cc9c6"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ef010e9afcb4057fb6be3d0a0cfa07aac04fe97ef73fe4a23138d8522ba7c17"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b287304f2b2220d51ccb51fd857761e78bcffbeabe7b0238f8dc36f2edfd9542"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e09000b2599e1836314430f81a3884c66a5cbabdff5d9f175b5d560d4de38d78"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c80ce0001efa16066358d2dd77993adc25f5a6c61850e4ad096a2232930bce"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:413e1f6ac248f7d1b755199a06635e70c3515493d3b41ba46063dec33aa2ebb7"}, + {file = "safetensors-0.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3ac139377cfe71ba04573f1cda66e663b7c3e95be850e9e6c2dd4b5984bd513"}, + {file = "safetensors-0.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:04157d008385bea66d12fe90844a80d4a76dc25ec5230b5bd9a630496d1b7c03"}, + {file = "safetensors-0.4.1-cp311-none-win32.whl", hash = "sha256:5f25297148ec665f0deb8bd67e9564634d8d6841041ab5393ccfe203379ea88b"}, + {file = "safetensors-0.4.1-cp311-none-win_amd64.whl", hash = "sha256:b2f8877990a72ff595507b80f4b69036a9a1986a641f8681adf3425d97d3d2a5"}, + {file = "safetensors-0.4.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:eb2c1da1cc39509d1a55620a5f4d14f8911c47a89c926a96e6f4876e864375a3"}, + {file = "safetensors-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:303d2c0415cf15a28f8d7f17379ea3c34c2b466119118a34edd9965983a1a8a6"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb4cb3e37a9b961ddd68e873b29fe9ab4a081e3703412e34aedd2b7a8e9cafd9"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae5497adc68669db2fed7cb2dad81e6a6106e79c9a132da3efdb6af1db1014fa"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b30abd0cddfe959d1daedf92edcd1b445521ebf7ddefc20860ed01486b33c90"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d784a98c492c751f228a4a894c3b8a092ff08b24e73b5568938c28b8c0e8f8df"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57a5ab08b0ec7a7caf30d2ac79bb30c89168431aca4f8854464bb9461686925"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:edcf3121890b5f0616aa5a54683b1a5d2332037b970e507d6bb7841a3a596556"}, + {file = "safetensors-0.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fdb58dee173ef33634c3016c459d671ca12d11e6acf9db008261cbe58107e579"}, + {file = "safetensors-0.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:780dc21eb3fd32ddd0e8c904bdb0290f2454f4ac21ae71e94f9ce72db1900a5a"}, + {file = "safetensors-0.4.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:48901bd540f8a3c1791314bc5c8a170927bf7f6acddb75bf0a263d081a3637d4"}, + {file = "safetensors-0.4.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3b0b7b2d5976fbed8a05e2bbdce5816a59e6902e9e7c7e07dc723637ed539787"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f69903ff49cb30b9227fb5d029bea276ea20d04b06803877a420c5b1b74c689"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0ddd050e01f3e843aa8c1c27bf68675b8a08e385d0045487af4d70418c3cb356"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a82bc2bd7a9a0e08239bdd6d7774d64121f136add93dfa344a2f1a6d7ef35fa"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ace9e66a40f98a216ad661245782483cf79cf56eb2b112650bb904b0baa9db5"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82cbb8f4d022f2e94498cbefca900698b8ded3d4f85212f47da614001ff06652"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:791edc10a3c359a2f5f52d5cddab0df8a45107d91027d86c3d44e57162e5d934"}, + {file = "safetensors-0.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:83c2cfbe8c6304f0891e7bb378d56f66d2148972eeb5f747cd8a2246886f0d8c"}, + {file = "safetensors-0.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:04dd14f53f5500eb4c4149674216ba1000670efbcf4b1b5c2643eb244e7882ea"}, + {file = "safetensors-0.4.1-cp37-none-win32.whl", hash = "sha256:d5b3defa74f3723a388bfde2f5d488742bc4879682bd93267c09a3bcdf8f869b"}, + {file = "safetensors-0.4.1-cp37-none-win_amd64.whl", hash = "sha256:25a043cbb59d4f75e9dd87fdf5c009dd8830105a2c57ace49b72167dd9808111"}, + {file = "safetensors-0.4.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:3f6a520af7f2717c5ecba112041f2c8af1ca6480b97bf957aba81ed9642e654c"}, + {file = "safetensors-0.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3807ac3b16288dffebb3474b555b56fe466baa677dfc16290dcd02dca1ab228"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b58ba13a9e82b4bc3fc221914f6ef237fe6c2adb13cede3ace64d1aacf49610"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dac4bb42f8679aadc59bd91a4c5a1784a758ad49d0912995945cd674089f628e"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911b48dc09e321a194def3a7431662ff4f03646832f3a8915bbf0f449b8a5fcb"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82571d20288c975c1b30b08deb9b1c3550f36b31191e1e81fae87669a92217d0"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da52ee0dc8ba03348ffceab767bd8230842fdf78f8a996e2a16445747143a778"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2536b11ce665834201072e9397404170f93f3be10cca9995b909f023a04501ee"}, + {file = "safetensors-0.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:998fbac99ca956c3a09fe07cc0b35fac26a521fa8865a690686d889f0ff4e4a6"}, + {file = "safetensors-0.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:845be0aafabf2a60c2d482d4e93023fecffe5e5443d801d7a7741bae9de41233"}, + {file = "safetensors-0.4.1-cp38-none-win32.whl", hash = "sha256:ce7a28bc8af685a69d7e869d09d3e180a275e3281e29cf5f1c7319e231932cc7"}, + {file = "safetensors-0.4.1-cp38-none-win_amd64.whl", hash = "sha256:e056fb9e22d118cc546107f97dc28b449d88274207dd28872bd668c86216e4f6"}, + {file = "safetensors-0.4.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:bdc0d039e44a727824639824090bd8869535f729878fa248addd3dc01db30eae"}, + {file = "safetensors-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c1b1d510c7aba71504ece87bf393ea82638df56303e371e5e2cf09d18977dd7"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd0afd95c1e497f520e680ea01e0397c0868a3a3030e128438cf6e9e3fcd671"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f603bdd8deac6726d39f41688ed353c532dd53935234405d79e9eb53f152fbfb"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8a85e3e47e0d4eebfaf9a58b40aa94f977a56050cb5598ad5396a9ee7c087c6"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0ccb5aa0f3be2727117e5631200fbb3a5b3a2b3757545a92647d6dd8be6658f"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d784938534e255473155e4d9f276ee69eb85455b6af1292172c731409bf9adee"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a257de175c254d39ccd6a21341cd62eb7373b05c1e618a78096a56a857e0c316"}, + {file = "safetensors-0.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6fd80f7794554091836d4d613d33a7d006e2b8d6ba014d06f97cebdfda744f64"}, + {file = "safetensors-0.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:35803201d980efcf964b75a0a2aee97fe5e9ecc5f3ad676b38fafdfe98e0620d"}, + {file = "safetensors-0.4.1-cp39-none-win32.whl", hash = "sha256:7ff8a36e0396776d3ed9a106fc9a9d7c55d4439ca9a056a24bf66d343041d3e6"}, + {file = "safetensors-0.4.1-cp39-none-win_amd64.whl", hash = "sha256:bfa2e20342b81921b98edba52f8deb68843fa9c95250739a56b52ceda5ea5c61"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ae2d5a31cfb8a973a318f7c4d2cffe0bd1fe753cdf7bb41a1939d45a0a06f964"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a45dbf03e8334d3a5dc93687d98b6dc422f5d04c7d519dac09b84a3c87dd7c6"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297b359d91126c0f9d4fd17bae3cfa2fe3a048a6971b8db07db746ad92f850c"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda3d98e2bcece388232cfc551ebf063b55bdb98f65ab54df397da30efc7dcc5"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8934bdfd202ebd0697040a3dff40dd77bc4c5bbf3527ede0532f5e7fb4d970f"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:42c3710cec7e5c764c7999697516370bee39067de0aa089b7e2cfb97ac8c6b20"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53134226053e56bd56e73f7db42596e7908ed79f3c9a1016e4c1dade593ac8e5"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:257d59e40a1b367cb544122e7451243d65b33c3f34d822a347f4eea6fdf97fdf"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d54c2f1826e790d1eb2d2512bfd0ee443f0206b423d6f27095057c7f18a0687"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645b3f1138fce6e818e79d4128afa28f0657430764cc045419c1d069ff93f732"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9a7ffb1e551c6df51d267f5a751f042b183df22690f6feceac8d27364fd51d7"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:44e230fbbe120de564b64f63ef3a8e6ff02840fa02849d9c443d56252a1646d4"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:9d16b3b2fcc6fca012c74bd01b5619c655194d3e3c13e4d4d0e446eefa39a463"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5d95ea4d8b32233910734a904123bdd3979c137c461b905a5ed32511defc075f"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:dab431699b5d45e0ca043bc580651ce9583dda594e62e245b7497adb32e99809"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d8bbb7344e39cb9d4762e85c21df94ebeb03edac923dd94bb9ed8c10eac070"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1faf5111c66a6ba91f85dff2e36edaaf36e6966172703159daeef330de4ddc7b"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:660ca1d8bff6c7bc7c6b30b9b32df74ef3ab668f5df42cefd7588f0d40feadcb"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ae2f67f04ed0bb2e56fd380a8bd3eef03f609df53f88b6f5c7e89c08e52aae00"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8ed5d2c04cdc1afc6b3c28d59580448ac07732c50d94c15e14670f9c473a2ce"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2b6a2814278b6660261aa9a9aae524616de9f1ec364e3716d219b6ed8f91801f"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3cfd1ca35eacc635f0eaa894e5c5ed83ffebd0f95cac298fd430014fa7323631"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4177b456c6b0c722d82429127b5beebdaf07149d265748e97e0a34ff0b3694c8"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:313e8472197bde54e3ec54a62df184c414582979da8f3916981b6a7954910a1b"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fdb4adb76e21bad318210310590de61c9f4adcef77ee49b4a234f9dc48867869"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1d568628e9c43ca15eb96c217da73737c9ccb07520fafd8a1eba3f2750614105"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:573b6023a55a2f28085fc0a84e196c779b6cbef4d9e73acea14c8094fee7686f"}, + {file = "safetensors-0.4.1.tar.gz", hash = "sha256:2304658e6ada81a5223225b4efe84748e760c46079bffedf7e321763cafb36c9"}, ] [package.extras] @@ -3098,6 +3447,22 @@ starlette = ["starlette (>=0.19.1)"] starlite = ["starlite (>=1.48)"] tornado = ["tornado (>=5)"] +[[package]] +name = "setuptools" +version = "69.0.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -3286,56 +3651,117 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "tokenizers" -version = "0.13.3" -description = "Fast and Customizable Tokenizers" +version = "0.15.0" +description = "" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "tokenizers-0.13.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:f3835c5be51de8c0a092058a4d4380cb9244fb34681fd0a295fbf0a52a5fdf33"}, - {file = "tokenizers-0.13.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4ef4c3e821730f2692489e926b184321e887f34fb8a6b80b8096b966ba663d07"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5fd1a6a25353e9aa762e2aae5a1e63883cad9f4e997c447ec39d071020459bc"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee0b1b311d65beab83d7a41c56a1e46ab732a9eed4460648e8eb0bd69fc2d059"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ef4215284df1277dadbcc5e17d4882bda19f770d02348e73523f7e7d8b8d396"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4d53976079cff8a033f778fb9adca2d9d69d009c02fa2d71a878b5f3963ed30"}, - {file = "tokenizers-0.13.3-cp310-cp310-win32.whl", hash = "sha256:1f0e3b4c2ea2cd13238ce43548959c118069db7579e5d40ec270ad77da5833ce"}, - {file = "tokenizers-0.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:89649c00d0d7211e8186f7a75dfa1db6996f65edce4b84821817eadcc2d3c79e"}, - {file = "tokenizers-0.13.3-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:56b726e0d2bbc9243872b0144515ba684af5b8d8cd112fb83ee1365e26ec74c8"}, - {file = "tokenizers-0.13.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc5c022ce692e1f499d745af293ab9ee6f5d92538ed2faf73f9708c89ee59ce6"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55c981ac44ba87c93e847c333e58c12abcbb377a0c2f2ef96e1a266e4184ff2"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f247eae99800ef821a91f47c5280e9e9afaeed9980fc444208d5aa6ba69ff148"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b3e3215d048e94f40f1c95802e45dcc37c5b05eb46280fc2ccc8cd351bff839"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba2b0bf01777c9b9bc94b53764d6684554ce98551fec496f71bc5be3a03e98b"}, - {file = "tokenizers-0.13.3-cp311-cp311-win32.whl", hash = "sha256:cc78d77f597d1c458bf0ea7c2a64b6aa06941c7a99cb135b5969b0278824d808"}, - {file = "tokenizers-0.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:ecf182bf59bd541a8876deccf0360f5ae60496fd50b58510048020751cf1724c"}, - {file = "tokenizers-0.13.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:0527dc5436a1f6bf2c0327da3145687d3bcfbeab91fed8458920093de3901b44"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cbb2c307627dc99b44b22ef05ff4473aa7c7cc1fec8f0a8b37d8a64b1a16d2"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4560dbdeaae5b7ee0d4e493027e3de6d53c991b5002d7ff95083c99e11dd5ac0"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64064bd0322405c9374305ab9b4c07152a1474370327499911937fd4a76d004b"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c6e2ab0f2e3d939ca66aa1d596602105fe33b505cd2854a4c1717f704c51de"}, - {file = "tokenizers-0.13.3-cp37-cp37m-win32.whl", hash = "sha256:6cc29d410768f960db8677221e497226e545eaaea01aa3613fa0fdf2cc96cff4"}, - {file = "tokenizers-0.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fc2a7fdf864554a0dacf09d32e17c0caa9afe72baf9dd7ddedc61973bae352d8"}, - {file = "tokenizers-0.13.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8791dedba834c1fc55e5f1521be325ea3dafb381964be20684b92fdac95d79b7"}, - {file = "tokenizers-0.13.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:d607a6a13718aeb20507bdf2b96162ead5145bbbfa26788d6b833f98b31b26e1"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3791338f809cd1bf8e4fee6b540b36822434d0c6c6bc47162448deee3f77d425"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2f35f30e39e6aab8716f07790f646bdc6e4a853816cc49a95ef2a9016bf9ce6"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310204dfed5aa797128b65d63538a9837cbdd15da2a29a77d67eefa489edda26"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0f9b92ea052305166559f38498b3b0cae159caea712646648aaa272f7160963"}, - {file = "tokenizers-0.13.3-cp38-cp38-win32.whl", hash = "sha256:9a3fa134896c3c1f0da6e762d15141fbff30d094067c8f1157b9fdca593b5806"}, - {file = "tokenizers-0.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:8e7b0cdeace87fa9e760e6a605e0ae8fc14b7d72e9fc19c578116f7287bb873d"}, - {file = "tokenizers-0.13.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:00cee1e0859d55507e693a48fa4aef07060c4bb6bd93d80120e18fea9371c66d"}, - {file = "tokenizers-0.13.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a23ff602d0797cea1d0506ce69b27523b07e70f6dda982ab8cf82402de839088"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ce07445050b537d2696022dafb115307abdffd2a5c106f029490f84501ef97"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:280ffe95f50eaaf655b3a1dc7ff1d9cf4777029dbbc3e63a74e65a056594abc3"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97acfcec592f7e9de8cadcdcda50a7134423ac8455c0166b28c9ff04d227b371"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7730c98a3010cd4f523465867ff95cd9d6430db46676ce79358f65ae39797b"}, - {file = "tokenizers-0.13.3-cp39-cp39-win32.whl", hash = "sha256:48625a108029cb1ddf42e17a81b5a3230ba6888a70c9dc14e81bc319e812652d"}, - {file = "tokenizers-0.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:bc0a6f1ba036e482db6453571c9e3e60ecd5489980ffd95d11dc9f960483d783"}, - {file = "tokenizers-0.13.3.tar.gz", hash = "sha256:2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e"}, + {file = "tokenizers-0.15.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:cd3cd0299aaa312cd2988957598f80becd04d5a07338741eca076057a2b37d6e"}, + {file = "tokenizers-0.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a922c492c721744ee175f15b91704be2d305569d25f0547c77cd6c9f210f9dc"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:331dd786d02fc38698f835fff61c99480f98b73ce75a4c65bd110c9af5e4609a"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88dd0961c437d413ab027f8b115350c121d49902cfbadf08bb8f634b15fa1814"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6fdcc55339df7761cd52e1fbe8185d3b3963bc9e3f3545faa6c84f9e8818259a"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1480b0051d8ab5408e8e4db2dc832f7082ea24aa0722c427bde2418c6f3bd07"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9855e6c258918f9cf62792d4f6ddfa6c56dccd8c8118640f867f6393ecaf8bd7"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9529fe75efcd54ba8d516aa725e1851df9199f0669b665c55e90df08f5af86"}, + {file = "tokenizers-0.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8edcc90a36eab0705fe9121d6c77c6e42eeef25c7399864fd57dfb27173060bf"}, + {file = "tokenizers-0.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae17884aafb3e94f34fb7cfedc29054f5f54e142475ebf8a265a4e388fee3f8b"}, + {file = "tokenizers-0.15.0-cp310-none-win32.whl", hash = "sha256:9a3241acdc9b44cff6e95c4a55b9be943ef3658f8edb3686034d353734adba05"}, + {file = "tokenizers-0.15.0-cp310-none-win_amd64.whl", hash = "sha256:4b31807cb393d6ea31926b307911c89a1209d5e27629aa79553d1599c8ffdefe"}, + {file = "tokenizers-0.15.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:af7e9be8c05d30bb137b9fd20f9d99354816599e5fd3d58a4b1e28ba3b36171f"}, + {file = "tokenizers-0.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c3d7343fa562ea29661783344a2d83662db0d3d17a6fa6a403cac8e512d2d9fd"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:32371008788aeeb0309a9244809a23e4c0259625e6b74a103700f6421373f395"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9db64c7c9954fbae698884c5bb089764edc549731e5f9b7fa1dd4e4d78d77f"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbed5944c31195514669cf6381a0d8d47f164943000d10f93d6d02f0d45c25e0"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aab16c4a26d351d63e965b0c792f5da7227a37b69a6dc6d922ff70aa595b1b0c"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c2b60b12fdd310bf85ce5d7d3f823456b9b65eed30f5438dd7761879c495983"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0344d6602740e44054a9e5bbe9775a5e149c4dddaff15959bb07dcce95a5a859"}, + {file = "tokenizers-0.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4525f6997d81d9b6d9140088f4f5131f6627e4c960c2c87d0695ae7304233fc3"}, + {file = "tokenizers-0.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:65975094fef8cc68919644936764efd2ce98cf1bacbe8db2687155d2b0625bee"}, + {file = "tokenizers-0.15.0-cp311-none-win32.whl", hash = "sha256:ff5d2159c5d93015f5a4542aac6c315506df31853123aa39042672031768c301"}, + {file = "tokenizers-0.15.0-cp311-none-win_amd64.whl", hash = "sha256:2dd681b53cf615e60a31a115a3fda3980e543d25ca183797f797a6c3600788a3"}, + {file = "tokenizers-0.15.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:c9cce6ee149a3d703f86877bc2a6d997e34874b2d5a2d7839e36b2273f31d3d9"}, + {file = "tokenizers-0.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a0a94bc3370e6f1cc8a07a8ae867ce13b7c1b4291432a773931a61f256d44ea"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:309cfcccfc7e502cb1f1de2c9c1c94680082a65bfd3a912d5a5b2c90c677eb60"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8413e994dd7d875ab13009127fc85633916c71213917daf64962bafd488f15dc"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0ebf9430f901dbdc3dcb06b493ff24a3644c9f88c08e6a1d6d0ae2228b9b818"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10361e9c7864b22dd791ec5126327f6c9292fb1d23481d4895780688d5e298ac"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:babe42635b8a604c594bdc56d205755f73414fce17ba8479d142a963a6c25cbc"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3768829861e964c7a4556f5f23307fce6a23872c2ebf030eb9822dbbbf7e9b2a"}, + {file = "tokenizers-0.15.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9c91588a630adc88065e1c03ac6831e3e2112558869b9ebcb2b8afd8a14c944d"}, + {file = "tokenizers-0.15.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:77606994e793ca54ecf3a3619adc8a906a28ca223d9354b38df41cb8766a0ed6"}, + {file = "tokenizers-0.15.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:6fe143939f3b596681922b2df12a591a5b010e7dcfbee2202482cd0c1c2f2459"}, + {file = "tokenizers-0.15.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:b7bee0f1795e3e3561e9a557061b1539e5255b8221e3f928f58100282407e090"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5d37e7f4439b4c46192ab4f2ff38ab815e4420f153caa13dec9272ef14403d34"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caadf255cf7f951b38d10097836d1f3bcff4aeaaffadfdf748bab780bf5bff95"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05accb9162bf711a941b1460b743d62fec61c160daf25e53c5eea52c74d77814"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26a2ef890740127cb115ee5260878f4a677e36a12831795fd7e85887c53b430b"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e54c5f26df14913620046b33e822cb3bcd091a332a55230c0e63cc77135e2169"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669b8ed653a578bcff919566631156f5da3aab84c66f3c0b11a6281e8b4731c7"}, + {file = "tokenizers-0.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0ea480d943297df26f06f508dab6e012b07f42bf3dffdd36e70799368a5f5229"}, + {file = "tokenizers-0.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc80a0a565ebfc7cd89de7dd581da8c2b3238addfca6280572d27d763f135f2f"}, + {file = "tokenizers-0.15.0-cp37-none-win32.whl", hash = "sha256:cdd945e678bbdf4517d5d8de66578a5030aeefecdb46f5320b034de9cad8d4dd"}, + {file = "tokenizers-0.15.0-cp37-none-win_amd64.whl", hash = "sha256:1ab96ab7dc706e002c32b2ea211a94c1c04b4f4de48354728c3a6e22401af322"}, + {file = "tokenizers-0.15.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:f21c9eb71c9a671e2a42f18b456a3d118e50c7f0fc4dd9fa8f4eb727fea529bf"}, + {file = "tokenizers-0.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a5f4543a35889679fc3052086e69e81880b2a5a28ff2a52c5a604be94b77a3f"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f8aa81afec893e952bd39692b2d9ef60575ed8c86fce1fd876a06d2e73e82dca"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1574a5a4af22c3def93fe8fe4adcc90a39bf5797ed01686a4c46d1c3bc677d2f"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c7982fd0ec9e9122d03b209dac48cebfea3de0479335100ef379a9a959b9a5a"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d16b647032df2ce2c1f9097236e046ea9fedd969b25637b9d5d734d78aa53b"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b3cdf29e6f9653da330515dc8fa414be5a93aae79e57f8acc50d4028dd843edf"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7286f3df10de840867372e3e64b99ef58c677210e3ceb653cd0e740a5c53fe78"}, + {file = "tokenizers-0.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aabc83028baa5a36ce7a94e7659250f0309c47fa4a639e5c2c38e6d5ea0de564"}, + {file = "tokenizers-0.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:72f78b0e0e276b1fc14a672fa73f3acca034ba8db4e782124a2996734a9ba9cf"}, + {file = "tokenizers-0.15.0-cp38-none-win32.whl", hash = "sha256:9680b0ecc26e7e42f16680c1aa62e924d58d1c2dd992707081cc10a374896ea2"}, + {file = "tokenizers-0.15.0-cp38-none-win_amd64.whl", hash = "sha256:f17cbd88dab695911cbdd385a5a7e3709cc61dff982351f5d1b5939f074a2466"}, + {file = "tokenizers-0.15.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:3661862df7382c5eb23ac4fbf7c75e69b02dc4f5784e4c5a734db406b5b24596"}, + {file = "tokenizers-0.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3045d191dad49647f5a5039738ecf1c77087945c7a295f7bcf051c37067e883"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9fcaad9ab0801f14457d7c820d9f246b5ab590c407fc6b073819b1573097aa7"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79f17027f24fe9485701c8dbb269b9c713954ec3bdc1e7075a66086c0c0cd3c"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:01a3aa332abc4bee7640563949fcfedca4de8f52691b3b70f2fc6ca71bfc0f4e"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05b83896a893cdfedad8785250daa3ba9f0504848323471524d4783d7291661e"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbbf2489fcf25d809731ba2744ff278dd07d9eb3f8b7482726bd6cae607073a4"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab806ad521a5e9de38078b7add97589c313915f6f5fec6b2f9f289d14d607bd6"}, + {file = "tokenizers-0.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a522612d5c88a41563e3463226af64e2fa00629f65cdcc501d1995dd25d23f5"}, + {file = "tokenizers-0.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e58a38c4e6075810bdfb861d9c005236a72a152ebc7005941cc90d1bbf16aca9"}, + {file = "tokenizers-0.15.0-cp39-none-win32.whl", hash = "sha256:b8034f1041fd2bd2b84ff9f4dc4ae2e1c3b71606820a9cd5c562ebd291a396d1"}, + {file = "tokenizers-0.15.0-cp39-none-win_amd64.whl", hash = "sha256:edde9aa964145d528d0e0dbf14f244b8a85ebf276fb76869bc02e2530fa37a96"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:309445d10d442b7521b98083dc9f0b5df14eca69dbbfebeb98d781ee2cef5d30"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d3125a6499226d4d48efc54f7498886b94c418e93a205b673bc59364eecf0804"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ed56ddf0d54877bb9c6d885177db79b41576e61b5ef6defeb579dcb803c04ad5"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b22cd714706cc5b18992a232b023f736e539495f5cc61d2d28d176e55046f6c"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2719b1e9bc8e8e7f6599b99d0a8e24f33d023eb8ef644c0366a596f0aa926"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:85ddae17570ec7e5bfaf51ffa78d044f444a8693e1316e1087ee6150596897ee"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76f1bed992e396bf6f83e3df97b64ff47885e45e8365f8983afed8556a0bc51f"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3bb0f4df6dce41a1c7482087b60d18c372ef4463cb99aa8195100fcd41e0fd64"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:22c27672c27a059a5f39ff4e49feed8c7f2e1525577c8a7e3978bd428eb5869d"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78104f5d035c9991f92831fc0efe9e64a05d4032194f2a69f67aaa05a4d75bbb"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a40b73dc19d82c3e3ffb40abdaacca8fbc95eeb26c66b7f9f860aebc07a73998"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d801d1368188c74552cd779b1286e67cb9fd96f4c57a9f9a2a09b6def9e1ab37"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82641ffb13a4da1293fcc9f437d457647e60ed0385a9216cd135953778b3f0a1"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:160f9d1810f2c18fffa94aa98bf17632f6bd2dabc67fcb01a698ca80c37d52ee"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d7d6eea831ed435fdeeb9bcd26476226401d7309d115a710c65da4088841948"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f6456bec6c557d63d8ec0023758c32f589e1889ed03c055702e84ce275488bed"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eef39a502fad3bf104b9e1906b4fb0cee20e44e755e51df9a98f8922c3bf6d4"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1e4664c5b797e093c19b794bbecc19d2367e782b4a577d8b7c1821db5dc150d"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ca003fb5f3995ff5cf676db6681b8ea5d54d3b30bea36af1120e78ee1a4a4cdf"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7f17363141eb0c53752c89e10650b85ef059a52765d0802ba9613dbd2d21d425"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:8a765db05581c7d7e1280170f2888cda351760d196cc059c37ea96f121125799"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2a0dd641a72604486cd7302dd8f87a12c8a9b45e1755e47d2682733f097c1af5"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a1a3c973e4dc97797fc19e9f11546c95278ffc55c4492acb742f69e035490bc"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4fab75642aae4e604e729d6f78e0addb9d7e7d49e28c8f4d16b24da278e5263"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65f80be77f6327a86d8fd35a4467adcfe6174c159b4ab52a1a8dd4c6f2d7d9e1"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8da7533dbe66b88afd430c56a2f2ce1fd82e2681868f857da38eeb3191d7498"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa8eb4584fc6cbe6a84d7a7864be3ed28e23e9fd2146aa8ef1814d579df91958"}, + {file = "tokenizers-0.15.0.tar.gz", hash = "sha256:10c7e6e7b4cabd757da59e93f5f8d1126291d16f8b54f28510825ef56a3e5d0e"}, ] +[package.dependencies] +huggingface_hub = ">=0.16.4,<1.0" + [package.extras] -dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] [[package]] @@ -3360,53 +3786,52 @@ telegram = ["requests"] [[package]] name = "transformers" -version = "4.33.3" +version = "4.36.2" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false python-versions = ">=3.8.0" files = [ - {file = "transformers-4.33.3-py3-none-any.whl", hash = "sha256:7150bbf6781ddb3338ce7d74f4d6f557e6c236a0a1dd3de57412214caae7fd71"}, - {file = "transformers-4.33.3.tar.gz", hash = "sha256:8ea7c92310dee7c63b14766ce928218f7a9177960b2487ac018c91ae621af03e"}, + {file = "transformers-4.36.2-py3-none-any.whl", hash = "sha256:462066c4f74ee52516f12890dcc9ec71d1a5e97998db621668455117a54330f6"}, + {file = "transformers-4.36.2.tar.gz", hash = "sha256:d8068e897e47793281501e547d2bbdfc5b8556409c2cb6c3d9e2ca77d4c0b4ec"}, ] [package.dependencies] filelock = "*" -huggingface-hub = ">=0.15.1,<1.0" +huggingface-hub = ">=0.19.3,<1.0" numpy = ">=1.17" packaging = ">=20.0" pyyaml = ">=5.1" regex = "!=2019.12.17" requests = "*" safetensors = ">=0.3.1" -tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" +tokenizers = ">=0.14,<0.19" tqdm = ">=4.27" [package.extras] -accelerate = ["accelerate (>=0.20.3)"] -agents = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.10,!=1.12.0)"] -all = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] +accelerate = ["accelerate (>=0.21.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.10,!=1.12.0)"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.20.3)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] +deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] docs-specific = ["hf-doc-builder"] -fairscale = ["fairscale (>0.3)"] flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune]", "sigopt"] +integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] modelcreation = ["cookiecutter (==1.7.3)"] natten = ["natten (>=0.14.6)"] onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (>=0.0.241,<=0.0.259)", "urllib3 (<2.0.0)"] -ray = ["ray[tune]"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<2.0.0)"] +ray = ["ray[tune] (>=2.7.0)"] retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] sagemaker = ["sagemaker (>=2.31.0)"] sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] @@ -3414,18 +3839,18 @@ serving = ["fastapi", "pydantic (<2)", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] timm = ["timm"] -tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["accelerate (>=0.20.3)", "torch (>=1.10,!=1.12.0)"] +tokenizers = ["tokenizers (>=0.14,<0.19)"] +torch = ["accelerate (>=0.21.0)", "torch (>=1.10,!=1.12.0)"] torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (<10.0.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.15.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "tqdm (>=4.27)"] +torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "tqdm (>=4.27)"] video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (<10.0.0)"] +vision = ["Pillow (>=10.0.1,<=15.0)"] [[package]] name = "typing-extensions" @@ -3438,6 +3863,17 @@ files = [ {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + [[package]] name = "urllib3" version = "1.26.17" @@ -3523,6 +3959,17 @@ dev = ["Cython (>=0.29.32,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "aiohttp", "flak docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)"] +[[package]] +name = "vine" +version = "5.0.0" +description = "Promises, promises, promises." +optional = false +python-versions = ">=3.6" +files = [ + {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, + {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, +] + [[package]] name = "watchfiles" version = "0.20.0" @@ -3557,6 +4004,17 @@ files = [ [package.dependencies] anyio = ">=3.0.0" +[[package]] +name = "wcwidth" +version = "0.2.8" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, +] + [[package]] name = "websockets" version = "11.0.3" @@ -3838,4 +4296,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "61578467a70980ff9c2dc0cd787b6410b91d7c5fd2bb4c46b6951ec82690ef67" +content-hash = "a0d9362ca5fc6e5c310bba39ce9bac720880d4dbb884a9f6625e51c952c54ffa" diff --git a/server/pyproject.toml b/server/pyproject.toml index e3b44774..a28ba42f 100644 --- a/server/pyproject.toml +++ b/server/pyproject.toml @@ -8,12 +8,11 @@ packages = [] [tool.poetry.dependencies] python = "^3.11" -aiohttp = "^3.8.5" +aiohttp = "^3.9.0" aiohttp-cors = "^0.7.0" av = "^10.0.0" requests = "^2.31.0" aiortc = "^1.5.0" -faster-whisper = "^0.7.1" sortedcontainers = "^2.4.0" loguru = "^0.7.0" pydantic-settings = "^2.0.2" @@ -28,16 +27,22 @@ sqlalchemy = "<1.5" fief-client = {extras = ["fastapi"], version = "^0.17.0"} alembic = "^1.11.3" nltk = "^3.8.1" -transformers = "^4.32.1" prometheus-fastapi-instrumentator = "^6.1.0" sentencepiece = "^0.1.99" protobuf = "^4.24.3" profanityfilter = "^2.0.6" +celery = "^5.3.4" +redis = "^5.0.1" +python-jose = {extras = ["cryptography"], version = "^3.3.0"} +python-multipart = "^0.0.6" +faster-whisper = "^0.10.0" +transformers = "^4.36.2" [tool.poetry.group.dev.dependencies] black = "^23.7.0" stamina = "^23.1.0" +pyinstrument = "^4.6.1" [tool.poetry.group.tests.dependencies] @@ -47,6 +52,7 @@ pytest-asyncio = "^0.21.1" pytest = "^7.4.0" httpx-ws = "^0.4.1" pytest-httpx = "^0.23.1" +pytest-celery = "^0.0.0" [tool.poetry.group.aws.dependencies] diff --git a/server/reflector/app.py b/server/reflector/app.py index 758faf69..2a72ff44 100644 --- a/server/reflector/app.py +++ b/server/reflector/app.py @@ -13,6 +13,14 @@ from reflector.metrics import metrics_init from reflector.settings import settings from reflector.views.rtc_offer import router as rtc_offer_router from reflector.views.transcripts import router as transcripts_router +from reflector.views.transcripts_audio import router as transcripts_audio_router +from reflector.views.transcripts_participants import ( + router as transcripts_participants_router, +) +from reflector.views.transcripts_speaker import router as transcripts_speaker_router +from reflector.views.transcripts_upload import router as transcripts_upload_router +from reflector.views.transcripts_webrtc import router as transcripts_webrtc_router +from reflector.views.transcripts_websocket import router as transcripts_websocket_router from reflector.views.user import router as user_router try: @@ -41,7 +49,6 @@ if settings.SENTRY_DSN: else: logger.info("Sentry disabled") - # build app app = FastAPI(lifespan=lifespan) app.add_middleware( @@ -61,9 +68,18 @@ metrics_init(app, instrumentator) # register views app.include_router(rtc_offer_router) app.include_router(transcripts_router, prefix="/v1") +app.include_router(transcripts_audio_router, prefix="/v1") +app.include_router(transcripts_participants_router, prefix="/v1") +app.include_router(transcripts_speaker_router, prefix="/v1") +app.include_router(transcripts_upload_router, prefix="/v1") +app.include_router(transcripts_websocket_router, prefix="/v1") +app.include_router(transcripts_webrtc_router, prefix="/v1") app.include_router(user_router, prefix="/v1") add_pagination(app) +# prepare celery +from reflector.worker import app as celery_app # noqa + # simpler openapi id def use_route_names_as_operation_ids(app: FastAPI) -> None: @@ -84,7 +100,10 @@ def use_route_names_as_operation_ids(app: FastAPI) -> None: version = None if route.path.startswith("/v"): version = route.path.split("/")[1] - opid = f"{version}_{route.name}" + if route.operation_id is not None: + opid = f"{version}_{route.operation_id}" + else: + opid = f"{version}_{route.name}" else: opid = route.name @@ -94,11 +113,28 @@ def use_route_names_as_operation_ids(app: FastAPI) -> None: "Please rename the route or the view function." ) route.operation_id = opid - ensure_uniq_operation_ids.add(route.name) + ensure_uniq_operation_ids.add(opid) use_route_names_as_operation_ids(app) +if settings.PROFILING: + from fastapi import Request + from fastapi.responses import HTMLResponse + from pyinstrument import Profiler + + @app.middleware("http") + async def profile_request(request: Request, call_next): + profiling = request.query_params.get("profile", False) + if profiling: + profiler = Profiler(async_mode="enabled") + profiler.start() + await call_next(request) + profiler.stop() + return HTMLResponse(profiler.output_html()) + else: + return await call_next(request) + if __name__ == "__main__": import uvicorn diff --git a/server/reflector/db/__init__.py b/server/reflector/db/__init__.py index b68dfe20..9871c633 100644 --- a/server/reflector/db/__init__.py +++ b/server/reflector/db/__init__.py @@ -1,32 +1,13 @@ import databases import sqlalchemy - from reflector.events import subscribers_shutdown, subscribers_startup from reflector.settings import settings database = databases.Database(settings.DATABASE_URL) metadata = sqlalchemy.MetaData() - -transcripts = sqlalchemy.Table( - "transcript", - metadata, - sqlalchemy.Column("id", sqlalchemy.String, primary_key=True), - sqlalchemy.Column("name", sqlalchemy.String), - sqlalchemy.Column("status", sqlalchemy.String), - sqlalchemy.Column("locked", sqlalchemy.Boolean), - sqlalchemy.Column("duration", sqlalchemy.Integer), - sqlalchemy.Column("created_at", sqlalchemy.DateTime), - sqlalchemy.Column("title", sqlalchemy.String, nullable=True), - sqlalchemy.Column("short_summary", sqlalchemy.String, nullable=True), - sqlalchemy.Column("long_summary", sqlalchemy.String, nullable=True), - sqlalchemy.Column("topics", sqlalchemy.JSON), - sqlalchemy.Column("events", sqlalchemy.JSON), - sqlalchemy.Column("source_language", sqlalchemy.String, nullable=True), - sqlalchemy.Column("target_language", sqlalchemy.String, nullable=True), - # with user attached, optional - sqlalchemy.Column("user_id", sqlalchemy.String), -) +# import models +import reflector.db.transcripts # noqa engine = sqlalchemy.create_engine( settings.DATABASE_URL, connect_args={"check_same_thread": False} diff --git a/server/reflector/db/transcripts.py b/server/reflector/db/transcripts.py new file mode 100644 index 00000000..423f8af6 --- /dev/null +++ b/server/reflector/db/transcripts.py @@ -0,0 +1,507 @@ +import json +from contextlib import asynccontextmanager +from datetime import datetime +from pathlib import Path +from typing import Any, Literal +from uuid import uuid4 + +import sqlalchemy +from fastapi import HTTPException +from pydantic import BaseModel, ConfigDict, Field +from reflector.db import database, metadata +from reflector.processors.types import Word as ProcessorWord +from reflector.settings import settings +from reflector.storage import Storage +from sqlalchemy.sql import false + +transcripts = sqlalchemy.Table( + "transcript", + metadata, + sqlalchemy.Column("id", sqlalchemy.String, primary_key=True), + sqlalchemy.Column("name", sqlalchemy.String), + sqlalchemy.Column("status", sqlalchemy.String), + sqlalchemy.Column("locked", sqlalchemy.Boolean), + sqlalchemy.Column("duration", sqlalchemy.Integer), + sqlalchemy.Column("created_at", sqlalchemy.DateTime), + sqlalchemy.Column("title", sqlalchemy.String, nullable=True), + sqlalchemy.Column("short_summary", sqlalchemy.String, nullable=True), + sqlalchemy.Column("long_summary", sqlalchemy.String, nullable=True), + sqlalchemy.Column("topics", sqlalchemy.JSON), + sqlalchemy.Column("events", sqlalchemy.JSON), + sqlalchemy.Column("participants", sqlalchemy.JSON), + sqlalchemy.Column("source_language", sqlalchemy.String, nullable=True), + sqlalchemy.Column("target_language", sqlalchemy.String, nullable=True), + sqlalchemy.Column( + "reviewed", sqlalchemy.Boolean, nullable=False, server_default=false() + ), + sqlalchemy.Column( + "audio_location", + sqlalchemy.String, + nullable=False, + server_default="local", + ), + # with user attached, optional + sqlalchemy.Column("user_id", sqlalchemy.String), + sqlalchemy.Column( + "share_mode", + sqlalchemy.String, + nullable=False, + server_default="private", + ), +) + + +def generate_uuid4() -> str: + return str(uuid4()) + + +def generate_transcript_name() -> str: + now = datetime.utcnow() + return f"Transcript {now.strftime('%Y-%m-%d %H:%M:%S')}" + + +def get_storage() -> Storage: + return Storage.get_instance( + name=settings.TRANSCRIPT_STORAGE_BACKEND, + settings_prefix="TRANSCRIPT_STORAGE_", + ) + + +class AudioWaveform(BaseModel): + data: list[float] + + +class TranscriptText(BaseModel): + text: str + translation: str | None + + +class TranscriptSegmentTopic(BaseModel): + speaker: int + text: str + timestamp: float + + +class TranscriptTopic(BaseModel): + id: str = Field(default_factory=generate_uuid4) + title: str + summary: str + timestamp: float + duration: float | None = 0 + transcript: str | None = None + words: list[ProcessorWord] = [] + + +class TranscriptFinalShortSummary(BaseModel): + short_summary: str + + +class TranscriptFinalLongSummary(BaseModel): + long_summary: str + + +class TranscriptFinalTitle(BaseModel): + title: str + + +class TranscriptDuration(BaseModel): + duration: float + + +class TranscriptWaveform(BaseModel): + waveform: list[float] + + +class TranscriptEvent(BaseModel): + event: str + data: dict + + +class TranscriptParticipant(BaseModel): + model_config = ConfigDict(from_attributes=True) + id: str = Field(default_factory=generate_uuid4) + speaker: int | None + name: str + + +class Transcript(BaseModel): + id: str = Field(default_factory=generate_uuid4) + user_id: str | None = None + name: str = Field(default_factory=generate_transcript_name) + status: str = "idle" + locked: bool = False + duration: float = 0 + created_at: datetime = Field(default_factory=datetime.utcnow) + title: str | None = None + short_summary: str | None = None + long_summary: str | None = None + topics: list[TranscriptTopic] = [] + events: list[TranscriptEvent] = [] + participants: list[TranscriptParticipant] | None = [] + source_language: str = "en" + target_language: str = "en" + share_mode: Literal["private", "semi-private", "public"] = "private" + audio_location: str = "local" + reviewed: bool = False + + def add_event(self, event: str, data: BaseModel) -> TranscriptEvent: + ev = TranscriptEvent(event=event, data=data.model_dump()) + self.events.append(ev) + return ev + + def upsert_topic(self, topic: TranscriptTopic): + index = next((i for i, t in enumerate(self.topics) if t.id == topic.id), None) + if index is not None: + self.topics[index] = topic + else: + self.topics.append(topic) + + def upsert_participant(self, participant: TranscriptParticipant): + index = next( + (i for i, p in enumerate(self.participants) if p.id == participant.id), + None, + ) + if index is not None: + self.participants[index] = participant + else: + self.participants.append(participant) + return participant + + def delete_participant(self, participant_id: str): + index = next( + (i for i, p in enumerate(self.participants) if p.id == participant_id), + None, + ) + if index is not None: + del self.participants[index] + + def events_dump(self, mode="json"): + return [event.model_dump(mode=mode) for event in self.events] + + def topics_dump(self, mode="json"): + return [topic.model_dump(mode=mode) for topic in self.topics] + + def participants_dump(self, mode="json"): + return [participant.model_dump(mode=mode) for participant in self.participants] + + def unlink(self): + self.data_path.unlink(missing_ok=True) + + @property + def data_path(self): + return Path(settings.DATA_DIR) / self.id + + @property + def audio_wav_filename(self): + return self.data_path / "audio.wav" + + @property + def audio_mp3_filename(self): + return self.data_path / "audio.mp3" + + @property + def audio_waveform_filename(self): + return self.data_path / "audio.json" + + @property + def storage_audio_path(self): + return f"{self.id}/audio.mp3" + + @property + def audio_waveform(self): + try: + with open(self.audio_waveform_filename) as fd: + data = json.load(fd) + except json.JSONDecodeError: + # unlink file if it's corrupted + self.audio_waveform_filename.unlink(missing_ok=True) + return None + + return AudioWaveform(data=data) + + async def get_audio_url(self) -> str: + if self.audio_location == "local": + return self._generate_local_audio_link() + elif self.audio_location == "storage": + return await self._generate_storage_audio_link() + raise Exception(f"Unknown audio location {self.audio_location}") + + async def _generate_storage_audio_link(self) -> str: + return await get_storage().get_file_url(self.storage_audio_path) + + def _generate_local_audio_link(self) -> str: + # we need to create an url to be used for diarization + # we can't use the audio_mp3_filename because it's not accessible + # from the diarization processor + from datetime import timedelta + + from reflector.app import app + from reflector.views.transcripts import create_access_token + + path = app.url_path_for( + "transcript_get_audio_mp3", + transcript_id=self.id, + ) + url = f"{settings.BASE_URL}{path}" + if self.user_id: + # we pass token only if the user_id is set + # otherwise, the audio is public + token = create_access_token( + {"sub": self.user_id}, + expires_delta=timedelta(minutes=15), + ) + url += f"?token={token}" + return url + + def find_empty_speaker(self) -> int: + """ + Find an empty speaker seat + """ + speakers = set( + word.speaker + for topic in self.topics + for word in topic.words + if word.speaker is not None + ) + i = 0 + while True: + if i not in speakers: + return i + i += 1 + raise Exception("No empty speaker found") + + +class TranscriptController: + async def get_all( + self, + user_id: str | None = None, + order_by: str | None = None, + filter_empty: bool | None = False, + filter_recording: bool | None = False, + return_query: bool = False, + ) -> list[Transcript]: + """ + Get all transcripts + + If `user_id` is specified, only return transcripts that belong to the user. + Otherwise, return all anonymous transcripts. + + Parameters: + - `order_by`: field to order by, e.g. "-created_at" + - `filter_empty`: filter out empty transcripts + - `filter_recording`: filter out transcripts that are currently recording + """ + query = transcripts.select().where(transcripts.c.user_id == user_id) + + if order_by is not None: + field = getattr(transcripts.c, order_by[1:]) + if order_by.startswith("-"): + field = field.desc() + query = query.order_by(field) + + if filter_empty: + query = query.filter(transcripts.c.status != "idle") + + if filter_recording: + query = query.filter(transcripts.c.status != "recording") + + if return_query: + return query + + results = await database.fetch_all(query) + return results + + async def get_by_id(self, transcript_id: str, **kwargs) -> Transcript | None: + """ + Get a transcript by id + """ + query = transcripts.select().where(transcripts.c.id == transcript_id) + if "user_id" in kwargs: + query = query.where(transcripts.c.user_id == kwargs["user_id"]) + result = await database.fetch_one(query) + if not result: + return None + return Transcript(**result) + + async def get_by_id_for_http( + self, + transcript_id: str, + user_id: str | None, + ) -> Transcript: + """ + Get a transcript by ID for HTTP request. + + If not found, it will raise a 404 error. + If the user is not allowed to access the transcript, it will raise a 403 error. + + This method checks the share mode of the transcript and the user_id + to determine if the user can access the transcript. + """ + query = transcripts.select().where(transcripts.c.id == transcript_id) + result = await database.fetch_one(query) + if not result: + raise HTTPException(status_code=404, detail="Transcript not found") + + # if the transcript is anonymous, share mode is not checked + transcript = Transcript(**result) + if transcript.user_id is None: + return transcript + + if transcript.share_mode == "private": + # in private mode, only the owner can access the transcript + if transcript.user_id == user_id: + return transcript + + elif transcript.share_mode == "semi-private": + # in semi-private mode, only the owner and the users with the link + # can access the transcript + if user_id is not None: + return transcript + + elif transcript.share_mode == "public": + # in public mode, everyone can access the transcript + return transcript + + raise HTTPException(status_code=403, detail="Transcript access denied") + + async def add( + self, + name: str, + source_language: str = "en", + target_language: str = "en", + user_id: str | None = None, + ): + """ + Add a new transcript + """ + transcript = Transcript( + name=name, + source_language=source_language, + target_language=target_language, + user_id=user_id, + ) + query = transcripts.insert().values(**transcript.model_dump()) + await database.execute(query) + return transcript + + async def update(self, transcript: Transcript, values: dict, mutate=True): + """ + Update a transcript fields with key/values in values + """ + query = ( + transcripts.update() + .where(transcripts.c.id == transcript.id) + .values(**values) + ) + await database.execute(query) + if mutate: + for key, value in values.items(): + setattr(transcript, key, value) + + async def remove_by_id( + self, + transcript_id: str, + user_id: str | None = None, + ) -> None: + """ + Remove a transcript by id + """ + transcript = await self.get_by_id(transcript_id, user_id=user_id) + if not transcript: + return + if user_id is not None and transcript.user_id != user_id: + return + transcript.unlink() + query = transcripts.delete().where(transcripts.c.id == transcript_id) + await database.execute(query) + + @asynccontextmanager + async def transaction(self): + """ + A context manager for database transaction + """ + async with database.transaction(isolation="serializable"): + yield + + async def append_event( + self, + transcript: Transcript, + event: str, + data: Any, + ) -> TranscriptEvent: + """ + Append an event to a transcript + """ + resp = transcript.add_event(event=event, data=data) + await self.update( + transcript, + {"events": transcript.events_dump()}, + mutate=False, + ) + return resp + + async def upsert_topic( + self, + transcript: Transcript, + topic: TranscriptTopic, + ) -> TranscriptEvent: + """ + Append an event to a transcript + """ + transcript.upsert_topic(topic) + await self.update( + transcript, + {"topics": transcript.topics_dump()}, + mutate=False, + ) + + async def move_mp3_to_storage(self, transcript: Transcript): + """ + Move mp3 file to storage + """ + + # store the audio on external storage + await get_storage().put_file( + transcript.storage_audio_path, + transcript.audio_mp3_filename.read_bytes(), + ) + + # indicate on the transcript that the audio is now on storage + await self.update(transcript, {"audio_location": "storage"}) + + # unlink the local file + transcript.audio_mp3_filename.unlink(missing_ok=True) + + async def upsert_participant( + self, + transcript: Transcript, + participant: TranscriptParticipant, + ) -> TranscriptParticipant: + """ + Add/update a participant to a transcript + """ + result = transcript.upsert_participant(participant) + await self.update( + transcript, + {"participants": transcript.participants_dump()}, + mutate=False, + ) + return result + + async def delete_participant( + self, + transcript: Transcript, + participant_id: str, + ): + """ + Delete a participant from a transcript + """ + transcript.delete_participant(participant_id) + await self.update( + transcript, + {"participants": transcript.participants_dump()}, + mutate=False, + ) + + +transcripts_controller = TranscriptController() diff --git a/server/reflector/llm/llm_banana.py b/server/reflector/llm/llm_banana.py deleted file mode 100644 index e0384770..00000000 --- a/server/reflector/llm/llm_banana.py +++ /dev/null @@ -1,54 +0,0 @@ -import httpx - -from reflector.llm.base import LLM -from reflector.settings import settings -from reflector.utils.retry import retry - - -class BananaLLM(LLM): - def __init__(self): - super().__init__() - self.timeout = settings.LLM_TIMEOUT - self.headers = { - "X-Banana-API-Key": settings.LLM_BANANA_API_KEY, - "X-Banana-Model-Key": settings.LLM_BANANA_MODEL_KEY, - } - - async def _generate( - self, prompt: str, gen_schema: dict | None, gen_cfg: dict | None, **kwargs - ): - json_payload = {"prompt": prompt} - if gen_schema: - json_payload["gen_schema"] = gen_schema - if gen_cfg: - json_payload["gen_cfg"] = gen_cfg - async with httpx.AsyncClient() as client: - response = await retry(client.post)( - settings.LLM_URL, - headers=self.headers, - json=json_payload, - timeout=self.timeout, - retry_timeout=300, # as per their sdk - ) - response.raise_for_status() - text = response.json()["text"] - return text - - -LLM.register("banana", BananaLLM) - -if __name__ == "__main__": - from reflector.logger import logger - - async def main(): - llm = BananaLLM() - prompt = llm.create_prompt( - instruct="Complete the following task", - text="Tell me a joke about programming.", - ) - result = await llm.generate(prompt=prompt, logger=logger) - print(result) - - import asyncio - - asyncio.run(main()) diff --git a/server/reflector/llm/llm_modal.py b/server/reflector/llm/llm_modal.py index 220730e5..4b81c5a0 100644 --- a/server/reflector/llm/llm_modal.py +++ b/server/reflector/llm/llm_modal.py @@ -47,6 +47,7 @@ class ModalLLM(LLM): json=json_payload, timeout=self.timeout, retry_timeout=60 * 5, + follow_redirects=True, ) response.raise_for_status() text = response.json()["text"] diff --git a/server/reflector/pipelines/main_live_pipeline.py b/server/reflector/pipelines/main_live_pipeline.py new file mode 100644 index 00000000..20743391 --- /dev/null +++ b/server/reflector/pipelines/main_live_pipeline.py @@ -0,0 +1,680 @@ +""" +Main reflector pipeline for live streaming +========================================== + +This is the default pipeline used in the API. + +It is decoupled to: +- PipelineMainLive: have limited processing during live +- PipelineMainPost: do heavy lifting after the live + +It is directly linked to our data model. +""" + +import asyncio +import functools +from contextlib import asynccontextmanager + +from celery import chord, group, shared_task +from pydantic import BaseModel +from reflector.db.transcripts import ( + Transcript, + TranscriptDuration, + TranscriptFinalLongSummary, + TranscriptFinalShortSummary, + TranscriptFinalTitle, + TranscriptText, + TranscriptTopic, + TranscriptWaveform, + transcripts_controller, +) +from reflector.logger import logger +from reflector.pipelines.runner import PipelineRunner +from reflector.processors import ( + AudioChunkerProcessor, + AudioDiarizationAutoProcessor, + AudioFileWriterProcessor, + AudioMergeProcessor, + AudioTranscriptAutoProcessor, + BroadcastProcessor, + Pipeline, + TranscriptFinalLongSummaryProcessor, + TranscriptFinalShortSummaryProcessor, + TranscriptFinalTitleProcessor, + TranscriptLinerProcessor, + TranscriptTopicDetectorProcessor, + TranscriptTranslatorProcessor, +) +from reflector.processors.audio_waveform_processor import AudioWaveformProcessor +from reflector.processors.types import AudioDiarizationInput +from reflector.processors.types import ( + TitleSummaryWithId as TitleSummaryWithIdProcessorType, +) +from reflector.processors.types import Transcript as TranscriptProcessorType +from reflector.settings import settings +from reflector.ws_manager import WebsocketManager, get_ws_manager +from structlog import BoundLogger as Logger + + +def asynctask(f): + @functools.wraps(f) + def wrapper(*args, **kwargs): + coro = f(*args, **kwargs) + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + if loop and loop.is_running(): + return loop.run_until_complete(coro) + return asyncio.run(coro) + + return wrapper + + +def broadcast_to_sockets(func): + """ + Decorator to broadcast transcript event to websockets + concerning this transcript + """ + + async def wrapper(self, *args, **kwargs): + resp = await func(self, *args, **kwargs) + if resp is None: + return + await self.ws_manager.send_json( + room_id=self.ws_room_id, + message=resp.model_dump(mode="json"), + ) + + return wrapper + + +def get_transcript(func): + """ + Decorator to fetch the transcript from the database from the first argument + """ + + async def wrapper(**kwargs): + transcript_id = kwargs.pop("transcript_id") + transcript = await transcripts_controller.get_by_id(transcript_id=transcript_id) + if not transcript: + raise Exception("Transcript {transcript_id} not found") + tlogger = logger.bind(transcript_id=transcript.id) + try: + return await func(transcript=transcript, logger=tlogger, **kwargs) + except Exception as exc: + tlogger.error("Pipeline error", exc_info=exc) + raise + + return wrapper + + +class StrValue(BaseModel): + value: str + + +class PipelineMainBase(PipelineRunner): + transcript_id: str + ws_room_id: str | None = None + ws_manager: WebsocketManager | None = None + + def prepare(self): + # prepare websocket + self._lock = asyncio.Lock() + self.ws_room_id = f"ts:{self.transcript_id}" + self.ws_manager = get_ws_manager() + + async def get_transcript(self) -> Transcript: + # fetch the transcript + result = await transcripts_controller.get_by_id( + transcript_id=self.transcript_id + ) + if not result: + raise Exception("Transcript not found") + return result + + def get_transcript_topics(self, transcript: Transcript) -> list[TranscriptTopic]: + return [ + TitleSummaryWithIdProcessorType( + id=topic.id, + title=topic.title, + summary=topic.summary, + timestamp=topic.timestamp, + duration=topic.duration, + transcript=TranscriptProcessorType(words=topic.words), + ) + for topic in transcript.topics + ] + + @asynccontextmanager + async def transaction(self): + async with self._lock: + async with transcripts_controller.transaction(): + yield + + @broadcast_to_sockets + async def on_status(self, status): + # if it's the first part, update the status of the transcript + # but do not set the ended status yet. + if isinstance(self, PipelineMainLive): + status_mapping = { + "started": "recording", + "push": "recording", + "flush": "processing", + "error": "error", + } + elif isinstance(self, PipelineMainFinalSummaries): + status_mapping = { + "push": "processing", + "flush": "processing", + "error": "error", + "ended": "ended", + } + else: + # intermediate pipeline don't update status + return + + # mutate to model status + status = status_mapping.get(status) + if not status: + return + + # when the status of the pipeline changes, update the transcript + async with self.transaction(): + transcript = await self.get_transcript() + if status == transcript.status: + return + resp = await transcripts_controller.append_event( + transcript=transcript, + event="STATUS", + data=StrValue(value=status), + ) + await transcripts_controller.update( + transcript, + { + "status": status, + }, + ) + return resp + + @broadcast_to_sockets + async def on_transcript(self, data): + async with self.transaction(): + transcript = await self.get_transcript() + return await transcripts_controller.append_event( + transcript=transcript, + event="TRANSCRIPT", + data=TranscriptText(text=data.text, translation=data.translation), + ) + + @broadcast_to_sockets + async def on_topic(self, data): + topic = TranscriptTopic( + title=data.title, + summary=data.summary, + timestamp=data.timestamp, + transcript=data.transcript.text, + words=data.transcript.words, + ) + if isinstance(data, TitleSummaryWithIdProcessorType): + topic.id = data.id + async with self.transaction(): + transcript = await self.get_transcript() + await transcripts_controller.upsert_topic(transcript, topic) + return await transcripts_controller.append_event( + transcript=transcript, + event="TOPIC", + data=topic, + ) + + @broadcast_to_sockets + async def on_title(self, data): + final_title = TranscriptFinalTitle(title=data.title) + async with self.transaction(): + transcript = await self.get_transcript() + if not transcript.title: + await transcripts_controller.update( + transcript, + { + "title": final_title.title, + }, + ) + return await transcripts_controller.append_event( + transcript=transcript, + event="FINAL_TITLE", + data=final_title, + ) + + @broadcast_to_sockets + async def on_long_summary(self, data): + final_long_summary = TranscriptFinalLongSummary(long_summary=data.long_summary) + async with self.transaction(): + transcript = await self.get_transcript() + await transcripts_controller.update( + transcript, + { + "long_summary": final_long_summary.long_summary, + }, + ) + return await transcripts_controller.append_event( + transcript=transcript, + event="FINAL_LONG_SUMMARY", + data=final_long_summary, + ) + + @broadcast_to_sockets + async def on_short_summary(self, data): + final_short_summary = TranscriptFinalShortSummary( + short_summary=data.short_summary + ) + async with self.transaction(): + transcript = await self.get_transcript() + await transcripts_controller.update( + transcript, + { + "short_summary": final_short_summary.short_summary, + }, + ) + return await transcripts_controller.append_event( + transcript=transcript, + event="FINAL_SHORT_SUMMARY", + data=final_short_summary, + ) + + @broadcast_to_sockets + async def on_duration(self, data): + async with self.transaction(): + duration = TranscriptDuration(duration=data) + + transcript = await self.get_transcript() + await transcripts_controller.update( + transcript, + { + "duration": duration.duration, + }, + ) + return await transcripts_controller.append_event( + transcript=transcript, event="DURATION", data=duration + ) + + @broadcast_to_sockets + async def on_waveform(self, data): + async with self.transaction(): + waveform = TranscriptWaveform(waveform=data) + + transcript = await self.get_transcript() + + return await transcripts_controller.append_event( + transcript=transcript, event="WAVEFORM", data=waveform + ) + + +class PipelineMainLive(PipelineMainBase): + """ + Main pipeline for live streaming, attach to RTC connection + Any long post process should be done in the post pipeline + """ + + async def create(self) -> Pipeline: + # create a context for the whole rtc transaction + # add a customised logger to the context + self.prepare() + transcript = await self.get_transcript() + + processors = [ + AudioFileWriterProcessor( + path=transcript.audio_wav_filename, + on_duration=self.on_duration, + ), + AudioChunkerProcessor(), + AudioMergeProcessor(), + AudioTranscriptAutoProcessor.as_threaded(), + TranscriptLinerProcessor(), + TranscriptTranslatorProcessor.as_threaded(callback=self.on_transcript), + TranscriptTopicDetectorProcessor.as_threaded(callback=self.on_topic), + ] + pipeline = Pipeline(*processors) + pipeline.options = self + pipeline.set_pref("audio:source_language", transcript.source_language) + pipeline.set_pref("audio:target_language", transcript.target_language) + pipeline.logger.bind(transcript_id=transcript.id) + pipeline.logger.info("Pipeline main live created") + + return pipeline + + async def on_ended(self): + # when the pipeline ends, connect to the post pipeline + logger.info("Pipeline main live ended", transcript_id=self.transcript_id) + logger.info("Scheduling pipeline main post", transcript_id=self.transcript_id) + pipeline_post(transcript_id=self.transcript_id) + + +class PipelineMainDiarization(PipelineMainBase): + """ + Diarize the audio and update topics + """ + + async def create(self) -> Pipeline: + # create a context for the whole rtc transaction + # add a customised logger to the context + self.prepare() + pipeline = Pipeline( + AudioDiarizationAutoProcessor(callback=self.on_topic), + ) + pipeline.options = self + + # now let's start the pipeline by pushing information to the + # first processor diarization processor + # XXX translation is lost when converting our data model to the processor model + transcript = await self.get_transcript() + + # diarization works only if the file is uploaded to an external storage + if transcript.audio_location == "local": + pipeline.logger.info("Audio is local, skipping diarization") + return + + topics = self.get_transcript_topics(transcript) + audio_url = await transcript.get_audio_url() + audio_diarization_input = AudioDiarizationInput( + audio_url=audio_url, + topics=topics, + ) + + # as tempting to use pipeline.push, prefer to use the runner + # to let the start just do one job. + pipeline.logger.bind(transcript_id=transcript.id) + pipeline.logger.info("Diarization pipeline created") + self.push(audio_diarization_input) + self.flush() + + return pipeline + + +class PipelineMainFromTopics(PipelineMainBase): + """ + Pseudo class for generating a pipeline from topics + """ + + def get_processors(self) -> list: + raise NotImplementedError + + async def create(self) -> Pipeline: + self.prepare() + + # get transcript + self._transcript = transcript = await self.get_transcript() + + # create pipeline + processors = self.get_processors() + pipeline = Pipeline(*processors) + pipeline.options = self + pipeline.logger.bind(transcript_id=transcript.id) + pipeline.logger.info(f"{self.__class__.__name__} pipeline created") + + # push topics + topics = self.get_transcript_topics(transcript) + for topic in topics: + self.push(topic) + + self.flush() + + return pipeline + + +class PipelineMainTitleAndShortSummary(PipelineMainFromTopics): + """ + Generate title from the topics + """ + + def get_processors(self) -> list: + return [ + BroadcastProcessor( + processors=[ + TranscriptFinalTitleProcessor.as_threaded(callback=self.on_title), + TranscriptFinalShortSummaryProcessor.as_threaded( + callback=self.on_short_summary + ), + ] + ) + ] + + +class PipelineMainFinalSummaries(PipelineMainFromTopics): + """ + Generate summaries from the topics + """ + + def get_processors(self) -> list: + return [ + BroadcastProcessor( + processors=[ + TranscriptFinalLongSummaryProcessor.as_threaded( + callback=self.on_long_summary + ), + TranscriptFinalShortSummaryProcessor.as_threaded( + callback=self.on_short_summary + ), + ] + ), + ] + + +class PipelineMainWaveform(PipelineMainFromTopics): + """ + Generate waveform + """ + + def get_processors(self) -> list: + return [ + AudioWaveformProcessor.as_threaded( + audio_path=self._transcript.audio_wav_filename, + waveform_path=self._transcript.audio_waveform_filename, + on_waveform=self.on_waveform, + ), + ] + + +@get_transcript +async def pipeline_remove_upload(transcript: Transcript, logger: Logger): + logger.info("Starting remove upload") + uploads = transcript.data_path.glob("upload.*") + for upload in uploads: + upload.unlink(missing_ok=True) + logger.info("Remove upload done") + + +@get_transcript +async def pipeline_waveform(transcript: Transcript, logger: Logger): + logger.info("Starting waveform") + runner = PipelineMainWaveform(transcript_id=transcript.id) + await runner.run() + logger.info("Waveform done") + + +@get_transcript +async def pipeline_convert_to_mp3(transcript: Transcript, logger: Logger): + logger.info("Starting convert to mp3") + + # If the audio wav is not available, just skip + wav_filename = transcript.audio_wav_filename + if not wav_filename.exists(): + logger.warning("Wav file not found, may be already converted") + return + + # Convert to mp3 + mp3_filename = transcript.audio_mp3_filename + + import av + + with av.open(wav_filename.as_posix()) as in_container: + in_stream = in_container.streams.audio[0] + with av.open(mp3_filename.as_posix(), "w") as out_container: + out_stream = out_container.add_stream("mp3") + for frame in in_container.decode(in_stream): + for packet in out_stream.encode(frame): + out_container.mux(packet) + + # Delete the wav file + transcript.audio_wav_filename.unlink(missing_ok=True) + + logger.info("Convert to mp3 done") + + +@get_transcript +async def pipeline_upload_mp3(transcript: Transcript, logger: Logger): + if not settings.TRANSCRIPT_STORAGE_BACKEND: + logger.info("No storage backend configured, skipping mp3 upload") + return + + logger.info("Starting upload mp3") + + # If the audio mp3 is not available, just skip + mp3_filename = transcript.audio_mp3_filename + if not mp3_filename.exists(): + logger.warning("Mp3 file not found, may be already uploaded") + return + + # Upload to external storage and delete the file + await transcripts_controller.move_mp3_to_storage(transcript) + + logger.info("Upload mp3 done") + + +@get_transcript +async def pipeline_diarization(transcript: Transcript, logger: Logger): + logger.info("Starting diarization") + runner = PipelineMainDiarization(transcript_id=transcript.id) + await runner.run() + logger.info("Diarization done") + + +@get_transcript +async def pipeline_title_and_short_summary(transcript: Transcript, logger: Logger): + logger.info("Starting title and short summary") + runner = PipelineMainTitleAndShortSummary(transcript_id=transcript.id) + await runner.run() + logger.info("Title and short summary done") + + +@get_transcript +async def pipeline_summaries(transcript: Transcript, logger: Logger): + logger.info("Starting summaries") + runner = PipelineMainFinalSummaries(transcript_id=transcript.id) + await runner.run() + logger.info("Summaries done") + + +# =================================================================== +# Celery tasks that can be called from the API +# =================================================================== + + +@shared_task +@asynctask +async def task_pipeline_remove_upload(*, transcript_id: str): + await pipeline_remove_upload(transcript_id=transcript_id) + + +@shared_task +@asynctask +async def task_pipeline_waveform(*, transcript_id: str): + await pipeline_waveform(transcript_id=transcript_id) + + +@shared_task +@asynctask +async def task_pipeline_convert_to_mp3(*, transcript_id: str): + await pipeline_convert_to_mp3(transcript_id=transcript_id) + + +@shared_task +@asynctask +async def task_pipeline_upload_mp3(*, transcript_id: str): + await pipeline_upload_mp3(transcript_id=transcript_id) + + +@shared_task +@asynctask +async def task_pipeline_diarization(*, transcript_id: str): + await pipeline_diarization(transcript_id=transcript_id) + + +@shared_task +@asynctask +async def task_pipeline_title_and_short_summary(*, transcript_id: str): + await pipeline_title_and_short_summary(transcript_id=transcript_id) + + +@shared_task +@asynctask +async def task_pipeline_final_summaries(*, transcript_id: str): + await pipeline_summaries(transcript_id=transcript_id) + + +def pipeline_post(*, transcript_id: str): + """ + Run the post pipeline + """ + chain_mp3_and_diarize = ( + task_pipeline_waveform.si(transcript_id=transcript_id) + | task_pipeline_convert_to_mp3.si(transcript_id=transcript_id) + | task_pipeline_upload_mp3.si(transcript_id=transcript_id) + | task_pipeline_remove_upload.si(transcript_id=transcript_id) + | task_pipeline_diarization.si(transcript_id=transcript_id) + ) + chain_title_preview = task_pipeline_title_and_short_summary.si( + transcript_id=transcript_id + ) + chain_final_summaries = task_pipeline_final_summaries.si( + transcript_id=transcript_id + ) + + chain = chord( + group(chain_mp3_and_diarize, chain_title_preview), + chain_final_summaries, + ) + chain.delay() + + +@get_transcript +async def pipeline_upload(transcript: Transcript, logger: Logger): + import av + + try: + # open audio + upload_filename = next(transcript.data_path.glob("upload.*")) + container = av.open(upload_filename.as_posix()) + + # create pipeline + pipeline = PipelineMainLive(transcript_id=transcript.id) + pipeline.start() + + # push audio to pipeline + try: + logger.info("Start pushing audio into the pipeline") + for frame in container.decode(audio=0): + pipeline.push(frame) + finally: + logger.info("Flushing the pipeline") + pipeline.flush() + + logger.info("Waiting for the pipeline to end") + await pipeline.join() + + except Exception as exc: + logger.error("Pipeline error", exc_info=exc) + await transcripts_controller.update( + transcript, + { + "status": "error", + }, + ) + raise + + logger.info("Pipeline ended") + + +@shared_task +@asynctask +async def task_pipeline_upload(*, transcript_id: str): + return await pipeline_upload(transcript_id=transcript_id) diff --git a/server/reflector/pipelines/runner.py b/server/reflector/pipelines/runner.py new file mode 100644 index 00000000..0edf156c --- /dev/null +++ b/server/reflector/pipelines/runner.py @@ -0,0 +1,152 @@ +""" +Pipeline Runner +=============== + +Pipeline runner designed to be executed in a asyncio task. + +It is meant to be subclassed, and implement a create() method +that expose/return a Pipeline instance. + +During its lifecycle, it will emit the following status: +- started: the pipeline has been started +- push: the pipeline received at least one data +- flush: the pipeline is flushing +- ended: the pipeline has ended +- error: the pipeline has ended with an error +""" + +import asyncio + +from pydantic import BaseModel, ConfigDict +from reflector.logger import logger +from reflector.processors import Pipeline + + +class PipelineRunner(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + status: str = "idle" + pipeline: Pipeline | None = None + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._task = None + self._q_cmd = asyncio.Queue(maxsize=4096) + self._ev_done = asyncio.Event() + self._is_first_push = True + self._logger = logger.bind( + runner=id(self), + runner_cls=self.__class__.__name__, + ) + + def create(self) -> Pipeline: + """ + Create the pipeline if not specified earlier. + Should be implemented in a subclass + """ + raise NotImplementedError() + + def start(self): + """ + Start the pipeline as a coroutine task + """ + self._task = asyncio.get_event_loop().create_task(self.run()) + + async def join(self): + """ + Wait for the pipeline to finish + """ + if self._task: + await self._task + + def start_sync(self): + """ + Start the pipeline synchronously (for non-asyncio apps) + """ + coro = self.run() + asyncio.run(coro) + + def push(self, data): + """ + Push data to the pipeline + """ + self._add_cmd("PUSH", data) + + def flush(self): + """ + Flush the pipeline + """ + self._add_cmd("FLUSH", None) + + async def on_status(self, status): + """ + Called when the status of the pipeline changes + """ + pass + + async def on_ended(self): + """ + Called when the pipeline ends + """ + pass + + def _add_cmd(self, cmd: str, data): + """ + Enqueue a command to be executed in the runner. + Currently supported commands: PUSH, FLUSH + """ + self._q_cmd.put_nowait([cmd, data]) + + async def _set_status(self, status): + self._logger.debug("Runner status updated", status=status) + self.status = status + if self.on_status: + try: + await self.on_status(status) + except Exception: + self._logger.exception("Runer error while setting status") + + async def run(self): + try: + # create the pipeline if not yet done + await self._set_status("init") + self._is_first_push = True + if not self.pipeline: + self.pipeline = await self.create() + + if not self.pipeline: + # no pipeline created in create, just finish it then. + await self._set_status("ended") + self._ev_done.set() + if self.on_ended: + await self.on_ended() + return + + # start the loop + await self._set_status("started") + while not self._ev_done.is_set(): + cmd, data = await self._q_cmd.get() + func = getattr(self, f"cmd_{cmd.lower()}") + if func: + await func(data) + else: + raise Exception(f"Unknown command {cmd}") + except Exception: + self._logger.exception("Runner error") + await self._set_status("error") + self._ev_done.set() + raise + + async def cmd_push(self, data): + if self._is_first_push: + await self._set_status("push") + self._is_first_push = False + await self.pipeline.push(data) + + async def cmd_flush(self, data): + await self._set_status("flush") + await self.pipeline.flush() + await self._set_status("ended") + self._ev_done.set() + if self.on_ended: + await self.on_ended() diff --git a/server/reflector/processors/__init__.py b/server/reflector/processors/__init__.py index 96a3941d..1c88d6c5 100644 --- a/server/reflector/processors/__init__.py +++ b/server/reflector/processors/__init__.py @@ -1,9 +1,16 @@ from .audio_chunker import AudioChunkerProcessor # noqa: F401 +from .audio_diarization_auto import AudioDiarizationAutoProcessor # noqa: F401 from .audio_file_writer import AudioFileWriterProcessor # noqa: F401 from .audio_merge import AudioMergeProcessor # noqa: F401 from .audio_transcript import AudioTranscriptProcessor # noqa: F401 from .audio_transcript_auto import AudioTranscriptAutoProcessor # noqa: F401 -from .base import Pipeline, PipelineEvent, Processor, ThreadedProcessor # noqa: F401 +from .base import ( # noqa: F401 + BroadcastProcessor, + Pipeline, + PipelineEvent, + Processor, + ThreadedProcessor, +) from .transcript_final_long_summary import ( # noqa: F401 TranscriptFinalLongSummaryProcessor, ) diff --git a/server/reflector/processors/audio_diarization.py b/server/reflector/processors/audio_diarization.py new file mode 100644 index 00000000..69eab5b7 --- /dev/null +++ b/server/reflector/processors/audio_diarization.py @@ -0,0 +1,181 @@ +from reflector.processors.base import Processor +from reflector.processors.types import AudioDiarizationInput, TitleSummary, Word + + +class AudioDiarizationProcessor(Processor): + INPUT_TYPE = AudioDiarizationInput + OUTPUT_TYPE = TitleSummary + + async def _push(self, data: AudioDiarizationInput): + try: + self.logger.info("Diarization started", audio_file_url=data.audio_url) + diarization = await self._diarize(data) + self.logger.info("Diarization finished") + except Exception: + self.logger.exception("Diarization failed after retrying") + raise + + # now reapply speaker to topics (if any) + # topics is a list[BaseModel] with an attribute words + # words is a list[BaseModel] with text, start and speaker attribute + + # create a view of words based on topics + # the current algorithm is using words index, we cannot use a generator + words = list(self.iter_words_from_topics(data.topics)) + + # assign speaker to words (mutate the words list) + self.assign_speaker(words, diarization) + + # emit them + for topic in data.topics: + await self.emit(topic) + + async def _diarize(self, data: AudioDiarizationInput): + raise NotImplementedError + + def assign_speaker(self, words: list[Word], diarization: list[dict]): + self._diarization_remove_overlap(diarization) + self._diarization_remove_segment_without_words(words, diarization) + self._diarization_merge_same_speaker(words, diarization) + self._diarization_assign_speaker(words, diarization) + + def iter_words_from_topics(self, topics: TitleSummary): + for topic in topics: + for word in topic.transcript.words: + yield word + + def is_word_continuation(self, word_prev, word): + """ + Return True if the word is a continuation of the previous word + by checking if the previous word is ending with a punctuation + or if the current word is starting with a capital letter + """ + # is word_prev ending with a punctuation ? + if word_prev.text and word_prev.text[-1] in ".?!": + return False + elif word.text and word.text[0].isupper(): + return False + return True + + def _diarization_remove_overlap(self, diarization: list[dict]): + """ + Remove overlap in diarization results + + When using a diarization algorithm, it's possible to have overlapping segments + This function remove the overlap by keeping the longest segment + + Warning: this function mutate the diarization list + """ + # remove overlap by keeping the longest segment + diarization_idx = 0 + while diarization_idx < len(diarization) - 1: + d = diarization[diarization_idx] + dnext = diarization[diarization_idx + 1] + if d["end"] > dnext["start"]: + # remove the shortest segment + if d["end"] - d["start"] > dnext["end"] - dnext["start"]: + # remove next segment + diarization.pop(diarization_idx + 1) + else: + # remove current segment + diarization.pop(diarization_idx) + else: + diarization_idx += 1 + + def _diarization_remove_segment_without_words( + self, words: list[Word], diarization: list[dict] + ): + """ + Remove diarization segments without words + + Warning: this function mutate the diarization list + """ + # count the number of words for each diarization segment + diarization_count = [] + for d in diarization: + start = d["start"] + end = d["end"] + count = 0 + for word in words: + if start <= word.start < end: + count += 1 + elif start < word.end <= end: + count += 1 + diarization_count.append(count) + + # remove diarization segments with no words + diarization_idx = 0 + while diarization_idx < len(diarization): + if diarization_count[diarization_idx] == 0: + diarization.pop(diarization_idx) + diarization_count.pop(diarization_idx) + else: + diarization_idx += 1 + + def _diarization_merge_same_speaker( + self, words: list[Word], diarization: list[dict] + ): + """ + Merge diarization contigous segments with the same speaker + + Warning: this function mutate the diarization list + """ + # merge segment with same speaker + diarization_idx = 0 + while diarization_idx < len(diarization) - 1: + d = diarization[diarization_idx] + dnext = diarization[diarization_idx + 1] + if d["speaker"] == dnext["speaker"]: + diarization[diarization_idx]["end"] = dnext["end"] + diarization.pop(diarization_idx + 1) + else: + diarization_idx += 1 + + def _diarization_assign_speaker(self, words: list[Word], diarization: list[dict]): + """ + Assign speaker to words based on diarization + + Warning: this function mutate the words list + """ + + word_idx = 0 + last_speaker = None + for d in diarization: + start = d["start"] + end = d["end"] + speaker = d["speaker"] + + # diarization may start after the first set of words + # in this case, we assign the last speaker + for word in words[word_idx:]: + if word.start < start: + # speaker change, but what make sense for assigning the word ? + # If it's a new sentence, assign with the new speaker + # If it's a continuation, assign with the last speaker + is_continuation = False + if word_idx > 0 and word_idx < len(words) - 1: + is_continuation = self.is_word_continuation( + *words[word_idx - 1 : word_idx + 1] + ) + if is_continuation: + word.speaker = last_speaker + else: + word.speaker = speaker + last_speaker = speaker + word_idx += 1 + else: + break + + # now continue to assign speaker until the word starts after the end + for word in words[word_idx:]: + if start <= word.start < end: + last_speaker = speaker + word.speaker = speaker + word_idx += 1 + elif word.start > end: + break + + # no more diarization available, + # assign last speaker to all words without speaker + for word in words[word_idx:]: + word.speaker = last_speaker diff --git a/server/reflector/processors/audio_diarization_auto.py b/server/reflector/processors/audio_diarization_auto.py new file mode 100644 index 00000000..0e7bfc5c --- /dev/null +++ b/server/reflector/processors/audio_diarization_auto.py @@ -0,0 +1,33 @@ +import importlib + +from reflector.processors.audio_diarization import AudioDiarizationProcessor +from reflector.settings import settings + + +class AudioDiarizationAutoProcessor(AudioDiarizationProcessor): + _registry = {} + + @classmethod + def register(cls, name, kclass): + cls._registry[name] = kclass + + def __new__(cls, name: str | None = None, **kwargs): + if name is None: + name = settings.DIARIZATION_BACKEND + + if name not in cls._registry: + module_name = f"reflector.processors.audio_diarization_{name}" + importlib.import_module(module_name) + + # gather specific configuration for the processor + # search `DIARIZATION_BACKEND_XXX_YYY`, push to constructor as `backend_xxx_yyy` + config = {} + name_upper = name.upper() + settings_prefix = "DIARIZATION_" + config_prefix = f"{settings_prefix}{name_upper}_" + for key, value in settings: + if key.startswith(config_prefix): + config_name = key[len(settings_prefix) :].lower() + config[config_name] = value + + return cls._registry[name](**config | kwargs) diff --git a/server/reflector/processors/audio_diarization_modal.py b/server/reflector/processors/audio_diarization_modal.py new file mode 100644 index 00000000..511b7f70 --- /dev/null +++ b/server/reflector/processors/audio_diarization_modal.py @@ -0,0 +1,37 @@ +import httpx +from reflector.processors.audio_diarization import AudioDiarizationProcessor +from reflector.processors.audio_diarization_auto import AudioDiarizationAutoProcessor +from reflector.processors.types import AudioDiarizationInput, TitleSummary +from reflector.settings import settings + + +class AudioDiarizationModalProcessor(AudioDiarizationProcessor): + INPUT_TYPE = AudioDiarizationInput + OUTPUT_TYPE = TitleSummary + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.diarization_url = settings.DIARIZATION_URL + "/diarize" + self.headers = { + "Authorization": f"Bearer {settings.LLM_MODAL_API_KEY}", + } + + async def _diarize(self, data: AudioDiarizationInput): + # Gather diarization data + params = { + "audio_file_url": data.audio_url, + "timestamp": 0, + } + async with httpx.AsyncClient() as client: + response = await client.post( + self.diarization_url, + headers=self.headers, + params=params, + timeout=None, + follow_redirects=True, + ) + response.raise_for_status() + return response.json()["diarization"] + + +AudioDiarizationAutoProcessor.register("modal", AudioDiarizationModalProcessor) diff --git a/server/reflector/processors/audio_file_writer.py b/server/reflector/processors/audio_file_writer.py index d34dc3f0..36ee4263 100644 --- a/server/reflector/processors/audio_file_writer.py +++ b/server/reflector/processors/audio_file_writer.py @@ -12,8 +12,8 @@ class AudioFileWriterProcessor(Processor): INPUT_TYPE = av.AudioFrame OUTPUT_TYPE = av.AudioFrame - def __init__(self, path: Path | str): - super().__init__() + def __init__(self, path: Path | str, **kwargs): + super().__init__(**kwargs) if isinstance(path, str): path = Path(path) if path.suffix not in (".mp3", ".wav"): @@ -21,6 +21,7 @@ class AudioFileWriterProcessor(Processor): self.path = path self.out_container = None self.out_stream = None + self.last_packet = None async def _push(self, data: av.AudioFrame): if not self.out_container: @@ -40,12 +41,30 @@ class AudioFileWriterProcessor(Processor): raise ValueError("Only mp3 and wav files are supported") for packet in self.out_stream.encode(data): self.out_container.mux(packet) + self.last_packet = packet await self.emit(data) async def _flush(self): if self.out_container: for packet in self.out_stream.encode(): self.out_container.mux(packet) + self.last_packet = packet + try: + if self.last_packet is not None: + duration = round( + float( + (self.last_packet.pts * self.last_packet.duration) + * self.last_packet.time_base + ), + 2, + ) + except Exception: + self.logger.exception("Failed to get duration") + duration = 0 + self.out_container.close() self.out_container = None self.out_stream = None + + if duration > 0: + await self.emit(duration, name="duration") diff --git a/server/reflector/processors/audio_transcript.py b/server/reflector/processors/audio_transcript.py index f029b587..3f9dc85b 100644 --- a/server/reflector/processors/audio_transcript.py +++ b/server/reflector/processors/audio_transcript.py @@ -1,6 +1,4 @@ -from profanityfilter import ProfanityFilter from prometheus_client import Counter, Histogram - from reflector.processors.base import Processor from reflector.processors.types import AudioFile, Transcript @@ -40,8 +38,6 @@ class AudioTranscriptProcessor(Processor): self.m_transcript_call = self.m_transcript_call.labels(name) self.m_transcript_success = self.m_transcript_success.labels(name) self.m_transcript_failure = self.m_transcript_failure.labels(name) - self.profanity_filter = ProfanityFilter() - self.profanity_filter.set_censor("*") super().__init__(*args, **kwargs) async def _push(self, data: AudioFile): @@ -60,9 +56,3 @@ class AudioTranscriptProcessor(Processor): async def _transcript(self, data: AudioFile): raise NotImplementedError - - def filter_profanity(self, text: str) -> str: - """ - Remove censored words from the transcript - """ - return self.profanity_filter.censor(text) diff --git a/server/reflector/processors/audio_transcript_auto.py b/server/reflector/processors/audio_transcript_auto.py index f223a52d..ac79ced0 100644 --- a/server/reflector/processors/audio_transcript_auto.py +++ b/server/reflector/processors/audio_transcript_auto.py @@ -1,8 +1,6 @@ import importlib from reflector.processors.audio_transcript import AudioTranscriptProcessor -from reflector.processors.base import Pipeline, Processor -from reflector.processors.types import AudioFile from reflector.settings import settings @@ -13,8 +11,9 @@ class AudioTranscriptAutoProcessor(AudioTranscriptProcessor): def register(cls, name, kclass): cls._registry[name] = kclass - @classmethod - def get_instance(cls, name): + def __new__(cls, name: str | None = None, **kwargs): + if name is None: + name = settings.TRANSCRIPT_BACKEND if name not in cls._registry: module_name = f"reflector.processors.audio_transcript_{name}" importlib.import_module(module_name) @@ -30,30 +29,4 @@ class AudioTranscriptAutoProcessor(AudioTranscriptProcessor): config_name = key[len(settings_prefix) :].lower() config[config_name] = value - return cls._registry[name](**config) - - def __init__(self, **kwargs): - self.processor = self.get_instance(settings.TRANSCRIPT_BACKEND) - super().__init__(**kwargs) - - def set_pipeline(self, pipeline: Pipeline): - super().set_pipeline(pipeline) - self.processor.set_pipeline(pipeline) - - def connect(self, processor: Processor): - self.processor.connect(processor) - - def disconnect(self, processor: Processor): - self.processor.disconnect(processor) - - def on(self, callback): - self.processor.on(callback) - - def off(self, callback): - self.processor.off(callback) - - async def _push(self, data: AudioFile): - return await self.processor._push(data) - - async def _flush(self): - return await self.processor._flush() + return cls._registry[name](**config | kwargs) diff --git a/server/reflector/processors/audio_transcript_banana.py b/server/reflector/processors/audio_transcript_banana.py deleted file mode 100644 index fe339eea..00000000 --- a/server/reflector/processors/audio_transcript_banana.py +++ /dev/null @@ -1,86 +0,0 @@ -""" -Implementation using the GPU service from banana. - -API will be a POST request to TRANSCRIPT_URL: - -```json -{ - "audio_url": "https://...", - "audio_ext": "wav", - "timestamp": 123.456 - "language": "en" -} -``` - -""" - -from pathlib import Path - -import httpx -from reflector.processors.audio_transcript import AudioTranscriptProcessor -from reflector.processors.audio_transcript_auto import AudioTranscriptAutoProcessor -from reflector.processors.types import AudioFile, Transcript, Word -from reflector.settings import settings -from reflector.storage import Storage -from reflector.utils.retry import retry - - -class AudioTranscriptBananaProcessor(AudioTranscriptProcessor): - def __init__(self, banana_api_key: str, banana_model_key: str): - super().__init__() - self.transcript_url = settings.TRANSCRIPT_URL - self.timeout = settings.TRANSCRIPT_TIMEOUT - self.storage = Storage.get_instance( - settings.TRANSCRIPT_STORAGE_BACKEND, "TRANSCRIPT_STORAGE_" - ) - self.headers = { - "X-Banana-API-Key": banana_api_key, - "X-Banana-Model-Key": banana_model_key, - } - - async def _transcript(self, data: AudioFile): - async with httpx.AsyncClient() as client: - print(f"Uploading audio {data.path.name} to S3") - url = await self._upload_file(data.path) - - print(f"Try to transcribe audio {data.path.name}") - request_data = { - "audio_url": url, - "audio_ext": data.path.suffix[1:], - "timestamp": float(round(data.timestamp, 2)), - } - response = await retry(client.post)( - self.transcript_url, - json=request_data, - headers=self.headers, - timeout=self.timeout, - ) - - print(f"Transcript response: {response.status_code} {response.content}") - response.raise_for_status() - result = response.json() - transcript = Transcript( - text=result["text"], - words=[ - Word(text=word["text"], start=word["start"], end=word["end"]) - for word in result["words"] - ], - ) - - # remove audio file from S3 - await self._delete_file(data.path) - - return transcript - - @retry - async def _upload_file(self, path: Path) -> str: - upload_result = await self.storage.put_file(path.name, open(path, "rb")) - return upload_result.url - - @retry - async def _delete_file(self, path: Path): - await self.storage.delete_file(path.name) - return True - - -AudioTranscriptAutoProcessor.register("banana", AudioTranscriptBananaProcessor) diff --git a/server/reflector/processors/audio_transcript_modal.py b/server/reflector/processors/audio_transcript_modal.py index 201ed9d4..0ca4710f 100644 --- a/server/reflector/processors/audio_transcript_modal.py +++ b/server/reflector/processors/audio_transcript_modal.py @@ -41,6 +41,7 @@ class AudioTranscriptModalProcessor(AudioTranscriptProcessor): timeout=self.timeout, headers=self.headers, params=json_payload, + follow_redirects=True, ) self.logger.debug( @@ -48,10 +49,7 @@ class AudioTranscriptModalProcessor(AudioTranscriptProcessor): ) response.raise_for_status() result = response.json() - text = result["text"][source_language] - text = self.filter_profanity(text) transcript = Transcript( - text=text, words=[ Word( text=word["text"], diff --git a/server/reflector/processors/audio_transcript_whisper.py b/server/reflector/processors/audio_transcript_whisper.py index e3bd595b..cd96e01a 100644 --- a/server/reflector/processors/audio_transcript_whisper.py +++ b/server/reflector/processors/audio_transcript_whisper.py @@ -30,7 +30,6 @@ class AudioTranscriptWhisperProcessor(AudioTranscriptProcessor): ts = data.timestamp for segment in segments: - transcript.text += segment.text for word in segment.words: transcript.words.append( Word( diff --git a/server/reflector/processors/audio_waveform_processor.py b/server/reflector/processors/audio_waveform_processor.py new file mode 100644 index 00000000..f1a24ffd --- /dev/null +++ b/server/reflector/processors/audio_waveform_processor.py @@ -0,0 +1,36 @@ +import json +from pathlib import Path + +from reflector.processors.base import Processor +from reflector.processors.types import TitleSummary +from reflector.utils.audio_waveform import get_audio_waveform + + +class AudioWaveformProcessor(Processor): + """ + Write the waveform for the final audio + """ + + INPUT_TYPE = TitleSummary + + def __init__(self, audio_path: Path | str, waveform_path: str, **kwargs): + super().__init__(**kwargs) + if isinstance(audio_path, str): + audio_path = Path(audio_path) + if audio_path.suffix not in (".mp3", ".wav"): + raise ValueError("Only mp3 and wav files are supported") + self.audio_path = audio_path + self.waveform_path = waveform_path + + async def _flush(self): + self.waveform_path.parent.mkdir(parents=True, exist_ok=True) + self.logger.info("Waveform Processing Started") + waveform = get_audio_waveform(path=self.audio_path, segments_count=255) + + with open(self.waveform_path, "w") as fd: + json.dump(waveform, fd) + self.logger.info("Waveform Processing Finished") + await self.emit(waveform, name="waveform") + + async def _push(_self, _data): + return diff --git a/server/reflector/processors/base.py b/server/reflector/processors/base.py index 6771e11e..00f0223b 100644 --- a/server/reflector/processors/base.py +++ b/server/reflector/processors/base.py @@ -14,7 +14,42 @@ class PipelineEvent(BaseModel): data: Any -class Processor: +class Emitter: + def __init__(self, **kwargs): + self._callbacks = {} + + # register callbacks from kwargs (on_*) + for key, value in kwargs.items(): + if key.startswith("on_"): + self.on(value, name=key[3:]) + + def on(self, callback, name="default"): + """ + Register a callback to be called when data is emitted + """ + # ensure callback is asynchronous + if not asyncio.iscoroutinefunction(callback): + raise ValueError("Callback must be a coroutine function") + if name not in self._callbacks: + self._callbacks[name] = [] + self._callbacks[name].append(callback) + + def off(self, callback, name="default"): + """ + Unregister a callback to be called when data is emitted + """ + if name not in self._callbacks: + return + self._callbacks[name].remove(callback) + + async def emit(self, data, name="default"): + if name not in self._callbacks: + return + for callback in self._callbacks[name]: + await callback(data) + + +class Processor(Emitter): INPUT_TYPE: type = None OUTPUT_TYPE: type = None @@ -59,7 +94,8 @@ class Processor: ["processor"], ) - def __init__(self, callback=None, custom_logger=None): + def __init__(self, callback=None, custom_logger=None, **kwargs): + super().__init__(**kwargs) self.name = name = self.__class__.__name__ self.m_processor = self.m_processor.labels(name) self.m_processor_call = self.m_processor_call.labels(name) @@ -70,9 +106,11 @@ class Processor: self.m_processor_flush_success = self.m_processor_flush_success.labels(name) self.m_processor_flush_failure = self.m_processor_flush_failure.labels(name) self._processors = [] - self._callbacks = [] + + # register callbacks if callback: self.on(callback) + self.uid = uuid4().hex self.flushed = False self.logger = (custom_logger or logger).bind(processor=self.__class__.__name__) @@ -100,21 +138,6 @@ class Processor: """ self._processors.remove(processor) - def on(self, callback): - """ - Register a callback to be called when data is emitted - """ - # ensure callback is asynchronous - if not asyncio.iscoroutinefunction(callback): - raise ValueError("Callback must be a coroutine function") - self._callbacks.append(callback) - - def off(self, callback): - """ - Unregister a callback to be called when data is emitted - """ - self._callbacks.remove(callback) - def get_pref(self, key: str, default: Any = None): """ Get a preference from the pipeline prefs @@ -123,15 +146,16 @@ class Processor: return self.pipeline.get_pref(key, default) return default - async def emit(self, data): - if self.pipeline: - await self.pipeline.emit( - PipelineEvent(processor=self.name, uid=self.uid, data=data) - ) - for callback in self._callbacks: - await callback(data) - for processor in self._processors: - await processor.push(data) + async def emit(self, data, name="default"): + if name == "default": + if self.pipeline: + await self.pipeline.emit( + PipelineEvent(processor=self.name, uid=self.uid, data=data) + ) + await super().emit(data, name=name) + if name == "default": + for processor in self._processors: + await processor.push(data) async def push(self, data): """ @@ -254,11 +278,11 @@ class ThreadedProcessor(Processor): def disconnect(self, processor: Processor): self.processor.disconnect(processor) - def on(self, callback): - self.processor.on(callback) + def on(self, callback, name="default"): + self.processor.on(callback, name=name) - def off(self, callback): - self.processor.off(callback) + def off(self, callback, name="default"): + self.processor.off(callback, name=name) def describe(self, level=0): super().describe(level) @@ -290,12 +314,12 @@ class BroadcastProcessor(Processor): processor.set_pipeline(pipeline) async def _push(self, data): - for processor in self.processors: - await processor.push(data) + coros = [processor.push(data) for processor in self.processors] + await asyncio.gather(*coros) async def _flush(self): - for processor in self.processors: - await processor.flush() + coros = [processor.flush() for processor in self.processors] + await asyncio.gather(*coros) def connect(self, processor: Processor): for processor in self.processors: @@ -305,13 +329,13 @@ class BroadcastProcessor(Processor): for processor in self.processors: processor.disconnect(processor) - def on(self, callback): + def on(self, callback, name="default"): for processor in self.processors: - processor.on(callback) + processor.on(callback, name=name) - def off(self, callback): + def off(self, callback, name="default"): for processor in self.processors: - processor.off(callback) + processor.off(callback, name=name) def describe(self, level=0): super().describe(level) @@ -333,6 +357,7 @@ class Pipeline(Processor): self.logger.info("Pipeline created") self.processors = processors + self.options = None self.prefs = {} for processor in processors: diff --git a/server/reflector/processors/transcript_liner.py b/server/reflector/processors/transcript_liner.py index c1aa14a0..b4e7b5e3 100644 --- a/server/reflector/processors/transcript_liner.py +++ b/server/reflector/processors/transcript_liner.py @@ -36,7 +36,6 @@ class TranscriptLinerProcessor(Processor): # cut to the next . partial = Transcript(words=[]) for word in self.transcript.words[:]: - partial.text += word.text partial.words.append(word) if not self.is_sentence_terminated(word.text): continue diff --git a/server/reflector/processors/transcript_translator.py b/server/reflector/processors/transcript_translator.py index 77b8f5be..fbb07164 100644 --- a/server/reflector/processors/transcript_translator.py +++ b/server/reflector/processors/transcript_translator.py @@ -16,6 +16,7 @@ class TranscriptTranslatorProcessor(Processor): def __init__(self, **kwargs): super().__init__(**kwargs) + self.transcript = None self.translate_url = settings.TRANSLATE_URL self.timeout = settings.TRANSLATE_TIMEOUT self.headers = {"Authorization": f"Bearer {settings.LLM_MODAL_API_KEY}"} @@ -50,6 +51,7 @@ class TranscriptTranslatorProcessor(Processor): headers=self.headers, params=json_payload, timeout=self.timeout, + follow_redirects=True, ) response.raise_for_status() result = response.json()["text"] diff --git a/server/reflector/processors/types.py b/server/reflector/processors/types.py index e867becf..cedb23f9 100644 --- a/server/reflector/processors/types.py +++ b/server/reflector/processors/types.py @@ -1,8 +1,16 @@ import io +import re import tempfile from pathlib import Path +from profanityfilter import ProfanityFilter from pydantic import BaseModel, PrivateAttr +from reflector.redis_cache import redis_cache + +PUNC_RE = re.compile(r"[.;:?!…]") + +profanity_filter = ProfanityFilter() +profanity_filter.set_censor("*") class AudioFile(BaseModel): @@ -43,13 +51,34 @@ class Word(BaseModel): text: str start: float end: float + speaker: int = 0 + + +class TranscriptSegment(BaseModel): + text: str + start: float + end: float + speaker: int = 0 class Transcript(BaseModel): - text: str = "" translation: str | None = None words: list[Word] = None + @property + def raw_text(self): + # Uncensored text + return "".join([word.text for word in self.words]) + + @redis_cache(prefix="profanity", duration=3600 * 24 * 7) + def _get_censored_text(self, text: str): + return profanity_filter.censor(text).strip() + + @property + def text(self): + # Censored text + return self._get_censored_text(self.raw_text) + @property def human_timestamp(self): minutes = int(self.timestamp / 60) @@ -74,7 +103,6 @@ class Transcript(BaseModel): self.words = other.words else: self.words.extend(other.words) - self.text += other.text def add_offset(self, offset: float): for word in self.words: @@ -87,6 +115,51 @@ class Transcript(BaseModel): ] return Transcript(text=self.text, translation=self.translation, words=words) + def as_segments(self) -> list[TranscriptSegment]: + # from a list of word, create a list of segments + # join the word that are less than 2 seconds apart + # but separate if the speaker changes, or if the punctuation is a . , ; : ? ! + segments = [] + current_segment = None + MAX_SEGMENT_LENGTH = 120 + + for word in self.words: + if current_segment is None: + current_segment = TranscriptSegment( + text=word.text, + start=word.start, + end=word.end, + speaker=word.speaker, + ) + continue + + # If the word is attach to another speaker, push the current segment + # and start a new one + if word.speaker != current_segment.speaker: + segments.append(current_segment) + current_segment = TranscriptSegment( + text=word.text, + start=word.start, + end=word.end, + speaker=word.speaker, + ) + continue + + # if the word is the end of a sentence, and we have enough content, + # add the word to the current segment and push it + current_segment.text += word.text + current_segment.end = word.end + + have_punc = PUNC_RE.search(word.text) + if have_punc and (len(current_segment.text) > MAX_SEGMENT_LENGTH): + segments.append(current_segment) + current_segment = None + + if current_segment: + segments.append(current_segment) + + return segments + class TitleSummary(BaseModel): title: str @@ -103,6 +176,10 @@ class TitleSummary(BaseModel): return f"{minutes:02d}:{seconds:02d}.{milliseconds:03d}" +class TitleSummaryWithId(TitleSummary): + id: str + + class FinalLongSummary(BaseModel): long_summary: str duration: float @@ -318,3 +395,8 @@ class TranslationLanguages(BaseModel): def is_supported(self, lang_id: str) -> bool: return lang_id in self.supported_languages + + +class AudioDiarizationInput(BaseModel): + audio_url: str + topics: list[TitleSummaryWithId] diff --git a/server/reflector/redis_cache.py b/server/reflector/redis_cache.py new file mode 100644 index 00000000..c31471cf --- /dev/null +++ b/server/reflector/redis_cache.py @@ -0,0 +1,50 @@ +import functools +import json + +import redis +from reflector.settings import settings + +redis_clients = {} + + +def get_redis_client(db=0): + """ + Get a Redis client for the specified database. + """ + if db not in redis_clients: + redis_clients[db] = redis.StrictRedis( + host=settings.REDIS_HOST, + port=settings.REDIS_PORT, + db=db, + ) + return redis_clients[db] + + +def redis_cache(prefix="cache", duration=3600, db=settings.REDIS_CACHE_DB, argidx=1): + """ + Cache the result of a function in Redis. + """ + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + # Check if the first argument is a string + if len(args) < (argidx + 1) or not isinstance(args[argidx], str): + return func(*args, **kwargs) + + # Compute the cache key based on the arguments and prefix + cache_key = prefix + ":" + args[argidx] + redis_client = get_redis_client(db=db) + cached_result = redis_client.get(cache_key) + + if cached_result: + return json.loads(cached_result.decode("utf-8")) + + # If the result is not cached, call the original function + result = func(*args, **kwargs) + redis_client.setex(cache_key, duration, json.dumps(result)) + return result + + return wrapper + + return decorator diff --git a/server/reflector/settings.py b/server/reflector/settings.py index e61c6d96..d0ddc91a 100644 --- a/server/reflector/settings.py +++ b/server/reflector/settings.py @@ -2,7 +2,11 @@ from pydantic_settings import BaseSettings, SettingsConfigDict class Settings(BaseSettings): - model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8") + model_config = SettingsConfigDict( + env_file=".env", + env_file_encoding="utf-8", + extra="ignore", + ) OPENMP_KMP_DUPLICATE_LIB_OK: bool = False @@ -37,7 +41,7 @@ class Settings(BaseSettings): AUDIO_BUFFER_SIZE: int = 256 * 960 # Audio Transcription - # backends: whisper, banana, modal + # backends: whisper, modal TRANSCRIPT_BACKEND: str = "whisper" TRANSCRIPT_URL: str | None = None TRANSCRIPT_TIMEOUT: int = 90 @@ -46,24 +50,20 @@ class Settings(BaseSettings): TRANSLATE_URL: str | None = None TRANSLATE_TIMEOUT: int = 90 - # Audio transcription banana.dev configuration - TRANSCRIPT_BANANA_API_KEY: str | None = None - TRANSCRIPT_BANANA_MODEL_KEY: str | None = None - # Audio transcription modal.com configuration TRANSCRIPT_MODAL_API_KEY: str | None = None # Audio transcription storage - TRANSCRIPT_STORAGE_BACKEND: str = "aws" + TRANSCRIPT_STORAGE_BACKEND: str | None = None # Storage configuration for AWS - TRANSCRIPT_STORAGE_AWS_BUCKET_NAME: str = "reflector-bucket/chunks" + TRANSCRIPT_STORAGE_AWS_BUCKET_NAME: str = "reflector-bucket" TRANSCRIPT_STORAGE_AWS_REGION: str = "us-east-1" TRANSCRIPT_STORAGE_AWS_ACCESS_KEY_ID: str | None = None TRANSCRIPT_STORAGE_AWS_SECRET_ACCESS_KEY: str | None = None # LLM - # available backend: openai, banana, modal, oobabooga + # available backend: openai, modal, oobabooga LLM_BACKEND: str = "oobabooga" # LLM common configuration @@ -78,13 +78,14 @@ class Settings(BaseSettings): LLM_TEMPERATURE: float = 0.7 ZEPHYR_LLM_URL: str | None = None - # LLM Banana configuration - LLM_BANANA_API_KEY: str | None = None - LLM_BANANA_MODEL_KEY: str | None = None - # LLM Modal configuration LLM_MODAL_API_KEY: str | None = None + # Diarization + DIARIZATION_ENABLED: bool = True + DIARIZATION_BACKEND: str = "modal" + DIARIZATION_URL: str | None = None + # Sentry SENTRY_DSN: str | None = None @@ -109,5 +110,26 @@ class Settings(BaseSettings): # Min transcript length to generate topic + summary MIN_TRANSCRIPT_LENGTH: int = 750 + # Celery + CELERY_BROKER_URL: str = "redis://localhost:6379/1" + CELERY_RESULT_BACKEND: str = "redis://localhost:6379/1" + + # Redis + REDIS_HOST: str = "localhost" + REDIS_PORT: int = 6379 + REDIS_CACHE_DB: int = 2 + + # Secret key + SECRET_KEY: str = "changeme-f02f86fd8b3e4fd892c6043e5a298e21" + + # Current hosting/domain + BASE_URL: str = "http://localhost:1250" + + # Profiling + PROFILING: bool = False + + # Healthcheck + HEALTHCHECK_URL: str | None = None + settings = Settings() diff --git a/server/reflector/storage/base.py b/server/reflector/storage/base.py index 5cdafdbf..a457ddf8 100644 --- a/server/reflector/storage/base.py +++ b/server/reflector/storage/base.py @@ -1,6 +1,7 @@ +import importlib + from pydantic import BaseModel from reflector.settings import settings -import importlib class FileResult(BaseModel): @@ -17,7 +18,7 @@ class Storage: cls._registry[name] = kclass @classmethod - def get_instance(cls, name, settings_prefix=""): + def get_instance(cls, name: str, settings_prefix: str = ""): if name not in cls._registry: module_name = f"reflector.storage.storage_{name}" importlib.import_module(module_name) @@ -45,3 +46,9 @@ class Storage: async def _delete_file(self, filename: str): raise NotImplementedError + + async def get_file_url(self, filename: str) -> str: + return await self._get_file_url(filename) + + async def _get_file_url(self, filename: str) -> str: + raise NotImplementedError diff --git a/server/reflector/storage/storage_aws.py b/server/reflector/storage/storage_aws.py index 09a9c383..d2313293 100644 --- a/server/reflector/storage/storage_aws.py +++ b/server/reflector/storage/storage_aws.py @@ -1,6 +1,6 @@ import aioboto3 -from reflector.storage.base import Storage, FileResult from reflector.logger import logger +from reflector.storage.base import FileResult, Storage class AwsStorage(Storage): @@ -44,16 +44,18 @@ class AwsStorage(Storage): Body=data, ) + async def _get_file_url(self, filename: str) -> FileResult: + bucket = self.aws_bucket_name + folder = self.aws_folder + s3filename = f"{folder}/{filename}" if folder else filename + async with self.session.client("s3") as client: presigned_url = await client.generate_presigned_url( "get_object", Params={"Bucket": bucket, "Key": s3filename}, ExpiresIn=3600, ) - return FileResult( - filename=filename, - url=presigned_url, - ) + return presigned_url async def _delete_file(self, filename: str): bucket = self.aws_bucket_name diff --git a/server/reflector/tools/start_post_main_live_pipeline.py b/server/reflector/tools/start_post_main_live_pipeline.py new file mode 100644 index 00000000..859f03a4 --- /dev/null +++ b/server/reflector/tools/start_post_main_live_pipeline.py @@ -0,0 +1,14 @@ +import argparse + +from reflector.app import celery_app # noqa +from reflector.pipelines.main_live_pipeline import task_pipeline_main_post + +parser = argparse.ArgumentParser() +parser.add_argument("transcript_id", type=str) +parser.add_argument("--delay", action="store_true") +args = parser.parse_args() + +if args.delay: + task_pipeline_main_post.delay(args.transcript_id) +else: + task_pipeline_main_post(args.transcript_id) diff --git a/server/reflector/views/_range_requests_response.py b/server/reflector/views/_range_requests_response.py index f0c628e9..2fac632d 100644 --- a/server/reflector/views/_range_requests_response.py +++ b/server/reflector/views/_range_requests_response.py @@ -1,7 +1,7 @@ import os from typing import BinaryIO -from fastapi import HTTPException, Request, status +from fastapi import HTTPException, Request, Response, status from fastapi.responses import StreamingResponse @@ -57,6 +57,9 @@ def range_requests_response( ), } + if request.method == "HEAD": + return Response(headers=headers) + if content_disposition: headers["Content-Disposition"] = content_disposition diff --git a/server/reflector/views/rtc_offer.py b/server/reflector/views/rtc_offer.py index 5662d989..386ada9c 100644 --- a/server/reflector/views/rtc_offer.py +++ b/server/reflector/views/rtc_offer.py @@ -1,7 +1,5 @@ import asyncio -from enum import StrEnum -from json import dumps, loads -from pathlib import Path +from json import loads import av from aiortc import MediaStreamTrack, RTCPeerConnection, RTCSessionDescription @@ -10,25 +8,7 @@ from prometheus_client import Gauge from pydantic import BaseModel from reflector.events import subscribers_shutdown from reflector.logger import logger -from reflector.processors import ( - AudioChunkerProcessor, - AudioFileWriterProcessor, - AudioMergeProcessor, - AudioTranscriptAutoProcessor, - FinalLongSummary, - FinalShortSummary, - Pipeline, - TitleSummary, - Transcript, - TranscriptFinalLongSummaryProcessor, - TranscriptFinalShortSummaryProcessor, - TranscriptFinalTitleProcessor, - TranscriptLinerProcessor, - TranscriptTopicDetectorProcessor, - TranscriptTranslatorProcessor, -) -from reflector.processors.base import BroadcastProcessor -from reflector.processors.types import FinalTitle +from reflector.pipelines.runner import PipelineRunner sessions = [] router = APIRouter() @@ -38,7 +18,7 @@ m_rtc_sessions = Gauge("rtc_sessions", "Number of active RTC sessions") class TranscriptionContext(object): def __init__(self, logger): self.logger = logger - self.pipeline = None + self.pipeline_runner = None self.data_channel = None self.status = "idle" self.topics = [] @@ -60,7 +40,7 @@ class AudioStreamTrack(MediaStreamTrack): ctx = self.ctx frame = await self.track.recv() try: - await ctx.pipeline.push(frame) + ctx.pipeline_runner.push(frame) except Exception as e: ctx.logger.error("Pipeline error", error=e) return frame @@ -71,27 +51,10 @@ class RtcOffer(BaseModel): type: str -class StrValue(BaseModel): - value: str - - -class PipelineEvent(StrEnum): - TRANSCRIPT = "TRANSCRIPT" - TOPIC = "TOPIC" - FINAL_LONG_SUMMARY = "FINAL_LONG_SUMMARY" - STATUS = "STATUS" - FINAL_SHORT_SUMMARY = "FINAL_SHORT_SUMMARY" - FINAL_TITLE = "FINAL_TITLE" - - async def rtc_offer_base( params: RtcOffer, request: Request, - event_callback=None, - event_callback_args=None, - audio_filename: Path | None = None, - source_language: str = "en", - target_language: str = "en", + pipeline_runner: PipelineRunner, ): # build an rtc session offer = RTCSessionDescription(sdp=params.sdp, type=params.type) @@ -101,146 +64,10 @@ async def rtc_offer_base( clientid = f"{peername[0]}:{peername[1]}" ctx = TranscriptionContext(logger=logger.bind(client=clientid)) - async def update_status(status: str): - changed = ctx.status != status - if changed: - ctx.status = status - if event_callback: - await event_callback( - event=PipelineEvent.STATUS, - args=event_callback_args, - data=StrValue(value=status), - ) - - # build pipeline callback - async def on_transcript(transcript: Transcript): - ctx.logger.info("Transcript", transcript=transcript) - - # send to RTC - if ctx.data_channel.readyState == "open": - result = { - "cmd": "SHOW_TRANSCRIPTION", - "text": transcript.text, - } - ctx.data_channel.send(dumps(result)) - - # send to callback (eg. websocket) - if event_callback: - await event_callback( - event=PipelineEvent.TRANSCRIPT, - args=event_callback_args, - data=transcript, - ) - - async def on_topic(topic: TitleSummary): - # FIXME: make it incremental with the frontend, not send everything - ctx.logger.info("Topic", topic=topic) - ctx.topics.append( - { - "title": topic.title, - "timestamp": topic.timestamp, - "transcript": topic.transcript.text, - "desc": topic.summary, - } - ) - - # send to RTC - if ctx.data_channel.readyState == "open": - result = {"cmd": "UPDATE_TOPICS", "topics": ctx.topics} - ctx.data_channel.send(dumps(result)) - - # send to callback (eg. websocket) - if event_callback: - await event_callback( - event=PipelineEvent.TOPIC, args=event_callback_args, data=topic - ) - - async def on_final_short_summary(summary: FinalShortSummary): - ctx.logger.info("FinalShortSummary", final_short_summary=summary) - - # send to RTC - if ctx.data_channel.readyState == "open": - result = { - "cmd": "DISPLAY_FINAL_SHORT_SUMMARY", - "summary": summary.short_summary, - "duration": summary.duration, - } - ctx.data_channel.send(dumps(result)) - - # send to callback (eg. websocket) - if event_callback: - await event_callback( - event=PipelineEvent.FINAL_SHORT_SUMMARY, - args=event_callback_args, - data=summary, - ) - - async def on_final_long_summary(summary: FinalLongSummary): - ctx.logger.info("FinalLongSummary", final_summary=summary) - - # send to RTC - if ctx.data_channel.readyState == "open": - result = { - "cmd": "DISPLAY_FINAL_LONG_SUMMARY", - "summary": summary.long_summary, - "duration": summary.duration, - } - ctx.data_channel.send(dumps(result)) - - # send to callback (eg. websocket) - if event_callback: - await event_callback( - event=PipelineEvent.FINAL_LONG_SUMMARY, - args=event_callback_args, - data=summary, - ) - - async def on_final_title(title: FinalTitle): - ctx.logger.info("FinalTitle", final_title=title) - - # send to RTC - if ctx.data_channel.readyState == "open": - result = {"cmd": "DISPLAY_FINAL_TITLE", "title": title.title} - ctx.data_channel.send(dumps(result)) - - # send to callback (eg. websocket) - if event_callback: - await event_callback( - event=PipelineEvent.FINAL_TITLE, - args=event_callback_args, - data=title, - ) - - # create a context for the whole rtc transaction - # add a customised logger to the context - processors = [] - if audio_filename is not None: - processors += [AudioFileWriterProcessor(path=audio_filename)] - processors += [ - AudioChunkerProcessor(), - AudioMergeProcessor(), - AudioTranscriptAutoProcessor.as_threaded(), - TranscriptLinerProcessor(), - TranscriptTranslatorProcessor.as_threaded(callback=on_transcript), - TranscriptTopicDetectorProcessor.as_threaded(callback=on_topic), - BroadcastProcessor( - processors=[ - TranscriptFinalTitleProcessor.as_threaded(callback=on_final_title), - TranscriptFinalLongSummaryProcessor.as_threaded( - callback=on_final_long_summary - ), - TranscriptFinalShortSummaryProcessor.as_threaded( - callback=on_final_short_summary - ), - ] - ), - ] - ctx.pipeline = Pipeline(*processors) - ctx.pipeline.set_pref("audio:source_language", source_language) - ctx.pipeline.set_pref("audio:target_language", target_language) - # handle RTC peer connection pc = RTCPeerConnection() + ctx.pipeline_runner = pipeline_runner + ctx.pipeline_runner.start() async def flush_pipeline_and_quit(close=True): # may be called twice @@ -249,12 +76,10 @@ async def rtc_offer_base( # - when we receive the close event, we do nothing. # 2. or the client close the connection # and there is nothing to do because it is already closed - await update_status("processing") - await ctx.pipeline.flush() + ctx.pipeline_runner.flush() if close: ctx.logger.debug("Closing peer connection") await pc.close() - await update_status("ended") if pc in sessions: sessions.remove(pc) m_rtc_sessions.dec() @@ -287,7 +112,6 @@ async def rtc_offer_base( def on_track(track): ctx.logger.info(f"Track {track.kind} received") pc.addTrack(AudioStreamTrack(ctx, track)) - asyncio.get_event_loop().create_task(update_status("recording")) await pc.setRemoteDescription(offer) @@ -308,8 +132,3 @@ async def rtc_clean_sessions(_): logger.debug(f"Closing session {pc}") await pc.close() sessions.clear() - - -@router.post("/offer") -async def rtc_offer(params: RtcOffer, request: Request): - return await rtc_offer_base(params, request) diff --git a/server/reflector/views/transcripts.py b/server/reflector/views/transcripts.py index a9e017c4..171e04d7 100644 --- a/server/reflector/views/transcripts.py +++ b/server/reflector/views/transcripts.py @@ -1,213 +1,33 @@ -import json -from datetime import datetime -from pathlib import Path -from typing import Annotated, Optional -from uuid import uuid4 +from datetime import datetime, timedelta +from typing import Annotated, Literal, Optional import reflector.auth as auth -from fastapi import ( - APIRouter, - Depends, - HTTPException, - Request, - WebSocket, - WebSocketDisconnect, -) -from fastapi_pagination import Page, paginate +from fastapi import APIRouter, Depends, HTTPException +from fastapi_pagination import Page +from fastapi_pagination.ext.databases import paginate +from jose import jwt from pydantic import BaseModel, Field -from reflector.db import database, transcripts -from reflector.logger import logger +from reflector.db.transcripts import ( + TranscriptParticipant, + TranscriptTopic, + transcripts_controller, +) +from reflector.processors.types import Transcript as ProcessorTranscript +from reflector.processors.types import Word from reflector.settings import settings -from reflector.utils.audio_waveform import get_audio_waveform -from starlette.concurrency import run_in_threadpool - -from ._range_requests_response import range_requests_response -from .rtc_offer import PipelineEvent, RtcOffer, rtc_offer_base router = APIRouter() -# ============================================================== -# Models to move to a database, but required for the API to work -# ============================================================== +ALGORITHM = "HS256" +DOWNLOAD_EXPIRE_MINUTES = 60 -def generate_uuid4(): - return str(uuid4()) - - -def generate_transcript_name(): - now = datetime.utcnow() - return f"Transcript {now.strftime('%Y-%m-%d %H:%M:%S')}" - - -class AudioWaveform(BaseModel): - data: list[float] - - -class TranscriptText(BaseModel): - text: str - translation: str | None - - -class TranscriptTopic(BaseModel): - id: str = Field(default_factory=generate_uuid4) - title: str - summary: str - transcript: str - timestamp: float - - -class TranscriptFinalShortSummary(BaseModel): - short_summary: str - - -class TranscriptFinalLongSummary(BaseModel): - long_summary: str - - -class TranscriptFinalTitle(BaseModel): - title: str - - -class TranscriptEvent(BaseModel): - event: str - data: dict - - -class Transcript(BaseModel): - id: str = Field(default_factory=generate_uuid4) - user_id: str | None = None - name: str = Field(default_factory=generate_transcript_name) - status: str = "idle" - locked: bool = False - duration: float = 0 - created_at: datetime = Field(default_factory=datetime.utcnow) - title: str | None = None - short_summary: str | None = None - long_summary: str | None = None - topics: list[TranscriptTopic] = [] - events: list[TranscriptEvent] = [] - source_language: str = "en" - target_language: str = "en" - - def add_event(self, event: str, data: BaseModel) -> TranscriptEvent: - ev = TranscriptEvent(event=event, data=data.model_dump()) - self.events.append(ev) - return ev - - def upsert_topic(self, topic: TranscriptTopic): - existing_topic = next((t for t in self.topics if t.id == topic.id), None) - if existing_topic: - existing_topic.update_from(topic) - else: - self.topics.append(topic) - - def events_dump(self, mode="json"): - return [event.model_dump(mode=mode) for event in self.events] - - def topics_dump(self, mode="json"): - return [topic.model_dump(mode=mode) for topic in self.topics] - - def convert_audio_to_waveform(self, segments_count=256): - fn = self.audio_waveform_filename - if fn.exists(): - return - waveform = get_audio_waveform( - path=self.audio_mp3_filename, segments_count=segments_count - ) - try: - with open(fn, "w") as fd: - json.dump(waveform, fd) - except Exception: - # remove file if anything happen during the write - fn.unlink(missing_ok=True) - raise - return waveform - - def unlink(self): - self.data_path.unlink(missing_ok=True) - - @property - def data_path(self): - return Path(settings.DATA_DIR) / self.id - - @property - def audio_mp3_filename(self): - return self.data_path / "audio.mp3" - - @property - def audio_waveform_filename(self): - return self.data_path / "audio.json" - - @property - def audio_waveform(self): - try: - with open(self.audio_waveform_filename) as fd: - data = json.load(fd) - except json.JSONDecodeError: - # unlink file if it's corrupted - self.audio_waveform_filename.unlink(missing_ok=True) - return None - - return AudioWaveform(data=data) - - -class TranscriptController: - async def get_all(self, user_id: str | None = None) -> list[Transcript]: - query = transcripts.select().where(transcripts.c.user_id == user_id) - results = await database.fetch_all(query) - return results - - async def get_by_id(self, transcript_id: str, **kwargs) -> Transcript | None: - query = transcripts.select().where(transcripts.c.id == transcript_id) - if "user_id" in kwargs: - query = query.where(transcripts.c.user_id == kwargs["user_id"]) - result = await database.fetch_one(query) - if not result: - return None - return Transcript(**result) - - async def add( - self, - name: str, - source_language: str = "en", - target_language: str = "en", - user_id: str | None = None, - ): - transcript = Transcript( - name=name, - source_language=source_language, - target_language=target_language, - user_id=user_id, - ) - query = transcripts.insert().values(**transcript.model_dump()) - await database.execute(query) - return transcript - - async def update(self, transcript: Transcript, values: dict): - query = ( - transcripts.update() - .where(transcripts.c.id == transcript.id) - .values(**values) - ) - await database.execute(query) - for key, value in values.items(): - setattr(transcript, key, value) - - async def remove_by_id( - self, transcript_id: str, user_id: str | None = None - ) -> None: - transcript = await self.get_by_id(transcript_id, user_id=user_id) - if not transcript: - return - if user_id is not None and transcript.user_id != user_id: - return - transcript.unlink() - query = transcripts.delete().where(transcripts.c.id == transcript_id) - await database.execute(query) - - -transcripts_controller = TranscriptController() +def create_access_token(data: dict, expires_delta: timedelta): + to_encode = data.copy() + expire = datetime.utcnow() + expires_delta + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=ALGORITHM) + return encoded_jwt # ============================================================== @@ -217,16 +37,20 @@ transcripts_controller = TranscriptController() class GetTranscript(BaseModel): id: str + user_id: str | None name: str status: str locked: bool - duration: int + duration: float title: str | None short_summary: str | None long_summary: str | None created_at: datetime - source_language: str - target_language: str + share_mode: str = Field("private") + source_language: str | None + target_language: str | None + participants: list[TranscriptParticipant] | None + reviewed: bool class CreateTranscript(BaseModel): @@ -241,6 +65,9 @@ class UpdateTranscript(BaseModel): title: Optional[str] = Field(None) short_summary: Optional[str] = Field(None) long_summary: Optional[str] = Field(None) + share_mode: Optional[Literal["public", "semi-private", "private"]] = Field(None) + participants: Optional[list[TranscriptParticipant]] = Field(None) + reviewed: Optional[bool] = Field(None) class DeletionStatus(BaseModel): @@ -251,11 +78,20 @@ class DeletionStatus(BaseModel): async def transcripts_list( user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], ): + from reflector.db import database + if not user and not settings.PUBLIC_MODE: raise HTTPException(status_code=401, detail="Not authenticated") user_id = user["sub"] if user else None - return paginate(await transcripts_controller.get_all(user_id=user_id)) + return await paginate( + database, + await transcripts_controller.get_all( + user_id=user_id, + order_by="-created_at", + return_query=True, + ), + ) @router.post("/transcripts", response_model=GetTranscript) @@ -277,16 +113,117 @@ async def transcripts_create( # ============================================================== +class GetTranscriptSegmentTopic(BaseModel): + text: str + start: float + speaker: int + + +class GetTranscriptTopic(BaseModel): + id: str + title: str + summary: str + timestamp: float + duration: float | None + transcript: str + segments: list[GetTranscriptSegmentTopic] = [] + + @classmethod + def from_transcript_topic(cls, topic: TranscriptTopic): + if not topic.words: + # In previous version, words were missing + # Just output a segment with speaker 0 + text = topic.transcript + duration = None + segments = [ + GetTranscriptSegmentTopic( + text=topic.transcript, + start=topic.timestamp, + speaker=0, + ) + ] + else: + # New versions include words + transcript = ProcessorTranscript(words=topic.words) + text = transcript.text + duration = transcript.duration + segments = [ + GetTranscriptSegmentTopic( + text=segment.text, + start=segment.start, + speaker=segment.speaker, + ) + for segment in transcript.as_segments() + ] + return cls( + id=topic.id, + title=topic.title, + summary=topic.summary, + timestamp=topic.timestamp, + transcript=text, + segments=segments, + duration=duration, + ) + + +class GetTranscriptTopicWithWords(GetTranscriptTopic): + words: list[Word] = [] + + @classmethod + def from_transcript_topic(cls, topic: TranscriptTopic): + instance = super().from_transcript_topic(topic) + if topic.words: + instance.words = topic.words + return instance + + +class SpeakerWords(BaseModel): + speaker: int + words: list[Word] + + +class GetTranscriptTopicWithWordsPerSpeaker(GetTranscriptTopic): + words_per_speaker: list[SpeakerWords] = [] + + @classmethod + def from_transcript_topic(cls, topic: TranscriptTopic): + instance = super().from_transcript_topic(topic) + if topic.words: + words_per_speakers = [] + # group words by speaker + words = [] + for word in topic.words: + if words and words[-1].speaker != word.speaker: + words_per_speakers.append( + SpeakerWords( + speaker=words[-1].speaker, + words=words, + ) + ) + words = [] + words.append(word) + if words: + words_per_speakers.append( + SpeakerWords( + speaker=words[-1].speaker, + words=words, + ) + ) + + instance.words_per_speaker = words_per_speakers + + return instance + + @router.get("/transcripts/{transcript_id}", response_model=GetTranscript) async def transcript_get( transcript_id: str, user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], ): user_id = user["sub"] if user else None - transcript = await transcripts_controller.get_by_id(transcript_id, user_id=user_id) - if not transcript: - raise HTTPException(status_code=404, detail="Transcript not found") - return transcript + return await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) @router.patch("/transcripts/{transcript_id}", response_model=GetTranscript) @@ -299,32 +236,7 @@ async def transcript_update( transcript = await transcripts_controller.get_by_id(transcript_id, user_id=user_id) if not transcript: raise HTTPException(status_code=404, detail="Transcript not found") - values = {"events": []} - if info.name is not None: - values["name"] = info.name - if info.locked is not None: - values["locked"] = info.locked - if info.long_summary is not None: - values["long_summary"] = info.long_summary - for transcript_event in transcript.events: - if transcript_event["event"] == PipelineEvent.FINAL_LONG_SUMMARY: - transcript_event["long_summary"] = info.long_summary - break - values["events"].extend(transcript.events) - if info.short_summary is not None: - values["short_summary"] = info.short_summary - for transcript_event in transcript.events: - if transcript_event["event"] == PipelineEvent.FINAL_SHORT_SUMMARY: - transcript_event["short_summary"] = info.short_summary - break - values["events"].extend(transcript.events) - if info.title is not None: - values["title"] = info.title - for transcript_event in transcript.events: - if transcript_event["event"] == PipelineEvent.FINAL_TITLE: - transcript_event["title"] = info.title - break - values["events"].extend(transcript.events) + values = info.dict(exclude_unset=True) await transcripts_controller.update(transcript, values) return transcript @@ -342,255 +254,63 @@ async def transcript_delete( return DeletionStatus(status="ok") -@router.get("/transcripts/{transcript_id}/audio/mp3") -async def transcript_get_audio_mp3( - request: Request, - transcript_id: str, - user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], -): - user_id = user["sub"] if user else None - transcript = await transcripts_controller.get_by_id(transcript_id, user_id=user_id) - if not transcript: - raise HTTPException(status_code=404, detail="Transcript not found") - - if not transcript.audio_mp3_filename.exists(): - raise HTTPException(status_code=404, detail="Audio not found") - - truncated_id = str(transcript.id).split("-")[0] - filename = f"recording_{truncated_id}.mp3" - - return range_requests_response( - request, - transcript.audio_mp3_filename, - content_type="audio/mpeg", - content_disposition=f"attachment; filename={filename}", - ) - - -@router.get("/transcripts/{transcript_id}/audio/waveform") -async def transcript_get_audio_waveform( - transcript_id: str, - user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], -) -> AudioWaveform: - user_id = user["sub"] if user else None - transcript = await transcripts_controller.get_by_id(transcript_id, user_id=user_id) - if not transcript: - raise HTTPException(status_code=404, detail="Transcript not found") - - if not transcript.audio_mp3_filename.exists(): - raise HTTPException(status_code=404, detail="Audio not found") - - await run_in_threadpool(transcript.convert_audio_to_waveform) - - return transcript.audio_waveform - - -@router.get("/transcripts/{transcript_id}/topics", response_model=list[TranscriptTopic]) +@router.get( + "/transcripts/{transcript_id}/topics", + response_model=list[GetTranscriptTopic], +) async def transcript_get_topics( transcript_id: str, user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], ): user_id = user["sub"] if user else None - transcript = await transcripts_controller.get_by_id(transcript_id, user_id=user_id) - if not transcript: - raise HTTPException(status_code=404, detail="Transcript not found") - return transcript.topics + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + # convert to GetTranscriptTopic + return [ + GetTranscriptTopic.from_transcript_topic(topic) for topic in transcript.topics + ] -@router.get("/transcripts/{transcript_id}/events") -async def transcript_get_websocket_events(transcript_id: str): - pass - - -# ============================================================== -# Websocket Manager -# ============================================================== - - -class WebsocketManager: - def __init__(self): - self.active_connections = {} - - async def connect(self, transcript_id: str, websocket: WebSocket): - await websocket.accept() - if transcript_id not in self.active_connections: - self.active_connections[transcript_id] = [] - self.active_connections[transcript_id].append(websocket) - - def disconnect(self, transcript_id: str, websocket: WebSocket): - if transcript_id not in self.active_connections: - return - self.active_connections[transcript_id].remove(websocket) - if not self.active_connections[transcript_id]: - del self.active_connections[transcript_id] - - async def send_json(self, transcript_id: str, message): - if transcript_id not in self.active_connections: - return - for connection in self.active_connections[transcript_id][:]: - try: - await connection.send_json(message) - except Exception: - self.active_connections[transcript_id].remove(connection) - - -ws_manager = WebsocketManager() - - -@router.websocket("/transcripts/{transcript_id}/events") -async def transcript_events_websocket( +@router.get( + "/transcripts/{transcript_id}/topics/with-words", + response_model=list[GetTranscriptTopicWithWords], +) +async def transcript_get_topics_with_words( transcript_id: str, - websocket: WebSocket, - # user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], -): - # user_id = user["sub"] if user else None - transcript = await transcripts_controller.get_by_id(transcript_id) - if not transcript: - raise HTTPException(status_code=404, detail="Transcript not found") - - await ws_manager.connect(transcript_id, websocket) - - # on first connection, send all events - for event in transcript.events: - await websocket.send_json(event.model_dump(mode="json")) - - # XXX if transcript is final (locked=True and status=ended) - # XXX send a final event to the client and close the connection - - # endless loop to wait for new events - try: - while True: - await websocket.receive() - except (RuntimeError, WebSocketDisconnect): - ws_manager.disconnect(transcript_id, websocket) - - -# ============================================================== -# Web RTC -# ============================================================== - - -async def handle_rtc_event(event: PipelineEvent, args, data): - # OFC the current implementation is not good, - # but it's just a POC before persistence. It won't query the - # transcript from the database for each event. - # print(f"Event: {event}", args, data) - transcript_id = args - transcript = await transcripts_controller.get_by_id(transcript_id) - if not transcript: - return - - # event send to websocket clients may not be the same as the event - # received from the pipeline. For example, the pipeline will send - # a TRANSCRIPT event with all words, but this is not what we want - # to send to the websocket client. - - # FIXME don't do copy - if event == PipelineEvent.TRANSCRIPT: - resp = transcript.add_event( - event=event, - data=TranscriptText(text=data.text, translation=data.translation), - ) - await transcripts_controller.update( - transcript, - { - "events": transcript.events_dump(), - }, - ) - - elif event == PipelineEvent.TOPIC: - topic = TranscriptTopic( - title=data.title, - summary=data.summary, - transcript=data.transcript.text, - timestamp=data.timestamp, - ) - resp = transcript.add_event(event=event, data=topic) - transcript.upsert_topic(topic) - - await transcripts_controller.update( - transcript, - { - "events": transcript.events_dump(), - "topics": transcript.topics_dump(), - }, - ) - - elif event == PipelineEvent.FINAL_TITLE: - final_title = TranscriptFinalTitle(title=data.title) - resp = transcript.add_event(event=event, data=final_title) - await transcripts_controller.update( - transcript, - { - "events": transcript.events_dump(), - "title": final_title.title, - }, - ) - - elif event == PipelineEvent.FINAL_LONG_SUMMARY: - final_long_summary = TranscriptFinalLongSummary(long_summary=data.long_summary) - resp = transcript.add_event(event=event, data=final_long_summary) - await transcripts_controller.update( - transcript, - { - "events": transcript.events_dump(), - "long_summary": final_long_summary.long_summary, - }, - ) - - elif event == PipelineEvent.FINAL_SHORT_SUMMARY: - final_short_summary = TranscriptFinalShortSummary( - short_summary=data.short_summary - ) - resp = transcript.add_event(event=event, data=final_short_summary) - await transcripts_controller.update( - transcript, - { - "events": transcript.events_dump(), - "short_summary": final_short_summary.short_summary, - }, - ) - - elif event == PipelineEvent.STATUS: - resp = transcript.add_event(event=event, data=data) - await transcripts_controller.update( - transcript, - { - "events": transcript.events_dump(), - "status": data.value, - }, - ) - - else: - logger.warning(f"Unknown event: {event}") - return - - # transmit to websocket clients - await ws_manager.send_json(transcript_id, resp.model_dump(mode="json")) - - -@router.post("/transcripts/{transcript_id}/record/webrtc") -async def transcript_record_webrtc( - transcript_id: str, - params: RtcOffer, - request: Request, user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], ): user_id = user["sub"] if user else None - transcript = await transcripts_controller.get_by_id(transcript_id, user_id=user_id) - if not transcript: - raise HTTPException(status_code=404, detail="Transcript not found") - - if transcript.locked: - raise HTTPException(status_code=400, detail="Transcript is locked") - - # FIXME do not allow multiple recording at the same time - return await rtc_offer_base( - params, - request, - event_callback=handle_rtc_event, - event_callback_args=transcript_id, - audio_filename=transcript.audio_mp3_filename, - source_language=transcript.source_language, - target_language=transcript.target_language, + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id ) + + # convert to GetTranscriptTopicWithWords + return [ + GetTranscriptTopicWithWords.from_transcript_topic(topic) + for topic in transcript.topics + ] + + +@router.get( + "/transcripts/{transcript_id}/topics/{topic_id}/words-per-speaker", + response_model=GetTranscriptTopicWithWordsPerSpeaker, +) +async def transcript_get_topics_with_words_per_speaker( + transcript_id: str, + topic_id: str, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +): + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + # get the topic from the transcript + topic = next((t for t in transcript.topics if t.id == topic_id), None) + if not topic: + raise HTTPException(status_code=404, detail="Topic not found") + + # convert to GetTranscriptTopicWithWordsPerSpeaker + return GetTranscriptTopicWithWordsPerSpeaker.from_transcript_topic(topic) diff --git a/server/reflector/views/transcripts_audio.py b/server/reflector/views/transcripts_audio.py new file mode 100644 index 00000000..7b3655fb --- /dev/null +++ b/server/reflector/views/transcripts_audio.py @@ -0,0 +1,115 @@ +""" +Transcripts audio related endpoints +=================================== + +""" +from typing import Annotated, Optional + +import httpx +import reflector.auth as auth +from fastapi import APIRouter, Depends, HTTPException, Request, Response, status +from jose import jwt +from reflector.db.transcripts import AudioWaveform, transcripts_controller +from reflector.settings import settings +from reflector.views.transcripts import ALGORITHM + +from ._range_requests_response import range_requests_response + +router = APIRouter() + + +@router.get( + "/transcripts/{transcript_id}/audio/mp3", + operation_id="transcript_get_audio_mp3", +) +@router.head( + "/transcripts/{transcript_id}/audio/mp3", + operation_id="transcript_head_audio_mp3", +) +async def transcript_get_audio_mp3( + request: Request, + transcript_id: str, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], + token: str | None = None, +): + user_id = user["sub"] if user else None + if not user_id and token: + unauthorized_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token", + headers={"WWW-Authenticate": "Bearer"}, + ) + try: + payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[ALGORITHM]) + user_id: str = payload.get("sub") + except jwt.JWTError: + raise unauthorized_exception + + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + if transcript.audio_location == "storage": + # proxy S3 file, to prevent issue with CORS + url = await transcript.get_audio_url() + headers = {} + + copy_headers = ["range", "accept-encoding"] + for header in copy_headers: + if header in request.headers: + headers[header] = request.headers[header] + + async with httpx.AsyncClient() as client: + resp = await client.request(request.method, url, headers=headers) + return Response( + content=resp.content, + status_code=resp.status_code, + headers=resp.headers, + ) + + if transcript.audio_location == "storage": + # proxy S3 file, to prevent issue with CORS + url = await transcript.get_audio_url() + headers = {} + + copy_headers = ["range", "accept-encoding"] + for header in copy_headers: + if header in request.headers: + headers[header] = request.headers[header] + + async with httpx.AsyncClient() as client: + resp = await client.request(request.method, url, headers=headers) + return Response( + content=resp.content, + status_code=resp.status_code, + headers=resp.headers, + ) + + if not transcript.audio_mp3_filename.exists(): + raise HTTPException(status_code=500, detail="Audio not found") + + truncated_id = str(transcript.id).split("-")[0] + filename = f"recording_{truncated_id}.mp3" + + return range_requests_response( + request, + transcript.audio_mp3_filename, + content_type="audio/mpeg", + content_disposition=f"attachment; filename={filename}", + ) + + +@router.get("/transcripts/{transcript_id}/audio/waveform") +async def transcript_get_audio_waveform( + transcript_id: str, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +) -> AudioWaveform: + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + if not transcript.audio_waveform_filename.exists(): + raise HTTPException(status_code=404, detail="Audio not found") + + return transcript.audio_waveform diff --git a/server/reflector/views/transcripts_participants.py b/server/reflector/views/transcripts_participants.py new file mode 100644 index 00000000..fd08405c --- /dev/null +++ b/server/reflector/views/transcripts_participants.py @@ -0,0 +1,143 @@ +""" +Transcript participants API endpoints +===================================== + +""" +from typing import Annotated, Optional + +import reflector.auth as auth +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel, ConfigDict, Field +from reflector.db.transcripts import TranscriptParticipant, transcripts_controller +from reflector.views.types import DeletionStatus + +router = APIRouter() + + +class Participant(BaseModel): + model_config = ConfigDict(from_attributes=True) + id: str + speaker: int | None + name: str + + +class CreateParticipant(BaseModel): + speaker: Optional[int] = Field(None) + name: str + + +class UpdateParticipant(BaseModel): + speaker: Optional[int] = Field(None) + name: Optional[str] = Field(None) + + +@router.get("/transcripts/{transcript_id}/participants") +async def transcript_get_participants( + transcript_id: str, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +) -> list[Participant]: + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + return [ + Participant.model_validate(participant) + for participant in transcript.participants + ] + + +@router.post("/transcripts/{transcript_id}/participants") +async def transcript_add_participant( + transcript_id: str, + participant: CreateParticipant, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +) -> Participant: + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + # ensure the speaker is unique + if participant.speaker is not None: + for p in transcript.participants: + if p.speaker == participant.speaker: + raise HTTPException( + status_code=400, + detail="Speaker already assigned", + ) + + obj = await transcripts_controller.upsert_participant( + transcript, TranscriptParticipant(**participant.dict()) + ) + return Participant.model_validate(obj) + + +@router.get("/transcripts/{transcript_id}/participants/{participant_id}") +async def transcript_get_participant( + transcript_id: str, + participant_id: str, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +) -> Participant: + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + for p in transcript.participants: + if p.id == participant_id: + return Participant.model_validate(p) + + raise HTTPException(status_code=404, detail="Participant not found") + + +@router.patch("/transcripts/{transcript_id}/participants/{participant_id}") +async def transcript_update_participant( + transcript_id: str, + participant_id: str, + participant: UpdateParticipant, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +) -> Participant: + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + # ensure the speaker is unique + for p in transcript.participants: + if p.speaker == participant.speaker and p.id != participant_id: + raise HTTPException( + status_code=400, + detail="Speaker already assigned", + ) + + # find the participant + obj = None + for p in transcript.participants: + if p.id == participant_id: + obj = p + break + + if not obj: + raise HTTPException(status_code=404, detail="Participant not found") + + # update participant but just the fields that are set + fields = participant.dict(exclude_unset=True) + obj = obj.copy(update=fields) + + await transcripts_controller.upsert_participant(transcript, obj) + return Participant.model_validate(obj) + + +@router.delete("/transcripts/{transcript_id}/participants/{participant_id}") +async def transcript_delete_participant( + transcript_id: str, + participant_id: str, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +) -> DeletionStatus: + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + await transcripts_controller.delete_participant(transcript, participant_id) + return DeletionStatus(status="ok") diff --git a/server/reflector/views/transcripts_speaker.py b/server/reflector/views/transcripts_speaker.py new file mode 100644 index 00000000..0bddad5e --- /dev/null +++ b/server/reflector/views/transcripts_speaker.py @@ -0,0 +1,170 @@ +""" +Reassign speakers in a transcript +================================= + +""" +from typing import Annotated, Optional + +import reflector.auth as auth +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel, Field +from reflector.db.transcripts import transcripts_controller + +router = APIRouter() + + +class SpeakerAssignment(BaseModel): + speaker: Optional[int] = Field(None, ge=0) + participant: Optional[str] = Field(None) + timestamp_from: float + timestamp_to: float + + +class SpeakerAssignmentStatus(BaseModel): + status: str + + +class SpeakerMerge(BaseModel): + speaker_from: int + speaker_to: int + + +@router.patch("/transcripts/{transcript_id}/speaker/assign") +async def transcript_assign_speaker( + transcript_id: str, + assignment: SpeakerAssignment, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +) -> SpeakerAssignmentStatus: + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + if not transcript: + raise HTTPException(status_code=404, detail="Transcript not found") + + if assignment.speaker is None and assignment.participant is None: + raise HTTPException( + status_code=400, + detail="Either speaker or participant must be provided", + ) + + if assignment.speaker is not None and assignment.participant is not None: + raise HTTPException( + status_code=400, + detail="Only one of speaker or participant must be provided", + ) + + # if it's a participant, search for it + if assignment.speaker is not None: + speaker = assignment.speaker + + elif assignment.participant is not None: + participant = next( + ( + participant + for participant in transcript.participants + if participant.id == assignment.participant + ), + None, + ) + if not participant: + raise HTTPException( + status_code=404, + detail="Participant not found", + ) + + # if the participant does not have a speaker, create one + if participant.speaker is None: + participant.speaker = transcript.find_empty_speaker() + await transcripts_controller.upsert_participant(transcript, participant) + + speaker = participant.speaker + + # reassign speakers from words in the transcript + ts_from = assignment.timestamp_from + ts_to = assignment.timestamp_to + changed_topics = [] + for topic in transcript.topics: + changed = False + for word in topic.words: + if ts_from <= word.start <= ts_to: + word.speaker = speaker + changed = True + if changed: + changed_topics.append(topic) + + # batch changes + for topic in changed_topics: + transcript.upsert_topic(topic) + await transcripts_controller.update( + transcript, + { + "topics": transcript.topics_dump(), + }, + ) + + return SpeakerAssignmentStatus(status="ok") + + +@router.patch("/transcripts/{transcript_id}/speaker/merge") +async def transcript_merge_speaker( + transcript_id: str, + merge: SpeakerMerge, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +) -> SpeakerAssignmentStatus: + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + if not transcript: + raise HTTPException(status_code=404, detail="Transcript not found") + + # ensure both speaker are not assigned to the 2 differents participants + participant_from = next( + ( + participant + for participant in transcript.participants + if participant.speaker == merge.speaker_from + ), + None, + ) + participant_to = next( + ( + participant + for participant in transcript.participants + if participant.speaker == merge.speaker_to + ), + None, + ) + if participant_from and participant_to: + raise HTTPException( + status_code=400, + detail="Both speakers are assigned to participants", + ) + + # reassign speakers from words in the transcript + speaker_from = merge.speaker_from + speaker_to = merge.speaker_to + changed_topics = [] + for topic in transcript.topics: + changed = False + for word in topic.words: + if word.speaker == speaker_from: + word.speaker = speaker_to + changed = True + if changed: + changed_topics.append(topic) + + # batch changes + for topic in changed_topics: + transcript.upsert_topic(topic) + await transcripts_controller.update( + transcript, + { + "topics": transcript.topics_dump(), + }, + ) + + return SpeakerAssignmentStatus(status="ok") diff --git a/server/reflector/views/transcripts_upload.py b/server/reflector/views/transcripts_upload.py new file mode 100644 index 00000000..96b82d78 --- /dev/null +++ b/server/reflector/views/transcripts_upload.py @@ -0,0 +1,79 @@ +from typing import Annotated, Optional + +import av +import reflector.auth as auth +from fastapi import APIRouter, Depends, HTTPException, UploadFile +from pydantic import BaseModel +from reflector.db.transcripts import transcripts_controller +from reflector.pipelines.main_live_pipeline import task_pipeline_upload + +router = APIRouter() + + +class UploadStatus(BaseModel): + status: str + + +@router.post("/transcripts/{transcript_id}/record/upload") +async def transcript_record_upload( + transcript_id: str, + file: UploadFile, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +): + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + if transcript.locked: + raise HTTPException(status_code=400, detail="Transcript is locked") + + # ensure there is no other upload in the directory (searching data_path/upload.*) + if any(transcript.data_path.glob("upload.*")): + raise HTTPException( + status_code=400, detail="There is already an upload in progress" + ) + + # save the file to the transcript folder + extension = file.filename.split(".")[-1] + upload_filename = transcript.data_path / f"upload.{extension}" + upload_filename.parent.mkdir(parents=True, exist_ok=True) + + # ensure the file is back to the beginning + await file.seek(0) + + # save the file to the transcript folder + try: + with open(upload_filename, "wb") as f: + while True: + chunk = await file.read(16384) + if not chunk: + break + f.write(chunk) + except Exception: + upload_filename.unlink() + raise + + # ensure the file have audio part, using av + # XXX Trying to do this check on the initial UploadFile object is not + # possible, dunno why. UploadFile.file has no name. + # Trying to pass UploadFile.file with format=extension does not work + # it never detect audio stream... + container = av.open(upload_filename.as_posix()) + try: + if not len(container.streams.audio): + raise HTTPException(status_code=400, detail="File has no audio stream") + except Exception: + # delete the uploaded file + upload_filename.unlink() + raise + finally: + container.close() + + # set the status to "uploaded" + await transcripts_controller.update(transcript, {"status": "uploaded"}) + + # launch a background task to process the file + task_pipeline_upload.delay(transcript_id=transcript_id) + + return UploadStatus(status="ok") diff --git a/server/reflector/views/transcripts_webrtc.py b/server/reflector/views/transcripts_webrtc.py new file mode 100644 index 00000000..af451411 --- /dev/null +++ b/server/reflector/views/transcripts_webrtc.py @@ -0,0 +1,37 @@ +from typing import Annotated, Optional + +import reflector.auth as auth +from fastapi import APIRouter, Depends, HTTPException, Request +from reflector.db.transcripts import transcripts_controller + +from .rtc_offer import RtcOffer, rtc_offer_base + +router = APIRouter() + + +@router.post("/transcripts/{transcript_id}/record/webrtc") +async def transcript_record_webrtc( + transcript_id: str, + params: RtcOffer, + request: Request, + user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +): + user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id_for_http( + transcript_id, user_id=user_id + ) + + if transcript.locked: + raise HTTPException(status_code=400, detail="Transcript is locked") + + # create a pipeline runner + from reflector.pipelines.main_live_pipeline import PipelineMainLive + + pipeline_runner = PipelineMainLive(transcript_id=transcript_id) + + # FIXME do not allow multiple recording at the same time + return await rtc_offer_base( + params, + request, + pipeline_runner=pipeline_runner, + ) diff --git a/server/reflector/views/transcripts_websocket.py b/server/reflector/views/transcripts_websocket.py new file mode 100644 index 00000000..65571aab --- /dev/null +++ b/server/reflector/views/transcripts_websocket.py @@ -0,0 +1,53 @@ +""" +Transcripts websocket API +========================= + +""" +from fastapi import APIRouter, HTTPException, WebSocket, WebSocketDisconnect +from reflector.db.transcripts import transcripts_controller +from reflector.ws_manager import get_ws_manager + +router = APIRouter() + + +@router.get("/transcripts/{transcript_id}/events") +async def transcript_get_websocket_events(transcript_id: str): + pass + + +@router.websocket("/transcripts/{transcript_id}/events") +async def transcript_events_websocket( + transcript_id: str, + websocket: WebSocket, + # user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)], +): + # user_id = user["sub"] if user else None + transcript = await transcripts_controller.get_by_id(transcript_id) + if not transcript: + raise HTTPException(status_code=404, detail="Transcript not found") + + # connect to websocket manager + # use ts:transcript_id as room id + room_id = f"ts:{transcript_id}" + ws_manager = get_ws_manager() + await ws_manager.add_user_to_room(room_id, websocket) + + try: + # on first connection, send all events only to the current user + for event in transcript.events: + # for now, do not send TRANSCRIPT or STATUS options - theses are live event + # not necessary to be sent to the client; but keep the rest + name = event.event + if name in ("TRANSCRIPT", "STATUS"): + continue + await websocket.send_json(event.model_dump(mode="json")) + + # XXX if transcript is final (locked=True and status=ended) + # XXX send a final event to the client and close the connection + + # endless loop to wait for new events + # we do not have command system now, + while True: + await websocket.receive() + except (RuntimeError, WebSocketDisconnect): + await ws_manager.remove_user_from_room(room_id, websocket) diff --git a/server/reflector/views/types.py b/server/reflector/views/types.py new file mode 100644 index 00000000..70361131 --- /dev/null +++ b/server/reflector/views/types.py @@ -0,0 +1,5 @@ +from pydantic import BaseModel + + +class DeletionStatus(BaseModel): + status: str diff --git a/server/reflector/worker/app.py b/server/reflector/worker/app.py new file mode 100644 index 00000000..5f1e4e74 --- /dev/null +++ b/server/reflector/worker/app.py @@ -0,0 +1,32 @@ +import celery +import structlog +from celery import Celery +from reflector.settings import settings + +logger = structlog.get_logger(__name__) +if celery.current_app.main != "default": + logger.info(f"Celery already configured ({celery.current_app})") + app = celery.current_app +else: + app = Celery(__name__) + app.conf.broker_url = settings.CELERY_BROKER_URL + app.conf.result_backend = settings.CELERY_RESULT_BACKEND + app.conf.broker_connection_retry_on_startup = True + app.autodiscover_tasks( + [ + "reflector.pipelines.main_live_pipeline", + "reflector.worker.healthcheck", + ] + ) + + # crontab + app.conf.beat_schedule = {} + + if settings.HEALTHCHECK_URL: + app.conf.beat_schedule["healthcheck_ping"] = { + "task": "reflector.worker.healthcheck.healthcheck_ping", + "schedule": 60.0 * 10, + } + logger.info("Healthcheck enabled", url=settings.HEALTHCHECK_URL) + else: + logger.warning("Healthcheck disabled, no url configured") diff --git a/server/reflector/worker/healthcheck.py b/server/reflector/worker/healthcheck.py new file mode 100644 index 00000000..e4ce6bc3 --- /dev/null +++ b/server/reflector/worker/healthcheck.py @@ -0,0 +1,18 @@ +import httpx +import structlog +from celery import shared_task +from reflector.settings import settings + +logger = structlog.get_logger(__name__) + + +@shared_task +def healthcheck_ping(): + url = settings.HEALTHCHECK_URL + if not url: + return + try: + print("pinging healthcheck url", url) + httpx.get(url, timeout=10) + except Exception as e: + logger.error("healthcheck_ping", error=str(e)) diff --git a/server/reflector/ws_manager.py b/server/reflector/ws_manager.py new file mode 100644 index 00000000..a84e3361 --- /dev/null +++ b/server/reflector/ws_manager.py @@ -0,0 +1,126 @@ +""" +Websocket manager +================= + +This module contains the WebsocketManager class, which is responsible for +managing websockets and handling websocket connections. + +It uses the RedisPubSubManager class to subscribe to Redis channels and +broadcast messages to all connected websockets. +""" + +import asyncio +import json +import threading + +import redis.asyncio as redis +from fastapi import WebSocket +from reflector.settings import settings + + +class RedisPubSubManager: + def __init__(self, host="localhost", port=6379): + self.redis_host = host + self.redis_port = port + self.redis_connection = None + self.pubsub = None + + async def get_redis_connection(self) -> redis.Redis: + return redis.Redis( + host=self.redis_host, + port=self.redis_port, + auto_close_connection_pool=False, + ) + + async def connect(self) -> None: + if self.redis_connection is not None: + return + self.redis_connection = await self.get_redis_connection() + self.pubsub = self.redis_connection.pubsub() + + async def disconnect(self) -> None: + if self.redis_connection is None: + return + await self.redis_connection.close() + self.redis_connection = None + + async def send_json(self, room_id: str, message: str) -> None: + if not self.redis_connection: + await self.connect() + message = json.dumps(message) + await self.redis_connection.publish(room_id, message) + + async def subscribe(self, room_id: str) -> redis.Redis: + await self.pubsub.subscribe(room_id) + return self.pubsub + + async def unsubscribe(self, room_id: str) -> None: + await self.pubsub.unsubscribe(room_id) + + +class WebsocketManager: + def __init__(self, pubsub_client: RedisPubSubManager = None): + self.rooms: dict = {} + self.pubsub_client = pubsub_client + + async def add_user_to_room(self, room_id: str, websocket: WebSocket) -> None: + await websocket.accept() + + if room_id in self.rooms: + self.rooms[room_id].append(websocket) + else: + self.rooms[room_id] = [websocket] + + await self.pubsub_client.connect() + pubsub_subscriber = await self.pubsub_client.subscribe(room_id) + asyncio.create_task(self._pubsub_data_reader(pubsub_subscriber)) + + async def send_json(self, room_id: str, message: dict) -> None: + await self.pubsub_client.send_json(room_id, message) + + async def remove_user_from_room(self, room_id: str, websocket: WebSocket) -> None: + self.rooms[room_id].remove(websocket) + + if len(self.rooms[room_id]) == 0: + del self.rooms[room_id] + await self.pubsub_client.unsubscribe(room_id) + + async def _pubsub_data_reader(self, pubsub_subscriber): + while True: + message = await pubsub_subscriber.get_message( + ignore_subscribe_messages=True + ) + if message is not None: + room_id = message["channel"].decode("utf-8") + all_sockets = self.rooms[room_id] + for socket in all_sockets: + data = json.loads(message["data"].decode("utf-8")) + await socket.send_json(data) + + +def get_ws_manager() -> WebsocketManager: + """ + Returns the WebsocketManager instance for managing websockets. + + This function initializes and returns the WebsocketManager instance, + which is responsible for managing websockets and handling websocket + connections. + + Returns: + WebsocketManager: The initialized WebsocketManager instance. + + Raises: + ImportError: If the 'reflector.settings' module cannot be imported. + RedisConnectionError: If there is an error connecting to the Redis server. + """ + local = threading.local() + if hasattr(local, "ws_manager"): + return local.ws_manager + + pubsub_client = RedisPubSubManager( + host=settings.REDIS_HOST, + port=settings.REDIS_PORT, + ) + ws_manager = WebsocketManager(pubsub_client=pubsub_client) + local.ws_manager = ws_manager + return ws_manager diff --git a/server/runserver.sh b/server/runserver.sh index 38eafe09..31cce123 100755 --- a/server/runserver.sh +++ b/server/runserver.sh @@ -4,4 +4,13 @@ if [ -f "/venv/bin/activate" ]; then source /venv/bin/activate fi alembic upgrade head -python -m reflector.app + +if [ "${ENTRYPOINT}" = "server" ]; then + python -m reflector.app +elif [ "${ENTRYPOINT}" = "worker" ]; then + celery -A reflector.worker.app worker --loglevel=info +elif [ "${ENTRYPOINT}" = "beat" ]; then + celery -A reflector.worker.app beat --loglevel=info +else + echo "Unknown command" +fi diff --git a/server/tests/conftest.py b/server/tests/conftest.py index 76b56abf..d25801bf 100644 --- a/server/tests/conftest.py +++ b/server/tests/conftest.py @@ -1,4 +1,5 @@ from unittest.mock import patch +from tempfile import NamedTemporaryFile import pytest @@ -7,7 +8,6 @@ import pytest @pytest.mark.asyncio async def setup_database(): from reflector.settings import settings - from tempfile import NamedTemporaryFile with NamedTemporaryFile() as f: settings.DATABASE_URL = f"sqlite:///{f.name}" @@ -36,7 +36,13 @@ def dummy_processors(): mock_long_summary.return_value = "LLM LONG SUMMARY" mock_short_summary.return_value = {"short_summary": "LLM SHORT SUMMARY"} mock_translate.return_value = "Bonjour le monde" - yield mock_translate, mock_topic, mock_title, mock_long_summary, mock_short_summary # noqa + yield ( + mock_translate, + mock_topic, + mock_title, + mock_long_summary, + mock_short_summary, + ) # noqa @pytest.fixture @@ -45,28 +51,50 @@ async def dummy_transcript(): from reflector.processors.types import AudioFile, Transcript, Word class TestAudioTranscriptProcessor(AudioTranscriptProcessor): - async def _transcript(self, data: AudioFile): - source_language = self.get_pref("audio:source_language", "en") - print("transcripting", source_language) - print("pipeline", self.pipeline) - print("prefs", self.pipeline.prefs) + _time_idx = 0 + async def _transcript(self, data: AudioFile): + i = self._time_idx + self._time_idx += 2 return Transcript( text="Hello world.", words=[ - Word(start=0.0, end=1.0, text="Hello"), - Word(start=1.0, end=2.0, text=" world."), + Word(start=i, end=i + 1, text="Hello", speaker=0), + Word(start=i + 1, end=i + 2, text=" world.", speaker=0), ], ) with patch( "reflector.processors.audio_transcript_auto" - ".AudioTranscriptAutoProcessor.get_instance" + ".AudioTranscriptAutoProcessor.__new__" ) as mock_audio: mock_audio.return_value = TestAudioTranscriptProcessor() yield +@pytest.fixture +async def dummy_diarization(): + from reflector.processors.audio_diarization import AudioDiarizationProcessor + + class TestAudioDiarizationProcessor(AudioDiarizationProcessor): + _time_idx = 0 + + async def _diarize(self, data): + i = self._time_idx + self._time_idx += 2 + return [ + {"start": i, "end": i + 1, "speaker": 0}, + {"start": i + 1, "end": i + 2, "speaker": 1}, + ] + + with patch( + "reflector.processors.audio_diarization_auto" + ".AudioDiarizationAutoProcessor.__new__" + ) as mock_audio: + mock_audio.return_value = TestAudioDiarizationProcessor() + yield + + @pytest.fixture async def dummy_llm(): from reflector.llm.base import LLM @@ -81,6 +109,25 @@ async def dummy_llm(): yield +@pytest.fixture +async def dummy_storage(): + from reflector.storage.base import Storage + + class DummyStorage(Storage): + async def _put_file(self, *args, **kwargs): + pass + + async def _delete_file(self, *args, **kwargs): + pass + + async def _get_file_url(self, *args, **kwargs): + return "http://fake_server/audio.mp3" + + with patch("reflector.storage.base.Storage.get_instance") as mock_storage: + mock_storage.return_value = DummyStorage() + yield + + @pytest.fixture def nltk(): with patch("reflector.llm.base.LLM.ensure_nltk") as mock_nltk: @@ -98,7 +145,96 @@ def ensure_casing(): @pytest.fixture def sentence_tokenize(): with patch( - "reflector.processors.TranscriptFinalLongSummaryProcessor" ".sentence_tokenize" + "reflector.processors.TranscriptFinalLongSummaryProcessor.sentence_tokenize" ) as mock_sent_tokenize: mock_sent_tokenize.return_value = ["LLM LONG SUMMARY"] yield + + +@pytest.fixture(scope="session") +def celery_enable_logging(): + return True + + +@pytest.fixture(scope="session") +def celery_config(): + with NamedTemporaryFile() as f: + yield { + "broker_url": "memory://", + "result_backend": f"db+sqlite:///{f.name}", + } + + +@pytest.fixture(scope="session") +def celery_includes(): + return ["reflector.pipelines.main_live_pipeline"] + + +@pytest.fixture(scope="session") +def fake_mp3_upload(): + with patch( + "reflector.db.transcripts.TranscriptController.move_mp3_to_storage" + ) as mock_move: + mock_move.return_value = True + yield + + +@pytest.fixture +async def fake_transcript_with_topics(tmpdir): + from reflector.settings import settings + from reflector.app import app + from reflector.views.transcripts import transcripts_controller + from reflector.db.transcripts import TranscriptTopic + from reflector.processors.types import Word + from pathlib import Path + from httpx import AsyncClient + import shutil + + settings.DATA_DIR = Path(tmpdir) + + # create a transcript + ac = AsyncClient(app=app, base_url="http://test/v1") + response = await ac.post("/transcripts", json={"name": "Test audio download"}) + assert response.status_code == 200 + tid = response.json()["id"] + + transcript = await transcripts_controller.get_by_id(tid) + assert transcript is not None + + await transcripts_controller.update(transcript, {"status": "finished"}) + + # manually copy a file at the expected location + audio_filename = transcript.audio_mp3_filename + path = Path(__file__).parent / "records" / "test_mathieu_hello.mp3" + audio_filename.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(path, audio_filename) + + # create some topics + await transcripts_controller.upsert_topic( + transcript, + TranscriptTopic( + title="Topic 1", + summary="Topic 1 summary", + timestamp=0, + transcript="Hello world", + words=[ + Word(text="Hello", start=0, end=1, speaker=0), + Word(text="world", start=1, end=2, speaker=0), + ], + ), + ) + await transcripts_controller.upsert_topic( + transcript, + TranscriptTopic( + title="Topic 2", + summary="Topic 2 summary", + timestamp=2, + transcript="Hello world", + words=[ + Word(text="Hello", start=2, end=3, speaker=0), + Word(text="world", start=3, end=4, speaker=0), + ], + ), + ) + + yield transcript diff --git a/server/tests/test_processor_audio_diarization.py b/server/tests/test_processor_audio_diarization.py new file mode 100644 index 00000000..00935a49 --- /dev/null +++ b/server/tests/test_processor_audio_diarization.py @@ -0,0 +1,140 @@ +import pytest +from unittest import mock + + +@pytest.mark.parametrize( + "name,diarization,expected", + [ + [ + "no overlap", + [ + {"start": 0.0, "end": 1.0, "speaker": "A"}, + {"start": 1.0, "end": 2.0, "speaker": "B"}, + ], + ["A", "A", "B", "B"], + ], + [ + "same speaker", + [ + {"start": 0.0, "end": 1.0, "speaker": "A"}, + {"start": 1.0, "end": 2.0, "speaker": "A"}, + ], + ["A", "A", "A", "A"], + ], + [ + # first segment is removed because it overlap + # with the second segment, and it is smaller + "overlap at 0.5s", + [ + {"start": 0.0, "end": 1.0, "speaker": "A"}, + {"start": 0.5, "end": 2.0, "speaker": "B"}, + ], + ["B", "B", "B", "B"], + ], + [ + "junk segment at 0.5s for 0.2s", + [ + {"start": 0.0, "end": 1.0, "speaker": "A"}, + {"start": 0.5, "end": 0.7, "speaker": "B"}, + {"start": 1, "end": 2.0, "speaker": "B"}, + ], + ["A", "A", "B", "B"], + ], + [ + "start without diarization", + [ + {"start": 0.5, "end": 1.0, "speaker": "A"}, + {"start": 1.0, "end": 2.0, "speaker": "B"}, + ], + ["A", "A", "B", "B"], + ], + [ + "end missing diarization", + [ + {"start": 0.0, "end": 1.0, "speaker": "A"}, + {"start": 1.0, "end": 1.5, "speaker": "B"}, + ], + ["A", "A", "B", "B"], + ], + [ + "continuation of next speaker", + [ + {"start": 0.0, "end": 0.9, "speaker": "A"}, + {"start": 1.5, "end": 2.0, "speaker": "B"}, + ], + ["A", "A", "B", "B"], + ], + [ + "continuation of previous speaker", + [ + {"start": 0.0, "end": 0.5, "speaker": "A"}, + {"start": 1.0, "end": 2.0, "speaker": "B"}, + ], + ["A", "A", "B", "B"], + ], + [ + "segment without words", + [ + {"start": 0.0, "end": 1.0, "speaker": "A"}, + {"start": 1.0, "end": 2.0, "speaker": "B"}, + {"start": 2.0, "end": 3.0, "speaker": "X"}, + ], + ["A", "A", "B", "B"], + ], + ], +) +@pytest.mark.asyncio +async def test_processors_audio_diarization(event_loop, name, diarization, expected): + from reflector.processors.audio_diarization import AudioDiarizationProcessor + from reflector.processors.types import ( + TitleSummaryWithId, + Transcript, + Word, + AudioDiarizationInput, + ) + + # create fake topic + topics = [ + TitleSummaryWithId( + id="1", + title="Title1", + summary="Summary1", + timestamp=0.0, + duration=1.0, + transcript=Transcript( + words=[ + Word(text="Word1", start=0.0, end=0.5), + Word(text="word2.", start=0.5, end=1.0), + ] + ), + ), + TitleSummaryWithId( + id="2", + title="Title2", + summary="Summary2", + timestamp=0.0, + duration=1.0, + transcript=Transcript( + words=[ + Word(text="Word3", start=1.0, end=1.5), + Word(text="word4.", start=1.5, end=2.0), + ] + ), + ), + ] + + diarizer = AudioDiarizationProcessor() + with mock.patch.object(diarizer, "_diarize") as mock_diarize: + mock_diarize.return_value = diarization + + data = AudioDiarizationInput( + audio_url="https://example.com/audio.mp3", + topics=topics, + ) + await diarizer._push(data) + + # check that the speaker has been assigned to the words + assert topics[0].transcript.words[0].speaker == expected[0] + assert topics[0].transcript.words[1].speaker == expected[1] + assert topics[1].transcript.words[0].speaker == expected[2] + assert topics[1].transcript.words[1].speaker == expected[3] diff --git a/server/tests/test_processor_transcript_segment.py b/server/tests/test_processor_transcript_segment.py new file mode 100644 index 00000000..6fde0dd1 --- /dev/null +++ b/server/tests/test_processor_transcript_segment.py @@ -0,0 +1,161 @@ +def test_processor_transcript_segment(): + from reflector.processors.types import Transcript, Word + + transcript = Transcript( + words=[ + Word(text=" the", start=5.12, end=5.48, speaker=0), + Word(text=" different", start=5.48, end=5.8, speaker=0), + Word(text=" projects", start=5.8, end=6.3, speaker=0), + Word(text=" that", start=6.3, end=6.5, speaker=0), + Word(text=" are", start=6.5, end=6.58, speaker=0), + Word(text=" going", start=6.58, end=6.82, speaker=0), + Word(text=" on", start=6.82, end=7.26, speaker=0), + Word(text=" to", start=7.26, end=7.4, speaker=0), + Word(text=" give", start=7.4, end=7.54, speaker=0), + Word(text=" you", start=7.54, end=7.9, speaker=0), + Word(text=" context", start=7.9, end=8.24, speaker=0), + Word(text=" and", start=8.24, end=8.66, speaker=0), + Word(text=" I", start=8.66, end=8.72, speaker=0), + Word(text=" think", start=8.72, end=8.82, speaker=0), + Word(text=" that's", start=8.82, end=9.04, speaker=0), + Word(text=" what", start=9.04, end=9.12, speaker=0), + Word(text=" we'll", start=9.12, end=9.24, speaker=0), + Word(text=" do", start=9.24, end=9.32, speaker=0), + Word(text=" this", start=9.32, end=9.52, speaker=0), + Word(text=" week.", start=9.52, end=9.76, speaker=0), + Word(text=" Um,", start=10.24, end=10.62, speaker=0), + Word(text=" so,", start=11.36, end=11.94, speaker=0), + Word(text=" um,", start=12.46, end=12.92, speaker=0), + Word(text=" what", start=13.74, end=13.94, speaker=0), + Word(text=" we're", start=13.94, end=14.1, speaker=0), + Word(text=" going", start=14.1, end=14.24, speaker=0), + Word(text=" to", start=14.24, end=14.34, speaker=0), + Word(text=" do", start=14.34, end=14.8, speaker=0), + Word(text=" at", start=14.8, end=14.98, speaker=0), + Word(text=" H", start=14.98, end=15.04, speaker=0), + Word(text=" of", start=15.04, end=15.16, speaker=0), + Word(text=" you,", start=15.16, end=15.26, speaker=0), + Word(text=" maybe.", start=15.28, end=15.34, speaker=0), + Word(text=" you", start=15.36, end=15.52, speaker=0), + Word(text=" can", start=15.52, end=15.62, speaker=0), + Word(text=" introduce", start=15.62, end=15.98, speaker=0), + Word(text=" yourself", start=15.98, end=16.42, speaker=0), + Word(text=" to", start=16.42, end=16.68, speaker=0), + Word(text=" the", start=16.68, end=16.72, speaker=0), + Word(text=" team", start=16.72, end=17.52, speaker=0), + Word(text=" quickly", start=17.87, end=18.65, speaker=0), + Word(text=" and", start=18.65, end=19.63, speaker=0), + Word(text=" Oh,", start=20.91, end=21.55, speaker=0), + Word(text=" this", start=21.67, end=21.83, speaker=0), + Word(text=" is", start=21.83, end=22.17, speaker=0), + Word(text=" a", start=22.17, end=22.35, speaker=0), + Word(text=" reflector", start=22.35, end=22.89, speaker=0), + Word(text=" translating", start=22.89, end=23.33, speaker=0), + Word(text=" into", start=23.33, end=23.73, speaker=0), + Word(text=" French", start=23.73, end=23.95, speaker=0), + Word(text=" for", start=23.95, end=24.15, speaker=0), + Word(text=" me.", start=24.15, end=24.43, speaker=0), + Word(text=" This", start=27.87, end=28.19, speaker=0), + Word(text=" is", start=28.19, end=28.45, speaker=0), + Word(text=" all", start=28.45, end=28.79, speaker=0), + Word(text=" the", start=28.79, end=29.15, speaker=0), + Word(text=" way,", start=29.15, end=29.15, speaker=0), + Word(text=" please,", start=29.53, end=29.59, speaker=0), + Word(text=" please,", start=29.73, end=29.77, speaker=0), + Word(text=" please,", start=29.77, end=29.83, speaker=0), + Word(text=" please.", start=29.83, end=29.97, speaker=0), + Word(text=" Yeah,", start=29.97, end=30.17, speaker=0), + Word(text=" that's", start=30.25, end=30.33, speaker=0), + Word(text=" all", start=30.33, end=30.49, speaker=0), + Word(text=" it's", start=30.49, end=30.69, speaker=0), + Word(text=" right.", start=30.69, end=30.69, speaker=0), + Word(text=" Right.", start=30.72, end=30.98, speaker=1), + Word(text=" Yeah,", start=31.56, end=31.72, speaker=2), + Word(text=" that's", start=31.86, end=31.98, speaker=2), + Word(text=" right.", start=31.98, end=32.2, speaker=2), + Word(text=" Because", start=32.38, end=32.46, speaker=0), + Word(text=" I", start=32.46, end=32.58, speaker=0), + Word(text=" thought", start=32.58, end=32.78, speaker=0), + Word(text=" I'd", start=32.78, end=33.0, speaker=0), + Word(text=" be", start=33.0, end=33.02, speaker=0), + Word(text=" able", start=33.02, end=33.18, speaker=0), + Word(text=" to", start=33.18, end=33.34, speaker=0), + Word(text=" pull", start=33.34, end=33.52, speaker=0), + Word(text=" out.", start=33.52, end=33.68, speaker=0), + Word(text=" Yeah,", start=33.7, end=33.9, speaker=0), + Word(text=" that", start=33.9, end=34.02, speaker=0), + Word(text=" was", start=34.02, end=34.24, speaker=0), + Word(text=" the", start=34.24, end=34.34, speaker=0), + Word(text=" one", start=34.34, end=34.44, speaker=0), + Word(text=" before", start=34.44, end=34.7, speaker=0), + Word(text=" that.", start=34.7, end=35.24, speaker=0), + Word(text=" Friends,", start=35.84, end=36.46, speaker=0), + Word(text=" if", start=36.64, end=36.7, speaker=0), + Word(text=" you", start=36.7, end=36.7, speaker=0), + Word(text=" have", start=36.7, end=37.24, speaker=0), + Word(text=" tell", start=37.24, end=37.44, speaker=0), + Word(text=" us", start=37.44, end=37.68, speaker=0), + Word(text=" if", start=37.68, end=37.82, speaker=0), + Word(text=" it's", start=37.82, end=38.04, speaker=0), + Word(text=" good,", start=38.04, end=38.58, speaker=0), + Word(text=" exceptionally", start=38.96, end=39.1, speaker=0), + Word(text=" good", start=39.1, end=39.6, speaker=0), + Word(text=" and", start=39.6, end=39.86, speaker=0), + Word(text=" tell", start=39.86, end=40.0, speaker=0), + Word(text=" us", start=40.0, end=40.06, speaker=0), + Word(text=" when", start=40.06, end=40.2, speaker=0), + Word(text=" it's", start=40.2, end=40.34, speaker=0), + Word(text=" exceptionally", start=40.34, end=40.6, speaker=0), + Word(text=" bad.", start=40.6, end=40.94, speaker=0), + Word(text=" We", start=40.96, end=41.26, speaker=0), + Word(text=" don't", start=41.26, end=41.44, speaker=0), + Word(text=" need", start=41.44, end=41.66, speaker=0), + Word(text=" that", start=41.66, end=41.82, speaker=0), + Word(text=" at", start=41.82, end=41.94, speaker=0), + Word(text=" the", start=41.94, end=41.98, speaker=0), + Word(text=" middle", start=41.98, end=42.18, speaker=0), + Word(text=" of", start=42.18, end=42.36, speaker=0), + Word(text=" age.", start=42.36, end=42.7, speaker=0), + Word(text=" Okay,", start=43.26, end=43.44, speaker=0), + Word(text=" yeah,", start=43.68, end=43.76, speaker=0), + Word(text=" that", start=43.78, end=44.3, speaker=0), + Word(text=" sentence", start=44.3, end=44.72, speaker=0), + Word(text=" right", start=44.72, end=45.1, speaker=0), + Word(text=" before.", start=45.1, end=45.56, speaker=0), + Word(text=" it", start=46.08, end=46.36, speaker=0), + Word(text=" realizing", start=46.36, end=47.0, speaker=0), + Word(text=" that", start=47.0, end=47.28, speaker=0), + Word(text=" I", start=47.28, end=47.28, speaker=0), + Word(text=" was", start=47.28, end=47.64, speaker=0), + Word(text=" saying", start=47.64, end=48.06, speaker=0), + Word(text=" that", start=48.06, end=48.44, speaker=0), + Word(text=" it's", start=48.44, end=48.54, speaker=0), + Word(text=" interesting", start=48.54, end=48.78, speaker=0), + Word(text=" that", start=48.78, end=48.96, speaker=0), + Word(text=" it's", start=48.96, end=49.08, speaker=0), + Word(text=" translating", start=49.08, end=49.32, speaker=0), + Word(text=" the", start=49.32, end=49.56, speaker=0), + Word(text=" French", start=49.56, end=49.76, speaker=0), + Word(text=" was", start=49.76, end=50.16, speaker=0), + Word(text=" completely", start=50.16, end=50.4, speaker=0), + Word(text=" wrong.", start=50.4, end=50.7, speaker=0), + ] + ) + + segments = transcript.as_segments() + assert len(segments) == 7 + + # check speaker order + assert segments[0].speaker == 0 + assert segments[1].speaker == 0 + assert segments[2].speaker == 0 + assert segments[3].speaker == 1 + assert segments[4].speaker == 2 + assert segments[5].speaker == 0 + assert segments[6].speaker == 0 + + # check the timing (first entry, and first of others speakers) + assert segments[0].start == 5.12 + assert segments[3].start == 30.72 + assert segments[4].start == 31.56 + assert segments[5].start == 32.38 diff --git a/server/tests/test_retry_decorator.py b/server/tests/test_retry_decorator.py index 22729eac..c60a490f 100644 --- a/server/tests/test_retry_decorator.py +++ b/server/tests/test_retry_decorator.py @@ -1,3 +1,4 @@ +import asyncio import pytest import httpx from reflector.utils.retry import ( @@ -8,6 +9,31 @@ from reflector.utils.retry import ( ) +@pytest.mark.asyncio +async def test_retry_redirect(httpx_mock): + async def custom_response(request: httpx.Request): + if request.url.path == "/hello": + await asyncio.sleep(1) + return httpx.Response( + status_code=303, headers={"location": "https://test_url/redirected"} + ) + elif request.url.path == "/redirected": + return httpx.Response(status_code=200, json={"hello": "world"}) + else: + raise Exception("Unexpected path") + + httpx_mock.add_callback(custom_response) + async with httpx.AsyncClient() as client: + # timeout should not triggered, as it will end up ok + # even though the first request is a 303 and took more that 0.5 + resp = await retry(client.get)( + "https://test_url/hello", + retry_timeout=0.5, + follow_redirects=True, + ) + assert resp.json() == {"hello": "world"} + + @pytest.mark.asyncio async def test_retry_httpx(httpx_mock): # this code should be force a retry diff --git a/server/tests/test_transcripts.py b/server/tests/test_transcripts.py index 800d7a5c..c708d57e 100644 --- a/server/tests/test_transcripts.py +++ b/server/tests/test_transcripts.py @@ -196,3 +196,29 @@ async def test_transcript_delete(): response = await ac.get(f"/transcripts/{tid}") assert response.status_code == 404 + + +@pytest.mark.asyncio +async def test_transcript_mark_reviewed(): + from reflector.app import app + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + response = await ac.post("/transcripts", json={"name": "test"}) + assert response.status_code == 200 + assert response.json()["name"] == "test" + assert response.json()["reviewed"] is False + + tid = response.json()["id"] + + response = await ac.get(f"/transcripts/{tid}") + assert response.status_code == 200 + assert response.json()["name"] == "test" + assert response.json()["reviewed"] is False + + response = await ac.patch(f"/transcripts/{tid}", json={"reviewed": True}) + assert response.status_code == 200 + assert response.json()["reviewed"] is True + + response = await ac.get(f"/transcripts/{tid}") + assert response.status_code == 200 + assert response.json()["reviewed"] is True diff --git a/server/tests/test_transcripts_audio_download.py b/server/tests/test_transcripts_audio_download.py index 79cb25bf..28f83fff 100644 --- a/server/tests/test_transcripts_audio_download.py +++ b/server/tests/test_transcripts_audio_download.py @@ -46,6 +46,34 @@ async def test_transcript_audio_download(fake_transcript, url_suffix, content_ty assert response.status_code == 200 assert response.headers["content-type"] == content_type + # test get 404 + ac = AsyncClient(app=app, base_url="http://test/v1") + response = await ac.get(f"/transcripts/{fake_transcript.id}XXX/audio{url_suffix}") + assert response.status_code == 404 + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "url_suffix,content_type", + [ + ["/mp3", "audio/mpeg"], + ], +) +async def test_transcript_audio_download_head( + fake_transcript, url_suffix, content_type +): + from reflector.app import app + + ac = AsyncClient(app=app, base_url="http://test/v1") + response = await ac.head(f"/transcripts/{fake_transcript.id}/audio{url_suffix}") + assert response.status_code == 200 + assert response.headers["content-type"] == content_type + + # test head 404 + ac = AsyncClient(app=app, base_url="http://test/v1") + response = await ac.head(f"/transcripts/{fake_transcript.id}XXX/audio{url_suffix}") + assert response.status_code == 404 + @pytest.mark.asyncio @pytest.mark.parametrize( @@ -90,15 +118,3 @@ async def test_transcript_audio_download_range_with_seek( assert response.status_code == 206 assert response.headers["content-type"] == content_type assert response.headers["content-range"].startswith("bytes 100-") - - -@pytest.mark.asyncio -async def test_transcript_audio_download_waveform(fake_transcript): - from reflector.app import app - - ac = AsyncClient(app=app, base_url="http://test/v1") - response = await ac.get(f"/transcripts/{fake_transcript.id}/audio/waveform") - assert response.status_code == 200 - assert response.headers["content-type"] == "application/json" - assert isinstance(response.json()["data"], list) - assert len(response.json()["data"]) >= 255 diff --git a/server/tests/test_transcripts_participants.py b/server/tests/test_transcripts_participants.py new file mode 100644 index 00000000..b55b16a8 --- /dev/null +++ b/server/tests/test_transcripts_participants.py @@ -0,0 +1,164 @@ +import pytest +from httpx import AsyncClient + + +@pytest.mark.asyncio +async def test_transcript_participants(): + from reflector.app import app + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + response = await ac.post("/transcripts", json={"name": "test"}) + assert response.status_code == 200 + assert response.json()["participants"] == [] + + # create a participant + transcript_id = response.json()["id"] + response = await ac.post( + f"/transcripts/{transcript_id}/participants", json={"name": "test"} + ) + assert response.status_code == 200 + assert response.json()["id"] is not None + assert response.json()["speaker"] is None + assert response.json()["name"] == "test" + + # create another one with a speaker + response = await ac.post( + f"/transcripts/{transcript_id}/participants", + json={"name": "test2", "speaker": 1}, + ) + assert response.status_code == 200 + assert response.json()["id"] is not None + assert response.json()["speaker"] == 1 + assert response.json()["name"] == "test2" + + # get all participants via transcript + response = await ac.get(f"/transcripts/{transcript_id}") + assert response.status_code == 200 + assert len(response.json()["participants"]) == 2 + + # get participants via participants endpoint + response = await ac.get(f"/transcripts/{transcript_id}/participants") + assert response.status_code == 200 + assert len(response.json()) == 2 + + +@pytest.mark.asyncio +async def test_transcript_participants_same_speaker(): + from reflector.app import app + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + response = await ac.post("/transcripts", json={"name": "test"}) + assert response.status_code == 200 + assert response.json()["participants"] == [] + transcript_id = response.json()["id"] + + # create a participant + response = await ac.post( + f"/transcripts/{transcript_id}/participants", + json={"name": "test", "speaker": 1}, + ) + assert response.status_code == 200 + assert response.json()["speaker"] == 1 + + # create another one with the same speaker + response = await ac.post( + f"/transcripts/{transcript_id}/participants", + json={"name": "test2", "speaker": 1}, + ) + assert response.status_code == 400 + + +@pytest.mark.asyncio +async def test_transcript_participants_update_name(): + from reflector.app import app + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + response = await ac.post("/transcripts", json={"name": "test"}) + assert response.status_code == 200 + assert response.json()["participants"] == [] + transcript_id = response.json()["id"] + + # create a participant + response = await ac.post( + f"/transcripts/{transcript_id}/participants", + json={"name": "test", "speaker": 1}, + ) + assert response.status_code == 200 + assert response.json()["speaker"] == 1 + + # update the participant + participant_id = response.json()["id"] + response = await ac.patch( + f"/transcripts/{transcript_id}/participants/{participant_id}", + json={"name": "test2"}, + ) + assert response.status_code == 200 + assert response.json()["name"] == "test2" + + # verify the participant was updated + response = await ac.get( + f"/transcripts/{transcript_id}/participants/{participant_id}" + ) + assert response.status_code == 200 + assert response.json()["name"] == "test2" + + # verify the participant was updated in transcript + response = await ac.get(f"/transcripts/{transcript_id}") + assert response.status_code == 200 + assert len(response.json()["participants"]) == 1 + assert response.json()["participants"][0]["name"] == "test2" + + +@pytest.mark.asyncio +async def test_transcript_participants_update_speaker(): + from reflector.app import app + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + response = await ac.post("/transcripts", json={"name": "test"}) + assert response.status_code == 200 + assert response.json()["participants"] == [] + transcript_id = response.json()["id"] + + # create a participant + response = await ac.post( + f"/transcripts/{transcript_id}/participants", + json={"name": "test", "speaker": 1}, + ) + assert response.status_code == 200 + participant1_id = response.json()["id"] + + # create another participant + response = await ac.post( + f"/transcripts/{transcript_id}/participants", + json={"name": "test2", "speaker": 2}, + ) + assert response.status_code == 200 + participant2_id = response.json()["id"] + + # update the participant, refused as speaker is already taken + response = await ac.patch( + f"/transcripts/{transcript_id}/participants/{participant2_id}", + json={"speaker": 1}, + ) + assert response.status_code == 400 + + # delete the participant 1 + response = await ac.delete( + f"/transcripts/{transcript_id}/participants/{participant1_id}" + ) + assert response.status_code == 200 + + # update the participant 2 again, should be accepted now + response = await ac.patch( + f"/transcripts/{transcript_id}/participants/{participant2_id}", + json={"speaker": 1}, + ) + assert response.status_code == 200 + + # ensure participant2 name is still there + response = await ac.get( + f"/transcripts/{transcript_id}/participants/{participant2_id}" + ) + assert response.status_code == 200 + assert response.json()["name"] == "test2" + assert response.json()["speaker"] == 1 diff --git a/server/tests/test_transcripts_rtc_ws.py b/server/tests/test_transcripts_rtc_ws.py index 50e74231..c607fe06 100644 --- a/server/tests/test_transcripts_rtc_ws.py +++ b/server/tests/test_transcripts_rtc_ws.py @@ -32,7 +32,7 @@ class ThreadedUvicorn: @pytest.fixture -async def appserver(tmpdir): +async def appserver(tmpdir, setup_database, celery_session_app, celery_session_worker): from reflector.settings import settings from reflector.app import app @@ -52,12 +52,23 @@ async def appserver(tmpdir): settings.DATA_DIR = DATA_DIR +@pytest.fixture(scope="session") +def celery_includes(): + return ["reflector.pipelines.main_live_pipeline"] + + +@pytest.mark.usefixtures("setup_database") +@pytest.mark.usefixtures("celery_session_app") +@pytest.mark.usefixtures("celery_session_worker") @pytest.mark.asyncio async def test_transcript_rtc_and_websocket( tmpdir, dummy_llm, dummy_transcript, dummy_processors, + dummy_diarization, + dummy_storage, + fake_mp3_upload, ensure_casing, appserver, sentence_tokenize, @@ -95,6 +106,7 @@ async def test_transcript_rtc_and_websocket( print("Test websocket: DISCONNECTED") websocket_task = asyncio.get_event_loop().create_task(websocket_task()) + print("Test websocket: TASK CREATED", websocket_task) # create stream client import argparse @@ -121,14 +133,20 @@ async def test_transcript_rtc_and_websocket( # XXX aiortc is long to close the connection # instead of waiting a long time, we just send a STOP client.channel.send(json.dumps({"cmd": "STOP"})) - - # wait the processing to finish - await asyncio.sleep(2) - await client.stop() # wait the processing to finish - await asyncio.sleep(2) + timeout = 20 + while True: + # fetch the transcript and check if it is ended + resp = await ac.get(f"/transcripts/{tid}") + assert resp.status_code == 200 + if resp.json()["status"] in ("ended", "error"): + break + await asyncio.sleep(1) + + if resp.json()["status"] != "ended": + raise TimeoutError("Timeout while waiting for transcript to be ended") # stop websocket task websocket_task.cancel() @@ -167,31 +185,47 @@ async def test_transcript_rtc_and_websocket( ev = events[eventnames.index("FINAL_TITLE")] assert ev["data"]["title"] == "LLM TITLE" + assert "WAVEFORM" in eventnames + ev = events[eventnames.index("WAVEFORM")] + assert isinstance(ev["data"]["waveform"], list) + assert len(ev["data"]["waveform"]) >= 250 + waveform_resp = await ac.get(f"/transcripts/{tid}/audio/waveform") + assert waveform_resp.status_code == 200 + assert waveform_resp.headers["content-type"] == "application/json" + assert isinstance(waveform_resp.json()["data"], list) + assert len(waveform_resp.json()["data"]) >= 250 + # check status order statuses = [e["data"]["value"] for e in events if e["event"] == "STATUS"] - assert statuses == ["recording", "processing", "ended"] + assert statuses.index("recording") < statuses.index("processing") + assert statuses.index("processing") < statuses.index("ended") # ensure the last event received is ended assert events[-1]["event"] == "STATUS" assert events[-1]["data"]["value"] == "ended" - # check that transcript status in model is updated - resp = await ac.get(f"/transcripts/{tid}") - assert resp.status_code == 200 - assert resp.json()["status"] == "ended" + # check on the latest response that the audio duration is > 0 + assert resp.json()["duration"] > 0 + assert "DURATION" in eventnames # check that audio/mp3 is available - resp = await ac.get(f"/transcripts/{tid}/audio/mp3") - assert resp.status_code == 200 - assert resp.headers["Content-Type"] == "audio/mpeg" + audio_resp = await ac.get(f"/transcripts/{tid}/audio/mp3") + assert audio_resp.status_code == 200 + assert audio_resp.headers["Content-Type"] == "audio/mpeg" +@pytest.mark.usefixtures("setup_database") +@pytest.mark.usefixtures("celery_session_app") +@pytest.mark.usefixtures("celery_session_worker") @pytest.mark.asyncio async def test_transcript_rtc_and_websocket_and_fr( tmpdir, dummy_llm, dummy_transcript, dummy_processors, + dummy_diarization, + dummy_storage, + fake_mp3_upload, ensure_casing, appserver, sentence_tokenize, @@ -232,6 +266,7 @@ async def test_transcript_rtc_and_websocket_and_fr( print("Test websocket: DISCONNECTED") websocket_task = asyncio.get_event_loop().create_task(websocket_task()) + print("Test websocket: TASK CREATED", websocket_task) # create stream client import argparse @@ -265,6 +300,18 @@ async def test_transcript_rtc_and_websocket_and_fr( await client.stop() # wait the processing to finish + timeout = 20 + while True: + # fetch the transcript and check if it is ended + resp = await ac.get(f"/transcripts/{tid}") + assert resp.status_code == 200 + if resp.json()["status"] == "ended": + break + await asyncio.sleep(1) + + if resp.json()["status"] != "ended": + raise TimeoutError("Timeout while waiting for transcript to be ended") + await asyncio.sleep(2) # stop websocket task @@ -306,7 +353,8 @@ async def test_transcript_rtc_and_websocket_and_fr( # check status order statuses = [e["data"]["value"] for e in events if e["event"] == "STATUS"] - assert statuses == ["recording", "processing", "ended"] + assert statuses.index("recording") < statuses.index("processing") + assert statuses.index("processing") < statuses.index("ended") # ensure the last event received is ended assert events[-1]["event"] == "STATUS" diff --git a/server/tests/test_transcripts_speaker.py b/server/tests/test_transcripts_speaker.py new file mode 100644 index 00000000..e3e8034a --- /dev/null +++ b/server/tests/test_transcripts_speaker.py @@ -0,0 +1,401 @@ +import pytest +from httpx import AsyncClient + + +@pytest.mark.asyncio +async def test_transcript_reassign_speaker(fake_transcript_with_topics): + from reflector.app import app + + transcript_id = fake_transcript_with_topics.id + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + # check the transcript exists + response = await ac.get(f"/transcripts/{transcript_id}") + assert response.status_code == 200 + + # check initial topics of the transcript + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 0 + assert topics[0]["words"][1]["speaker"] == 0 + assert topics[1]["words"][0]["speaker"] == 0 + assert topics[1]["words"][1]["speaker"] == 0 + # check through segments + assert len(topics[0]["segments"]) == 1 + assert topics[0]["segments"][0]["speaker"] == 0 + assert len(topics[1]["segments"]) == 1 + assert topics[1]["segments"][0]["speaker"] == 0 + + # reassign speaker + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "speaker": 1, + "timestamp_from": 0, + "timestamp_to": 1, + }, + ) + assert response.status_code == 200 + + # check topics again + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 1 + assert topics[0]["words"][1]["speaker"] == 1 + assert topics[1]["words"][0]["speaker"] == 0 + assert topics[1]["words"][1]["speaker"] == 0 + # check segments + assert len(topics[0]["segments"]) == 1 + assert topics[0]["segments"][0]["speaker"] == 1 + assert len(topics[1]["segments"]) == 1 + assert topics[1]["segments"][0]["speaker"] == 0 + + # reassign speaker, middle of 2 topics + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "speaker": 2, + "timestamp_from": 1, + "timestamp_to": 2.5, + }, + ) + assert response.status_code == 200 + + # check topics again + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 1 + assert topics[0]["words"][1]["speaker"] == 2 + assert topics[1]["words"][0]["speaker"] == 2 + assert topics[1]["words"][1]["speaker"] == 0 + # check segments + assert len(topics[0]["segments"]) == 2 + assert topics[0]["segments"][0]["speaker"] == 1 + assert topics[0]["segments"][1]["speaker"] == 2 + assert len(topics[1]["segments"]) == 2 + assert topics[1]["segments"][0]["speaker"] == 2 + assert topics[1]["segments"][1]["speaker"] == 0 + + # reassign speaker, everything + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "speaker": 4, + "timestamp_from": 0, + "timestamp_to": 100, + }, + ) + assert response.status_code == 200 + + # check topics again + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 4 + assert topics[0]["words"][1]["speaker"] == 4 + assert topics[1]["words"][0]["speaker"] == 4 + assert topics[1]["words"][1]["speaker"] == 4 + # check segments + assert len(topics[0]["segments"]) == 1 + assert topics[0]["segments"][0]["speaker"] == 4 + assert len(topics[1]["segments"]) == 1 + assert topics[1]["segments"][0]["speaker"] == 4 + + +@pytest.mark.asyncio +async def test_transcript_merge_speaker(fake_transcript_with_topics): + from reflector.app import app + + transcript_id = fake_transcript_with_topics.id + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + # check the transcript exists + response = await ac.get(f"/transcripts/{transcript_id}") + assert response.status_code == 200 + + # check initial topics of the transcript + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 0 + assert topics[0]["words"][1]["speaker"] == 0 + assert topics[1]["words"][0]["speaker"] == 0 + assert topics[1]["words"][1]["speaker"] == 0 + + # reassign speaker + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "speaker": 1, + "timestamp_from": 0, + "timestamp_to": 1, + }, + ) + assert response.status_code == 200 + + # check topics again + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 1 + assert topics[0]["words"][1]["speaker"] == 1 + assert topics[1]["words"][0]["speaker"] == 0 + assert topics[1]["words"][1]["speaker"] == 0 + + # merge speakers + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/merge", + json={ + "speaker_from": 1, + "speaker_to": 0, + }, + ) + assert response.status_code == 200 + + # check topics again + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 0 + assert topics[0]["words"][1]["speaker"] == 0 + assert topics[1]["words"][0]["speaker"] == 0 + assert topics[1]["words"][1]["speaker"] == 0 + + +@pytest.mark.asyncio +async def test_transcript_reassign_with_participant(fake_transcript_with_topics): + from reflector.app import app + + transcript_id = fake_transcript_with_topics.id + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + # check the transcript exists + response = await ac.get(f"/transcripts/{transcript_id}") + assert response.status_code == 200 + transcript = response.json() + assert len(transcript["participants"]) == 0 + + # create 2 participants + response = await ac.post( + f"/transcripts/{transcript_id}/participants", + json={ + "name": "Participant 1", + }, + ) + assert response.status_code == 200 + participant1_id = response.json()["id"] + + response = await ac.post( + f"/transcripts/{transcript_id}/participants", + json={ + "name": "Participant 2", + }, + ) + assert response.status_code == 200 + participant2_id = response.json()["id"] + + # check participants speakers + response = await ac.get(f"/transcripts/{transcript_id}/participants") + assert response.status_code == 200 + participants = response.json() + assert len(participants) == 2 + assert participants[0]["name"] == "Participant 1" + assert participants[0]["speaker"] is None + assert participants[1]["name"] == "Participant 2" + assert participants[1]["speaker"] is None + + # check initial topics of the transcript + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 0 + assert topics[0]["words"][1]["speaker"] == 0 + assert topics[1]["words"][0]["speaker"] == 0 + assert topics[1]["words"][1]["speaker"] == 0 + # check through segments + assert len(topics[0]["segments"]) == 1 + assert topics[0]["segments"][0]["speaker"] == 0 + assert len(topics[1]["segments"]) == 1 + assert topics[1]["segments"][0]["speaker"] == 0 + + # reassign speaker from a participant + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "participant": participant1_id, + "timestamp_from": 0, + "timestamp_to": 1, + }, + ) + assert response.status_code == 200 + + # check participants if speaker has been assigned + # first participant should have 1, because it's not used yet. + response = await ac.get(f"/transcripts/{transcript_id}/participants") + assert response.status_code == 200 + participants = response.json() + assert len(participants) == 2 + assert participants[0]["name"] == "Participant 1" + assert participants[0]["speaker"] == 1 + assert participants[1]["name"] == "Participant 2" + assert participants[1]["speaker"] is None + + # check topics again + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 1 + assert topics[0]["words"][1]["speaker"] == 1 + assert topics[1]["words"][0]["speaker"] == 0 + assert topics[1]["words"][1]["speaker"] == 0 + # check segments + assert len(topics[0]["segments"]) == 1 + assert topics[0]["segments"][0]["speaker"] == 1 + assert len(topics[1]["segments"]) == 1 + assert topics[1]["segments"][0]["speaker"] == 0 + + # reassign participant, middle of 2 topics + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "participant": participant2_id, + "timestamp_from": 1, + "timestamp_to": 2.5, + }, + ) + assert response.status_code == 200 + + # check participants if speaker has been assigned + # first participant should have 1, because it's not used yet. + response = await ac.get(f"/transcripts/{transcript_id}/participants") + assert response.status_code == 200 + participants = response.json() + assert len(participants) == 2 + assert participants[0]["name"] == "Participant 1" + assert participants[0]["speaker"] == 1 + assert participants[1]["name"] == "Participant 2" + assert participants[1]["speaker"] == 2 + + # check topics again + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 1 + assert topics[0]["words"][1]["speaker"] == 2 + assert topics[1]["words"][0]["speaker"] == 2 + assert topics[1]["words"][1]["speaker"] == 0 + # check segments + assert len(topics[0]["segments"]) == 2 + assert topics[0]["segments"][0]["speaker"] == 1 + assert topics[0]["segments"][1]["speaker"] == 2 + assert len(topics[1]["segments"]) == 2 + assert topics[1]["segments"][0]["speaker"] == 2 + assert topics[1]["segments"][1]["speaker"] == 0 + + # reassign speaker, everything + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "participant": participant1_id, + "timestamp_from": 0, + "timestamp_to": 100, + }, + ) + assert response.status_code == 200 + + # check topics again + response = await ac.get(f"/transcripts/{transcript_id}/topics/with-words") + assert response.status_code == 200 + topics = response.json() + assert len(topics) == 2 + + # check through words + assert topics[0]["words"][0]["speaker"] == 1 + assert topics[0]["words"][1]["speaker"] == 1 + assert topics[1]["words"][0]["speaker"] == 1 + assert topics[1]["words"][1]["speaker"] == 1 + # check segments + assert len(topics[0]["segments"]) == 1 + assert topics[0]["segments"][0]["speaker"] == 1 + assert len(topics[1]["segments"]) == 1 + assert topics[1]["segments"][0]["speaker"] == 1 + + +@pytest.mark.asyncio +async def test_transcript_reassign_edge_cases(fake_transcript_with_topics): + from reflector.app import app + + transcript_id = fake_transcript_with_topics.id + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + # check the transcript exists + response = await ac.get(f"/transcripts/{transcript_id}") + assert response.status_code == 200 + transcript = response.json() + assert len(transcript["participants"]) == 0 + + # try reassign without any participant_id or speaker + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "timestamp_from": 0, + "timestamp_to": 1, + }, + ) + assert response.status_code == 400 + + # try reassing with both participant_id and speaker + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "participant": "123", + "speaker": 1, + "timestamp_from": 0, + "timestamp_to": 1, + }, + ) + assert response.status_code == 400 + + # try reassing with non-existing participant_id + response = await ac.patch( + f"/transcripts/{transcript_id}/speaker/assign", + json={ + "participant": "123", + "timestamp_from": 0, + "timestamp_to": 1, + }, + ) + assert response.status_code == 404 diff --git a/server/tests/test_transcripts_topics.py b/server/tests/test_transcripts_topics.py new file mode 100644 index 00000000..cd845b3f --- /dev/null +++ b/server/tests/test_transcripts_topics.py @@ -0,0 +1,26 @@ +import pytest +from httpx import AsyncClient + + +@pytest.mark.asyncio +async def test_transcript_topics(fake_transcript_with_topics): + from reflector.app import app + + transcript_id = fake_transcript_with_topics.id + + async with AsyncClient(app=app, base_url="http://test/v1") as ac: + # check the transcript exists + response = await ac.get(f"/transcripts/{transcript_id}/topics") + assert response.status_code == 200 + assert len(response.json()) == 2 + topic_id = response.json()[0]["id"] + + # get words per speakers + response = await ac.get( + f"/transcripts/{transcript_id}/topics/{topic_id}/words-per-speaker" + ) + assert response.status_code == 200 + data = response.json() + assert len(data["words_per_speaker"]) == 1 + assert data["words_per_speaker"][0]["speaker"] == 0 + assert len(data["words_per_speaker"][0]["words"]) == 2 diff --git a/server/tests/test_transcripts_upload.py b/server/tests/test_transcripts_upload.py new file mode 100644 index 00000000..3cb482c1 --- /dev/null +++ b/server/tests/test_transcripts_upload.py @@ -0,0 +1,61 @@ +import pytest +import asyncio +from httpx import AsyncClient + + +@pytest.mark.usefixtures("setup_database") +@pytest.mark.usefixtures("celery_session_app") +@pytest.mark.usefixtures("celery_session_worker") +@pytest.mark.asyncio +async def test_transcript_upload_file( + tmpdir, + ensure_casing, + dummy_llm, + dummy_processors, + dummy_diarization, + dummy_storage, +): + from reflector.app import app + + ac = AsyncClient(app=app, base_url="http://test/v1") + + # create a transcript + response = await ac.post("/transcripts", json={"name": "test"}) + assert response.status_code == 200 + assert response.json()["status"] == "idle" + tid = response.json()["id"] + + # upload mp3 + response = await ac.post( + f"/transcripts/{tid}/record/upload", + files={ + "file": ( + "test_short.wav", + open("tests/records/test_short.wav", "rb"), + "audio/mpeg", + ) + }, + ) + assert response.status_code == 200 + assert response.json()["status"] == "ok" + + # wait the processing to finish + while True: + # fetch the transcript and check if it is ended + resp = await ac.get(f"/transcripts/{tid}") + assert resp.status_code == 200 + if resp.json()["status"] in ("ended", "error"): + break + await asyncio.sleep(1) + + # check the transcript is ended + transcript = resp.json() + assert transcript["status"] == "ended" + assert transcript["short_summary"] == "LLM SHORT SUMMARY" + assert transcript["title"] == "LLM TITLE" + + # check topics and transcript + response = await ac.get(f"/transcripts/{tid}/topics") + assert response.status_code == 200 + assert len(response.json()) == 1 + assert "want to share" in response.json()[0]["transcript"] diff --git a/www/.env_template b/www/.env_template new file mode 100644 index 00000000..182b1639 --- /dev/null +++ b/www/.env_template @@ -0,0 +1,2 @@ +FIEF_CLIENT_SECRET= +ZULIP_API_KEY= diff --git a/www/.gitignore b/www/.gitignore index e52822e0..895fbb29 100644 --- a/www/.gitignore +++ b/www/.gitignore @@ -39,3 +39,5 @@ next-env.d.ts # Sentry Auth Token .sentryclirc + +config.ts \ No newline at end of file diff --git a/www/app/(auth)/fiefWrapper.tsx b/www/app/(auth)/fiefWrapper.tsx index 187fef7c..bb38f5ee 100644 --- a/www/app/(auth)/fiefWrapper.tsx +++ b/www/app/(auth)/fiefWrapper.tsx @@ -1,11 +1,18 @@ "use client"; import { FiefAuthProvider } from "@fief/fief/nextjs/react"; +import { createContext } from "react"; -export default function FiefWrapper({ children }) { +export const CookieContext = createContext<{ hasAuthCookie: boolean }>({ + hasAuthCookie: false, +}); + +export default function FiefWrapper({ children, hasAuthCookie }) { return ( - - {children} - + + + {children} + + ); } diff --git a/www/app/(auth)/userInfo.tsx b/www/app/(auth)/userInfo.tsx index 0f01a85e..76e55acb 100644 --- a/www/app/(auth)/userInfo.tsx +++ b/www/app/(auth)/userInfo.tsx @@ -1,29 +1,23 @@ "use client"; -import { - useFiefIsAuthenticated, - useFiefUserinfo, -} from "@fief/fief/nextjs/react"; +import { useFiefIsAuthenticated } from "@fief/fief/nextjs/react"; import Link from "next/link"; export default function UserInfo() { const isAuthenticated = useFiefIsAuthenticated(); - const userinfo = useFiefUserinfo(); return !isAuthenticated ? ( - - Log in or create account + + Log in ) : ( - {userinfo?.email} ( - + Log out - ) ); } diff --git a/www/app/(errors)/errorContext.tsx b/www/app/(errors)/errorContext.tsx index d8a80c04..d541c6f0 100644 --- a/www/app/(errors)/errorContext.tsx +++ b/www/app/(errors)/errorContext.tsx @@ -3,7 +3,8 @@ import React, { createContext, useContext, useState } from "react"; interface ErrorContextProps { error: Error | null; - setError: React.Dispatch>; + humanMessage?: string; + setError: (error: Error, humanMessage?: string) => void; } const ErrorContext = createContext(undefined); @@ -22,9 +23,16 @@ interface ErrorProviderProps { export const ErrorProvider: React.FC = ({ children }) => { const [error, setError] = useState(null); + const [humanMessage, setHumanMessage] = useState(); + const declareError = (error, humanMessage?) => { + setError(error); + setHumanMessage(humanMessage); + }; return ( - + {children} ); diff --git a/www/app/(errors)/errorMessage.tsx b/www/app/(errors)/errorMessage.tsx index 8b410c4c..6d198650 100644 --- a/www/app/(errors)/errorMessage.tsx +++ b/www/app/(errors)/errorMessage.tsx @@ -4,29 +4,51 @@ import { useEffect, useState } from "react"; import * as Sentry from "@sentry/react"; const ErrorMessage: React.FC = () => { - const { error, setError } = useError(); + const { error, setError, humanMessage } = useError(); const [isVisible, setIsVisible] = useState(false); + // Setup Shortcuts + useEffect(() => { + const handleKeyPress = (event: KeyboardEvent) => { + switch (event.key) { + case "^": + throw new Error("Unhandled Exception thrown by '^' shortcut"); + case "$": + setError( + new Error("Unhandled Exception thrown by '$' shortcut"), + "You did this to yourself", + ); + } + }; + + document.addEventListener("keydown", handleKeyPress); + return () => document.removeEventListener("keydown", handleKeyPress); + }, []); + useEffect(() => { if (error) { - setIsVisible(true); - Sentry.captureException(error); - console.error("Error", error.message, error); + if (humanMessage) { + setIsVisible(true); + Sentry.captureException(Error(humanMessage, { cause: error })); + } else { + Sentry.captureException(error); + } + + console.error("Error", error); } }, [error]); - if (!isVisible || !error) return null; + if (!isVisible || !humanMessage) return null; return ( ); }; diff --git a/www/app/[domain]/browse/page.tsx b/www/app/[domain]/browse/page.tsx new file mode 100644 index 00000000..46a81295 --- /dev/null +++ b/www/app/[domain]/browse/page.tsx @@ -0,0 +1,94 @@ +"use client"; +import React, { useState } from "react"; + +import { GetTranscript } from "../../api"; +import { Title } from "../../lib/textComponents"; +import Pagination from "./pagination"; +import Link from "next/link"; +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { faGear } from "@fortawesome/free-solid-svg-icons"; +import useTranscriptList from "../transcripts/useTranscriptList"; + +export default function TranscriptBrowser() { + const [page, setPage] = useState(1); + const { loading, response } = useTranscriptList(page); + + return ( +
+ {/* +
+ +
+ */} + +
+ Past transcripts + +
+ + {loading && ( +
+ +
+ )} + {!loading && !response && ( +
+ No transcripts found, but you can  + + record a meeting + +  to get started. +
+ )} +
+
+ {response?.items.map((item: GetTranscript) => ( +
+
+
+ + {item.title || item.name} + + + {item.locked ? ( +
+ Locked +
+ ) : ( + <> + )} + + {item.source_language ? ( +
+ {item.source_language} +
+ ) : ( + <> + )} +
+
+ {new Date(item.created_at).toLocaleDateString("en-US")} +
+
{item.short_summary}
+
+
+ ))} +
+
+
+ ); +} diff --git a/www/app/[domain]/browse/pagination.tsx b/www/app/[domain]/browse/pagination.tsx new file mode 100644 index 00000000..e10d5321 --- /dev/null +++ b/www/app/[domain]/browse/pagination.tsx @@ -0,0 +1,75 @@ +type PaginationProps = { + page: number; + setPage: (page: number) => void; + total: number; + size: number; +}; + +export default function Pagination(props: PaginationProps) { + const { page, setPage, total, size } = props; + const totalPages = Math.ceil(total / size); + + const pageNumbers = Array.from( + { length: totalPages }, + (_, i) => i + 1, + ).filter((pageNumber) => { + if (totalPages <= 3) { + // If there are 3 or fewer total pages, show all pages. + return true; + } else if (page <= 2) { + // For the first two pages, show the first 3 pages. + return pageNumber <= 3; + } else if (page >= totalPages - 1) { + // For the last two pages, show the last 3 pages. + return pageNumber >= totalPages - 2; + } else { + // For all other cases, show 3 pages centered around the current page. + return pageNumber >= page - 1 && pageNumber <= page + 1; + } + }); + + const canGoPrevious = page > 1; + const canGoNext = page < totalPages; + + const handlePageChange = (newPage: number) => { + if (newPage >= 1 && newPage <= totalPages) { + setPage(newPage); + } + }; + + return ( +
+ + + {pageNumbers.map((pageNumber) => ( + + ))} + + +
+ ); +} diff --git a/www/app/[domain]/domainContext.tsx b/www/app/[domain]/domainContext.tsx new file mode 100644 index 00000000..6b6d65b5 --- /dev/null +++ b/www/app/[domain]/domainContext.tsx @@ -0,0 +1,50 @@ +"use client"; +import { createContext, useContext, useEffect, useState } from "react"; +import { DomainConfig } from "../lib/edgeConfig"; + +type DomainContextType = Omit; + +export const DomainContext = createContext({ + features: { + requireLogin: false, + privacy: true, + browse: false, + sendToZulip: false, + }, + api_url: "", + websocket_url: "", + zulip_streams: "", +}); + +export const DomainContextProvider = ({ + config, + children, +}: { + config: DomainConfig; + children: any; +}) => { + const [context, setContext] = useState(); + + useEffect(() => { + if (!config) return; + const { auth_callback_url, ...others } = config; + setContext(others); + }, [config]); + + if (!context) return; + + return ( + {children} + ); +}; + +// Get feature config client-side with +export const featureEnabled = ( + featureName: "requireLogin" | "privacy" | "browse" | "sendToZulip", +) => { + const context = useContext(DomainContext); + + return context.features[featureName] as boolean | undefined; +}; + +// Get config server-side (out of react) : see lib/edgeConfig. diff --git a/www/app/[domain]/layout.tsx b/www/app/[domain]/layout.tsx new file mode 100644 index 00000000..ebdfbe5d --- /dev/null +++ b/www/app/[domain]/layout.tsx @@ -0,0 +1,166 @@ +import "../styles/globals.scss"; +import { Poppins } from "next/font/google"; +import { Metadata, Viewport } from "next"; +import FiefWrapper from "../(auth)/fiefWrapper"; +import UserInfo from "../(auth)/userInfo"; +import { ErrorProvider } from "../(errors)/errorContext"; +import ErrorMessage from "../(errors)/errorMessage"; +import Image from "next/image"; +import Link from "next/link"; +import About from "../(aboutAndPrivacy)/about"; +import Privacy from "../(aboutAndPrivacy)/privacy"; +import { DomainContextProvider } from "./domainContext"; +import { getConfig } from "../lib/edgeConfig"; +import { ErrorBoundary } from "@sentry/nextjs"; +import { cookies } from "next/dist/client/components/headers"; +import { SESSION_COOKIE_NAME } from "../lib/fief"; + +const poppins = Poppins({ subsets: ["latin"], weight: ["200", "400", "600"] }); + +export const viewport: Viewport = { + themeColor: "black", + width: "device-width", + initialScale: 1, + maximumScale: 1, +}; + +export const metadata: Metadata = { + metadataBase: new URL(process.env.DEV_URL || "https://reflector.media"), + title: { + template: "%s – Reflector", + default: "Reflector - AI-Powered Meeting Transcriptions by Monadical", + }, + description: + "Reflector is an AI-powered tool that transcribes your meetings with unparalleled accuracy, divides content by topics, and provides insightful summaries. Maximize your productivity with Reflector, brought to you by Monadical. Capture the signal, not the noise", + applicationName: "Reflector", + referrer: "origin-when-cross-origin", + keywords: ["Reflector", "Monadical", "AI", "Meetings", "Transcription"], + authors: [{ name: "Monadical Team", url: "https://monadical.com/team.html" }], + formatDetection: { + email: false, + address: false, + telephone: false, + }, + + openGraph: { + title: "Reflector", + description: + "Reflector is an AI-powered tool that transcribes your meetings with unparalleled accuracy, divides content by topics, and provides insightful summaries. Maximize your productivity with Reflector, brought to you by Monadical. Capture the signal, not the noise.", + type: "website", + }, + + twitter: { + card: "summary_large_image", + title: "Reflector", + description: + "Reflector is an AI-powered tool that transcribes your meetings with unparalleled accuracy, divides content by topics, and provides insightful summaries. Maximize your productivity with Reflector, brought to you by Monadical. Capture the signal, not the noise.", + images: ["/r-icon.png"], + }, + + icons: { + icon: "/r-icon.png", + shortcut: "/r-icon.png", + apple: "/r-icon.png", + }, + robots: { index: false, follow: false, noarchive: true, noimageindex: true }, +}; + +type LayoutProps = { + params: { + domain: string; + }; + children: any; +}; + +export default async function RootLayout({ children, params }: LayoutProps) { + const config = await getConfig(params.domain); + const { requireLogin, privacy, browse } = config.features; + const hasAuthCookie = !!cookies().get(SESSION_COOKIE_NAME); + + return ( + + + + + "something went really wrong"

}> + + +
+
+ {/* Logo on the left */} + + Reflector +
+

+ Reflector +

+

+ Capture the signal, not the noise +

+
+ +
+ {/* Text link on the right */} + + Create + + {browse ? ( + <> +  ·  + + Browse + + + ) : ( + <> + )} +  ·  + + {privacy ? ( + <> +  ·  + + + ) : ( + <> + )} + {requireLogin ? ( + <> +  ·  + + + ) : ( + <> + )} +
+
+ + {children} +
+
+
+
+
+ + + ); +} diff --git a/www/app/page.tsx b/www/app/[domain]/page.tsx similarity index 100% rename from www/app/page.tsx rename to www/app/[domain]/page.tsx diff --git a/www/app/[domain]/transcripts/[transcriptId]/page.tsx b/www/app/[domain]/transcripts/[transcriptId]/page.tsx new file mode 100644 index 00000000..1e1eaf8f --- /dev/null +++ b/www/app/[domain]/transcripts/[transcriptId]/page.tsx @@ -0,0 +1,154 @@ +"use client"; +import Modal from "../modal"; +import useTranscript from "../useTranscript"; +import useTopics from "../useTopics"; +import useWaveform from "../useWaveform"; +import useMp3 from "../useMp3"; +import { TopicList } from "../topicList"; +import { Topic } from "../webSocketTypes"; +import React, { useEffect, useState } from "react"; +import "../../../styles/button.css"; +import FinalSummary from "../finalSummary"; +import ShareLink from "../shareLink"; +import QRCode from "react-qr-code"; +import TranscriptTitle from "../transcriptTitle"; +import ShareModal from "./shareModal"; +import Player from "../player"; +import WaveformLoading from "../waveformLoading"; +import { useRouter } from "next/navigation"; +import { featureEnabled } from "../../domainContext"; +import { toShareMode } from "../../../lib/shareMode"; + +type TranscriptDetails = { + params: { + transcriptId: string; + }; +}; + +export default function TranscriptDetails(details: TranscriptDetails) { + const transcriptId = details.params.transcriptId; + const router = useRouter(); + + const transcript = useTranscript(transcriptId); + const topics = useTopics(transcriptId); + const waveform = useWaveform(transcriptId); + const useActiveTopic = useState(null); + const mp3 = useMp3(transcriptId); + const [showModal, setShowModal] = useState(false); + + useEffect(() => { + const statusToRedirect = ["idle", "recording", "processing"]; + if (statusToRedirect.includes(transcript.response?.status)) { + const newUrl = "/transcripts/" + details.params.transcriptId + "/record"; + // Shallow redirection does not work on NextJS 13 + // https://github.com/vercel/next.js/discussions/48110 + // https://github.com/vercel/next.js/discussions/49540 + router.push(newUrl, undefined); + // history.replaceState({}, "", newUrl); + } + }, [transcript.response?.status]); + + const fullTranscript = + topics.topics + ?.map((topic) => topic.transcript) + .join("\n\n") + .replace(/ +/g, " ") + .trim() || ""; + + if (transcript && transcript.response) { + if (transcript.error || topics?.error) { + return ( + + ); + } + + if (!transcriptId || transcript?.loading || topics?.loading) { + return ; + } + + return ( + <> + {featureEnabled("sendToZulip") && ( + setShowModal(v)} + /> + )} +
+ {transcript?.response?.title && ( + + )} + {waveform.waveform && mp3.media ? ( + + ) : waveform.error ? ( +
"error loading this recording"
+ ) : ( + + )} +
+
+ + +
+
+ {transcript.response.long_summary ? ( + setShowModal(true)} + /> + ) : ( +
+ {transcript.response.status == "processing" ? ( +

Loading Transcript

+ ) : ( +

+ There was an error generating the final summary, please + come back later +

+ )} +
+ )} +
+ +
+
+ +
+
+ +
+
+
+
+ + ); + } +} diff --git a/www/app/transcripts/[transcriptId]/record/page.tsx b/www/app/[domain]/transcripts/[transcriptId]/record/page.tsx similarity index 65% rename from www/app/transcripts/[transcriptId]/record/page.tsx rename to www/app/[domain]/transcripts/[transcriptId]/record/page.tsx index 2e212c2e..d4bb6b3e 100644 --- a/www/app/transcripts/[transcriptId]/record/page.tsx +++ b/www/app/[domain]/transcripts/[transcriptId]/record/page.tsx @@ -6,14 +6,17 @@ import useWebRTC from "../../useWebRTC"; import useTranscript from "../../useTranscript"; import { useWebSockets } from "../../useWebSockets"; import useAudioDevice from "../../useAudioDevice"; -import "../../../styles/button.css"; +import "../../../../styles/button.css"; import { Topic } from "../../webSocketTypes"; -import getApi from "../../../lib/getApi"; import LiveTrancription from "../../liveTranscription"; import DisconnectedIndicator from "../../disconnectedIndicator"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faGear } from "@fortawesome/free-solid-svg-icons"; -import { lockWakeState, releaseWakeState } from "../../../lib/wakeLock"; +import { lockWakeState, releaseWakeState } from "../../../../lib/wakeLock"; +import { useRouter } from "next/navigation"; +import Player from "../../player"; +import useMp3 from "../../useMp3"; +import WaveformLoading from "../../waveformLoading"; type TranscriptDetails = { params: { @@ -37,14 +40,17 @@ const TranscriptRecord = (details: TranscriptDetails) => { }, []); const transcript = useTranscript(details.params.transcriptId); - const api = getApi(); - const webRTC = useWebRTC(stream, details.params.transcriptId, api); + const webRTC = useWebRTC(stream, details.params.transcriptId); const webSockets = useWebSockets(details.params.transcriptId); const { audioDevices, getAudioStream } = useAudioDevice(); - const [hasRecorded, setHasRecorded] = useState(false); + const [recordedTime, setRecordedTime] = useState(0); + const [startTime, setStartTime] = useState(0); const [transcriptStarted, setTranscriptStarted] = useState(false); + let mp3 = useMp3(details.params.transcriptId, true); + + const router = useRouter(); useEffect(() => { if (!transcriptStarted && webSockets.transcriptText.length !== 0) @@ -52,15 +58,25 @@ const TranscriptRecord = (details: TranscriptDetails) => { }, [webSockets.transcriptText]); useEffect(() => { - if (transcript?.response?.longSummary) { - const newUrl = `/transcripts/${transcript.response.id}`; + const statusToRedirect = ["ended", "error"]; + + //TODO if has no topic and is error, get back to new + if ( + statusToRedirect.includes(transcript.response?.status) || + statusToRedirect.includes(webSockets.status.value) + ) { + const newUrl = "/transcripts/" + details.params.transcriptId; // Shallow redirection does not work on NextJS 13 // https://github.com/vercel/next.js/discussions/48110 // https://github.com/vercel/next.js/discussions/49540 - // router.push(newUrl, undefined, { shallow: true }); - history.replaceState({}, "", newUrl); - } - }); + router.replace(newUrl); + // history.replaceState({}, "", newUrl); + } // history.replaceState({}, "", newUrl); + }, [webSockets.status.value, transcript.response?.status]); + + useEffect(() => { + if (transcript.response?.status === "ended") mp3.getNow(); + }, [transcript.response]); useEffect(() => { lockWakeState(); @@ -71,19 +87,32 @@ const TranscriptRecord = (details: TranscriptDetails) => { return ( <> - { - setStream(null); - setHasRecorded(true); - webRTC?.send(JSON.stringify({ cmd: "STOP" })); - }} - topics={webSockets.topics} - getAudioStream={getAudioStream} - useActiveTopic={useActiveTopic} - isPastMeeting={false} - audioDevices={audioDevices} - /> + {webSockets.waveform && webSockets.duration && mp3?.media ? ( + + ) : recordedTime ? ( + + ) : ( + { + setStream(null); + setRecordedTime(Date.now() - startTime); + webRTC?.send(JSON.stringify({ cmd: "STOP" })); + }} + onRecord={() => { + setStartTime(Date.now()); + }} + getAudioStream={getAudioStream} + audioDevices={audioDevices} + transcriptId={details.params.transcriptId} + /> + )}
{
- {!hasRecorded ? ( + {!recordedTime ? ( <> {transcriptStarted && (

Transcription

@@ -129,6 +158,7 @@ const TranscriptRecord = (details: TranscriptDetails) => { couple of minutes. Please do not navigate away from the page during this time.

+ {/* NTH If login required remove last sentence */}
)} diff --git a/www/app/[domain]/transcripts/[transcriptId]/shareModal.tsx b/www/app/[domain]/transcripts/[transcriptId]/shareModal.tsx new file mode 100644 index 00000000..339f2769 --- /dev/null +++ b/www/app/[domain]/transcripts/[transcriptId]/shareModal.tsx @@ -0,0 +1,159 @@ +import React, { useContext, useState, useEffect } from "react"; +import SelectSearch from "react-select-search"; +import { getZulipMessage, sendZulipMessage } from "../../../lib/zulip"; +import { GetTranscript, GetTranscriptTopic } from "../../../api"; +import "react-select-search/style.css"; +import { DomainContext } from "../../domainContext"; + +type ShareModal = { + show: boolean; + setShow: (show: boolean) => void; + transcript: GetTranscript | null; + topics: GetTranscriptTopic[] | null; +}; + +interface Stream { + id: number; + name: string; + topics: string[]; +} + +interface SelectSearchOption { + name: string; + value: string; +} + +const ShareModal = (props: ShareModal) => { + const [stream, setStream] = useState(undefined); + const [topic, setTopic] = useState(undefined); + const [includeTopics, setIncludeTopics] = useState(false); + const [isLoading, setIsLoading] = useState(true); + const [streams, setStreams] = useState([]); + const { zulip_streams } = useContext(DomainContext); + + useEffect(() => { + fetch(zulip_streams + "/streams.json") + .then((response) => { + if (!response.ok) { + throw new Error("Network response was not ok"); + } + return response.json(); + }) + .then((data) => { + data = data.sort((a: Stream, b: Stream) => + a.name.localeCompare(b.name), + ); + setStreams(data); + setIsLoading(false); + // data now contains the JavaScript object decoded from JSON + }) + .catch((error) => { + console.error("There was a problem with your fetch operation:", error); + }); + }, []); + + const handleSendToZulip = () => { + if (!props.transcript) return; + + const msg = getZulipMessage(props.transcript, props.topics, includeTopics); + + if (stream && topic) sendZulipMessage(stream, topic, msg); + }; + + if (props.show && isLoading) { + return
Loading...
; + } + + let streamOptions: SelectSearchOption[] = []; + if (streams) { + streams.forEach((stream) => { + const value = stream.name; + streamOptions.push({ name: value, value: value }); + }); + } + + return ( +
+ {props.show && ( +
+
+
+

Send to Zulip

+ + {/* Checkbox for 'Include Topics' */} +
+ +
+ +
+ # + { + setTopic(undefined); + setStream(val.toString()); + }} + placeholder="Pick a stream" + /> +
+ + {stream && ( + <> +
+ # + s.name == stream) + ?.topics.sort((a: string, b: string) => + a.localeCompare(b), + ) + .map((t) => ({ name: t, value: t })) || [] + } + value={topic} + onChange={(val) => setTopic(val.toString())} + placeholder="Pick a topic" + /> +
+ + )} + + + + +
+
+
+ )} +
+ ); +}; + +export default ShareModal; diff --git a/www/app/transcripts/audioInputsDropdown.tsx b/www/app/[domain]/transcripts/audioInputsDropdown.tsx similarity index 100% rename from www/app/transcripts/audioInputsDropdown.tsx rename to www/app/[domain]/transcripts/audioInputsDropdown.tsx diff --git a/www/app/[domain]/transcripts/createTranscript.ts b/www/app/[domain]/transcripts/createTranscript.ts new file mode 100644 index 00000000..8435e6c2 --- /dev/null +++ b/www/app/[domain]/transcripts/createTranscript.ts @@ -0,0 +1,44 @@ +import { useState } from "react"; +import { useError } from "../../(errors)/errorContext"; +import { GetTranscript, CreateTranscript } from "../../api"; +import useApi from "../../lib/useApi"; + +type UseTranscript = { + transcript: GetTranscript | null; + loading: boolean; + error: Error | null; + create: (transcriptCreationDetails: CreateTranscript) => void; +}; + +const useCreateTranscript = (): UseTranscript => { + const [transcript, setTranscript] = useState(null); + const [loading, setLoading] = useState(false); + const [error, setErrorState] = useState(null); + const { setError } = useError(); + const api = useApi(); + + const create = (transcriptCreationDetails: CreateTranscript) => { + if (loading || !api) return; + + setLoading(true); + + api + .v1TranscriptsCreate(transcriptCreationDetails) + .then((transcript) => { + setTranscript(transcript); + setLoading(false); + }) + .catch((err) => { + setError( + err, + "There was an issue creating a transcript, please try again.", + ); + setErrorState(err); + setLoading(false); + }); + }; + + return { transcript, loading, error, create }; +}; + +export default useCreateTranscript; diff --git a/www/app/transcripts/disconnectedIndicator.tsx b/www/app/[domain]/transcripts/disconnectedIndicator.tsx similarity index 100% rename from www/app/transcripts/disconnectedIndicator.tsx rename to www/app/[domain]/transcripts/disconnectedIndicator.tsx diff --git a/www/app/[domain]/transcripts/fileUploadButton.tsx b/www/app/[domain]/transcripts/fileUploadButton.tsx new file mode 100644 index 00000000..7cfaf19e --- /dev/null +++ b/www/app/[domain]/transcripts/fileUploadButton.tsx @@ -0,0 +1,50 @@ +import React from "react"; +import useApi from "../../lib/useApi"; +import { Body_transcript_record_upload_v1_transcripts__transcript_id__record_upload_post } from "../../api"; + +type FileUploadButton = { + transcriptId: string; +}; + +export default function FileUploadButton(props: FileUploadButton) { + const fileInputRef = React.useRef(null); + const api = useApi(); + + const triggerFileUpload = () => { + fileInputRef.current?.click(); + }; + + const handleFileUpload = (event: React.ChangeEvent) => { + const file = event.target.files?.[0]; + + if (file) { + console.log("Calling api.v1TranscriptRecordUpload()..."); + + // Create an object of the expected type + const uploadData = { + file: file, + // Add other properties if required by the type definition + }; + + api?.v1TranscriptRecordUpload(props.transcriptId, uploadData); + } + }; + + return ( + <> + + + + + ); +} diff --git a/www/app/[domain]/transcripts/finalSummary.tsx b/www/app/[domain]/transcripts/finalSummary.tsx new file mode 100644 index 00000000..6ad90b14 --- /dev/null +++ b/www/app/[domain]/transcripts/finalSummary.tsx @@ -0,0 +1,186 @@ +import { useRef, useState } from "react"; +import React from "react"; +import Markdown from "react-markdown"; +import "../../styles/markdown.css"; +import { featureEnabled } from "../domainContext"; +import { UpdateTranscript } from "../../api"; +import useApi from "../../lib/useApi"; + +type FinalSummaryProps = { + summary: string; + fullTranscript: string; + transcriptId: string; + openZulipModal: () => void; +}; + +export default function FinalSummary(props: FinalSummaryProps) { + const finalSummaryRef = useRef(null); + const [isCopiedSummary, setIsCopiedSummary] = useState(false); + const [isCopiedTranscript, setIsCopiedTranscript] = useState(false); + const [isEditMode, setIsEditMode] = useState(false); + const [preEditSummary, setPreEditSummary] = useState(props.summary); + const [editedSummary, setEditedSummary] = useState(props.summary); + + const updateSummary = async (newSummary: string, transcriptId: string) => { + try { + const api = useApi(); + const requestBody: UpdateTranscript = { + long_summary: newSummary, + }; + const updatedTranscript = await api?.v1TranscriptUpdate( + transcriptId, + requestBody, + ); + console.log("Updated long summary:", updatedTranscript); + } catch (err) { + console.error("Failed to update long summary:", err); + } + }; + + const onCopySummaryClick = () => { + let text_to_copy = finalSummaryRef.current?.innerText; + + text_to_copy && + navigator.clipboard.writeText(text_to_copy).then(() => { + setIsCopiedSummary(true); + // Reset the copied state after 2 seconds + setTimeout(() => setIsCopiedSummary(false), 2000); + }); + }; + + const onCopyTranscriptClick = () => { + let text_to_copy = props.fullTranscript; + + text_to_copy && + navigator.clipboard.writeText(text_to_copy).then(() => { + setIsCopiedTranscript(true); + // Reset the copied state after 2 seconds + setTimeout(() => setIsCopiedTranscript(false), 2000); + }); + }; + + const onEditClick = () => { + setPreEditSummary(editedSummary); + setIsEditMode(true); + }; + + const onDiscardClick = () => { + setEditedSummary(preEditSummary); + setIsEditMode(false); + }; + + const onSaveClick = () => { + updateSummary(editedSummary, props.transcriptId); + setIsEditMode(false); + }; + + const handleTextAreaKeyDown = (e: React.KeyboardEvent) => { + if (e.key === "Escape") { + onDiscardClick(); + } + + if (e.key === "Enter" && e.shiftKey) { + onSaveClick(); + e.preventDefault(); // prevent the default action of adding a new line + } + }; + + return ( +
+
+

+ Final Summary +

+ +
+ {isEditMode && ( + <> + + + + )} + + {!isEditMode && ( + <> + {featureEnabled("sendToZulip") && ( + + )} + + + + + + )} +
+
+ + {isEditMode ? ( +
+