From d76bb83fe0a12f2bf85af9219be0ee879cf09a17 Mon Sep 17 00:00:00 2001 From: Mathieu Virbel Date: Tue, 22 Aug 2023 17:10:36 +0200 Subject: [PATCH] modal: fix schema passing issue with shadowing BaseModel.schema default --- server/gpu/modal/reflector_llm.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/server/gpu/modal/reflector_llm.py b/server/gpu/modal/reflector_llm.py index fd8a4aae..1a3f77d6 100644 --- a/server/gpu/modal/reflector_llm.py +++ b/server/gpu/modal/reflector_llm.py @@ -155,7 +155,7 @@ class LLM: def web(): from fastapi import Depends, FastAPI, HTTPException, status from fastapi.security import OAuth2PasswordBearer - from pydantic import BaseModel + from pydantic import BaseModel, Field llmstub = LLM() @@ -172,14 +172,14 @@ def web(): class LLMRequest(BaseModel): prompt: str - schema: Optional[dict] = None + schema_: Optional[dict] = Field(None, alias="schema") @app.post("/llm", dependencies=[Depends(apikey_auth)]) async def llm( req: LLMRequest, ): - if req.schema: - func = llmstub.generate.spawn(prompt=req.prompt, schema=json.dumps(req.schema)) + if req.schema_: + func = llmstub.generate.spawn(prompt=req.prompt, schema=json.dumps(req.schema_)) else: func = llmstub.generate.spawn(prompt=req.prompt) result = func.get()