mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-20 20:29:06 +00:00
pass schema as dict
This commit is contained in:
@@ -172,13 +172,16 @@ def web():
|
||||
|
||||
class LLMRequest(BaseModel):
|
||||
prompt: str
|
||||
schema: Optional[str] = None
|
||||
schema: Optional[dict] = None
|
||||
|
||||
@app.post("/llm", dependencies=[Depends(apikey_auth)])
|
||||
async def llm(
|
||||
req: LLMRequest,
|
||||
):
|
||||
func = llmstub.generate.spawn(prompt=req.prompt, schema=req.schema)
|
||||
if req.schema:
|
||||
func = llmstub.generate.spawn(prompt=req.prompt, schema=json.dumps(req.schema))
|
||||
else:
|
||||
func = llmstub.generate.spawn(prompt=req.prompt)
|
||||
result = func.get()
|
||||
return result
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import json
|
||||
|
||||
import httpx
|
||||
from reflector.llm.base import LLM
|
||||
from reflector.settings import settings
|
||||
@@ -18,7 +16,7 @@ class BananaLLM(LLM):
|
||||
async def _generate(self, prompt: str, schema: dict | None, **kwargs):
|
||||
json_payload = {"prompt": prompt}
|
||||
if schema:
|
||||
json_payload["schema"] = json.dumps(schema)
|
||||
json_payload["schema"] = schema
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await retry(client.post)(
|
||||
settings.LLM_URL,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import json
|
||||
|
||||
import httpx
|
||||
from reflector.llm.base import LLM
|
||||
from reflector.settings import settings
|
||||
@@ -28,7 +26,7 @@ class ModalLLM(LLM):
|
||||
async def _generate(self, prompt: str, schema: dict | None, **kwargs):
|
||||
json_payload = {"prompt": prompt}
|
||||
if schema:
|
||||
json_payload["schema"] = json.dumps(schema)
|
||||
json_payload["schema"] = schema
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await retry(client.post)(
|
||||
self.llm_url,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import json
|
||||
|
||||
import httpx
|
||||
from reflector.llm.base import LLM
|
||||
from reflector.settings import settings
|
||||
@@ -9,7 +7,7 @@ class OobaboogaLLM(LLM):
|
||||
async def _generate(self, prompt: str, schema: dict | None, **kwargs):
|
||||
json_payload = {"prompt": prompt}
|
||||
if schema:
|
||||
json_payload["schema"] = json.dumps(schema)
|
||||
json_payload["schema"] = schema
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
settings.LLM_URL,
|
||||
|
||||
Reference in New Issue
Block a user