correct schema typing from str to dict

This commit is contained in:
Gokul Mohanarangan
2023-08-17 20:57:31 +05:30
parent 9103c8cca8
commit b08724a191
7 changed files with 12 additions and 8 deletions

View File

@@ -46,7 +46,11 @@ class LLM:
pass pass
async def generate( async def generate(
self, prompt: str, logger: reflector_logger, schema: str | None = None, **kwargs self,
prompt: str,
logger: reflector_logger,
schema: dict | None = None,
**kwargs,
) -> dict: ) -> dict:
logger.info("LLM generate", prompt=repr(prompt)) logger.info("LLM generate", prompt=repr(prompt))
try: try:
@@ -62,7 +66,7 @@ class LLM:
return result return result
async def _generate(self, prompt: str, schema: str | None, **kwargs) -> str: async def _generate(self, prompt: str, schema: dict | None, **kwargs) -> str:
raise NotImplementedError raise NotImplementedError
def _parse_json(self, result: str) -> dict: def _parse_json(self, result: str) -> dict:

View File

@@ -15,7 +15,7 @@ class BananaLLM(LLM):
"X-Banana-Model-Key": settings.LLM_BANANA_MODEL_KEY, "X-Banana-Model-Key": settings.LLM_BANANA_MODEL_KEY,
} }
async def _generate(self, prompt: str, schema: str | None, **kwargs): async def _generate(self, prompt: str, schema: dict | None, **kwargs):
json_payload = {"prompt": prompt} json_payload = {"prompt": prompt}
if schema: if schema:
json_payload["schema"] = json.dumps(schema) json_payload["schema"] = json.dumps(schema)

View File

@@ -25,7 +25,7 @@ class ModalLLM(LLM):
) )
response.raise_for_status() response.raise_for_status()
async def _generate(self, prompt: str, schema: str | None, **kwargs): async def _generate(self, prompt: str, schema: dict | None, **kwargs):
json_payload = {"prompt": prompt} json_payload = {"prompt": prompt}
if schema: if schema:
json_payload["schema"] = json.dumps(schema) json_payload["schema"] = json.dumps(schema)

View File

@@ -6,7 +6,7 @@ from reflector.settings import settings
class OobaboogaLLM(LLM): class OobaboogaLLM(LLM):
async def _generate(self, prompt: str, schema: str | None, **kwargs): async def _generate(self, prompt: str, schema: dict | None, **kwargs):
json_payload = {"prompt": prompt} json_payload = {"prompt": prompt}
if schema: if schema:
json_payload["schema"] = json.dumps(schema) json_payload["schema"] = json.dumps(schema)

View File

@@ -15,7 +15,7 @@ class OpenAILLM(LLM):
self.max_tokens = settings.LLM_MAX_TOKENS self.max_tokens = settings.LLM_MAX_TOKENS
logger.info(f"LLM use openai backend at {self.openai_url}") logger.info(f"LLM use openai backend at {self.openai_url}")
async def _generate(self, prompt: str, schema: str | None, **kwargs) -> str: async def _generate(self, prompt: str, schema: dict | None, **kwargs) -> str:
headers = { headers = {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": f"Bearer {self.openai_key}", "Authorization": f"Bearer {self.openai_key}",

View File

@@ -15,7 +15,7 @@ async def test_basic_process(event_loop):
settings.TRANSCRIPT_BACKEND = "whisper" settings.TRANSCRIPT_BACKEND = "whisper"
class LLMTest(LLM): class LLMTest(LLM):
async def _generate(self, prompt: str, schema: str | None, **kwargs) -> str: async def _generate(self, prompt: str, schema: dict | None, **kwargs) -> str:
return { return {
"title": "TITLE", "title": "TITLE",
"summary": "SUMMARY", "summary": "SUMMARY",

View File

@@ -61,7 +61,7 @@ async def dummy_llm():
from reflector.llm.base import LLM from reflector.llm.base import LLM
class TestLLM(LLM): class TestLLM(LLM):
async def _generate(self, prompt: str, schema: str | None, **kwargs): async def _generate(self, prompt: str, schema: dict | None, **kwargs):
return json.dumps({"title": "LLM TITLE", "summary": "LLM SUMMARY"}) return json.dumps({"title": "LLM TITLE", "summary": "LLM SUMMARY"})
with patch("reflector.llm.base.LLM.get_instance") as mock_llm: with patch("reflector.llm.base.LLM.get_instance") as mock_llm: