llama-index
107 строк · 3.3 Кб
1from functools import partial2from typing import TYPE_CHECKING, Any, Optional, Type, cast3
4from llama_index.legacy.bridge.pydantic import BaseModel5from llama_index.legacy.program.llm_prompt_program import BaseLLMFunctionProgram6from llama_index.legacy.prompts.base import PromptTemplate7from llama_index.legacy.prompts.guidance_utils import (8parse_pydantic_from_guidance_program,9)
10
11if TYPE_CHECKING:12from guidance.models import Model as GuidanceLLM13
14
15class GuidancePydanticProgram(BaseLLMFunctionProgram["GuidanceLLM"]):16"""17A guidance-based function that returns a pydantic model.
18
19Note: this interface is not yet stable.
20"""
21
22def __init__(23self,24output_cls: Type[BaseModel],25prompt_template_str: str,26guidance_llm: Optional["GuidanceLLM"] = None,27verbose: bool = False,28):29try:30from guidance.models import OpenAIChat31except ImportError as e:32raise ImportError(33"guidance package not found." "please run `pip install guidance`"34) from e35
36if not guidance_llm:37llm = guidance_llm38else:39llm = OpenAIChat("gpt-3.5-turbo")40
41full_str = prompt_template_str + "\n"42self._full_str = full_str43self._guidance_program = partial(self.program, llm=llm, silent=not verbose)44self._output_cls = output_cls45self._verbose = verbose46
47def program(48self,49llm: "GuidanceLLM",50silent: bool,51tools_str: str,52query_str: str,53**kwargs: dict,54) -> "GuidanceLLM":55"""A wrapper to execute the program with new guidance version."""56from guidance import assistant, gen, user57
58given_query = self._full_str.replace("{{tools_str}}", tools_str).replace(59"{{query_str}}", query_str60)61with user():62llm = llm + given_query63
64with assistant():65llm = llm + gen(stop=".")66
67return llm # noqa: RET50468
69@classmethod70def from_defaults(71cls,72output_cls: Type[BaseModel],73prompt_template_str: Optional[str] = None,74prompt: Optional[PromptTemplate] = None,75llm: Optional["GuidanceLLM"] = None,76**kwargs: Any,77) -> "BaseLLMFunctionProgram":78"""From defaults."""79if prompt is None and prompt_template_str is None:80raise ValueError("Must provide either prompt or prompt_template_str.")81if prompt is not None and prompt_template_str is not None:82raise ValueError("Must provide either prompt or prompt_template_str.")83if prompt is not None:84prompt_template_str = prompt.template85prompt_template_str = cast(str, prompt_template_str)86return cls(87output_cls,88prompt_template_str,89guidance_llm=llm,90**kwargs,91)92
93@property94def output_cls(self) -> Type[BaseModel]:95return self._output_cls96
97def __call__(98self,99*args: Any,100**kwargs: Any,101) -> BaseModel:102executed_program = self._guidance_program(**kwargs)103response = str(executed_program)104
105return parse_pydantic_from_guidance_program(106response=response, cls=self._output_cls107)108