llama-index
62 строки · 2.2 Кб
1from typing import TYPE_CHECKING, Optional, Union2
3if TYPE_CHECKING:4from langchain.base_language import BaseLanguageModel5
6from llama_index.legacy.llms.llama_cpp import LlamaCPP7from llama_index.legacy.llms.llama_utils import completion_to_prompt, messages_to_prompt8from llama_index.legacy.llms.llm import LLM9from llama_index.legacy.llms.mock import MockLLM10from llama_index.legacy.llms.openai import OpenAI11from llama_index.legacy.llms.openai_utils import validate_openai_api_key12
13LLMType = Union[str, LLM, "BaseLanguageModel"]14
15
16def resolve_llm(llm: Optional[LLMType] = None) -> LLM:17"""Resolve LLM from string or LLM instance."""18try:19from langchain.base_language import BaseLanguageModel20
21from llama_index.legacy.llms.langchain import LangChainLLM22except ImportError:23BaseLanguageModel = None # type: ignore24
25if llm == "default":26# return default OpenAI model. If it fails, return LlamaCPP27try:28llm = OpenAI()29validate_openai_api_key(llm.api_key)30except ValueError as e:31raise ValueError(32"\n******\n"33"Could not load OpenAI model. "34"If you intended to use OpenAI, please check your OPENAI_API_KEY.\n"35"Original error:\n"36f"{e!s}"37"\nTo disable the LLM entirely, set llm=None."38"\n******"39)40
41if isinstance(llm, str):42splits = llm.split(":", 1)43is_local = splits[0]44model_path = splits[1] if len(splits) > 1 else None45if is_local != "local":46raise ValueError(47"llm must start with str 'local' or of type LLM or BaseLanguageModel"48)49llm = LlamaCPP(50model_path=model_path,51messages_to_prompt=messages_to_prompt,52completion_to_prompt=completion_to_prompt,53model_kwargs={"n_gpu_layers": 1},54)55elif BaseLanguageModel is not None and isinstance(llm, BaseLanguageModel):56# NOTE: if it's a langchain model, wrap it in a LangChainLLM57llm = LangChainLLM(llm=llm)58elif llm is None:59print("LLM is explicitly disabled. Using MockLLM.")60llm = MockLLM()61
62return llm63