llama-index
24 строки · 1.1 Кб
1from llama_index.legacy.llm_predictor.base import BaseLLMPredictor, LLMPredictor2from llama_index.legacy.llm_predictor.mock import MockLLMPredictor3from llama_index.legacy.llm_predictor.structured import StructuredLLMPredictor4from llama_index.legacy.llm_predictor.vellum.predictor import VellumPredictor5
6
7def load_predictor(data: dict) -> BaseLLMPredictor:8"""Load predictor by class name."""9if isinstance(data, BaseLLMPredictor):10return data11predictor_name = data.get("class_name", None)12if predictor_name is None:13raise ValueError("Predictor loading requires a class_name")14
15if predictor_name == LLMPredictor.class_name():16return LLMPredictor.from_dict(data)17elif predictor_name == StructuredLLMPredictor.class_name():18return StructuredLLMPredictor.from_dict(data)19elif predictor_name == MockLLMPredictor.class_name():20return MockLLMPredictor.from_dict(data)21elif predictor_name == VellumPredictor.class_name():22return VellumPredictor.from_dict(data)23else:24raise ValueError(f"Invalid predictor name: {predictor_name}")25