llama-index
141 строка · 5.1 Кб
1from typing import List, Sequence2
3from llama_index.legacy.constants import AI21_J2_CONTEXT_WINDOW, COHERE_CONTEXT_WINDOW4from llama_index.legacy.core.llms.types import ChatMessage, LLMMetadata, MessageRole5from llama_index.legacy.llms.anyscale_utils import anyscale_modelname_to_contextsize6from llama_index.legacy.llms.openai_utils import openai_modelname_to_contextsize7
8
9class LC:10from llama_index.legacy.bridge.langchain import (11AI21,12AIMessage,13BaseChatModel,14BaseLanguageModel,15BaseMessage,16ChatAnyscale,17ChatMessage,18ChatOpenAI,19Cohere,20FunctionMessage,21HumanMessage,22OpenAI,23SystemMessage,24)25
26
27def is_chat_model(llm: LC.BaseLanguageModel) -> bool:28return isinstance(llm, LC.BaseChatModel)29
30
31def to_lc_messages(messages: Sequence[ChatMessage]) -> List[LC.BaseMessage]:32lc_messages: List[LC.BaseMessage] = []33for message in messages:34LC_MessageClass = LC.BaseMessage35lc_kw = {36"content": message.content,37"additional_kwargs": message.additional_kwargs,38}39if message.role == "user":40LC_MessageClass = LC.HumanMessage41elif message.role == "assistant":42LC_MessageClass = LC.AIMessage43elif message.role == "function":44LC_MessageClass = LC.FunctionMessage45elif message.role == "system":46LC_MessageClass = LC.SystemMessage47elif message.role == "chatbot":48LC_MessageClass = LC.ChatMessage49else:50raise ValueError(f"Invalid role: {message.role}")51
52for req_key in LC_MessageClass.schema().get("required"):53if req_key not in lc_kw:54more_kw = lc_kw.get("additional_kwargs")55if not isinstance(more_kw, dict):56raise ValueError(57f"additional_kwargs must be a dict, got {type(more_kw)}"58)59if req_key not in more_kw:60raise ValueError(f"{req_key} needed for {LC_MessageClass}")61lc_kw[req_key] = more_kw.pop(req_key)62
63lc_messages.append(LC_MessageClass(**lc_kw))64
65return lc_messages66
67
68def from_lc_messages(lc_messages: Sequence[LC.BaseMessage]) -> List[ChatMessage]:69messages: List[ChatMessage] = []70for lc_message in lc_messages:71li_kw = {72"content": lc_message.content,73"additional_kwargs": lc_message.additional_kwargs,74}75if isinstance(lc_message, LC.HumanMessage):76li_kw["role"] = MessageRole.USER77elif isinstance(lc_message, LC.AIMessage):78li_kw["role"] = MessageRole.ASSISTANT79elif isinstance(lc_message, LC.FunctionMessage):80li_kw["role"] = MessageRole.FUNCTION81elif isinstance(lc_message, LC.SystemMessage):82li_kw["role"] = MessageRole.SYSTEM83elif isinstance(lc_message, LC.ChatMessage):84li_kw["role"] = MessageRole.CHATBOT85else:86raise ValueError(f"Invalid message type: {type(lc_message)}")87messages.append(ChatMessage(**li_kw))88
89return messages90
91
92def get_llm_metadata(llm: LC.BaseLanguageModel) -> LLMMetadata:93"""Get LLM metadata from llm."""94if not isinstance(llm, LC.BaseLanguageModel):95raise ValueError("llm must be instance of LangChain BaseLanguageModel")96
97is_chat_model_ = is_chat_model(llm)98
99if isinstance(llm, LC.OpenAI):100return LLMMetadata(101context_window=openai_modelname_to_contextsize(llm.model_name),102num_output=llm.max_tokens,103is_chat_model=is_chat_model_,104model_name=llm.model_name,105)106elif isinstance(llm, LC.ChatAnyscale):107return LLMMetadata(108context_window=anyscale_modelname_to_contextsize(llm.model_name),109num_output=llm.max_tokens or -1,110is_chat_model=is_chat_model_,111model_name=llm.model_name,112)113elif isinstance(llm, LC.ChatOpenAI):114return LLMMetadata(115context_window=openai_modelname_to_contextsize(llm.model_name),116num_output=llm.max_tokens or -1,117is_chat_model=is_chat_model_,118model_name=llm.model_name,119)120elif isinstance(llm, LC.Cohere):121# June 2023: Cohere's supported max input size for Generation models is 2048122# Reference: <https://docs.cohere.com/docs/tokens>123return LLMMetadata(124context_window=COHERE_CONTEXT_WINDOW,125num_output=llm.max_tokens,126is_chat_model=is_chat_model_,127model_name=llm.model,128)129elif isinstance(llm, LC.AI21):130# June 2023:131# AI21's supported max input size for132# J2 models is 8K (8192 tokens to be exact)133# Reference: <https://docs.ai21.com/changelog/increased-context-length-for-j2-foundation-models>134return LLMMetadata(135context_window=AI21_J2_CONTEXT_WINDOW,136num_output=llm.maxTokens,137is_chat_model=is_chat_model_,138model_name=llm.model,139)140else:141return LLMMetadata(is_chat_model=is_chat_model_)142