llama-index

Форк
0
141 строка · 5.1 Кб
1
from typing import List, Sequence
2

3
from llama_index.legacy.constants import AI21_J2_CONTEXT_WINDOW, COHERE_CONTEXT_WINDOW
4
from llama_index.legacy.core.llms.types import ChatMessage, LLMMetadata, MessageRole
5
from llama_index.legacy.llms.anyscale_utils import anyscale_modelname_to_contextsize
6
from llama_index.legacy.llms.openai_utils import openai_modelname_to_contextsize
7

8

9
class LC:
10
    from llama_index.legacy.bridge.langchain import (
11
        AI21,
12
        AIMessage,
13
        BaseChatModel,
14
        BaseLanguageModel,
15
        BaseMessage,
16
        ChatAnyscale,
17
        ChatMessage,
18
        ChatOpenAI,
19
        Cohere,
20
        FunctionMessage,
21
        HumanMessage,
22
        OpenAI,
23
        SystemMessage,
24
    )
25

26

27
def is_chat_model(llm: LC.BaseLanguageModel) -> bool:
28
    return isinstance(llm, LC.BaseChatModel)
29

30

31
def to_lc_messages(messages: Sequence[ChatMessage]) -> List[LC.BaseMessage]:
32
    lc_messages: List[LC.BaseMessage] = []
33
    for message in messages:
34
        LC_MessageClass = LC.BaseMessage
35
        lc_kw = {
36
            "content": message.content,
37
            "additional_kwargs": message.additional_kwargs,
38
        }
39
        if message.role == "user":
40
            LC_MessageClass = LC.HumanMessage
41
        elif message.role == "assistant":
42
            LC_MessageClass = LC.AIMessage
43
        elif message.role == "function":
44
            LC_MessageClass = LC.FunctionMessage
45
        elif message.role == "system":
46
            LC_MessageClass = LC.SystemMessage
47
        elif message.role == "chatbot":
48
            LC_MessageClass = LC.ChatMessage
49
        else:
50
            raise ValueError(f"Invalid role: {message.role}")
51

52
        for req_key in LC_MessageClass.schema().get("required"):
53
            if req_key not in lc_kw:
54
                more_kw = lc_kw.get("additional_kwargs")
55
                if not isinstance(more_kw, dict):
56
                    raise ValueError(
57
                        f"additional_kwargs must be a dict, got {type(more_kw)}"
58
                    )
59
                if req_key not in more_kw:
60
                    raise ValueError(f"{req_key} needed for {LC_MessageClass}")
61
                lc_kw[req_key] = more_kw.pop(req_key)
62

63
        lc_messages.append(LC_MessageClass(**lc_kw))
64

65
    return lc_messages
66

67

68
def from_lc_messages(lc_messages: Sequence[LC.BaseMessage]) -> List[ChatMessage]:
69
    messages: List[ChatMessage] = []
70
    for lc_message in lc_messages:
71
        li_kw = {
72
            "content": lc_message.content,
73
            "additional_kwargs": lc_message.additional_kwargs,
74
        }
75
        if isinstance(lc_message, LC.HumanMessage):
76
            li_kw["role"] = MessageRole.USER
77
        elif isinstance(lc_message, LC.AIMessage):
78
            li_kw["role"] = MessageRole.ASSISTANT
79
        elif isinstance(lc_message, LC.FunctionMessage):
80
            li_kw["role"] = MessageRole.FUNCTION
81
        elif isinstance(lc_message, LC.SystemMessage):
82
            li_kw["role"] = MessageRole.SYSTEM
83
        elif isinstance(lc_message, LC.ChatMessage):
84
            li_kw["role"] = MessageRole.CHATBOT
85
        else:
86
            raise ValueError(f"Invalid message type: {type(lc_message)}")
87
        messages.append(ChatMessage(**li_kw))
88

89
    return messages
90

91

92
def get_llm_metadata(llm: LC.BaseLanguageModel) -> LLMMetadata:
93
    """Get LLM metadata from llm."""
94
    if not isinstance(llm, LC.BaseLanguageModel):
95
        raise ValueError("llm must be instance of LangChain BaseLanguageModel")
96

97
    is_chat_model_ = is_chat_model(llm)
98

99
    if isinstance(llm, LC.OpenAI):
100
        return LLMMetadata(
101
            context_window=openai_modelname_to_contextsize(llm.model_name),
102
            num_output=llm.max_tokens,
103
            is_chat_model=is_chat_model_,
104
            model_name=llm.model_name,
105
        )
106
    elif isinstance(llm, LC.ChatAnyscale):
107
        return LLMMetadata(
108
            context_window=anyscale_modelname_to_contextsize(llm.model_name),
109
            num_output=llm.max_tokens or -1,
110
            is_chat_model=is_chat_model_,
111
            model_name=llm.model_name,
112
        )
113
    elif isinstance(llm, LC.ChatOpenAI):
114
        return LLMMetadata(
115
            context_window=openai_modelname_to_contextsize(llm.model_name),
116
            num_output=llm.max_tokens or -1,
117
            is_chat_model=is_chat_model_,
118
            model_name=llm.model_name,
119
        )
120
    elif isinstance(llm, LC.Cohere):
121
        # June 2023: Cohere's supported max input size for Generation models is 2048
122
        # Reference: <https://docs.cohere.com/docs/tokens>
123
        return LLMMetadata(
124
            context_window=COHERE_CONTEXT_WINDOW,
125
            num_output=llm.max_tokens,
126
            is_chat_model=is_chat_model_,
127
            model_name=llm.model,
128
        )
129
    elif isinstance(llm, LC.AI21):
130
        # June 2023:
131
        #   AI21's supported max input size for
132
        #   J2 models is 8K (8192 tokens to be exact)
133
        # Reference: <https://docs.ai21.com/changelog/increased-context-length-for-j2-foundation-models>
134
        return LLMMetadata(
135
            context_window=AI21_J2_CONTEXT_WINDOW,
136
            num_output=llm.maxTokens,
137
            is_chat_model=is_chat_model_,
138
            model_name=llm.model,
139
        )
140
    else:
141
        return LLMMetadata(is_chat_model=is_chat_model_)
142

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.