llama-index

Форк
0
139 строк · 4.4 Кб
1
"""OpenAI Agent.
2

3
Simple wrapper around AgentRunner + OpenAIAgentWorker.
4

5
For the legacy implementation see:
6
```python
7
from llama_index.legacy.agent.legacy.openai.base import OpenAIAgent
8
```
9
"""
10

11
from typing import (
12
    Any,
13
    List,
14
    Optional,
15
    Type,
16
)
17

18
from llama_index.legacy.agent.openai.step import OpenAIAgentWorker
19
from llama_index.legacy.agent.runner.base import AgentRunner
20
from llama_index.legacy.callbacks import (
21
    CallbackManager,
22
)
23
from llama_index.legacy.llms.base import ChatMessage
24
from llama_index.legacy.llms.llm import LLM
25
from llama_index.legacy.llms.openai import OpenAI
26
from llama_index.legacy.memory.chat_memory_buffer import ChatMemoryBuffer
27
from llama_index.legacy.memory.types import BaseMemory
28
from llama_index.legacy.objects.base import ObjectRetriever
29
from llama_index.legacy.tools import BaseTool
30

31
DEFAULT_MODEL_NAME = "gpt-3.5-turbo-0613"
32

33
DEFAULT_MAX_FUNCTION_CALLS = 5
34

35

36
class OpenAIAgent(AgentRunner):
37
    """OpenAI agent.
38

39
    Subclasses AgentRunner with a OpenAIAgentWorker.
40

41
    For the legacy implementation see:
42
    ```python
43
    from llama_index.legacy.agent.legacy.openai.base import OpenAIAgent
44
    ```
45

46
    """
47

48
    def __init__(
49
        self,
50
        tools: List[BaseTool],
51
        llm: OpenAI,
52
        memory: BaseMemory,
53
        prefix_messages: List[ChatMessage],
54
        verbose: bool = False,
55
        max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
56
        default_tool_choice: str = "auto",
57
        callback_manager: Optional[CallbackManager] = None,
58
        tool_retriever: Optional[ObjectRetriever[BaseTool]] = None,
59
    ) -> None:
60
        """Init params."""
61
        callback_manager = callback_manager or llm.callback_manager
62
        step_engine = OpenAIAgentWorker.from_tools(
63
            tools=tools,
64
            tool_retriever=tool_retriever,
65
            llm=llm,
66
            verbose=verbose,
67
            max_function_calls=max_function_calls,
68
            callback_manager=callback_manager,
69
            prefix_messages=prefix_messages,
70
        )
71
        super().__init__(
72
            step_engine,
73
            memory=memory,
74
            llm=llm,
75
            callback_manager=callback_manager,
76
            default_tool_choice=default_tool_choice,
77
        )
78

79
    @classmethod
80
    def from_tools(
81
        cls,
82
        tools: Optional[List[BaseTool]] = None,
83
        tool_retriever: Optional[ObjectRetriever[BaseTool]] = None,
84
        llm: Optional[LLM] = None,
85
        chat_history: Optional[List[ChatMessage]] = None,
86
        memory: Optional[BaseMemory] = None,
87
        memory_cls: Type[BaseMemory] = ChatMemoryBuffer,
88
        verbose: bool = False,
89
        max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
90
        default_tool_choice: str = "auto",
91
        callback_manager: Optional[CallbackManager] = None,
92
        system_prompt: Optional[str] = None,
93
        prefix_messages: Optional[List[ChatMessage]] = None,
94
        **kwargs: Any,
95
    ) -> "OpenAIAgent":
96
        """Create an OpenAIAgent from a list of tools.
97

98
        Similar to `from_defaults` in other classes, this method will
99
        infer defaults for a variety of parameters, including the LLM,
100
        if they are not specified.
101

102
        """
103
        tools = tools or []
104

105
        chat_history = chat_history or []
106
        llm = llm or OpenAI(model=DEFAULT_MODEL_NAME)
107
        if not isinstance(llm, OpenAI):
108
            raise ValueError("llm must be a OpenAI instance")
109

110
        if callback_manager is not None:
111
            llm.callback_manager = callback_manager
112

113
        memory = memory or memory_cls.from_defaults(chat_history, llm=llm)
114

115
        if not llm.metadata.is_function_calling_model:
116
            raise ValueError(
117
                f"Model name {llm.model} does not support function calling API. "
118
            )
119

120
        if system_prompt is not None:
121
            if prefix_messages is not None:
122
                raise ValueError(
123
                    "Cannot specify both system_prompt and prefix_messages"
124
                )
125
            prefix_messages = [ChatMessage(content=system_prompt, role="system")]
126

127
        prefix_messages = prefix_messages or []
128

129
        return cls(
130
            tools=tools,
131
            tool_retriever=tool_retriever,
132
            llm=llm,
133
            memory=memory,
134
            prefix_messages=prefix_messages,
135
            verbose=verbose,
136
            max_function_calls=max_function_calls,
137
            callback_manager=callback_manager,
138
            default_tool_choice=default_tool_choice,
139
        )
140

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.