llama-index

Форк
0
132 строки · 4.0 Кб
1
import asyncio
2
from inspect import signature
3
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, Type
4

5
if TYPE_CHECKING:
6
    from llama_index.legacy.bridge.langchain import StructuredTool, Tool
7
from llama_index.legacy.bridge.pydantic import BaseModel
8
from llama_index.legacy.tools.types import AsyncBaseTool, ToolMetadata, ToolOutput
9
from llama_index.legacy.tools.utils import create_schema_from_function
10

11
AsyncCallable = Callable[..., Awaitable[Any]]
12

13

14
def sync_to_async(fn: Callable[..., Any]) -> AsyncCallable:
15
    """Sync to async."""
16

17
    async def _async_wrapped_fn(*args: Any, **kwargs: Any) -> Any:
18
        loop = asyncio.get_running_loop()
19
        return await loop.run_in_executor(None, lambda: fn(*args, **kwargs))
20

21
    return _async_wrapped_fn
22

23

24
class FunctionTool(AsyncBaseTool):
25
    """Function Tool.
26

27
    A tool that takes in a function.
28

29
    """
30

31
    def __init__(
32
        self,
33
        fn: Callable[..., Any],
34
        metadata: ToolMetadata,
35
        async_fn: Optional[AsyncCallable] = None,
36
    ) -> None:
37
        self._fn = fn
38
        if async_fn is not None:
39
            self._async_fn = async_fn
40
        else:
41
            self._async_fn = sync_to_async(self._fn)
42
        self._metadata = metadata
43

44
    @classmethod
45
    def from_defaults(
46
        cls,
47
        fn: Callable[..., Any],
48
        name: Optional[str] = None,
49
        description: Optional[str] = None,
50
        fn_schema: Optional[Type[BaseModel]] = None,
51
        async_fn: Optional[AsyncCallable] = None,
52
        tool_metadata: Optional[ToolMetadata] = None,
53
    ) -> "FunctionTool":
54
        if tool_metadata is None:
55
            name = name or fn.__name__
56
            docstring = fn.__doc__
57
            description = description or f"{name}{signature(fn)}\n{docstring}"
58
            if fn_schema is None:
59
                fn_schema = create_schema_from_function(
60
                    f"{name}", fn, additional_fields=None
61
                )
62
            tool_metadata = ToolMetadata(
63
                name=name, description=description, fn_schema=fn_schema
64
            )
65
        return cls(fn=fn, metadata=tool_metadata, async_fn=async_fn)
66

67
    @property
68
    def metadata(self) -> ToolMetadata:
69
        """Metadata."""
70
        return self._metadata
71

72
    @property
73
    def fn(self) -> Callable[..., Any]:
74
        """Function."""
75
        return self._fn
76

77
    @property
78
    def async_fn(self) -> AsyncCallable:
79
        """Async function."""
80
        return self._async_fn
81

82
    def call(self, *args: Any, **kwargs: Any) -> ToolOutput:
83
        """Call."""
84
        tool_output = self._fn(*args, **kwargs)
85
        return ToolOutput(
86
            content=str(tool_output),
87
            tool_name=self.metadata.name,
88
            raw_input={"args": args, "kwargs": kwargs},
89
            raw_output=tool_output,
90
        )
91

92
    async def acall(self, *args: Any, **kwargs: Any) -> ToolOutput:
93
        """Call."""
94
        tool_output = await self._async_fn(*args, **kwargs)
95
        return ToolOutput(
96
            content=str(tool_output),
97
            tool_name=self.metadata.name,
98
            raw_input={"args": args, "kwargs": kwargs},
99
            raw_output=tool_output,
100
        )
101

102
    def to_langchain_tool(
103
        self,
104
        **langchain_tool_kwargs: Any,
105
    ) -> "Tool":
106
        """To langchain tool."""
107
        from llama_index.legacy.bridge.langchain import Tool
108

109
        langchain_tool_kwargs = self._process_langchain_tool_kwargs(
110
            langchain_tool_kwargs
111
        )
112
        return Tool.from_function(
113
            func=self.fn,
114
            coroutine=self.async_fn,
115
            **langchain_tool_kwargs,
116
        )
117

118
    def to_langchain_structured_tool(
119
        self,
120
        **langchain_tool_kwargs: Any,
121
    ) -> "StructuredTool":
122
        """To langchain structured tool."""
123
        from llama_index.legacy.bridge.langchain import StructuredTool
124

125
        langchain_tool_kwargs = self._process_langchain_tool_kwargs(
126
            langchain_tool_kwargs
127
        )
128
        return StructuredTool.from_function(
129
            func=self.fn,
130
            coroutine=self.async_fn,
131
            **langchain_tool_kwargs,
132
        )
133

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.