openai-python

Форк
0
/
test_response.py 
159 строк · 4.5 Кб
1
import json
2
from typing import List
3

4
import httpx
5
import pytest
6
import pydantic
7

8
from openai import OpenAI, BaseModel, AsyncOpenAI
9
from openai._response import (
10
    APIResponse,
11
    BaseAPIResponse,
12
    AsyncAPIResponse,
13
    BinaryAPIResponse,
14
    AsyncBinaryAPIResponse,
15
    extract_response_type,
16
)
17
from openai._streaming import Stream
18
from openai._base_client import FinalRequestOptions
19

20

21
class ConcreteBaseAPIResponse(APIResponse[bytes]):
22
    ...
23

24

25
class ConcreteAPIResponse(APIResponse[List[str]]):
26
    ...
27

28

29
class ConcreteAsyncAPIResponse(APIResponse[httpx.Response]):
30
    ...
31

32

33
def test_extract_response_type_direct_classes() -> None:
34
    assert extract_response_type(BaseAPIResponse[str]) == str
35
    assert extract_response_type(APIResponse[str]) == str
36
    assert extract_response_type(AsyncAPIResponse[str]) == str
37

38

39
def test_extract_response_type_direct_class_missing_type_arg() -> None:
40
    with pytest.raises(
41
        RuntimeError,
42
        match="Expected type <class 'openai._response.AsyncAPIResponse'> to have a type argument at index 0 but it did not",
43
    ):
44
        extract_response_type(AsyncAPIResponse)
45

46

47
def test_extract_response_type_concrete_subclasses() -> None:
48
    assert extract_response_type(ConcreteBaseAPIResponse) == bytes
49
    assert extract_response_type(ConcreteAPIResponse) == List[str]
50
    assert extract_response_type(ConcreteAsyncAPIResponse) == httpx.Response
51

52

53
def test_extract_response_type_binary_response() -> None:
54
    assert extract_response_type(BinaryAPIResponse) == bytes
55
    assert extract_response_type(AsyncBinaryAPIResponse) == bytes
56

57

58
class PydanticModel(pydantic.BaseModel):
59
    ...
60

61

62
def test_response_parse_mismatched_basemodel(client: OpenAI) -> None:
63
    response = APIResponse(
64
        raw=httpx.Response(200, content=b"foo"),
65
        client=client,
66
        stream=False,
67
        stream_cls=None,
68
        cast_to=str,
69
        options=FinalRequestOptions.construct(method="get", url="/foo"),
70
    )
71

72
    with pytest.raises(
73
        TypeError,
74
        match="Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`",
75
    ):
76
        response.parse(to=PydanticModel)
77

78

79
@pytest.mark.asyncio
80
async def test_async_response_parse_mismatched_basemodel(async_client: AsyncOpenAI) -> None:
81
    response = AsyncAPIResponse(
82
        raw=httpx.Response(200, content=b"foo"),
83
        client=async_client,
84
        stream=False,
85
        stream_cls=None,
86
        cast_to=str,
87
        options=FinalRequestOptions.construct(method="get", url="/foo"),
88
    )
89

90
    with pytest.raises(
91
        TypeError,
92
        match="Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`",
93
    ):
94
        await response.parse(to=PydanticModel)
95

96

97
def test_response_parse_custom_stream(client: OpenAI) -> None:
98
    response = APIResponse(
99
        raw=httpx.Response(200, content=b"foo"),
100
        client=client,
101
        stream=True,
102
        stream_cls=None,
103
        cast_to=str,
104
        options=FinalRequestOptions.construct(method="get", url="/foo"),
105
    )
106

107
    stream = response.parse(to=Stream[int])
108
    assert stream._cast_to == int
109

110

111
@pytest.mark.asyncio
112
async def test_async_response_parse_custom_stream(async_client: AsyncOpenAI) -> None:
113
    response = AsyncAPIResponse(
114
        raw=httpx.Response(200, content=b"foo"),
115
        client=async_client,
116
        stream=True,
117
        stream_cls=None,
118
        cast_to=str,
119
        options=FinalRequestOptions.construct(method="get", url="/foo"),
120
    )
121

122
    stream = await response.parse(to=Stream[int])
123
    assert stream._cast_to == int
124

125

126
class CustomModel(BaseModel):
127
    foo: str
128
    bar: int
129

130

131
def test_response_parse_custom_model(client: OpenAI) -> None:
132
    response = APIResponse(
133
        raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})),
134
        client=client,
135
        stream=False,
136
        stream_cls=None,
137
        cast_to=str,
138
        options=FinalRequestOptions.construct(method="get", url="/foo"),
139
    )
140

141
    obj = response.parse(to=CustomModel)
142
    assert obj.foo == "hello!"
143
    assert obj.bar == 2
144

145

146
@pytest.mark.asyncio
147
async def test_async_response_parse_custom_model(async_client: AsyncOpenAI) -> None:
148
    response = AsyncAPIResponse(
149
        raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})),
150
        client=async_client,
151
        stream=False,
152
        stream_cls=None,
153
        cast_to=str,
154
        options=FinalRequestOptions.construct(method="get", url="/foo"),
155
    )
156

157
    obj = await response.parse(to=CustomModel)
158
    assert obj.foo == "hello!"
159
    assert obj.bar == 2
160

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.