cohere-python

Форк
0
704 строки · 32.8 Кб
1
# This file was auto-generated by Fern from our API Definition.
2

3
import datetime as dt
4
import typing
5
import urllib.parse
6
from json.decoder import JSONDecodeError
7

8
from .. import core
9
from ..core.api_error import ApiError
10
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
11
from ..core.datetime_utils import serialize_datetime
12
from ..core.jsonable_encoder import jsonable_encoder
13
from ..core.remove_none_from_dict import remove_none_from_dict
14
from ..core.request_options import RequestOptions
15
from ..errors.too_many_requests_error import TooManyRequestsError
16
from ..types.dataset_type import DatasetType
17
from .types.datasets_create_response import DatasetsCreateResponse
18
from .types.datasets_get_response import DatasetsGetResponse
19
from .types.datasets_get_usage_response import DatasetsGetUsageResponse
20
from .types.datasets_list_response import DatasetsListResponse
21

22
try:
23
    import pydantic.v1 as pydantic  # type: ignore
24
except ImportError:
25
    import pydantic  # type: ignore
26

27
# this is used as the default value for optional parameters
28
OMIT = typing.cast(typing.Any, ...)
29

30

31
class DatasetsClient:
32
    def __init__(self, *, client_wrapper: SyncClientWrapper):
33
        self._client_wrapper = client_wrapper
34

35
    def list(
36
        self,
37
        *,
38
        dataset_type: typing.Optional[str] = None,
39
        before: typing.Optional[dt.datetime] = None,
40
        after: typing.Optional[dt.datetime] = None,
41
        limit: typing.Optional[float] = None,
42
        offset: typing.Optional[float] = None,
43
        request_options: typing.Optional[RequestOptions] = None,
44
    ) -> DatasetsListResponse:
45
        """
46
        List datasets that have been created.
47

48
        Parameters:
49
            - dataset_type: typing.Optional[str]. optional filter by dataset type
50

51
            - before: typing.Optional[dt.datetime]. optional filter before a date
52

53
            - after: typing.Optional[dt.datetime]. optional filter after a date
54

55
            - limit: typing.Optional[float]. optional limit to number of results
56

57
            - offset: typing.Optional[float]. optional offset to start of results
58

59
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
60
        ---
61
        from cohere.client import Client
62

63
        client = Client(
64
            client_name="YOUR_CLIENT_NAME",
65
            token="YOUR_TOKEN",
66
        )
67
        client.datasets.list()
68
        """
69
        _response = self._client_wrapper.httpx_client.request(
70
            "GET",
71
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets"),
72
            params=jsonable_encoder(
73
                remove_none_from_dict(
74
                    {
75
                        "datasetType": dataset_type,
76
                        "before": serialize_datetime(before) if before is not None else None,
77
                        "after": serialize_datetime(after) if after is not None else None,
78
                        "limit": limit,
79
                        "offset": offset,
80
                        **(
81
                            request_options.get("additional_query_parameters", {})
82
                            if request_options is not None
83
                            else {}
84
                        ),
85
                    }
86
                )
87
            ),
88
            headers=jsonable_encoder(
89
                remove_none_from_dict(
90
                    {
91
                        **self._client_wrapper.get_headers(),
92
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
93
                    }
94
                )
95
            ),
96
            timeout=request_options.get("timeout_in_seconds")
97
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
98
            else self._client_wrapper.get_timeout(),
99
            retries=0,
100
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
101
        )
102
        if 200 <= _response.status_code < 300:
103
            return pydantic.parse_obj_as(DatasetsListResponse, _response.json())  # type: ignore
104
        if _response.status_code == 429:
105
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
106
        try:
107
            _response_json = _response.json()
108
        except JSONDecodeError:
109
            raise ApiError(status_code=_response.status_code, body=_response.text)
110
        raise ApiError(status_code=_response.status_code, body=_response_json)
111

112
    def create(
113
        self,
114
        *,
115
        name: str,
116
        type: DatasetType,
117
        keep_original_file: typing.Optional[bool] = None,
118
        skip_malformed_input: typing.Optional[bool] = None,
119
        keep_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
120
        optional_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
121
        text_separator: typing.Optional[str] = None,
122
        csv_delimiter: typing.Optional[str] = None,
123
        data: core.File,
124
        eval_data: typing.Optional[core.File] = None,
125
        request_options: typing.Optional[RequestOptions] = None,
126
    ) -> DatasetsCreateResponse:
127
        """
128
        Create a dataset by uploading a file. See ['Dataset Creation'](https://docs.cohere.com/docs/datasets#dataset-creation) for more information.
129

130
        Parameters:
131
            - name: str. The name of the uploaded dataset.
132

133
            - type: DatasetType. The dataset type, which is used to validate the data.
134

135
            - keep_original_file: typing.Optional[bool]. Indicates if the original file should be stored.
136

137
            - skip_malformed_input: typing.Optional[bool]. Indicates whether rows with malformed input should be dropped (instead of failing the validation check). Dropped rows will be returned in the warnings field.
138

139
            - keep_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]]. List of names of fields that will be persisted in the Dataset. By default the Dataset will retain only the required fields indicated in the [schema for the corresponding Dataset type](https://docs.cohere.com/docs/datasets#dataset-types). For example, datasets of type `embed-input` will drop all fields other than the required `text` field. If any of the fields in `keep_fields` are missing from the uploaded file, Dataset validation will fail.
140

141
            - optional_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]]. List of names of fields that will be persisted in the Dataset. By default the Dataset will retain only the required fields indicated in the [schema for the corresponding Dataset type](https://docs.cohere.com/docs/datasets#dataset-types). For example, Datasets of type `embed-input` will drop all fields other than the required `text` field. If any of the fields in `optional_fields` are missing from the uploaded file, Dataset validation will pass.
142

143
            - text_separator: typing.Optional[str]. Raw .txt uploads will be split into entries using the text_separator value.
144

145
            - csv_delimiter: typing.Optional[str]. The delimiter used for .csv uploads.
146

147
            - data: core.File. See core.File for more documentation
148

149
            - eval_data: typing.Optional[core.File]. See core.File for more documentation
150

151
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
152
        ---
153
        from cohere.client import Client
154

155
        client = Client(
156
            client_name="YOUR_CLIENT_NAME",
157
            token="YOUR_TOKEN",
158
        )
159
        client.datasets.create(
160
            name="string",
161
            type="embed-input",
162
            keep_original_file=True,
163
            skip_malformed_input=True,
164
            keep_fields="string",
165
            optional_fields="string",
166
            text_separator="string",
167
            csv_delimiter="string",
168
        )
169
        """
170
        _response = self._client_wrapper.httpx_client.request(
171
            "POST",
172
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets"),
173
            params=jsonable_encoder(
174
                remove_none_from_dict(
175
                    {
176
                        "name": name,
177
                        "type": type,
178
                        "keep_original_file": keep_original_file,
179
                        "skip_malformed_input": skip_malformed_input,
180
                        "keep_fields": keep_fields,
181
                        "optional_fields": optional_fields,
182
                        "text_separator": text_separator,
183
                        "csv_delimiter": csv_delimiter,
184
                        **(
185
                            request_options.get("additional_query_parameters", {})
186
                            if request_options is not None
187
                            else {}
188
                        ),
189
                    }
190
                )
191
            ),
192
            data=jsonable_encoder(remove_none_from_dict({}))
193
            if request_options is None or request_options.get("additional_body_parameters") is None
194
            else {
195
                **jsonable_encoder(remove_none_from_dict({})),
196
                **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
197
            },
198
            files=core.convert_file_dict_to_httpx_tuples(remove_none_from_dict({"data": data, "eval_data": eval_data})),
199
            headers=jsonable_encoder(
200
                remove_none_from_dict(
201
                    {
202
                        **self._client_wrapper.get_headers(),
203
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
204
                    }
205
                )
206
            ),
207
            timeout=request_options.get("timeout_in_seconds")
208
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
209
            else self._client_wrapper.get_timeout(),
210
            retries=0,
211
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
212
        )
213
        if 200 <= _response.status_code < 300:
214
            return pydantic.parse_obj_as(DatasetsCreateResponse, _response.json())  # type: ignore
215
        if _response.status_code == 429:
216
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
217
        try:
218
            _response_json = _response.json()
219
        except JSONDecodeError:
220
            raise ApiError(status_code=_response.status_code, body=_response.text)
221
        raise ApiError(status_code=_response.status_code, body=_response_json)
222

223
    def get_usage(self, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetsGetUsageResponse:
224
        """
225
        View the dataset storage usage for your Organization. Each Organization can have up to 10GB of storage across all their users.
226

227
        Parameters:
228
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
229
        ---
230
        from cohere.client import Client
231

232
        client = Client(
233
            client_name="YOUR_CLIENT_NAME",
234
            token="YOUR_TOKEN",
235
        )
236
        client.datasets.get_usage()
237
        """
238
        _response = self._client_wrapper.httpx_client.request(
239
            "GET",
240
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets/usage"),
241
            params=jsonable_encoder(
242
                request_options.get("additional_query_parameters") if request_options is not None else None
243
            ),
244
            headers=jsonable_encoder(
245
                remove_none_from_dict(
246
                    {
247
                        **self._client_wrapper.get_headers(),
248
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
249
                    }
250
                )
251
            ),
252
            timeout=request_options.get("timeout_in_seconds")
253
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
254
            else self._client_wrapper.get_timeout(),
255
            retries=0,
256
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
257
        )
258
        if 200 <= _response.status_code < 300:
259
            return pydantic.parse_obj_as(DatasetsGetUsageResponse, _response.json())  # type: ignore
260
        if _response.status_code == 429:
261
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
262
        try:
263
            _response_json = _response.json()
264
        except JSONDecodeError:
265
            raise ApiError(status_code=_response.status_code, body=_response.text)
266
        raise ApiError(status_code=_response.status_code, body=_response_json)
267

268
    def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetsGetResponse:
269
        """
270
        Retrieve a dataset by ID. See ['Datasets'](https://docs.cohere.com/docs/datasets) for more information.
271

272
        Parameters:
273
            - id: str.
274

275
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
276
        ---
277
        from cohere.client import Client
278

279
        client = Client(
280
            client_name="YOUR_CLIENT_NAME",
281
            token="YOUR_TOKEN",
282
        )
283
        client.datasets.get(
284
            id="id",
285
        )
286
        """
287
        _response = self._client_wrapper.httpx_client.request(
288
            "GET",
289
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"datasets/{jsonable_encoder(id)}"),
290
            params=jsonable_encoder(
291
                request_options.get("additional_query_parameters") if request_options is not None else None
292
            ),
293
            headers=jsonable_encoder(
294
                remove_none_from_dict(
295
                    {
296
                        **self._client_wrapper.get_headers(),
297
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
298
                    }
299
                )
300
            ),
301
            timeout=request_options.get("timeout_in_seconds")
302
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
303
            else self._client_wrapper.get_timeout(),
304
            retries=0,
305
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
306
        )
307
        if 200 <= _response.status_code < 300:
308
            return pydantic.parse_obj_as(DatasetsGetResponse, _response.json())  # type: ignore
309
        if _response.status_code == 429:
310
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
311
        try:
312
            _response_json = _response.json()
313
        except JSONDecodeError:
314
            raise ApiError(status_code=_response.status_code, body=_response.text)
315
        raise ApiError(status_code=_response.status_code, body=_response_json)
316

317
    def delete(
318
        self, id: str, *, request_options: typing.Optional[RequestOptions] = None
319
    ) -> typing.Dict[str, typing.Any]:
320
        """
321
        Delete a dataset by ID. Datasets are automatically deleted after 30 days, but they can also be deleted manually.
322

323
        Parameters:
324
            - id: str.
325

326
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
327
        ---
328
        from cohere.client import Client
329

330
        client = Client(
331
            client_name="YOUR_CLIENT_NAME",
332
            token="YOUR_TOKEN",
333
        )
334
        client.datasets.delete(
335
            id="id",
336
        )
337
        """
338
        _response = self._client_wrapper.httpx_client.request(
339
            "DELETE",
340
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"datasets/{jsonable_encoder(id)}"),
341
            params=jsonable_encoder(
342
                request_options.get("additional_query_parameters") if request_options is not None else None
343
            ),
344
            headers=jsonable_encoder(
345
                remove_none_from_dict(
346
                    {
347
                        **self._client_wrapper.get_headers(),
348
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
349
                    }
350
                )
351
            ),
352
            timeout=request_options.get("timeout_in_seconds")
353
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
354
            else self._client_wrapper.get_timeout(),
355
            retries=0,
356
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
357
        )
358
        if 200 <= _response.status_code < 300:
359
            return pydantic.parse_obj_as(typing.Dict[str, typing.Any], _response.json())  # type: ignore
360
        if _response.status_code == 429:
361
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
362
        try:
363
            _response_json = _response.json()
364
        except JSONDecodeError:
365
            raise ApiError(status_code=_response.status_code, body=_response.text)
366
        raise ApiError(status_code=_response.status_code, body=_response_json)
367

368

369
class AsyncDatasetsClient:
370
    def __init__(self, *, client_wrapper: AsyncClientWrapper):
371
        self._client_wrapper = client_wrapper
372

373
    async def list(
374
        self,
375
        *,
376
        dataset_type: typing.Optional[str] = None,
377
        before: typing.Optional[dt.datetime] = None,
378
        after: typing.Optional[dt.datetime] = None,
379
        limit: typing.Optional[float] = None,
380
        offset: typing.Optional[float] = None,
381
        request_options: typing.Optional[RequestOptions] = None,
382
    ) -> DatasetsListResponse:
383
        """
384
        List datasets that have been created.
385

386
        Parameters:
387
            - dataset_type: typing.Optional[str]. optional filter by dataset type
388

389
            - before: typing.Optional[dt.datetime]. optional filter before a date
390

391
            - after: typing.Optional[dt.datetime]. optional filter after a date
392

393
            - limit: typing.Optional[float]. optional limit to number of results
394

395
            - offset: typing.Optional[float]. optional offset to start of results
396

397
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
398
        ---
399
        from cohere.client import AsyncClient
400

401
        client = AsyncClient(
402
            client_name="YOUR_CLIENT_NAME",
403
            token="YOUR_TOKEN",
404
        )
405
        await client.datasets.list()
406
        """
407
        _response = await self._client_wrapper.httpx_client.request(
408
            "GET",
409
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets"),
410
            params=jsonable_encoder(
411
                remove_none_from_dict(
412
                    {
413
                        "datasetType": dataset_type,
414
                        "before": serialize_datetime(before) if before is not None else None,
415
                        "after": serialize_datetime(after) if after is not None else None,
416
                        "limit": limit,
417
                        "offset": offset,
418
                        **(
419
                            request_options.get("additional_query_parameters", {})
420
                            if request_options is not None
421
                            else {}
422
                        ),
423
                    }
424
                )
425
            ),
426
            headers=jsonable_encoder(
427
                remove_none_from_dict(
428
                    {
429
                        **self._client_wrapper.get_headers(),
430
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
431
                    }
432
                )
433
            ),
434
            timeout=request_options.get("timeout_in_seconds")
435
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
436
            else self._client_wrapper.get_timeout(),
437
            retries=0,
438
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
439
        )
440
        if 200 <= _response.status_code < 300:
441
            return pydantic.parse_obj_as(DatasetsListResponse, _response.json())  # type: ignore
442
        if _response.status_code == 429:
443
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
444
        try:
445
            _response_json = _response.json()
446
        except JSONDecodeError:
447
            raise ApiError(status_code=_response.status_code, body=_response.text)
448
        raise ApiError(status_code=_response.status_code, body=_response_json)
449

450
    async def create(
451
        self,
452
        *,
453
        name: str,
454
        type: DatasetType,
455
        keep_original_file: typing.Optional[bool] = None,
456
        skip_malformed_input: typing.Optional[bool] = None,
457
        keep_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
458
        optional_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
459
        text_separator: typing.Optional[str] = None,
460
        csv_delimiter: typing.Optional[str] = None,
461
        data: core.File,
462
        eval_data: typing.Optional[core.File] = None,
463
        request_options: typing.Optional[RequestOptions] = None,
464
    ) -> DatasetsCreateResponse:
465
        """
466
        Create a dataset by uploading a file. See ['Dataset Creation'](https://docs.cohere.com/docs/datasets#dataset-creation) for more information.
467

468
        Parameters:
469
            - name: str. The name of the uploaded dataset.
470

471
            - type: DatasetType. The dataset type, which is used to validate the data.
472

473
            - keep_original_file: typing.Optional[bool]. Indicates if the original file should be stored.
474

475
            - skip_malformed_input: typing.Optional[bool]. Indicates whether rows with malformed input should be dropped (instead of failing the validation check). Dropped rows will be returned in the warnings field.
476

477
            - keep_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]]. List of names of fields that will be persisted in the Dataset. By default the Dataset will retain only the required fields indicated in the [schema for the corresponding Dataset type](https://docs.cohere.com/docs/datasets#dataset-types). For example, datasets of type `embed-input` will drop all fields other than the required `text` field. If any of the fields in `keep_fields` are missing from the uploaded file, Dataset validation will fail.
478

479
            - optional_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]]. List of names of fields that will be persisted in the Dataset. By default the Dataset will retain only the required fields indicated in the [schema for the corresponding Dataset type](https://docs.cohere.com/docs/datasets#dataset-types). For example, Datasets of type `embed-input` will drop all fields other than the required `text` field. If any of the fields in `optional_fields` are missing from the uploaded file, Dataset validation will pass.
480

481
            - text_separator: typing.Optional[str]. Raw .txt uploads will be split into entries using the text_separator value.
482

483
            - csv_delimiter: typing.Optional[str]. The delimiter used for .csv uploads.
484

485
            - data: core.File. See core.File for more documentation
486

487
            - eval_data: typing.Optional[core.File]. See core.File for more documentation
488

489
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
490
        ---
491
        from cohere.client import AsyncClient
492

493
        client = AsyncClient(
494
            client_name="YOUR_CLIENT_NAME",
495
            token="YOUR_TOKEN",
496
        )
497
        await client.datasets.create(
498
            name="string",
499
            type="embed-input",
500
            keep_original_file=True,
501
            skip_malformed_input=True,
502
            keep_fields="string",
503
            optional_fields="string",
504
            text_separator="string",
505
            csv_delimiter="string",
506
        )
507
        """
508
        _response = await self._client_wrapper.httpx_client.request(
509
            "POST",
510
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets"),
511
            params=jsonable_encoder(
512
                remove_none_from_dict(
513
                    {
514
                        "name": name,
515
                        "type": type,
516
                        "keep_original_file": keep_original_file,
517
                        "skip_malformed_input": skip_malformed_input,
518
                        "keep_fields": keep_fields,
519
                        "optional_fields": optional_fields,
520
                        "text_separator": text_separator,
521
                        "csv_delimiter": csv_delimiter,
522
                        **(
523
                            request_options.get("additional_query_parameters", {})
524
                            if request_options is not None
525
                            else {}
526
                        ),
527
                    }
528
                )
529
            ),
530
            data=jsonable_encoder(remove_none_from_dict({}))
531
            if request_options is None or request_options.get("additional_body_parameters") is None
532
            else {
533
                **jsonable_encoder(remove_none_from_dict({})),
534
                **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
535
            },
536
            files=core.convert_file_dict_to_httpx_tuples(remove_none_from_dict({"data": data, "eval_data": eval_data})),
537
            headers=jsonable_encoder(
538
                remove_none_from_dict(
539
                    {
540
                        **self._client_wrapper.get_headers(),
541
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
542
                    }
543
                )
544
            ),
545
            timeout=request_options.get("timeout_in_seconds")
546
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
547
            else self._client_wrapper.get_timeout(),
548
            retries=0,
549
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
550
        )
551
        if 200 <= _response.status_code < 300:
552
            return pydantic.parse_obj_as(DatasetsCreateResponse, _response.json())  # type: ignore
553
        if _response.status_code == 429:
554
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
555
        try:
556
            _response_json = _response.json()
557
        except JSONDecodeError:
558
            raise ApiError(status_code=_response.status_code, body=_response.text)
559
        raise ApiError(status_code=_response.status_code, body=_response_json)
560

561
    async def get_usage(self, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetsGetUsageResponse:
562
        """
563
        View the dataset storage usage for your Organization. Each Organization can have up to 10GB of storage across all their users.
564

565
        Parameters:
566
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
567
        ---
568
        from cohere.client import AsyncClient
569

570
        client = AsyncClient(
571
            client_name="YOUR_CLIENT_NAME",
572
            token="YOUR_TOKEN",
573
        )
574
        await client.datasets.get_usage()
575
        """
576
        _response = await self._client_wrapper.httpx_client.request(
577
            "GET",
578
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets/usage"),
579
            params=jsonable_encoder(
580
                request_options.get("additional_query_parameters") if request_options is not None else None
581
            ),
582
            headers=jsonable_encoder(
583
                remove_none_from_dict(
584
                    {
585
                        **self._client_wrapper.get_headers(),
586
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
587
                    }
588
                )
589
            ),
590
            timeout=request_options.get("timeout_in_seconds")
591
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
592
            else self._client_wrapper.get_timeout(),
593
            retries=0,
594
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
595
        )
596
        if 200 <= _response.status_code < 300:
597
            return pydantic.parse_obj_as(DatasetsGetUsageResponse, _response.json())  # type: ignore
598
        if _response.status_code == 429:
599
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
600
        try:
601
            _response_json = _response.json()
602
        except JSONDecodeError:
603
            raise ApiError(status_code=_response.status_code, body=_response.text)
604
        raise ApiError(status_code=_response.status_code, body=_response_json)
605

606
    async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetsGetResponse:
607
        """
608
        Retrieve a dataset by ID. See ['Datasets'](https://docs.cohere.com/docs/datasets) for more information.
609

610
        Parameters:
611
            - id: str.
612

613
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
614
        ---
615
        from cohere.client import AsyncClient
616

617
        client = AsyncClient(
618
            client_name="YOUR_CLIENT_NAME",
619
            token="YOUR_TOKEN",
620
        )
621
        await client.datasets.get(
622
            id="id",
623
        )
624
        """
625
        _response = await self._client_wrapper.httpx_client.request(
626
            "GET",
627
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"datasets/{jsonable_encoder(id)}"),
628
            params=jsonable_encoder(
629
                request_options.get("additional_query_parameters") if request_options is not None else None
630
            ),
631
            headers=jsonable_encoder(
632
                remove_none_from_dict(
633
                    {
634
                        **self._client_wrapper.get_headers(),
635
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
636
                    }
637
                )
638
            ),
639
            timeout=request_options.get("timeout_in_seconds")
640
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
641
            else self._client_wrapper.get_timeout(),
642
            retries=0,
643
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
644
        )
645
        if 200 <= _response.status_code < 300:
646
            return pydantic.parse_obj_as(DatasetsGetResponse, _response.json())  # type: ignore
647
        if _response.status_code == 429:
648
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
649
        try:
650
            _response_json = _response.json()
651
        except JSONDecodeError:
652
            raise ApiError(status_code=_response.status_code, body=_response.text)
653
        raise ApiError(status_code=_response.status_code, body=_response_json)
654

655
    async def delete(
656
        self, id: str, *, request_options: typing.Optional[RequestOptions] = None
657
    ) -> typing.Dict[str, typing.Any]:
658
        """
659
        Delete a dataset by ID. Datasets are automatically deleted after 30 days, but they can also be deleted manually.
660

661
        Parameters:
662
            - id: str.
663

664
            - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
665
        ---
666
        from cohere.client import AsyncClient
667

668
        client = AsyncClient(
669
            client_name="YOUR_CLIENT_NAME",
670
            token="YOUR_TOKEN",
671
        )
672
        await client.datasets.delete(
673
            id="id",
674
        )
675
        """
676
        _response = await self._client_wrapper.httpx_client.request(
677
            "DELETE",
678
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"datasets/{jsonable_encoder(id)}"),
679
            params=jsonable_encoder(
680
                request_options.get("additional_query_parameters") if request_options is not None else None
681
            ),
682
            headers=jsonable_encoder(
683
                remove_none_from_dict(
684
                    {
685
                        **self._client_wrapper.get_headers(),
686
                        **(request_options.get("additional_headers", {}) if request_options is not None else {}),
687
                    }
688
                )
689
            ),
690
            timeout=request_options.get("timeout_in_seconds")
691
            if request_options is not None and request_options.get("timeout_in_seconds") is not None
692
            else self._client_wrapper.get_timeout(),
693
            retries=0,
694
            max_retries=request_options.get("max_retries") if request_options is not None else 0,  # type: ignore
695
        )
696
        if 200 <= _response.status_code < 300:
697
            return pydantic.parse_obj_as(typing.Dict[str, typing.Any], _response.json())  # type: ignore
698
        if _response.status_code == 429:
699
            raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json()))  # type: ignore
700
        try:
701
            _response_json = _response.json()
702
        except JSONDecodeError:
703
            raise ApiError(status_code=_response.status_code, body=_response.text)
704
        raise ApiError(status_code=_response.status_code, body=_response_json)
705

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.