cohere-python
704 строки · 32.8 Кб
1# This file was auto-generated by Fern from our API Definition.
2
3import datetime as dt4import typing5import urllib.parse6from json.decoder import JSONDecodeError7
8from .. import core9from ..core.api_error import ApiError10from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper11from ..core.datetime_utils import serialize_datetime12from ..core.jsonable_encoder import jsonable_encoder13from ..core.remove_none_from_dict import remove_none_from_dict14from ..core.request_options import RequestOptions15from ..errors.too_many_requests_error import TooManyRequestsError16from ..types.dataset_type import DatasetType17from .types.datasets_create_response import DatasetsCreateResponse18from .types.datasets_get_response import DatasetsGetResponse19from .types.datasets_get_usage_response import DatasetsGetUsageResponse20from .types.datasets_list_response import DatasetsListResponse21
22try:23import pydantic.v1 as pydantic # type: ignore24except ImportError:25import pydantic # type: ignore26
27# this is used as the default value for optional parameters
28OMIT = typing.cast(typing.Any, ...)29
30
31class DatasetsClient:32def __init__(self, *, client_wrapper: SyncClientWrapper):33self._client_wrapper = client_wrapper34
35def list(36self,37*,38dataset_type: typing.Optional[str] = None,39before: typing.Optional[dt.datetime] = None,40after: typing.Optional[dt.datetime] = None,41limit: typing.Optional[float] = None,42offset: typing.Optional[float] = None,43request_options: typing.Optional[RequestOptions] = None,44) -> DatasetsListResponse:45"""46List datasets that have been created.
47
48Parameters:
49- dataset_type: typing.Optional[str]. optional filter by dataset type
50
51- before: typing.Optional[dt.datetime]. optional filter before a date
52
53- after: typing.Optional[dt.datetime]. optional filter after a date
54
55- limit: typing.Optional[float]. optional limit to number of results
56
57- offset: typing.Optional[float]. optional offset to start of results
58
59- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
60---
61from cohere.client import Client
62
63client = Client(
64client_name="YOUR_CLIENT_NAME",
65token="YOUR_TOKEN",
66)
67client.datasets.list()
68"""
69_response = self._client_wrapper.httpx_client.request(70"GET",71urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets"),72params=jsonable_encoder(73remove_none_from_dict(74{75"datasetType": dataset_type,76"before": serialize_datetime(before) if before is not None else None,77"after": serialize_datetime(after) if after is not None else None,78"limit": limit,79"offset": offset,80**(81request_options.get("additional_query_parameters", {})82if request_options is not None83else {}84),85}86)87),88headers=jsonable_encoder(89remove_none_from_dict(90{91**self._client_wrapper.get_headers(),92**(request_options.get("additional_headers", {}) if request_options is not None else {}),93}94)95),96timeout=request_options.get("timeout_in_seconds")97if request_options is not None and request_options.get("timeout_in_seconds") is not None98else self._client_wrapper.get_timeout(),99retries=0,100max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore101)102if 200 <= _response.status_code < 300:103return pydantic.parse_obj_as(DatasetsListResponse, _response.json()) # type: ignore104if _response.status_code == 429:105raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore106try:107_response_json = _response.json()108except JSONDecodeError:109raise ApiError(status_code=_response.status_code, body=_response.text)110raise ApiError(status_code=_response.status_code, body=_response_json)111
112def create(113self,114*,115name: str,116type: DatasetType,117keep_original_file: typing.Optional[bool] = None,118skip_malformed_input: typing.Optional[bool] = None,119keep_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,120optional_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,121text_separator: typing.Optional[str] = None,122csv_delimiter: typing.Optional[str] = None,123data: core.File,124eval_data: typing.Optional[core.File] = None,125request_options: typing.Optional[RequestOptions] = None,126) -> DatasetsCreateResponse:127"""128Create a dataset by uploading a file. See ['Dataset Creation'](https://docs.cohere.com/docs/datasets#dataset-creation) for more information.
129
130Parameters:
131- name: str. The name of the uploaded dataset.
132
133- type: DatasetType. The dataset type, which is used to validate the data.
134
135- keep_original_file: typing.Optional[bool]. Indicates if the original file should be stored.
136
137- skip_malformed_input: typing.Optional[bool]. Indicates whether rows with malformed input should be dropped (instead of failing the validation check). Dropped rows will be returned in the warnings field.
138
139- keep_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]]. List of names of fields that will be persisted in the Dataset. By default the Dataset will retain only the required fields indicated in the [schema for the corresponding Dataset type](https://docs.cohere.com/docs/datasets#dataset-types). For example, datasets of type `embed-input` will drop all fields other than the required `text` field. If any of the fields in `keep_fields` are missing from the uploaded file, Dataset validation will fail.
140
141- optional_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]]. List of names of fields that will be persisted in the Dataset. By default the Dataset will retain only the required fields indicated in the [schema for the corresponding Dataset type](https://docs.cohere.com/docs/datasets#dataset-types). For example, Datasets of type `embed-input` will drop all fields other than the required `text` field. If any of the fields in `optional_fields` are missing from the uploaded file, Dataset validation will pass.
142
143- text_separator: typing.Optional[str]. Raw .txt uploads will be split into entries using the text_separator value.
144
145- csv_delimiter: typing.Optional[str]. The delimiter used for .csv uploads.
146
147- data: core.File. See core.File for more documentation
148
149- eval_data: typing.Optional[core.File]. See core.File for more documentation
150
151- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
152---
153from cohere.client import Client
154
155client = Client(
156client_name="YOUR_CLIENT_NAME",
157token="YOUR_TOKEN",
158)
159client.datasets.create(
160name="string",
161type="embed-input",
162keep_original_file=True,
163skip_malformed_input=True,
164keep_fields="string",
165optional_fields="string",
166text_separator="string",
167csv_delimiter="string",
168)
169"""
170_response = self._client_wrapper.httpx_client.request(171"POST",172urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets"),173params=jsonable_encoder(174remove_none_from_dict(175{176"name": name,177"type": type,178"keep_original_file": keep_original_file,179"skip_malformed_input": skip_malformed_input,180"keep_fields": keep_fields,181"optional_fields": optional_fields,182"text_separator": text_separator,183"csv_delimiter": csv_delimiter,184**(185request_options.get("additional_query_parameters", {})186if request_options is not None187else {}188),189}190)191),192data=jsonable_encoder(remove_none_from_dict({}))193if request_options is None or request_options.get("additional_body_parameters") is None194else {195**jsonable_encoder(remove_none_from_dict({})),196**(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),197},198files=core.convert_file_dict_to_httpx_tuples(remove_none_from_dict({"data": data, "eval_data": eval_data})),199headers=jsonable_encoder(200remove_none_from_dict(201{202**self._client_wrapper.get_headers(),203**(request_options.get("additional_headers", {}) if request_options is not None else {}),204}205)206),207timeout=request_options.get("timeout_in_seconds")208if request_options is not None and request_options.get("timeout_in_seconds") is not None209else self._client_wrapper.get_timeout(),210retries=0,211max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore212)213if 200 <= _response.status_code < 300:214return pydantic.parse_obj_as(DatasetsCreateResponse, _response.json()) # type: ignore215if _response.status_code == 429:216raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore217try:218_response_json = _response.json()219except JSONDecodeError:220raise ApiError(status_code=_response.status_code, body=_response.text)221raise ApiError(status_code=_response.status_code, body=_response_json)222
223def get_usage(self, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetsGetUsageResponse:224"""225View the dataset storage usage for your Organization. Each Organization can have up to 10GB of storage across all their users.
226
227Parameters:
228- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
229---
230from cohere.client import Client
231
232client = Client(
233client_name="YOUR_CLIENT_NAME",
234token="YOUR_TOKEN",
235)
236client.datasets.get_usage()
237"""
238_response = self._client_wrapper.httpx_client.request(239"GET",240urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets/usage"),241params=jsonable_encoder(242request_options.get("additional_query_parameters") if request_options is not None else None243),244headers=jsonable_encoder(245remove_none_from_dict(246{247**self._client_wrapper.get_headers(),248**(request_options.get("additional_headers", {}) if request_options is not None else {}),249}250)251),252timeout=request_options.get("timeout_in_seconds")253if request_options is not None and request_options.get("timeout_in_seconds") is not None254else self._client_wrapper.get_timeout(),255retries=0,256max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore257)258if 200 <= _response.status_code < 300:259return pydantic.parse_obj_as(DatasetsGetUsageResponse, _response.json()) # type: ignore260if _response.status_code == 429:261raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore262try:263_response_json = _response.json()264except JSONDecodeError:265raise ApiError(status_code=_response.status_code, body=_response.text)266raise ApiError(status_code=_response.status_code, body=_response_json)267
268def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetsGetResponse:269"""270Retrieve a dataset by ID. See ['Datasets'](https://docs.cohere.com/docs/datasets) for more information.
271
272Parameters:
273- id: str.
274
275- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
276---
277from cohere.client import Client
278
279client = Client(
280client_name="YOUR_CLIENT_NAME",
281token="YOUR_TOKEN",
282)
283client.datasets.get(
284id="id",
285)
286"""
287_response = self._client_wrapper.httpx_client.request(288"GET",289urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"datasets/{jsonable_encoder(id)}"),290params=jsonable_encoder(291request_options.get("additional_query_parameters") if request_options is not None else None292),293headers=jsonable_encoder(294remove_none_from_dict(295{296**self._client_wrapper.get_headers(),297**(request_options.get("additional_headers", {}) if request_options is not None else {}),298}299)300),301timeout=request_options.get("timeout_in_seconds")302if request_options is not None and request_options.get("timeout_in_seconds") is not None303else self._client_wrapper.get_timeout(),304retries=0,305max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore306)307if 200 <= _response.status_code < 300:308return pydantic.parse_obj_as(DatasetsGetResponse, _response.json()) # type: ignore309if _response.status_code == 429:310raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore311try:312_response_json = _response.json()313except JSONDecodeError:314raise ApiError(status_code=_response.status_code, body=_response.text)315raise ApiError(status_code=_response.status_code, body=_response_json)316
317def delete(318self, id: str, *, request_options: typing.Optional[RequestOptions] = None319) -> typing.Dict[str, typing.Any]:320"""321Delete a dataset by ID. Datasets are automatically deleted after 30 days, but they can also be deleted manually.
322
323Parameters:
324- id: str.
325
326- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
327---
328from cohere.client import Client
329
330client = Client(
331client_name="YOUR_CLIENT_NAME",
332token="YOUR_TOKEN",
333)
334client.datasets.delete(
335id="id",
336)
337"""
338_response = self._client_wrapper.httpx_client.request(339"DELETE",340urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"datasets/{jsonable_encoder(id)}"),341params=jsonable_encoder(342request_options.get("additional_query_parameters") if request_options is not None else None343),344headers=jsonable_encoder(345remove_none_from_dict(346{347**self._client_wrapper.get_headers(),348**(request_options.get("additional_headers", {}) if request_options is not None else {}),349}350)351),352timeout=request_options.get("timeout_in_seconds")353if request_options is not None and request_options.get("timeout_in_seconds") is not None354else self._client_wrapper.get_timeout(),355retries=0,356max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore357)358if 200 <= _response.status_code < 300:359return pydantic.parse_obj_as(typing.Dict[str, typing.Any], _response.json()) # type: ignore360if _response.status_code == 429:361raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore362try:363_response_json = _response.json()364except JSONDecodeError:365raise ApiError(status_code=_response.status_code, body=_response.text)366raise ApiError(status_code=_response.status_code, body=_response_json)367
368
369class AsyncDatasetsClient:370def __init__(self, *, client_wrapper: AsyncClientWrapper):371self._client_wrapper = client_wrapper372
373async def list(374self,375*,376dataset_type: typing.Optional[str] = None,377before: typing.Optional[dt.datetime] = None,378after: typing.Optional[dt.datetime] = None,379limit: typing.Optional[float] = None,380offset: typing.Optional[float] = None,381request_options: typing.Optional[RequestOptions] = None,382) -> DatasetsListResponse:383"""384List datasets that have been created.
385
386Parameters:
387- dataset_type: typing.Optional[str]. optional filter by dataset type
388
389- before: typing.Optional[dt.datetime]. optional filter before a date
390
391- after: typing.Optional[dt.datetime]. optional filter after a date
392
393- limit: typing.Optional[float]. optional limit to number of results
394
395- offset: typing.Optional[float]. optional offset to start of results
396
397- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
398---
399from cohere.client import AsyncClient
400
401client = AsyncClient(
402client_name="YOUR_CLIENT_NAME",
403token="YOUR_TOKEN",
404)
405await client.datasets.list()
406"""
407_response = await self._client_wrapper.httpx_client.request(408"GET",409urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets"),410params=jsonable_encoder(411remove_none_from_dict(412{413"datasetType": dataset_type,414"before": serialize_datetime(before) if before is not None else None,415"after": serialize_datetime(after) if after is not None else None,416"limit": limit,417"offset": offset,418**(419request_options.get("additional_query_parameters", {})420if request_options is not None421else {}422),423}424)425),426headers=jsonable_encoder(427remove_none_from_dict(428{429**self._client_wrapper.get_headers(),430**(request_options.get("additional_headers", {}) if request_options is not None else {}),431}432)433),434timeout=request_options.get("timeout_in_seconds")435if request_options is not None and request_options.get("timeout_in_seconds") is not None436else self._client_wrapper.get_timeout(),437retries=0,438max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore439)440if 200 <= _response.status_code < 300:441return pydantic.parse_obj_as(DatasetsListResponse, _response.json()) # type: ignore442if _response.status_code == 429:443raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore444try:445_response_json = _response.json()446except JSONDecodeError:447raise ApiError(status_code=_response.status_code, body=_response.text)448raise ApiError(status_code=_response.status_code, body=_response_json)449
450async def create(451self,452*,453name: str,454type: DatasetType,455keep_original_file: typing.Optional[bool] = None,456skip_malformed_input: typing.Optional[bool] = None,457keep_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,458optional_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,459text_separator: typing.Optional[str] = None,460csv_delimiter: typing.Optional[str] = None,461data: core.File,462eval_data: typing.Optional[core.File] = None,463request_options: typing.Optional[RequestOptions] = None,464) -> DatasetsCreateResponse:465"""466Create a dataset by uploading a file. See ['Dataset Creation'](https://docs.cohere.com/docs/datasets#dataset-creation) for more information.
467
468Parameters:
469- name: str. The name of the uploaded dataset.
470
471- type: DatasetType. The dataset type, which is used to validate the data.
472
473- keep_original_file: typing.Optional[bool]. Indicates if the original file should be stored.
474
475- skip_malformed_input: typing.Optional[bool]. Indicates whether rows with malformed input should be dropped (instead of failing the validation check). Dropped rows will be returned in the warnings field.
476
477- keep_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]]. List of names of fields that will be persisted in the Dataset. By default the Dataset will retain only the required fields indicated in the [schema for the corresponding Dataset type](https://docs.cohere.com/docs/datasets#dataset-types). For example, datasets of type `embed-input` will drop all fields other than the required `text` field. If any of the fields in `keep_fields` are missing from the uploaded file, Dataset validation will fail.
478
479- optional_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]]. List of names of fields that will be persisted in the Dataset. By default the Dataset will retain only the required fields indicated in the [schema for the corresponding Dataset type](https://docs.cohere.com/docs/datasets#dataset-types). For example, Datasets of type `embed-input` will drop all fields other than the required `text` field. If any of the fields in `optional_fields` are missing from the uploaded file, Dataset validation will pass.
480
481- text_separator: typing.Optional[str]. Raw .txt uploads will be split into entries using the text_separator value.
482
483- csv_delimiter: typing.Optional[str]. The delimiter used for .csv uploads.
484
485- data: core.File. See core.File for more documentation
486
487- eval_data: typing.Optional[core.File]. See core.File for more documentation
488
489- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
490---
491from cohere.client import AsyncClient
492
493client = AsyncClient(
494client_name="YOUR_CLIENT_NAME",
495token="YOUR_TOKEN",
496)
497await client.datasets.create(
498name="string",
499type="embed-input",
500keep_original_file=True,
501skip_malformed_input=True,
502keep_fields="string",
503optional_fields="string",
504text_separator="string",
505csv_delimiter="string",
506)
507"""
508_response = await self._client_wrapper.httpx_client.request(509"POST",510urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets"),511params=jsonable_encoder(512remove_none_from_dict(513{514"name": name,515"type": type,516"keep_original_file": keep_original_file,517"skip_malformed_input": skip_malformed_input,518"keep_fields": keep_fields,519"optional_fields": optional_fields,520"text_separator": text_separator,521"csv_delimiter": csv_delimiter,522**(523request_options.get("additional_query_parameters", {})524if request_options is not None525else {}526),527}528)529),530data=jsonable_encoder(remove_none_from_dict({}))531if request_options is None or request_options.get("additional_body_parameters") is None532else {533**jsonable_encoder(remove_none_from_dict({})),534**(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),535},536files=core.convert_file_dict_to_httpx_tuples(remove_none_from_dict({"data": data, "eval_data": eval_data})),537headers=jsonable_encoder(538remove_none_from_dict(539{540**self._client_wrapper.get_headers(),541**(request_options.get("additional_headers", {}) if request_options is not None else {}),542}543)544),545timeout=request_options.get("timeout_in_seconds")546if request_options is not None and request_options.get("timeout_in_seconds") is not None547else self._client_wrapper.get_timeout(),548retries=0,549max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore550)551if 200 <= _response.status_code < 300:552return pydantic.parse_obj_as(DatasetsCreateResponse, _response.json()) # type: ignore553if _response.status_code == 429:554raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore555try:556_response_json = _response.json()557except JSONDecodeError:558raise ApiError(status_code=_response.status_code, body=_response.text)559raise ApiError(status_code=_response.status_code, body=_response_json)560
561async def get_usage(self, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetsGetUsageResponse:562"""563View the dataset storage usage for your Organization. Each Organization can have up to 10GB of storage across all their users.
564
565Parameters:
566- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
567---
568from cohere.client import AsyncClient
569
570client = AsyncClient(
571client_name="YOUR_CLIENT_NAME",
572token="YOUR_TOKEN",
573)
574await client.datasets.get_usage()
575"""
576_response = await self._client_wrapper.httpx_client.request(577"GET",578urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "datasets/usage"),579params=jsonable_encoder(580request_options.get("additional_query_parameters") if request_options is not None else None581),582headers=jsonable_encoder(583remove_none_from_dict(584{585**self._client_wrapper.get_headers(),586**(request_options.get("additional_headers", {}) if request_options is not None else {}),587}588)589),590timeout=request_options.get("timeout_in_seconds")591if request_options is not None and request_options.get("timeout_in_seconds") is not None592else self._client_wrapper.get_timeout(),593retries=0,594max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore595)596if 200 <= _response.status_code < 300:597return pydantic.parse_obj_as(DatasetsGetUsageResponse, _response.json()) # type: ignore598if _response.status_code == 429:599raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore600try:601_response_json = _response.json()602except JSONDecodeError:603raise ApiError(status_code=_response.status_code, body=_response.text)604raise ApiError(status_code=_response.status_code, body=_response_json)605
606async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetsGetResponse:607"""608Retrieve a dataset by ID. See ['Datasets'](https://docs.cohere.com/docs/datasets) for more information.
609
610Parameters:
611- id: str.
612
613- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
614---
615from cohere.client import AsyncClient
616
617client = AsyncClient(
618client_name="YOUR_CLIENT_NAME",
619token="YOUR_TOKEN",
620)
621await client.datasets.get(
622id="id",
623)
624"""
625_response = await self._client_wrapper.httpx_client.request(626"GET",627urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"datasets/{jsonable_encoder(id)}"),628params=jsonable_encoder(629request_options.get("additional_query_parameters") if request_options is not None else None630),631headers=jsonable_encoder(632remove_none_from_dict(633{634**self._client_wrapper.get_headers(),635**(request_options.get("additional_headers", {}) if request_options is not None else {}),636}637)638),639timeout=request_options.get("timeout_in_seconds")640if request_options is not None and request_options.get("timeout_in_seconds") is not None641else self._client_wrapper.get_timeout(),642retries=0,643max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore644)645if 200 <= _response.status_code < 300:646return pydantic.parse_obj_as(DatasetsGetResponse, _response.json()) # type: ignore647if _response.status_code == 429:648raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore649try:650_response_json = _response.json()651except JSONDecodeError:652raise ApiError(status_code=_response.status_code, body=_response.text)653raise ApiError(status_code=_response.status_code, body=_response_json)654
655async def delete(656self, id: str, *, request_options: typing.Optional[RequestOptions] = None657) -> typing.Dict[str, typing.Any]:658"""659Delete a dataset by ID. Datasets are automatically deleted after 30 days, but they can also be deleted manually.
660
661Parameters:
662- id: str.
663
664- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
665---
666from cohere.client import AsyncClient
667
668client = AsyncClient(
669client_name="YOUR_CLIENT_NAME",
670token="YOUR_TOKEN",
671)
672await client.datasets.delete(
673id="id",
674)
675"""
676_response = await self._client_wrapper.httpx_client.request(677"DELETE",678urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"datasets/{jsonable_encoder(id)}"),679params=jsonable_encoder(680request_options.get("additional_query_parameters") if request_options is not None else None681),682headers=jsonable_encoder(683remove_none_from_dict(684{685**self._client_wrapper.get_headers(),686**(request_options.get("additional_headers", {}) if request_options is not None else {}),687}688)689),690timeout=request_options.get("timeout_in_seconds")691if request_options is not None and request_options.get("timeout_in_seconds") is not None692else self._client_wrapper.get_timeout(),693retries=0,694max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore695)696if 200 <= _response.status_code < 300:697return pydantic.parse_obj_as(typing.Dict[str, typing.Any], _response.json()) # type: ignore698if _response.status_code == 429:699raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore700try:701_response_json = _response.json()702except JSONDecodeError:703raise ApiError(status_code=_response.status_code, body=_response.text)704raise ApiError(status_code=_response.status_code, body=_response_json)705