llama-index
46 строк · 1.3 Кб
1"""DashScope api utils."""
2
3from http import HTTPStatus4from typing import Any, Dict, List, Sequence5
6from llama_index.legacy.core.llms.types import (7ChatMessage,8ChatResponse,9CompletionResponse,10)
11
12
13def dashscope_response_to_completion_response(14response: Any, stream: bool = False15) -> CompletionResponse:16if response["status_code"] == HTTPStatus.OK:17content = response["output"]["choices"][0]["message"]["content"]18if not content:19content = ""20return CompletionResponse(text=content, raw=response)21else:22return CompletionResponse(text="", raw=response)23
24
25def dashscope_response_to_chat_response(26response: Any,27) -> ChatResponse:28if response["status_code"] == HTTPStatus.OK:29content = response["output"]["choices"][0]["message"]["content"]30if not content:31content = ""32role = response["output"]["choices"][0]["message"]["role"]33return ChatResponse(34message=ChatMessage(role=role, content=content), raw=response35)36else:37return ChatResponse(message=ChatMessage(), raw=response)38
39
40def chat_message_to_dashscope_messages(41chat_messages: Sequence[ChatMessage],42) -> List[Dict]:43messages = []44for msg in chat_messages:45messages.append({"role": msg.role.value, "content": msg.content})46return messages47