Langchain-Chatchat
95 строк · 3.4 Кб
1import sys
2import os
3from fastchat.conversation import Conversation
4from server.model_workers.base import *
5from server.utils import get_httpx_client
6from fastchat import conversation as conv
7import json
8from typing import List, Dict
9from configs import logger, log_verbose
10
11
12class AzureWorker(ApiModelWorker):
13def __init__(
14self,
15*,
16controller_addr: str = None,
17worker_addr: str = None,
18model_names: List[str] = ["azure-api"],
19version: str = "gpt-35-turbo",
20**kwargs,
21):
22kwargs.update(model_names=model_names, controller_addr=controller_addr, worker_addr=worker_addr)
23super().__init__(**kwargs)
24self.version = version
25
26def do_chat(self, params: ApiChatParams) -> Dict:
27params.load_config(self.model_names[0])
28
29data = dict(
30messages=params.messages,
31temperature=params.temperature,
32max_tokens=params.max_tokens if params.max_tokens else None,
33stream=True,
34)
35url = ("https://{}.openai.azure.com/openai/deployments/{}/chat/completions?api-version={}"
36.format(params.resource_name, params.deployment_name, params.api_version))
37headers = {
38'Content-Type': 'application/json',
39'Accept': 'application/json',
40'api-key': params.api_key,
41}
42
43text = ""
44if log_verbose:
45logger.info(f'{self.__class__.__name__}:url: {url}')
46logger.info(f'{self.__class__.__name__}:headers: {headers}')
47logger.info(f'{self.__class__.__name__}:data: {data}')
48
49with get_httpx_client() as client:
50with client.stream("POST", url, headers=headers, json=data) as response:
51print(data)
52for line in response.iter_lines():
53if not line.strip() or "[DONE]" in line:
54continue
55if line.startswith("data: "):
56line = line[6:]
57resp = json.loads(line)
58if choices := resp["choices"]:
59if chunk := choices[0].get("delta", {}).get("content"):
60text += chunk
61yield {
62"error_code": 0,
63"text": text
64}
65print(text)
66else:
67self.logger.error(f"请求 Azure API 时发生错误:{resp}")
68
69def get_embeddings(self, params):
70print("embedding")
71print(params)
72
73def make_conv_template(self, conv_template: str = None, model_path: str = None) -> Conversation:
74return conv.Conversation(
75name=self.model_names[0],
76system_message="You are a helpful, respectful and honest assistant.",
77messages=[],
78roles=["user", "assistant"],
79sep="\n### ",
80stop_str="###",
81)
82
83
84if __name__ == "__main__":
85import uvicorn
86from server.utils import MakeFastAPIOffline
87from fastchat.serve.base_model_worker import app
88
89worker = AzureWorker(
90controller_addr="http://127.0.0.1:20001",
91worker_addr="http://127.0.0.1:21008",
92)
93sys.modules["fastchat.serve.model_worker"].worker = worker
94MakeFastAPIOffline(app)
95uvicorn.run(app, port=21008)