llama-index
65 строк · 2.0 Кб
1from typing import Any, Dict
2
3import requests
4
5from llama_index.legacy.core.base_query_engine import BaseQueryEngine
6from llama_index.legacy.core.response.schema import Response
7from llama_index.legacy.schema import QueryBundle
8
9
10class CogniswitchQueryEngine(BaseQueryEngine):
11def __init__(self, cs_token: str, OAI_token: str, apiKey: str) -> None:
12"""The required fields.
13
14Args:
15cs_token (str): Cogniswitch token.
16OAI_token (str): OpenAI token.
17apiKey (str): Oauth token.
18"""
19self.cs_token = cs_token
20self.OAI_token = OAI_token
21self.apiKey = apiKey
22self.knowledge_request_endpoint = (
23"https://api.cogniswitch.ai:8243/cs-api/0.0.1/cs/knowledgeRequest"
24)
25self.headers = {
26"apiKey": self.apiKey,
27"platformToken": self.cs_token,
28"openAIToken": self.OAI_token,
29}
30
31def query_knowledge(self, query: str) -> Response:
32"""
33Send a query to the Cogniswitch service and retrieve the response.
34
35Args:
36query (str): Query to be answered.
37
38Returns:
39dict: Response JSON from the Cogniswitch service.
40"""
41data = {"query": query}
42response = requests.post(
43self.knowledge_request_endpoint,
44headers=self.headers,
45verify=False,
46data=data,
47)
48if response.status_code == 200:
49resp = response.json()
50answer = resp["data"]["answer"]
51
52return Response(response=answer)
53else:
54error_message = response.json()["message"]
55return Response(response=error_message)
56
57def _query(self, query_bundle: QueryBundle) -> Response:
58return self.query_knowledge(query_bundle.query_str)
59
60async def _aquery(self, query_bundle: QueryBundle) -> Response:
61return self.query_knowledge(query_bundle.query_str)
62
63def _get_prompt_modules(self) -> Dict[str, Any]:
64"""Get prompts."""
65return {}
66