MemGPT
69 строк · 2.4 Кб
1import gzip2import json3from typing import List4
5from memgpt.config import MemGPTConfig6from memgpt.constants import LLM_MAX_TOKENS7from memgpt.data_types import EmbeddingConfig, LLMConfig8
9
10def load_gzipped_file(file_path):11with gzip.open(file_path, "rt", encoding="utf-8") as f:12for line in f:13yield json.loads(line)14
15
16def read_jsonl(filename) -> List[dict]:17lines = []18with open(filename, "r") as file:19for line in file:20lines.append(json.loads(line.strip()))21return lines22
23
24def get_experiment_config(postgres_uri, endpoint_type="openai", model="gpt-4"):25config = MemGPTConfig.load()26config.archival_storage_type = "postgres"27config.archival_storage_uri = postgres_uri28
29if endpoint_type == "openai":30llm_config = LLMConfig(31model=model, model_endpoint_type="openai", model_endpoint="https://api.openai.com/v1", context_window=LLM_MAX_TOKENS[model]32)33embedding_config = EmbeddingConfig(34embedding_endpoint_type="openai",35embedding_endpoint="https://api.openai.com/v1",36embedding_dim=1536,37embedding_model="text-embedding-ada-002",38embedding_chunk_size=300, # TODO: fix this39)40else:41assert model == "ehartford/dolphin-2.5-mixtral-8x7b", "Only model supported is ehartford/dolphin-2.5-mixtral-8x7b"42llm_config = LLMConfig(43model="ehartford/dolphin-2.5-mixtral-8x7b",44model_endpoint_type="vllm",45model_endpoint="https://api.memgpt.ai",46model_wrapper="chatml",47context_window=16384,48)49embedding_config = EmbeddingConfig(50embedding_endpoint_type="hugging-face",51embedding_endpoint="https://embeddings.memgpt.ai",52embedding_dim=1024,53embedding_model="BAAI/bge-large-en-v1.5",54embedding_chunk_size=300,55)56
57config = MemGPTConfig(58anon_clientid=config.anon_clientid,59archival_storage_type="postgres",60archival_storage_uri=postgres_uri,61recall_storage_type="postgres",62recall_storage_uri=postgres_uri,63metadata_storage_type="postgres",64metadata_storage_uri=postgres_uri,65default_llm_config=llm_config,66default_embedding_config=embedding_config,67)68print("Config model", config.default_llm_config.model)69return config70