instructor
74 строки · 2.0 Кб
1import redis
2import functools
3import inspect
4import instructor
5
6from pydantic import BaseModel
7from openai import OpenAI
8
9client = instructor.from_openai(OpenAI())
10cache = redis.Redis("localhost")
11
12
13def instructor_cache(func):
14"""Cache a function that returns a Pydantic model"""
15return_type = inspect.signature(func).return_annotation
16if not issubclass(return_type, BaseModel):
17raise ValueError("The return type must be a Pydantic model")
18
19@functools.wraps(func)
20def wrapper(*args, **kwargs):
21key = f"{func.__name__}-{functools._make_key(args, kwargs, typed=False)}"
22# Check if the result is already cached
23if (cached := cache.get(key)) is not None:
24# Deserialize from JSON based on the return type
25if issubclass(return_type, BaseModel):
26return return_type.model_validate_json(cached)
27
28# Call the function and cache its result
29result = func(*args, **kwargs)
30serialized_result = result.model_dump_json()
31cache.set(key, serialized_result)
32
33return result
34
35return wrapper
36
37
38class UserDetail(BaseModel):
39name: str
40age: int
41
42
43@instructor_cache
44def extract(data) -> UserDetail:
45# Assuming client.chat.completions.create returns a UserDetail instance
46return client.chat.completions.create(
47model="gpt-3.5-turbo",
48response_model=UserDetail,
49messages=[
50{"role": "user", "content": data},
51],
52)
53
54
55def test_extract():
56import time
57
58start = time.perf_counter()
59model = extract("Extract jason is 25 years old")
60assert model.name.lower() == "jason"
61assert model.age == 25
62print(f"Time taken: {time.perf_counter() - start}")
63
64start = time.perf_counter()
65model = extract("Extract jason is 25 years old")
66assert model.name.lower() == "jason"
67assert model.age == 25
68print(f"Time taken: {time.perf_counter() - start}")
69
70
71if __name__ == "__main__":
72test_extract()
73# Time taken: 0.798335583996959
74# Time taken: 0.00017016706988215446
75