llama-index

Форк
0
91 строка · 3.3 Кб
1
"""Sentence Transformer Finetuning Engine."""
2

3
from typing import Any, Optional
4

5
from llama_index.legacy.embeddings.base import BaseEmbedding
6
from llama_index.legacy.embeddings.utils import resolve_embed_model
7
from llama_index.legacy.finetuning.embeddings.common import (
8
    EmbeddingQAFinetuneDataset,
9
)
10
from llama_index.legacy.finetuning.types import BaseEmbeddingFinetuneEngine
11

12

13
class SentenceTransformersFinetuneEngine(BaseEmbeddingFinetuneEngine):
14
    """Sentence Transformers Finetune Engine."""
15

16
    def __init__(
17
        self,
18
        dataset: EmbeddingQAFinetuneDataset,
19
        model_id: str = "BAAI/bge-small-en",
20
        model_output_path: str = "exp_finetune",
21
        batch_size: int = 10,
22
        val_dataset: Optional[EmbeddingQAFinetuneDataset] = None,
23
        loss: Optional[Any] = None,
24
        epochs: int = 2,
25
        show_progress_bar: bool = True,
26
        evaluation_steps: int = 50,
27
        use_all_docs: bool = False,
28
    ) -> None:
29
        """Init params."""
30
        from sentence_transformers import InputExample, SentenceTransformer, losses
31
        from torch.utils.data import DataLoader
32

33
        self.dataset = dataset
34

35
        self.model_id = model_id
36
        self.model_output_path = model_output_path
37
        self.model = SentenceTransformer(model_id)
38

39
        self.use_all_docs = use_all_docs
40

41
        examples: Any = []
42
        for query_id, query in dataset.queries.items():
43
            if use_all_docs:
44
                for node_id in dataset.relevant_docs[query_id]:
45
                    text = dataset.corpus[node_id]
46
                    example = InputExample(texts=[query, text])
47
                    examples.append(example)
48
            else:
49
                node_id = dataset.relevant_docs[query_id][0]
50
                text = dataset.corpus[node_id]
51
                example = InputExample(texts=[query, text])
52
                examples.append(example)
53

54
        self.examples = examples
55

56
        self.loader: DataLoader = DataLoader(examples, batch_size=batch_size)
57

58
        # define evaluator
59
        from sentence_transformers.evaluation import InformationRetrievalEvaluator
60

61
        evaluator: Optional[InformationRetrievalEvaluator] = None
62
        if val_dataset is not None:
63
            evaluator = InformationRetrievalEvaluator(
64
                val_dataset.queries, val_dataset.corpus, val_dataset.relevant_docs
65
            )
66
        self.evaluator = evaluator
67

68
        # define loss
69
        self.loss = loss or losses.MultipleNegativesRankingLoss(self.model)
70

71
        self.epochs = epochs
72
        self.show_progress_bar = show_progress_bar
73
        self.evaluation_steps = evaluation_steps
74
        self.warmup_steps = int(len(self.loader) * epochs * 0.1)
75

76
    def finetune(self, **train_kwargs: Any) -> None:
77
        """Finetune model."""
78
        self.model.fit(
79
            train_objectives=[(self.loader, self.loss)],
80
            epochs=self.epochs,
81
            warmup_steps=self.warmup_steps,
82
            output_path=self.model_output_path,
83
            show_progress_bar=self.show_progress_bar,
84
            evaluator=self.evaluator,
85
            evaluation_steps=self.evaluation_steps,
86
        )
87

88
    def get_finetuned_model(self, **model_kwargs: Any) -> BaseEmbedding:
89
        """Gets finetuned model."""
90
        embed_model_str = "local:" + self.model_output_path
91
        return resolve_embed_model(embed_model_str)
92

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.