llama-index

Форк
0
648 строк · 23.9 Кб
1
import asyncio
2
import multiprocessing
3
import re
4
import warnings
5
from concurrent.futures import ProcessPoolExecutor
6
from enum import Enum
7
from functools import partial, reduce
8
from hashlib import sha256
9
from itertools import repeat
10
from pathlib import Path
11
from typing import Any, Generator, List, Optional, Sequence, Union
12

13
from fsspec import AbstractFileSystem
14

15
from llama_index.legacy.bridge.pydantic import BaseModel, Field
16
from llama_index.legacy.embeddings.utils import resolve_embed_model
17
from llama_index.legacy.ingestion.cache import DEFAULT_CACHE_NAME, IngestionCache
18
from llama_index.legacy.node_parser import SentenceSplitter
19
from llama_index.legacy.readers.base import ReaderConfig
20
from llama_index.legacy.schema import (
21
    BaseNode,
22
    Document,
23
    MetadataMode,
24
    TransformComponent,
25
)
26
from llama_index.legacy.service_context import ServiceContext
27
from llama_index.legacy.storage.docstore import BaseDocumentStore, SimpleDocumentStore
28
from llama_index.legacy.storage.storage_context import DOCSTORE_FNAME
29
from llama_index.legacy.utils import concat_dirs
30
from llama_index.legacy.vector_stores.types import BasePydanticVectorStore
31

32

33
def remove_unstable_values(s: str) -> str:
34
    """Remove unstable key/value pairs.
35

36
    Examples include:
37
    - <__main__.Test object at 0x7fb9f3793f50>
38
    - <function test_fn at 0x7fb9f37a8900>
39
    """
40
    pattern = r"<[\w\s_\. ]+ at 0x[a-z0-9]+>"
41
    return re.sub(pattern, "", s)
42

43

44
def get_transformation_hash(
45
    nodes: List[BaseNode], transformation: TransformComponent
46
) -> str:
47
    """Get the hash of a transformation."""
48
    nodes_str = "".join(
49
        [str(node.get_content(metadata_mode=MetadataMode.ALL)) for node in nodes]
50
    )
51

52
    transformation_dict = transformation.to_dict()
53
    transform_string = remove_unstable_values(str(transformation_dict))
54

55
    return sha256((nodes_str + transform_string).encode("utf-8")).hexdigest()
56

57

58
def run_transformations(
59
    nodes: List[BaseNode],
60
    transformations: Sequence[TransformComponent],
61
    in_place: bool = True,
62
    cache: Optional[IngestionCache] = None,
63
    cache_collection: Optional[str] = None,
64
    **kwargs: Any,
65
) -> List[BaseNode]:
66
    """Run a series of transformations on a set of nodes.
67

68
    Args:
69
        nodes: The nodes to transform.
70
        transformations: The transformations to apply to the nodes.
71

72
    Returns:
73
        The transformed nodes.
74
    """
75
    if not in_place:
76
        nodes = list(nodes)
77

78
    for transform in transformations:
79
        if cache is not None:
80
            hash = get_transformation_hash(nodes, transform)
81
            cached_nodes = cache.get(hash, collection=cache_collection)
82
            if cached_nodes is not None:
83
                nodes = cached_nodes
84
            else:
85
                nodes = transform(nodes, **kwargs)
86
                cache.put(hash, nodes, collection=cache_collection)
87
        else:
88
            nodes = transform(nodes, **kwargs)
89

90
    return nodes
91

92

93
async def arun_transformations(
94
    nodes: List[BaseNode],
95
    transformations: Sequence[TransformComponent],
96
    in_place: bool = True,
97
    cache: Optional[IngestionCache] = None,
98
    cache_collection: Optional[str] = None,
99
    **kwargs: Any,
100
) -> List[BaseNode]:
101
    """Run a series of transformations on a set of nodes.
102

103
    Args:
104
        nodes: The nodes to transform.
105
        transformations: The transformations to apply to the nodes.
106

107
    Returns:
108
        The transformed nodes.
109
    """
110
    if not in_place:
111
        nodes = list(nodes)
112

113
    for transform in transformations:
114
        if cache is not None:
115
            hash = get_transformation_hash(nodes, transform)
116

117
            cached_nodes = cache.get(hash, collection=cache_collection)
118
            if cached_nodes is not None:
119
                nodes = cached_nodes
120
            else:
121
                nodes = await transform.acall(nodes, **kwargs)
122
                cache.put(hash, nodes, collection=cache_collection)
123
        else:
124
            nodes = await transform.acall(nodes, **kwargs)
125

126
    return nodes
127

128

129
def arun_transformations_wrapper(
130
    nodes: List[BaseNode],
131
    transformations: Sequence[TransformComponent],
132
    in_place: bool = True,
133
    cache: Optional[IngestionCache] = None,
134
    cache_collection: Optional[str] = None,
135
    **kwargs: Any,
136
) -> List[BaseNode]:
137
    """Wrapper for async run_transformation. To be used in loop.run_in_executor
138
    within a ProcessPoolExecutor.
139
    """
140
    loop = asyncio.new_event_loop()
141
    nodes = loop.run_until_complete(
142
        arun_transformations(
143
            nodes=nodes,
144
            transformations=transformations,
145
            in_place=in_place,
146
            cache=cache,
147
            cache_collection=cache_collection,
148
            **kwargs,
149
        )
150
    )
151
    loop.close()
152
    return nodes
153

154

155
class DocstoreStrategy(str, Enum):
156
    """Document de-duplication strategy."""
157

158
    UPSERTS = "upserts"
159
    DUPLICATES_ONLY = "duplicates_only"
160
    UPSERTS_AND_DELETE = "upserts_and_delete"
161

162

163
class IngestionPipeline(BaseModel):
164
    """An ingestion pipeline that can be applied to data."""
165

166
    transformations: List[TransformComponent] = Field(
167
        description="Transformations to apply to the data"
168
    )
169

170
    documents: Optional[Sequence[Document]] = Field(description="Documents to ingest")
171
    reader: Optional[ReaderConfig] = Field(description="Reader to use to read the data")
172
    vector_store: Optional[BasePydanticVectorStore] = Field(
173
        description="Vector store to use to store the data"
174
    )
175
    cache: IngestionCache = Field(
176
        default_factory=IngestionCache,
177
        description="Cache to use to store the data",
178
    )
179
    docstore: Optional[BaseDocumentStore] = Field(
180
        default=None,
181
        description="Document store to use for de-duping with a vector store.",
182
    )
183
    docstore_strategy: DocstoreStrategy = Field(
184
        default=DocstoreStrategy.UPSERTS, description="Document de-dup strategy."
185
    )
186
    disable_cache: bool = Field(default=False, description="Disable the cache")
187

188
    class Config:
189
        arbitrary_types_allowed = True
190

191
    def __init__(
192
        self,
193
        transformations: Optional[List[TransformComponent]] = None,
194
        reader: Optional[ReaderConfig] = None,
195
        documents: Optional[Sequence[Document]] = None,
196
        vector_store: Optional[BasePydanticVectorStore] = None,
197
        cache: Optional[IngestionCache] = None,
198
        docstore: Optional[BaseDocumentStore] = None,
199
        docstore_strategy: DocstoreStrategy = DocstoreStrategy.UPSERTS,
200
        disable_cache: bool = False,
201
    ) -> None:
202
        if transformations is None:
203
            transformations = self._get_default_transformations()
204

205
        super().__init__(
206
            transformations=transformations,
207
            reader=reader,
208
            documents=documents,
209
            vector_store=vector_store,
210
            cache=cache or IngestionCache(),
211
            docstore=docstore,
212
            docstore_strategy=docstore_strategy,
213
            disable_cache=disable_cache,
214
        )
215

216
    @classmethod
217
    def from_service_context(
218
        cls,
219
        service_context: ServiceContext,
220
        reader: Optional[ReaderConfig] = None,
221
        documents: Optional[Sequence[Document]] = None,
222
        vector_store: Optional[BasePydanticVectorStore] = None,
223
        cache: Optional[IngestionCache] = None,
224
        docstore: Optional[BaseDocumentStore] = None,
225
        disable_cache: bool = False,
226
    ) -> "IngestionPipeline":
227
        transformations = [
228
            *service_context.transformations,
229
            service_context.embed_model,
230
        ]
231

232
        return cls(
233
            transformations=transformations,
234
            reader=reader,
235
            documents=documents,
236
            vector_store=vector_store,
237
            cache=cache,
238
            docstore=docstore,
239
            disable_cache=disable_cache,
240
        )
241

242
    def persist(
243
        self,
244
        persist_dir: str = "./pipeline_storage",
245
        fs: Optional[AbstractFileSystem] = None,
246
        cache_name: str = DEFAULT_CACHE_NAME,
247
        docstore_name: str = DOCSTORE_FNAME,
248
    ) -> None:
249
        """Persist the pipeline to disk."""
250
        if fs is not None:
251
            persist_dir = str(persist_dir)  # NOTE: doesn't support Windows here
252
            docstore_path = concat_dirs(persist_dir, docstore_name)
253
            cache_path = concat_dirs(persist_dir, cache_name)
254

255
        else:
256
            persist_path = Path(persist_dir)
257
            docstore_path = str(persist_path / docstore_name)
258
            cache_path = str(persist_path / cache_name)
259

260
        self.cache.persist(cache_path, fs=fs)
261
        if self.docstore is not None:
262
            self.docstore.persist(docstore_path, fs=fs)
263

264
    def load(
265
        self,
266
        persist_dir: str = "./pipeline_storage",
267
        fs: Optional[AbstractFileSystem] = None,
268
        cache_name: str = DEFAULT_CACHE_NAME,
269
        docstore_name: str = DOCSTORE_FNAME,
270
    ) -> None:
271
        """Load the pipeline from disk."""
272
        if fs is not None:
273
            self.cache = IngestionCache.from_persist_path(
274
                concat_dirs(persist_dir, cache_name), fs=fs
275
            )
276
            self.docstore = SimpleDocumentStore.from_persist_path(
277
                concat_dirs(persist_dir, docstore_name), fs=fs
278
            )
279
        else:
280
            self.cache = IngestionCache.from_persist_path(
281
                str(Path(persist_dir) / cache_name)
282
            )
283
            self.docstore = SimpleDocumentStore.from_persist_path(
284
                str(Path(persist_dir) / docstore_name)
285
            )
286

287
    def _get_default_transformations(self) -> List[TransformComponent]:
288
        return [
289
            SentenceSplitter(),
290
            resolve_embed_model("default"),
291
        ]
292

293
    def _prepare_inputs(
294
        self, documents: Optional[List[Document]], nodes: Optional[List[BaseNode]]
295
    ) -> List[Document]:
296
        input_nodes: List[BaseNode] = []
297
        if documents is not None:
298
            input_nodes += documents
299

300
        if nodes is not None:
301
            input_nodes += nodes
302

303
        if self.documents is not None:
304
            input_nodes += self.documents
305

306
        if self.reader is not None:
307
            input_nodes += self.reader.read()
308

309
        return input_nodes
310

311
    def _handle_duplicates(
312
        self,
313
        nodes: List[BaseNode],
314
        store_doc_text: bool = True,
315
    ) -> List[BaseNode]:
316
        """Handle docstore duplicates by checking all hashes."""
317
        assert self.docstore is not None
318

319
        existing_hashes = self.docstore.get_all_document_hashes()
320
        current_hashes = []
321
        nodes_to_run = []
322
        for node in nodes:
323
            if node.hash not in existing_hashes and node.hash not in current_hashes:
324
                self.docstore.set_document_hash(node.id_, node.hash)
325
                nodes_to_run.append(node)
326
                current_hashes.append(node.hash)
327

328
        self.docstore.add_documents(nodes_to_run, store_text=store_doc_text)
329

330
        return nodes_to_run
331

332
    def _handle_upserts(
333
        self,
334
        nodes: List[BaseNode],
335
        store_doc_text: bool = True,
336
    ) -> List[BaseNode]:
337
        """Handle docstore upserts by checking hashes and ids."""
338
        assert self.docstore is not None
339

340
        existing_doc_ids_before = set(self.docstore.get_all_document_hashes().values())
341
        doc_ids_from_nodes = set()
342
        deduped_nodes_to_run = {}
343
        for node in nodes:
344
            ref_doc_id = node.ref_doc_id if node.ref_doc_id else node.id_
345
            doc_ids_from_nodes.add(ref_doc_id)
346
            existing_hash = self.docstore.get_document_hash(ref_doc_id)
347
            if not existing_hash:
348
                # document doesn't exist, so add it
349
                self.docstore.set_document_hash(ref_doc_id, node.hash)
350
                deduped_nodes_to_run[ref_doc_id] = node
351
            elif existing_hash and existing_hash != node.hash:
352
                self.docstore.delete_ref_doc(ref_doc_id, raise_error=False)
353

354
                if self.vector_store is not None:
355
                    self.vector_store.delete(ref_doc_id)
356

357
                self.docstore.set_document_hash(ref_doc_id, node.hash)
358

359
                deduped_nodes_to_run[ref_doc_id] = node
360
            else:
361
                continue  # document exists and is unchanged, so skip it
362

363
        if self.docstore_strategy == DocstoreStrategy.UPSERTS_AND_DELETE:
364
            # Identify missing docs and delete them from docstore and vector store
365
            doc_ids_to_delete = existing_doc_ids_before - doc_ids_from_nodes
366
            for ref_doc_id in doc_ids_to_delete:
367
                self.docstore.delete_document(ref_doc_id)
368

369
                if self.vector_store is not None:
370
                    self.vector_store.delete(ref_doc_id)
371

372
        nodes_to_run = list(deduped_nodes_to_run.values())
373
        self.docstore.add_documents(nodes_to_run, store_text=store_doc_text)
374

375
        return nodes_to_run
376

377
    @staticmethod
378
    def _node_batcher(
379
        num_batches: int, nodes: Union[List[BaseNode], List[Document]]
380
    ) -> Generator[Union[List[BaseNode], List[Document]], Any, Any]:
381
        """Yield successive n-sized chunks from lst."""
382
        batch_size = max(1, int(len(nodes) / num_batches))
383
        for i in range(0, len(nodes), batch_size):
384
            yield nodes[i : i + batch_size]
385

386
    def run(
387
        self,
388
        show_progress: bool = False,
389
        documents: Optional[List[Document]] = None,
390
        nodes: Optional[List[BaseNode]] = None,
391
        cache_collection: Optional[str] = None,
392
        in_place: bool = True,
393
        store_doc_text: bool = True,
394
        num_workers: Optional[int] = None,
395
        **kwargs: Any,
396
    ) -> Sequence[BaseNode]:
397
        """
398
        Args:
399
            show_progress (bool, optional): Shows execution progress bar(s). Defaults to False.
400
            documents (Optional[List[Document]], optional): Set of documents to be transformed. Defaults to None.
401
            nodes (Optional[List[BaseNode]], optional): Set of nodes to be transformed. Defaults to None.
402
            cache_collection (Optional[str], optional): Cache for transformations. Defaults to None.
403
            in_place (bool, optional): Whether transformations creates a new list for transformed nodes or modifies the
404
                array passed to `run_transformations`. Defaults to True.
405
            num_workers (Optional[int], optional): The number of parallel processes to use.
406
                If set to None, then sequential compute is used. Defaults to None.
407

408
        Returns:
409
            Sequence[BaseNode]: The set of transformed Nodes/Documents
410
        """
411
        input_nodes = self._prepare_inputs(documents, nodes)
412

413
        # check if we need to dedup
414
        if self.docstore is not None and self.vector_store is not None:
415
            if self.docstore_strategy in (
416
                DocstoreStrategy.UPSERTS,
417
                DocstoreStrategy.UPSERTS_AND_DELETE,
418
            ):
419
                nodes_to_run = self._handle_upserts(
420
                    input_nodes, store_doc_text=store_doc_text
421
                )
422
            elif self.docstore_strategy == DocstoreStrategy.DUPLICATES_ONLY:
423
                nodes_to_run = self._handle_duplicates(
424
                    input_nodes, store_doc_text=store_doc_text
425
                )
426
            else:
427
                raise ValueError(f"Invalid docstore strategy: {self.docstore_strategy}")
428
        elif self.docstore is not None and self.vector_store is None:
429
            if self.docstore_strategy == DocstoreStrategy.UPSERTS:
430
                print(
431
                    "Docstore strategy set to upserts, but no vector store. "
432
                    "Switching to duplicates_only strategy."
433
                )
434
                self.docstore_strategy = DocstoreStrategy.DUPLICATES_ONLY
435
            elif self.docstore_strategy == DocstoreStrategy.UPSERTS_AND_DELETE:
436
                print(
437
                    "Docstore strategy set to upserts and delete, but no vector store. "
438
                    "Switching to duplicates_only strategy."
439
                )
440
                self.docstore_strategy = DocstoreStrategy.DUPLICATES_ONLY
441
            nodes_to_run = self._handle_duplicates(
442
                input_nodes, store_doc_text=store_doc_text
443
            )
444

445
        else:
446
            nodes_to_run = input_nodes
447

448
        if num_workers and num_workers > 1:
449
            if num_workers > multiprocessing.cpu_count():
450
                warnings.warn(
451
                    "Specified num_workers exceed number of CPUs in the system. "
452
                    "Setting `num_workers` down to the maximum CPU count."
453
                )
454

455
            with multiprocessing.get_context("spawn").Pool(num_workers) as p:
456
                node_batches = self._node_batcher(
457
                    num_batches=num_workers, nodes=nodes_to_run
458
                )
459
                nodes_parallel = p.starmap(
460
                    run_transformations,
461
                    zip(
462
                        node_batches,
463
                        repeat(self.transformations),
464
                        repeat(in_place),
465
                        repeat(self.cache if not self.disable_cache else None),
466
                        repeat(cache_collection),
467
                    ),
468
                )
469
                nodes = reduce(lambda x, y: x + y, nodes_parallel, [])
470
        else:
471
            nodes = run_transformations(
472
                nodes_to_run,
473
                self.transformations,
474
                show_progress=show_progress,
475
                cache=self.cache if not self.disable_cache else None,
476
                cache_collection=cache_collection,
477
                in_place=in_place,
478
                **kwargs,
479
            )
480

481
        if self.vector_store is not None:
482
            self.vector_store.add([n for n in nodes if n.embedding is not None])
483

484
        return nodes
485

486
    # ------ async methods ------
487

488
    async def _ahandle_duplicates(
489
        self,
490
        nodes: List[BaseNode],
491
        store_doc_text: bool = True,
492
    ) -> List[BaseNode]:
493
        """Handle docstore duplicates by checking all hashes."""
494
        assert self.docstore is not None
495

496
        existing_hashes = await self.docstore.aget_all_document_hashes()
497
        current_hashes = []
498
        nodes_to_run = []
499
        for node in nodes:
500
            if node.hash not in existing_hashes and node.hash not in current_hashes:
501
                await self.docstore.aset_document_hash(node.id_, node.hash)
502
                nodes_to_run.append(node)
503
                current_hashes.append(node.hash)
504

505
        await self.docstore.async_add_documents(nodes_to_run, store_text=store_doc_text)
506

507
        return nodes_to_run
508

509
    async def _ahandle_upserts(
510
        self,
511
        nodes: List[BaseNode],
512
        store_doc_text: bool = True,
513
    ) -> List[BaseNode]:
514
        """Handle docstore upserts by checking hashes and ids."""
515
        assert self.docstore is not None
516

517
        existing_doc_ids_before = set(
518
            (await self.docstore.aget_all_document_hashes()).values()
519
        )
520
        doc_ids_from_nodes = set()
521
        deduped_nodes_to_run = {}
522
        for node in nodes:
523
            ref_doc_id = node.ref_doc_id if node.ref_doc_id else node.id_
524
            doc_ids_from_nodes.add(ref_doc_id)
525
            existing_hash = await self.docstore.aget_document_hash(ref_doc_id)
526
            if not existing_hash:
527
                # document doesn't exist, so add it
528
                await self.docstore.aset_document_hash(ref_doc_id, node.hash)
529
                deduped_nodes_to_run[ref_doc_id] = node
530
            elif existing_hash and existing_hash != node.hash:
531
                await self.docstore.adelete_ref_doc(ref_doc_id, raise_error=False)
532

533
                if self.vector_store is not None:
534
                    await self.vector_store.adelete(ref_doc_id)
535

536
                await self.docstore.aset_document_hash(ref_doc_id, node.hash)
537

538
                deduped_nodes_to_run[ref_doc_id] = node
539
            else:
540
                continue  # document exists and is unchanged, so skip it
541

542
        if self.docstore_strategy == DocstoreStrategy.UPSERTS_AND_DELETE:
543
            # Identify missing docs and delete them from docstore and vector store
544
            doc_ids_to_delete = existing_doc_ids_before - doc_ids_from_nodes
545
            for ref_doc_id in doc_ids_to_delete:
546
                await self.docstore.adelete_document(ref_doc_id)
547

548
                if self.vector_store is not None:
549
                    await self.vector_store.adelete(ref_doc_id)
550

551
        nodes_to_run = list(deduped_nodes_to_run.values())
552
        await self.docstore.async_add_documents(nodes_to_run, store_text=store_doc_text)
553

554
        return nodes_to_run
555

556
    async def arun(
557
        self,
558
        show_progress: bool = False,
559
        documents: Optional[List[Document]] = None,
560
        nodes: Optional[List[BaseNode]] = None,
561
        cache_collection: Optional[str] = None,
562
        in_place: bool = True,
563
        store_doc_text: bool = True,
564
        num_workers: Optional[int] = None,
565
        **kwargs: Any,
566
    ) -> Sequence[BaseNode]:
567
        input_nodes = self._prepare_inputs(documents, nodes)
568

569
        # check if we need to dedup
570
        if self.docstore is not None and self.vector_store is not None:
571
            if self.docstore_strategy in (
572
                DocstoreStrategy.UPSERTS,
573
                DocstoreStrategy.UPSERTS_AND_DELETE,
574
            ):
575
                nodes_to_run = await self._ahandle_upserts(
576
                    input_nodes, store_doc_text=store_doc_text
577
                )
578
            elif self.docstore_strategy == DocstoreStrategy.DUPLICATES_ONLY:
579
                nodes_to_run = await self._ahandle_duplicates(
580
                    input_nodes, store_doc_text=store_doc_text
581
                )
582
            else:
583
                raise ValueError(f"Invalid docstore strategy: {self.docstore_strategy}")
584
        elif self.docstore is not None and self.vector_store is None:
585
            if self.docstore_strategy == DocstoreStrategy.UPSERTS:
586
                print(
587
                    "Docstore strategy set to upserts, but no vector store. "
588
                    "Switching to duplicates_only strategy."
589
                )
590
                self.docstore_strategy = DocstoreStrategy.DUPLICATES_ONLY
591
            elif self.docstore_strategy == DocstoreStrategy.UPSERTS_AND_DELETE:
592
                print(
593
                    "Docstore strategy set to upserts and delete, but no vector store. "
594
                    "Switching to duplicates_only strategy."
595
                )
596
                self.docstore_strategy = DocstoreStrategy.DUPLICATES_ONLY
597
            nodes_to_run = await self._ahandle_duplicates(
598
                input_nodes, store_doc_text=store_doc_text
599
            )
600

601
        else:
602
            nodes_to_run = input_nodes
603

604
        if num_workers and num_workers > 1:
605
            if num_workers > multiprocessing.cpu_count():
606
                warnings.warn(
607
                    "Specified num_workers exceed number of CPUs in the system. "
608
                    "Setting `num_workers` down to the maximum CPU count."
609
                )
610

611
            loop = asyncio.get_event_loop()
612
            with ProcessPoolExecutor(max_workers=num_workers) as p:
613
                node_batches = self._node_batcher(
614
                    num_batches=num_workers, nodes=nodes_to_run
615
                )
616
                tasks = [
617
                    loop.run_in_executor(
618
                        p,
619
                        partial(
620
                            arun_transformations_wrapper,
621
                            transformations=self.transformations,
622
                            in_place=in_place,
623
                            cache=self.cache if not self.disable_cache else None,
624
                            cache_collection=cache_collection,
625
                        ),
626
                        batch,
627
                    )
628
                    for batch in node_batches
629
                ]
630
                result: List[List[BaseNode]] = await asyncio.gather(*tasks)
631
                nodes = reduce(lambda x, y: x + y, result, [])
632
        else:
633
            nodes = await arun_transformations(
634
                nodes_to_run,
635
                self.transformations,
636
                show_progress=show_progress,
637
                cache=self.cache if not self.disable_cache else None,
638
                cache_collection=cache_collection,
639
                in_place=in_place,
640
                **kwargs,
641
            )
642

643
        if self.vector_store is not None:
644
            await self.vector_store.async_add(
645
                [n for n in nodes if n.embedding is not None]
646
            )
647

648
        return nodes
649

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.