datasets

Форк
0
/
test_imagefolder.py 
509 строк · 21.7 Кб
1
import shutil
2
import textwrap
3

4
import numpy as np
5
import pytest
6

7
from datasets import ClassLabel, Features, Image, Value
8
from datasets.data_files import DataFilesDict, get_data_patterns
9
from datasets.download.streaming_download_manager import StreamingDownloadManager
10
from datasets.packaged_modules.imagefolder.imagefolder import ImageFolder
11

12
from ..utils import require_pil
13

14

15
@pytest.fixture
16
def cache_dir(tmp_path):
17
    return str(tmp_path / "imagefolder_cache_dir")
18

19

20
@pytest.fixture
21
def data_files_with_labels_no_metadata(tmp_path, image_file):
22
    data_dir = tmp_path / "data_files_with_labels_no_metadata"
23
    data_dir.mkdir(parents=True, exist_ok=True)
24
    subdir_class_0 = data_dir / "cat"
25
    subdir_class_0.mkdir(parents=True, exist_ok=True)
26
    subdir_class_1 = data_dir / "dog"
27
    subdir_class_1.mkdir(parents=True, exist_ok=True)
28

29
    image_filename = subdir_class_0 / "image_cat.jpg"
30
    shutil.copyfile(image_file, image_filename)
31
    image_filename2 = subdir_class_1 / "image_dog.jpg"
32
    shutil.copyfile(image_file, image_filename2)
33

34
    data_files_with_labels_no_metadata = DataFilesDict.from_patterns(
35
        get_data_patterns(str(data_dir)), data_dir.as_posix()
36
    )
37

38
    return data_files_with_labels_no_metadata
39

40

41
@pytest.fixture
42
def image_files_with_labels_and_duplicated_label_key_in_metadata(tmp_path, image_file):
43
    data_dir = tmp_path / "image_files_with_labels_and_label_key_in_metadata"
44
    data_dir.mkdir(parents=True, exist_ok=True)
45
    subdir_class_0 = data_dir / "cat"
46
    subdir_class_0.mkdir(parents=True, exist_ok=True)
47
    subdir_class_1 = data_dir / "dog"
48
    subdir_class_1.mkdir(parents=True, exist_ok=True)
49

50
    image_filename = subdir_class_0 / "image_cat.jpg"
51
    shutil.copyfile(image_file, image_filename)
52
    image_filename2 = subdir_class_1 / "image_dog.jpg"
53
    shutil.copyfile(image_file, image_filename2)
54

55
    image_metadata_filename = tmp_path / data_dir / "metadata.jsonl"
56
    image_metadata = textwrap.dedent(
57
        """\
58
        {"file_name": "cat/image_cat.jpg", "caption": "Nice image of a cat", "label": "Cat"}
59
        {"file_name": "dog/image_dog.jpg", "caption": "Nice image of a dog", "label": "Dog"}
60
        """
61
    )
62
    with open(image_metadata_filename, "w", encoding="utf-8") as f:
63
        f.write(image_metadata)
64

65
    return str(image_filename), str(image_filename2), str(image_metadata_filename)
66

67

68
@pytest.fixture
69
def image_file_with_metadata(tmp_path, image_file):
70
    image_filename = tmp_path / "image_rgb.jpg"
71
    shutil.copyfile(image_file, image_filename)
72
    image_metadata_filename = tmp_path / "metadata.jsonl"
73
    image_metadata = textwrap.dedent(
74
        """\
75
        {"file_name": "image_rgb.jpg", "caption": "Nice image"}
76
        """
77
    )
78
    with open(image_metadata_filename, "w", encoding="utf-8") as f:
79
        f.write(image_metadata)
80
    return str(image_filename), str(image_metadata_filename)
81

82

83
@pytest.fixture
84
def image_files_with_metadata_that_misses_one_image(tmp_path, image_file):
85
    image_filename = tmp_path / "image_rgb.jpg"
86
    shutil.copyfile(image_file, image_filename)
87
    image_filename2 = tmp_path / "image_rgb2.jpg"
88
    shutil.copyfile(image_file, image_filename2)
89
    image_metadata_filename = tmp_path / "metadata.jsonl"
90
    image_metadata = textwrap.dedent(
91
        """\
92
        {"file_name": "image_rgb.jpg", "caption": "Nice image"}
93
        """
94
    )
95
    with open(image_metadata_filename, "w", encoding="utf-8") as f:
96
        f.write(image_metadata)
97
    return str(image_filename), str(image_filename2), str(image_metadata_filename)
98

99

100
@pytest.fixture(params=["jsonl", "csv"])
101
def data_files_with_one_split_and_metadata(request, tmp_path, image_file):
102
    data_dir = tmp_path / "imagefolder_data_dir_with_metadata_one_split"
103
    data_dir.mkdir(parents=True, exist_ok=True)
104
    subdir = data_dir / "subdir"
105
    subdir.mkdir(parents=True, exist_ok=True)
106

107
    image_filename = data_dir / "image_rgb.jpg"
108
    shutil.copyfile(image_file, image_filename)
109
    image_filename2 = data_dir / "image_rgb2.jpg"
110
    shutil.copyfile(image_file, image_filename2)
111
    image_filename3 = subdir / "image_rgb3.jpg"  # in subdir
112
    shutil.copyfile(image_file, image_filename3)
113

114
    image_metadata_filename = data_dir / f"metadata.{request.param}"
115
    image_metadata = (
116
        textwrap.dedent(
117
            """\
118
        {"file_name": "image_rgb.jpg", "caption": "Nice image"}
119
        {"file_name": "image_rgb2.jpg", "caption": "Nice second image"}
120
        {"file_name": "subdir/image_rgb3.jpg", "caption": "Nice third image"}
121
        """
122
        )
123
        if request.param == "jsonl"
124
        else textwrap.dedent(
125
            """\
126
        file_name,caption
127
        image_rgb.jpg,Nice image
128
        image_rgb2.jpg,Nice second image
129
        subdir/image_rgb3.jpg,Nice third image
130
        """
131
        )
132
    )
133
    with open(image_metadata_filename, "w", encoding="utf-8") as f:
134
        f.write(image_metadata)
135
    data_files_with_one_split_and_metadata = DataFilesDict.from_patterns(
136
        get_data_patterns(str(data_dir)), data_dir.as_posix()
137
    )
138
    assert len(data_files_with_one_split_and_metadata) == 1
139
    assert len(data_files_with_one_split_and_metadata["train"]) == 4
140
    return data_files_with_one_split_and_metadata
141

142

143
@pytest.fixture(params=["jsonl", "csv"])
144
def data_files_with_two_splits_and_metadata(request, tmp_path, image_file):
145
    data_dir = tmp_path / "imagefolder_data_dir_with_metadata_two_splits"
146
    data_dir.mkdir(parents=True, exist_ok=True)
147
    train_dir = data_dir / "train"
148
    train_dir.mkdir(parents=True, exist_ok=True)
149
    test_dir = data_dir / "test"
150
    test_dir.mkdir(parents=True, exist_ok=True)
151

152
    image_filename = train_dir / "image_rgb.jpg"  # train image
153
    shutil.copyfile(image_file, image_filename)
154
    image_filename2 = train_dir / "image_rgb2.jpg"  # train image
155
    shutil.copyfile(image_file, image_filename2)
156
    image_filename3 = test_dir / "image_rgb3.jpg"  # test image
157
    shutil.copyfile(image_file, image_filename3)
158

159
    train_image_metadata_filename = train_dir / f"metadata.{request.param}"
160
    image_metadata = (
161
        textwrap.dedent(
162
            """\
163
        {"file_name": "image_rgb.jpg", "caption": "Nice train image"}
164
        {"file_name": "image_rgb2.jpg", "caption": "Nice second train image"}
165
        """
166
        )
167
        if request.param == "jsonl"
168
        else textwrap.dedent(
169
            """\
170
        file_name,caption
171
        image_rgb.jpg,Nice train image
172
        image_rgb2.jpg,Nice second train image
173
        """
174
        )
175
    )
176
    with open(train_image_metadata_filename, "w", encoding="utf-8") as f:
177
        f.write(image_metadata)
178
    test_image_metadata_filename = test_dir / f"metadata.{request.param}"
179
    image_metadata = (
180
        textwrap.dedent(
181
            """\
182
        {"file_name": "image_rgb3.jpg", "caption": "Nice test image"}
183
        """
184
        )
185
        if request.param == "jsonl"
186
        else textwrap.dedent(
187
            """\
188
        file_name,caption
189
        image_rgb3.jpg,Nice test image
190
        """
191
        )
192
    )
193
    with open(test_image_metadata_filename, "w", encoding="utf-8") as f:
194
        f.write(image_metadata)
195
    data_files_with_two_splits_and_metadata = DataFilesDict.from_patterns(
196
        get_data_patterns(str(data_dir)), data_dir.as_posix()
197
    )
198
    assert len(data_files_with_two_splits_and_metadata) == 2
199
    assert len(data_files_with_two_splits_and_metadata["train"]) == 3
200
    assert len(data_files_with_two_splits_and_metadata["test"]) == 2
201
    return data_files_with_two_splits_and_metadata
202

203

204
@pytest.fixture
205
def data_files_with_zip_archives(tmp_path, image_file):
206
    from PIL import Image, ImageOps
207

208
    data_dir = tmp_path / "imagefolder_data_dir_with_zip_archives"
209
    data_dir.mkdir(parents=True, exist_ok=True)
210
    archive_dir = data_dir / "archive"
211
    archive_dir.mkdir(parents=True, exist_ok=True)
212
    subdir = archive_dir / "subdir"
213
    subdir.mkdir(parents=True, exist_ok=True)
214

215
    image_filename = archive_dir / "image_rgb.jpg"
216
    shutil.copyfile(image_file, image_filename)
217
    image_filename2 = subdir / "image_rgb2.jpg"  # in subdir
218
    # make sure they're two different images
219
    # Indeed we won't be able to compare the image.filename, since the archive is not extracted in streaming mode
220
    ImageOps.flip(Image.open(image_file)).save(image_filename2)
221

222
    image_metadata_filename = archive_dir / "metadata.jsonl"
223
    image_metadata = textwrap.dedent(
224
        """\
225
        {"file_name": "image_rgb.jpg", "caption": "Nice image"}
226
        {"file_name": "subdir/image_rgb2.jpg", "caption": "Nice second image"}
227
        """
228
    )
229
    with open(image_metadata_filename, "w", encoding="utf-8") as f:
230
        f.write(image_metadata)
231

232
    shutil.make_archive(archive_dir, "zip", archive_dir)
233
    shutil.rmtree(str(archive_dir))
234

235
    data_files_with_zip_archives = DataFilesDict.from_patterns(get_data_patterns(str(data_dir)), data_dir.as_posix())
236

237
    assert len(data_files_with_zip_archives) == 1
238
    assert len(data_files_with_zip_archives["train"]) == 1
239
    return data_files_with_zip_archives
240

241

242
@require_pil
243
# check that labels are inferred correctly from dir names
244
def test_generate_examples_with_labels(data_files_with_labels_no_metadata, cache_dir):
245
    # there are no metadata.jsonl files in this test case
246
    imagefolder = ImageFolder(data_files=data_files_with_labels_no_metadata, cache_dir=cache_dir, drop_labels=False)
247
    imagefolder.download_and_prepare()
248
    assert imagefolder.info.features == Features({"image": Image(), "label": ClassLabel(names=["cat", "dog"])})
249
    dataset = list(imagefolder.as_dataset()["train"])
250
    label_feature = imagefolder.info.features["label"]
251

252
    assert dataset[0]["label"] == label_feature._str2int["cat"]
253
    assert dataset[1]["label"] == label_feature._str2int["dog"]
254

255

256
@require_pil
257
@pytest.mark.parametrize("drop_metadata", [None, True, False])
258
@pytest.mark.parametrize("drop_labels", [None, True, False])
259
def test_generate_examples_duplicated_label_key(
260
    image_files_with_labels_and_duplicated_label_key_in_metadata, drop_metadata, drop_labels, cache_dir, caplog
261
):
262
    cat_image_file, dog_image_file, image_metadata_file = image_files_with_labels_and_duplicated_label_key_in_metadata
263
    imagefolder = ImageFolder(
264
        drop_metadata=drop_metadata,
265
        drop_labels=drop_labels,
266
        data_files=[cat_image_file, dog_image_file, image_metadata_file],
267
        cache_dir=cache_dir,
268
    )
269
    if drop_labels is False:
270
        # infer labels from directories even if metadata files are found
271
        imagefolder.download_and_prepare()
272
        warning_in_logs = any("ignoring metadata columns" in record.msg.lower() for record in caplog.records)
273
        assert warning_in_logs if drop_metadata is not True else not warning_in_logs
274
        dataset = imagefolder.as_dataset()["train"]
275
        assert imagefolder.info.features["label"] == ClassLabel(names=["cat", "dog"])
276
        assert all(example["label"] in imagefolder.info.features["label"]._str2int.values() for example in dataset)
277
    else:
278
        imagefolder.download_and_prepare()
279
        dataset = imagefolder.as_dataset()["train"]
280
        if drop_metadata is not True:
281
            # labels are from metadata
282
            assert imagefolder.info.features["label"] == Value("string")
283
            assert all(example["label"] in ["Cat", "Dog"] for example in dataset)
284
        else:
285
            # drop both labels and metadata
286
            assert imagefolder.info.features == Features({"image": Image()})
287
            assert all(example.keys() == {"image"} for example in dataset)
288

289

290
@require_pil
291
@pytest.mark.parametrize("drop_metadata", [None, True, False])
292
@pytest.mark.parametrize("drop_labels", [None, True, False])
293
def test_generate_examples_drop_labels(data_files_with_labels_no_metadata, drop_metadata, drop_labels):
294
    imagefolder = ImageFolder(
295
        drop_metadata=drop_metadata, drop_labels=drop_labels, data_files=data_files_with_labels_no_metadata
296
    )
297
    gen_kwargs = imagefolder._split_generators(StreamingDownloadManager())[0].gen_kwargs
298
    # removing the labels explicitly requires drop_labels=True
299
    assert gen_kwargs["add_labels"] is not bool(drop_labels)
300
    assert gen_kwargs["add_metadata"] is False
301
    generator = imagefolder._generate_examples(**gen_kwargs)
302
    if not drop_labels:
303
        assert all(
304
            example.keys() == {"image", "label"} and all(val is not None for val in example.values())
305
            for _, example in generator
306
        )
307
    else:
308
        assert all(
309
            example.keys() == {"image"} and all(val is not None for val in example.values())
310
            for _, example in generator
311
        )
312

313

314
@require_pil
315
@pytest.mark.parametrize("drop_metadata", [None, True, False])
316
@pytest.mark.parametrize("drop_labels", [None, True, False])
317
def test_generate_examples_drop_metadata(image_file_with_metadata, drop_metadata, drop_labels):
318
    image_file, image_metadata_file = image_file_with_metadata
319
    imagefolder = ImageFolder(
320
        drop_metadata=drop_metadata, drop_labels=drop_labels, data_files={"train": [image_file, image_metadata_file]}
321
    )
322
    gen_kwargs = imagefolder._split_generators(StreamingDownloadManager())[0].gen_kwargs
323
    # since the dataset has metadata, removing the metadata explicitly requires drop_metadata=True
324
    assert gen_kwargs["add_metadata"] is not bool(drop_metadata)
325
    # since the dataset has metadata, adding the labels explicitly requires drop_labels=False
326
    assert gen_kwargs["add_labels"] is (drop_labels is False)
327
    generator = imagefolder._generate_examples(**gen_kwargs)
328
    expected_columns = {"image"}
329
    if gen_kwargs["add_metadata"]:
330
        expected_columns.add("caption")
331
    if gen_kwargs["add_labels"]:
332
        expected_columns.add("label")
333
    result = [example for _, example in generator]
334
    assert len(result) == 1
335
    example = result[0]
336
    assert example.keys() == expected_columns
337
    for column in expected_columns:
338
        assert example[column] is not None
339

340

341
@require_pil
342
@pytest.mark.parametrize("drop_metadata", [None, True, False])
343
def test_generate_examples_with_metadata_in_wrong_location(image_file, image_file_with_metadata, drop_metadata):
344
    _, image_metadata_file = image_file_with_metadata
345
    imagefolder = ImageFolder(drop_metadata=drop_metadata, data_files={"train": [image_file, image_metadata_file]})
346
    gen_kwargs = imagefolder._split_generators(StreamingDownloadManager())[0].gen_kwargs
347
    generator = imagefolder._generate_examples(**gen_kwargs)
348
    if not drop_metadata:
349
        with pytest.raises(ValueError):
350
            list(generator)
351
    else:
352
        assert all(
353
            example.keys() == {"image"} and all(val is not None for val in example.values())
354
            for _, example in generator
355
        )
356

357

358
@require_pil
359
@pytest.mark.parametrize("drop_metadata", [None, True, False])
360
def test_generate_examples_with_metadata_that_misses_one_image(
361
    image_files_with_metadata_that_misses_one_image, drop_metadata
362
):
363
    image_file, image_file2, image_metadata_file = image_files_with_metadata_that_misses_one_image
364
    if not drop_metadata:
365
        features = Features({"image": Image(), "caption": Value("string")})
366
    else:
367
        features = Features({"image": Image()})
368
    imagefolder = ImageFolder(
369
        drop_metadata=drop_metadata,
370
        features=features,
371
        data_files={"train": [image_file, image_file2, image_metadata_file]},
372
    )
373
    gen_kwargs = imagefolder._split_generators(StreamingDownloadManager())[0].gen_kwargs
374
    generator = imagefolder._generate_examples(**gen_kwargs)
375
    if not drop_metadata:
376
        with pytest.raises(ValueError):
377
            list(generator)
378
    else:
379
        assert all(
380
            example.keys() == {"image"} and all(val is not None for val in example.values())
381
            for _, example in generator
382
        )
383

384

385
@require_pil
386
@pytest.mark.parametrize("streaming", [False, True])
387
def test_data_files_with_metadata_and_single_split(streaming, cache_dir, data_files_with_one_split_and_metadata):
388
    data_files = data_files_with_one_split_and_metadata
389
    imagefolder = ImageFolder(data_files=data_files, cache_dir=cache_dir)
390
    imagefolder.download_and_prepare()
391
    datasets = imagefolder.as_streaming_dataset() if streaming else imagefolder.as_dataset()
392
    for split, data_files in data_files.items():
393
        expected_num_of_images = len(data_files) - 1  # don't count the metadata file
394
        assert split in datasets
395
        dataset = list(datasets[split])
396
        assert len(dataset) == expected_num_of_images
397
        # make sure each sample has its own image and metadata
398
        assert len({example["image"].filename for example in dataset}) == expected_num_of_images
399
        assert len({example["caption"] for example in dataset}) == expected_num_of_images
400
        assert all(example["caption"] is not None for example in dataset)
401

402

403
@require_pil
404
@pytest.mark.parametrize("streaming", [False, True])
405
def test_data_files_with_metadata_and_multiple_splits(streaming, cache_dir, data_files_with_two_splits_and_metadata):
406
    data_files = data_files_with_two_splits_and_metadata
407
    imagefolder = ImageFolder(data_files=data_files, cache_dir=cache_dir)
408
    imagefolder.download_and_prepare()
409
    datasets = imagefolder.as_streaming_dataset() if streaming else imagefolder.as_dataset()
410
    for split, data_files in data_files.items():
411
        expected_num_of_images = len(data_files) - 1  # don't count the metadata file
412
        assert split in datasets
413
        dataset = list(datasets[split])
414
        assert len(dataset) == expected_num_of_images
415
        # make sure each sample has its own image and metadata
416
        assert len({example["image"].filename for example in dataset}) == expected_num_of_images
417
        assert len({example["caption"] for example in dataset}) == expected_num_of_images
418
        assert all(example["caption"] is not None for example in dataset)
419

420

421
@require_pil
422
@pytest.mark.parametrize("streaming", [False, True])
423
def test_data_files_with_metadata_and_archives(streaming, cache_dir, data_files_with_zip_archives):
424
    imagefolder = ImageFolder(data_files=data_files_with_zip_archives, cache_dir=cache_dir)
425
    imagefolder.download_and_prepare()
426
    datasets = imagefolder.as_streaming_dataset() if streaming else imagefolder.as_dataset()
427
    for split, data_files in data_files_with_zip_archives.items():
428
        num_of_archives = len(data_files)  # the metadata file is inside the archive
429
        expected_num_of_images = 2 * num_of_archives
430
        assert split in datasets
431
        dataset = list(datasets[split])
432
        assert len(dataset) == expected_num_of_images
433
        # make sure each sample has its own image and metadata
434
        assert len({np.array(example["image"])[0, 0, 0] for example in dataset}) == expected_num_of_images
435
        assert len({example["caption"] for example in dataset}) == expected_num_of_images
436
        assert all(example["caption"] is not None for example in dataset)
437

438

439
@require_pil
440
def test_data_files_with_wrong_metadata_file_name(cache_dir, tmp_path, image_file):
441
    data_dir = tmp_path / "data_dir_with_bad_metadata"
442
    data_dir.mkdir(parents=True, exist_ok=True)
443
    shutil.copyfile(image_file, data_dir / "image_rgb.jpg")
444
    image_metadata_filename = data_dir / "bad_metadata.jsonl"  # bad file
445
    image_metadata = textwrap.dedent(
446
        """\
447
        {"file_name": "image_rgb.jpg", "caption": "Nice image"}
448
        """
449
    )
450
    with open(image_metadata_filename, "w", encoding="utf-8") as f:
451
        f.write(image_metadata)
452

453
    data_files_with_bad_metadata = DataFilesDict.from_patterns(get_data_patterns(str(data_dir)), data_dir.as_posix())
454
    imagefolder = ImageFolder(data_files=data_files_with_bad_metadata, cache_dir=cache_dir)
455
    imagefolder.download_and_prepare()
456
    dataset = imagefolder.as_dataset(split="train")
457
    # check that there are no metadata, since the metadata file name doesn't have the right name
458
    assert "caption" not in dataset.column_names
459

460

461
@require_pil
462
def test_data_files_with_wrong_image_file_name_column_in_metadata_file(cache_dir, tmp_path, image_file):
463
    data_dir = tmp_path / "data_dir_with_bad_metadata"
464
    data_dir.mkdir(parents=True, exist_ok=True)
465
    shutil.copyfile(image_file, data_dir / "image_rgb.jpg")
466
    image_metadata_filename = data_dir / "metadata.jsonl"
467
    image_metadata = textwrap.dedent(  # with bad column "bad_file_name" instead of "file_name"
468
        """\
469
        {"bad_file_name": "image_rgb.jpg", "caption": "Nice image"}
470
        """
471
    )
472
    with open(image_metadata_filename, "w", encoding="utf-8") as f:
473
        f.write(image_metadata)
474

475
    data_files_with_bad_metadata = DataFilesDict.from_patterns(get_data_patterns(str(data_dir)), data_dir.as_posix())
476
    imagefolder = ImageFolder(data_files=data_files_with_bad_metadata, cache_dir=cache_dir)
477
    with pytest.raises(ValueError) as exc_info:
478
        imagefolder.download_and_prepare()
479
    assert "`file_name` must be present" in str(exc_info.value)
480

481

482
@require_pil
483
def test_data_files_with_with_metadata_in_different_formats(cache_dir, tmp_path, image_file):
484
    data_dir = tmp_path / "data_dir_with_metadata_in_different_format"
485
    data_dir.mkdir(parents=True, exist_ok=True)
486
    shutil.copyfile(image_file, data_dir / "image_rgb.jpg")
487
    image_metadata_filename_jsonl = data_dir / "metadata.jsonl"
488
    image_metadata_jsonl = textwrap.dedent(
489
        """\
490
        {"file_name": "image_rgb.jpg", "caption": "Nice image"}
491
        """
492
    )
493
    with open(image_metadata_filename_jsonl, "w", encoding="utf-8") as f:
494
        f.write(image_metadata_jsonl)
495
    image_metadata_filename_csv = data_dir / "metadata.csv"
496
    image_metadata_csv = textwrap.dedent(
497
        """\
498
        file_name,caption
499
        image_rgb.jpg,Nice image
500
        """
501
    )
502
    with open(image_metadata_filename_csv, "w", encoding="utf-8") as f:
503
        f.write(image_metadata_csv)
504

505
    data_files_with_bad_metadata = DataFilesDict.from_patterns(get_data_patterns(str(data_dir)), data_dir.as_posix())
506
    imagefolder = ImageFolder(data_files=data_files_with_bad_metadata, cache_dir=cache_dir)
507
    with pytest.raises(ValueError) as exc_info:
508
        imagefolder.download_and_prepare()
509
    assert "metadata files with different extensions" in str(exc_info.value)
510

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.