datasets

Форк
0
/
benchmark_iterating.py 
98 строк · 3.7 Кб
1
import json
2
import os
3
import tempfile
4

5
import datasets
6
from utils import generate_example_dataset, get_duration
7

8

9
SPEED_TEST_N_EXAMPLES = 50_000
10
SMALL_TEST = 5_000
11

12
RESULTS_BASEPATH, RESULTS_FILENAME = os.path.split(__file__)
13
RESULTS_FILE_PATH = os.path.join(RESULTS_BASEPATH, "results", RESULTS_FILENAME.replace(".py", ".json"))
14

15

16
@get_duration
17
def read(dataset: datasets.Dataset, length):
18
    for i in range(length):
19
        _ = dataset[i]
20

21

22
@get_duration
23
def read_batch(dataset: datasets.Dataset, length, batch_size):
24
    for i in range(0, len(dataset), batch_size):
25
        _ = dataset[i : i + batch_size]
26

27

28
@get_duration
29
def read_formatted(dataset: datasets.Dataset, length, type):
30
    with dataset.formatted_as(type=type):
31
        for i in range(length):
32
            _ = dataset[i]
33

34

35
@get_duration
36
def read_formatted_batch(dataset: datasets.Dataset, length, batch_size, type):
37
    with dataset.formatted_as(type=type):
38
        for i in range(0, length, batch_size):
39
            _ = dataset[i : i + batch_size]
40

41

42
def benchmark_iterating():
43
    times = {"num examples": SPEED_TEST_N_EXAMPLES}
44
    functions = [
45
        (read, {"length": SMALL_TEST}),
46
        (read, {"length": SPEED_TEST_N_EXAMPLES}),
47
        (read_batch, {"length": SPEED_TEST_N_EXAMPLES, "batch_size": 10}),
48
        (read_batch, {"length": SPEED_TEST_N_EXAMPLES, "batch_size": 100}),
49
        (read_batch, {"length": SPEED_TEST_N_EXAMPLES, "batch_size": 1_000}),
50
        (read_formatted, {"type": "numpy", "length": SMALL_TEST}),
51
        (read_formatted, {"type": "pandas", "length": SMALL_TEST}),
52
        (read_formatted, {"type": "torch", "length": SMALL_TEST}),
53
        (read_formatted, {"type": "tensorflow", "length": SMALL_TEST}),
54
        (read_formatted_batch, {"type": "numpy", "length": SMALL_TEST, "batch_size": 10}),
55
        (read_formatted_batch, {"type": "numpy", "length": SMALL_TEST, "batch_size": 1_000}),
56
    ]
57

58
    functions_shuffled = [
59
        (read, {"length": SMALL_TEST}),
60
        (read, {"length": SPEED_TEST_N_EXAMPLES}),
61
        (read_batch, {"length": SPEED_TEST_N_EXAMPLES, "batch_size": 10}),
62
        (read_batch, {"length": SPEED_TEST_N_EXAMPLES, "batch_size": 100}),
63
        (read_batch, {"length": SPEED_TEST_N_EXAMPLES, "batch_size": 1_000}),
64
        (read_formatted, {"type": "numpy", "length": SMALL_TEST}),
65
        (read_formatted_batch, {"type": "numpy", "length": SMALL_TEST, "batch_size": 10}),
66
        (read_formatted_batch, {"type": "numpy", "length": SMALL_TEST, "batch_size": 1_000}),
67
    ]
68
    with tempfile.TemporaryDirectory() as tmp_dir:
69
        print("generating dataset")
70
        features = datasets.Features(
71
            {"list": datasets.Sequence(datasets.Value("float32")), "numbers": datasets.Value("float32")}
72
        )
73
        dataset = generate_example_dataset(
74
            os.path.join(tmp_dir, "dataset.arrow"),
75
            features,
76
            num_examples=SPEED_TEST_N_EXAMPLES,
77
            seq_shapes={"list": (100,)},
78
        )
79
        print("first set of iterations")
80
        for func, kwargs in functions:
81
            print(func.__name__, str(kwargs))
82
            times[func.__name__ + " " + " ".join(str(v) for v in kwargs.values())] = func(dataset, **kwargs)
83

84
        print("shuffling dataset")
85
        dataset = dataset.shuffle()
86
        print("Second set of iterations (after shuffling")
87
        for func, kwargs in functions_shuffled:
88
            print("shuffled ", func.__name__, str(kwargs))
89
            times["shuffled " + func.__name__ + " " + " ".join(str(v) for v in kwargs.values())] = func(
90
                dataset, **kwargs
91
            )
92

93
    with open(RESULTS_FILE_PATH, "wb") as f:
94
        f.write(json.dumps(times).encode("utf-8"))
95

96

97
if __name__ == "__main__":  # useful to run the profiler
98
    benchmark_iterating()
99

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.