deepspeed

Форк
0
/
_config.yml 
121 строка · 2.9 Кб
1
title: DeepSpeed
2
email: deepspeed@microsoft.com
3
description: >-
4
  DeepSpeed is a deep learning optimization library that makes distributed
5
  training easy, efficient, and effective.
6

7
locale : "en-US"
8
logo: /assets/images/deepspeed-logo-uppercase-bold-white-1.15.svg
9

10
repository: microsoft/DeepSpeed
11
baseurl: "/" # the subpath of your site, e.g. /blog
12
url: "https://www.deepspeed.ai" # the base hostname & protocol for your site, e.g. http://example.com
13

14
# Build settings
15
remote_theme: "mmistakes/minimal-mistakes@4.19.0"
16
minimal_mistakes_skin : "air"
17
search: true
18

19
plugins:
20
  - jekyll-feed
21
  - jekyll-include-cache
22
  - jekyll-paginate
23

24
#paginate: 10
25
#paginate_path: /blog/page:num
26

27
include: ["_pages"]
28
exclude: ["code-docs"]
29

30
collections:
31
  tutorials:
32
    output: true
33
    permalink: /:collection/:path/
34
    order:
35
      - advanced-install.md
36
      - getting-started.md
37
      - azure.md
38
      - automatic-tensor-parallelism.md
39
      - bert-finetuning.md
40
      - bert-pretraining.md
41
      - cifar-10.md
42
      - curriculum-learning.md
43
      - data-efficiency.md
44
      - ds4sci_evoformerattention.md
45
      - flops-profiler.md
46
      - pytorch-profiler.md
47
      - autotuning.md
48
      - gan.md
49
      - lrrt.md
50
      - megatron.md
51
      - mixture-of-experts.md
52
      - mixture-of-experts-nlg.md
53
      - mixture-of-experts-inference.md
54
      - model-compression.md
55
      - monitor.md
56
      - comms-logging.md
57
      - one-cycle.md
58
      - onebit-adam.md
59
      - zero-one-adam.md
60
      - onebit-lamb.md
61
      - pipeline.md
62
      - progressive_layer_dropping.md
63
      - sparse-attention.md
64
      - transformer_kernel.md
65
      - zero-offload.md
66
      - zero.md
67

68
defaults:
69
  - scope:
70
      path: ""
71
    values:
72
      layout: single
73
      author_profile: false
74
      read_time: false
75
      comments: false
76
      share: false
77
      related: false
78
      sneak_preview: false
79
      toc: true
80
      toc_label: "Contents"
81
      sidebar:
82
        nav: "lnav"
83
  - scope:
84
      path: "_pages"
85
    values:
86
      permalink: /docs/:basename/
87
      toc: true
88
      toc_label: "Contents"
89
  - scope:
90
      path: ""
91
      type: posts
92
    values:
93
      layout: single-full
94
      author_profile: false
95
      read_time: false
96
      comments: false
97
      share: true
98
      related: false
99
      toc: true
100
      toc_label: "Contents"
101
      toc_sticky: true
102
      show_date: true
103
  - scope:
104
      path: ""
105
      type: tutorials
106
    values:
107
      layout: single
108
      toc_sticky: true
109

110

111
analytics:
112
  provider: "google-gtag"
113
  google:
114
    tracking_id: "UA-169781858-1"
115

116
timezone: America/Los_Angeles
117
breadcrumbs: true
118

119
press_release_v3: https://www.microsoft.com/en-us/research/blog/deepspeed-extreme-scale-model-training-for-everyone/
120
press_release_v5: https://www.microsoft.com/en-us/research/blog/deepspeed-powers-8x-larger-moe-model-training-with-high-performance/
121
press_release_v6: https://www.microsoft.com/en-us/research/blog/deepspeed-advancing-moe-inference-and-training-to-power-next-generation-ai-scale/
122

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.