podman

Форк
0
173 строки · 3.8 Кб
1
/*
2
 * Copyright 2021 ByteDance Inc.
3
 *
4
 * Licensed under the Apache License, Version 2.0 (the "License");
5
 * you may not use this file except in compliance with the License.
6
 * You may obtain a copy of the License at
7
 *
8
 *     http://www.apache.org/licenses/LICENSE-2.0
9
 *
10
 * Unless required by applicable law or agreed to in writing, software
11
 * distributed under the License is distributed on an "AS IS" BASIS,
12
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
 * See the License for the specific language governing permissions and
14
 * limitations under the License.
15
 */
16

17
package caching
18

19
import (
20
    `sync`
21
    `sync/atomic`
22
    `unsafe`
23

24
    `github.com/bytedance/sonic/internal/rt`
25
)
26

27
/** Program Map **/
28

29
const (
30
    _LoadFactor   = 0.5
31
    _InitCapacity = 4096    // must be a power of 2
32
)
33

34
type _ProgramMap struct {
35
    n uint64
36
    m uint32
37
    b []_ProgramEntry
38
}
39

40
type _ProgramEntry struct {
41
    vt *rt.GoType
42
    fn interface{}
43
}
44

45
func newProgramMap() *_ProgramMap {
46
    return &_ProgramMap {
47
        n: 0,
48
        m: _InitCapacity - 1,
49
        b: make([]_ProgramEntry, _InitCapacity),
50
    }
51
}
52

53
func (self *_ProgramMap) copy() *_ProgramMap {
54
    fork := &_ProgramMap{
55
        n: self.n,
56
        m: self.m,
57
        b: make([]_ProgramEntry, len(self.b)),
58
    }
59
    for i, f := range self.b {
60
        fork.b[i] = f
61
    }
62
    return fork
63
}
64

65
func (self *_ProgramMap) get(vt *rt.GoType) interface{} {
66
    i := self.m + 1
67
    p := vt.Hash & self.m
68

69
    /* linear probing */
70
    for ; i > 0; i-- {
71
        if b := self.b[p]; b.vt == vt {
72
            return b.fn
73
        } else if b.vt == nil {
74
            break
75
        } else {
76
            p = (p + 1) & self.m
77
        }
78
    }
79

80
    /* not found */
81
    return nil
82
}
83

84
func (self *_ProgramMap) add(vt *rt.GoType, fn interface{}) *_ProgramMap {
85
    p := self.copy()
86
    f := float64(atomic.LoadUint64(&p.n) + 1) / float64(p.m + 1)
87

88
    /* check for load factor */
89
    if f > _LoadFactor {
90
        p = p.rehash()
91
    }
92

93
    /* insert the value */
94
    p.insert(vt, fn)
95
    return p
96
}
97

98
func (self *_ProgramMap) rehash() *_ProgramMap {
99
    c := (self.m + 1) << 1
100
    r := &_ProgramMap{m: c - 1, b: make([]_ProgramEntry, int(c))}
101

102
    /* rehash every entry */
103
    for i := uint32(0); i <= self.m; i++ {
104
        if b := self.b[i]; b.vt != nil {
105
            r.insert(b.vt, b.fn)
106
        }
107
    }
108

109
    /* rebuild successful */
110
    return r
111
}
112

113
func (self *_ProgramMap) insert(vt *rt.GoType, fn interface{}) {
114
    h := vt.Hash
115
    p := h & self.m
116

117
    /* linear probing */
118
    for i := uint32(0); i <= self.m; i++ {
119
        if b := &self.b[p]; b.vt != nil {
120
            p += 1
121
            p &= self.m
122
        } else {
123
            b.vt = vt
124
            b.fn = fn
125
            atomic.AddUint64(&self.n, 1)
126
            return
127
        }
128
    }
129

130
    /* should never happens */
131
    panic("no available slots")
132
}
133

134
/** RCU Program Cache **/
135

136
type ProgramCache struct {
137
    m sync.Mutex
138
    p unsafe.Pointer
139
}
140

141
func CreateProgramCache() *ProgramCache {
142
    return &ProgramCache {
143
        m: sync.Mutex{},
144
        p: unsafe.Pointer(newProgramMap()),
145
    }
146
}
147

148
func (self *ProgramCache) Get(vt *rt.GoType) interface{} {
149
    return (*_ProgramMap)(atomic.LoadPointer(&self.p)).get(vt)
150
}
151

152
func (self *ProgramCache) Compute(vt *rt.GoType, compute func(*rt.GoType, ... interface{}) (interface{}, error), ex ...interface{}) (interface{}, error) {
153
    var err error
154
    var val interface{}
155

156
    /* use defer to prevent inlining of this function */
157
    self.m.Lock()
158
    defer self.m.Unlock()
159

160
    /* double check with write lock held */
161
    if val = self.Get(vt); val != nil {
162
        return val, nil
163
    }
164

165
    /* compute the value */
166
    if val, err = compute(vt, ex...); err != nil {
167
        return nil, err
168
    }
169

170
    /* update the RCU cache */
171
    atomic.StorePointer(&self.p, unsafe.Pointer((*_ProgramMap)(atomic.LoadPointer(&self.p)).add(vt, val)))
172
    return val, nil
173
}
174

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.