Matteo Scandolo | a428586 | 2020-12-01 18:10:10 -0800 | [diff] [blame] | 1 | /* |
| 2 | Copyright 2020 The Kubernetes Authors. |
| 3 | |
| 4 | Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | you may not use this file except in compliance with the License. |
| 6 | You may obtain a copy of the License at |
| 7 | |
| 8 | http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | |
| 10 | Unless required by applicable law or agreed to in writing, software |
| 11 | distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | See the License for the specific language governing permissions and |
| 14 | limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | package value |
| 18 | |
| 19 | // Allocator provides a value object allocation strategy. |
| 20 | // Value objects can be allocated by passing an allocator to the "Using" |
| 21 | // receiver functions on the value interfaces, e.g. Map.ZipUsing(allocator, ...). |
| 22 | // Value objects returned from "Using" functions should be given back to the allocator |
| 23 | // once longer needed by calling Allocator.Free(Value). |
| 24 | type Allocator interface { |
| 25 | // Free gives the allocator back any value objects returned by the "Using" |
| 26 | // receiver functions on the value interfaces. |
| 27 | // interface{} may be any of: Value, Map, List or Range. |
| 28 | Free(interface{}) |
| 29 | |
| 30 | // The unexported functions are for "Using" receiver functions of the value types |
| 31 | // to request what they need from the allocator. |
| 32 | allocValueUnstructured() *valueUnstructured |
| 33 | allocListUnstructuredRange() *listUnstructuredRange |
| 34 | allocValueReflect() *valueReflect |
| 35 | allocMapReflect() *mapReflect |
| 36 | allocStructReflect() *structReflect |
| 37 | allocListReflect() *listReflect |
| 38 | allocListReflectRange() *listReflectRange |
| 39 | } |
| 40 | |
| 41 | // HeapAllocator simply allocates objects to the heap. It is the default |
| 42 | // allocator used receiver functions on the value interfaces that do not accept |
| 43 | // an allocator and should be used whenever allocating objects that will not |
| 44 | // be given back to an allocator by calling Allocator.Free(Value). |
| 45 | var HeapAllocator = &heapAllocator{} |
| 46 | |
| 47 | type heapAllocator struct{} |
| 48 | |
| 49 | func (p *heapAllocator) allocValueUnstructured() *valueUnstructured { |
| 50 | return &valueUnstructured{} |
| 51 | } |
| 52 | |
| 53 | func (p *heapAllocator) allocListUnstructuredRange() *listUnstructuredRange { |
| 54 | return &listUnstructuredRange{vv: &valueUnstructured{}} |
| 55 | } |
| 56 | |
| 57 | func (p *heapAllocator) allocValueReflect() *valueReflect { |
| 58 | return &valueReflect{} |
| 59 | } |
| 60 | |
| 61 | func (p *heapAllocator) allocStructReflect() *structReflect { |
| 62 | return &structReflect{} |
| 63 | } |
| 64 | |
| 65 | func (p *heapAllocator) allocMapReflect() *mapReflect { |
| 66 | return &mapReflect{} |
| 67 | } |
| 68 | |
| 69 | func (p *heapAllocator) allocListReflect() *listReflect { |
| 70 | return &listReflect{} |
| 71 | } |
| 72 | |
| 73 | func (p *heapAllocator) allocListReflectRange() *listReflectRange { |
| 74 | return &listReflectRange{vr: &valueReflect{}} |
| 75 | } |
| 76 | |
| 77 | func (p *heapAllocator) Free(_ interface{}) {} |
| 78 | |
| 79 | // NewFreelistAllocator creates freelist based allocator. |
| 80 | // This allocator provides fast allocation and freeing of short lived value objects. |
| 81 | // |
| 82 | // The freelists are bounded in size by freelistMaxSize. If more than this amount of value objects is |
| 83 | // allocated at once, the excess will be returned to the heap for garbage collection when freed. |
| 84 | // |
| 85 | // This allocator is unsafe and must not be accessed concurrently by goroutines. |
| 86 | // |
| 87 | // This allocator works well for traversal of value data trees. Typical usage is to acquire |
| 88 | // a freelist at the beginning of the traversal and use it through out |
| 89 | // for all temporary value access. |
| 90 | func NewFreelistAllocator() Allocator { |
| 91 | return &freelistAllocator{ |
| 92 | valueUnstructured: &freelist{new: func() interface{} { |
| 93 | return &valueUnstructured{} |
| 94 | }}, |
| 95 | listUnstructuredRange: &freelist{new: func() interface{} { |
| 96 | return &listUnstructuredRange{vv: &valueUnstructured{}} |
| 97 | }}, |
| 98 | valueReflect: &freelist{new: func() interface{} { |
| 99 | return &valueReflect{} |
| 100 | }}, |
| 101 | mapReflect: &freelist{new: func() interface{} { |
| 102 | return &mapReflect{} |
| 103 | }}, |
| 104 | structReflect: &freelist{new: func() interface{} { |
| 105 | return &structReflect{} |
| 106 | }}, |
| 107 | listReflect: &freelist{new: func() interface{} { |
| 108 | return &listReflect{} |
| 109 | }}, |
| 110 | listReflectRange: &freelist{new: func() interface{} { |
| 111 | return &listReflectRange{vr: &valueReflect{}} |
| 112 | }}, |
| 113 | } |
| 114 | } |
| 115 | |
| 116 | // Bound memory usage of freelists. This prevents the processing of very large lists from leaking memory. |
| 117 | // This limit is large enough for endpoints objects containing 1000 IP address entries. Freed objects |
| 118 | // that don't fit into the freelist are orphaned on the heap to be garbage collected. |
| 119 | const freelistMaxSize = 1000 |
| 120 | |
| 121 | type freelistAllocator struct { |
| 122 | valueUnstructured *freelist |
| 123 | listUnstructuredRange *freelist |
| 124 | valueReflect *freelist |
| 125 | mapReflect *freelist |
| 126 | structReflect *freelist |
| 127 | listReflect *freelist |
| 128 | listReflectRange *freelist |
| 129 | } |
| 130 | |
| 131 | type freelist struct { |
| 132 | list []interface{} |
| 133 | new func() interface{} |
| 134 | } |
| 135 | |
| 136 | func (f *freelist) allocate() interface{} { |
| 137 | var w2 interface{} |
| 138 | if n := len(f.list); n > 0 { |
| 139 | w2, f.list = f.list[n-1], f.list[:n-1] |
| 140 | } else { |
| 141 | w2 = f.new() |
| 142 | } |
| 143 | return w2 |
| 144 | } |
| 145 | |
| 146 | func (f *freelist) free(v interface{}) { |
| 147 | if len(f.list) < freelistMaxSize { |
| 148 | f.list = append(f.list, v) |
| 149 | } |
| 150 | } |
| 151 | |
| 152 | func (w *freelistAllocator) Free(value interface{}) { |
| 153 | switch v := value.(type) { |
| 154 | case *valueUnstructured: |
| 155 | v.Value = nil // don't hold references to unstructured objects |
| 156 | w.valueUnstructured.free(v) |
| 157 | case *listUnstructuredRange: |
| 158 | v.vv.Value = nil // don't hold references to unstructured objects |
| 159 | w.listUnstructuredRange.free(v) |
| 160 | case *valueReflect: |
| 161 | v.ParentMapKey = nil |
| 162 | v.ParentMap = nil |
| 163 | w.valueReflect.free(v) |
| 164 | case *mapReflect: |
| 165 | w.mapReflect.free(v) |
| 166 | case *structReflect: |
| 167 | w.structReflect.free(v) |
| 168 | case *listReflect: |
| 169 | w.listReflect.free(v) |
| 170 | case *listReflectRange: |
| 171 | v.vr.ParentMapKey = nil |
| 172 | v.vr.ParentMap = nil |
| 173 | w.listReflectRange.free(v) |
| 174 | } |
| 175 | } |
| 176 | |
| 177 | func (w *freelistAllocator) allocValueUnstructured() *valueUnstructured { |
| 178 | return w.valueUnstructured.allocate().(*valueUnstructured) |
| 179 | } |
| 180 | |
| 181 | func (w *freelistAllocator) allocListUnstructuredRange() *listUnstructuredRange { |
| 182 | return w.listUnstructuredRange.allocate().(*listUnstructuredRange) |
| 183 | } |
| 184 | |
| 185 | func (w *freelistAllocator) allocValueReflect() *valueReflect { |
| 186 | return w.valueReflect.allocate().(*valueReflect) |
| 187 | } |
| 188 | |
| 189 | func (w *freelistAllocator) allocStructReflect() *structReflect { |
| 190 | return w.structReflect.allocate().(*structReflect) |
| 191 | } |
| 192 | |
| 193 | func (w *freelistAllocator) allocMapReflect() *mapReflect { |
| 194 | return w.mapReflect.allocate().(*mapReflect) |
| 195 | } |
| 196 | |
| 197 | func (w *freelistAllocator) allocListReflect() *listReflect { |
| 198 | return w.listReflect.allocate().(*listReflect) |
| 199 | } |
| 200 | |
| 201 | func (w *freelistAllocator) allocListReflectRange() *listReflectRange { |
| 202 | return w.listReflectRange.allocate().(*listReflectRange) |
| 203 | } |