blob: 8bfac18762f34874c3e541efbcc11ade519a69c6 [file] [log] [blame]
Matt Jeanneretf1e9c5d2019-02-08 07:41:29 -05001#
2# Copyright 2017 the original author or authors.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
16
17"""
18Immutable classes to store config revision information arranged in a tree.
19
20Immutability cannot be enforced in Python, so anyoen working with these
21classes directly must obey the rules.
22"""
23
24import weakref
25from copy import copy
26from hashlib import md5
27
28from google.protobuf.descriptor import Descriptor
29from simplejson import dumps
30
31from common.utils.json_format import MessageToJson
32from voltha.protos import third_party
33from voltha.protos import meta_pb2
34
35import structlog
36
37log = structlog.get_logger()
38
39def is_proto_message(o):
40 """
41 Return True if object o appears to be a protobuf message; False otherwise.
42 """
43 # use a somewhat empirical approach to decide if something looks like
44 # a protobuf message
45 return isinstance(getattr(o, 'DESCRIPTOR', None), Descriptor)
46
47
48def message_to_json_concise(m):
49 """
50 Return the most concise string representation of a protobuf. Good for
51 things where size matters (e.g., generating hash).
52 """
53 return MessageToJson(m, False, True, False)
54
55
56_rev_cache = weakref.WeakValueDictionary() # cache of config revs
57
58
59_children_fields_cache = {} # to memoize externally stored field name info
60
61
62class _ChildType(object):
63 """Used to store key metadata about child_node fields in protobuf messages.
64 """
65 __slots__ = (
66 '_module',
67 '_type',
68 '_is_container',
69 '_key',
70 '_key_from_str'
71 )
72
73 def __init__(self, module, type, is_container,
74 key=None, key_from_str=None):
75 self._module = module
76 self._type = type
77 self._is_container = is_container
78 self._key = key
79 self._key_from_str = key_from_str
80
81 @property
82 def is_container(self):
83 return self._is_container
84
85 @property
86 def key(self):
87 return self._key
88
89 @property
90 def key_from_str(self):
91 return self._key_from_str
92
93 @property
94 def module(self):
95 return self._module
96
97 @property
98 def type(self):
99 return self._type
100
101
102def children_fields(cls):
103 """
104 Return a map of externally stored fields for this protobuf message type.
105 What is stored as branch node is determined by the "child_node"
106 annotation in the protobuf definitions.
107 With each external field, we store if the field is a container, if a
108 container is keyed (indexed), and what is the function that converts
109 path substring back to the key.
110 """
111 names = _children_fields_cache.get(cls)
112
113 if names is None:
114 names = {}
115
116 for field in cls.DESCRIPTOR.fields:
117
118 if field.has_options:
119 options = field.GetOptions()
120
121 if options.HasExtension(meta_pb2.child_node):
122 is_container = field.label == 3
123 meta = options.Extensions[meta_pb2.child_node]
124 key_from_str = None
125
126 if meta.key:
127 key_field = field.message_type.fields_by_name[meta.key]
128 key_type = key_field.type
129
130 if key_type == key_field.TYPE_STRING:
131 key_from_str = lambda s: s
132
133 elif key_type in (
134 key_field.TYPE_FIXED32,
135 key_field.TYPE_FIXED64,
136 key_field.TYPE_INT32,
137 key_field.TYPE_INT64,
138 key_field.TYPE_SFIXED32,
139 key_field.TYPE_SFIXED64,
140 key_field.TYPE_SINT32,
141 key_field.TYPE_SINT64,
142 key_field.TYPE_UINT32,
143 key_field.TYPE_UINT64):
144 key_from_str = lambda s: int(s)
145
146 else:
147 raise NotImplementedError()
148
149 field_class = field.message_type._concrete_class
150 names[field.name] = _ChildType(
151 module=field_class.__module__,
152 type=field_class.__name__,
153 is_container=is_container,
154 key=meta.key,
155 key_from_str=key_from_str
156 )
157
158 _children_fields_cache[cls] = names
159
160 return names
161
162
163_access_right_cache = {} # to memoize field access right restrictions
164
165
166def access_rights(cls):
167 """
168 Determine the access rights for each field and cache these maps for
169 fast retrieval.
170 """
171 access_map = _access_right_cache.get(cls)
172 if access_map is None:
173 access_map = {}
174 for field in cls.DESCRIPTOR.fields:
175 if field.has_options:
176 options = field.GetOptions()
177 if options.HasExtension(meta_pb2.access):
178 access = options.Extensions[meta_pb2.access]
179 access_map[field.name] = access
180 _access_right_cache[cls] = access_map
181 return access_map
182
183
184class ConfigDataRevision(object):
185 """
186 Holds a specific snapshot of the local configuration for config node.
187 It shall be treated as an immutable object, although in Python this is
188 very difficult to enforce!
189 As such, we can compute a unique hash based on the config data which
190 can be used to establish equivalence. It also has a time-stamp to track
191 changes.
192
193 This object must be treated as immutable, including its nested config data.
194 This is very important. The entire config module depends on hashes
195 we create over the data, so altering the data can lead to unpredictable
196 detriments.
197 """
198
199 __slots__ = (
200 '_data',
201 '_hash',
202 '__weakref__'
203 )
204
205 def __init__(self, data):
206 self._data = data
207 self._hash = self._hash_data(data)
208
209 @property
210 def data(self):
211 return self._data
212
213 @property
214 def hash(self):
215 return self._hash
216
217 @staticmethod
218 def _hash_data(data):
219 """Hash function to be used to track version changes of config nodes"""
220 if isinstance(data, (dict, list)):
221 to_hash = dumps(data, sort_keys=True)
222 elif is_proto_message(data):
223 to_hash = ':'.join((
224 data.__class__.__module__,
225 data.__class__.__name__,
226 data.SerializeToString()))
227 else:
228 to_hash = str(hash(data))
229 return md5(to_hash).hexdigest()[:12]
230
231
232class ConfigRevision(object):
233 """
234 Holds not only the local config data, but also the external children
235 reference lists, per field name.
236 Recall that externally stored fields are those marked "child_node" in
237 the protobuf definition.
238 This object must be treated as immutable, including its config data.
239 """
240
241 __slots__ = (
242 '_config',
243 '_children',
244 '_hash',
245 '_branch',
246 '__weakref__'
247 )
248
249 def __init__(self, branch, data, children=None):
250 self._branch = branch
251 self._config = ConfigDataRevision(data)
252 self._children = children
253 self._finalize()
254
255 def _finalize(self):
256 self._hash = self._hash_content()
257 if self._hash not in _rev_cache:
258 _rev_cache[self._hash] = self
259 if self._config._hash not in _rev_cache:
260 _rev_cache[self._config._hash] = self._config
261 else:
262 self._config = _rev_cache[self._config._hash] # re-use!
263
264 def _hash_content(self):
265 # hash is derived from config hash and hashes of all children
266 m = md5('' if self._config is None else self._config._hash)
267 if self._children is not None:
268 for child_field in sorted(self._children.keys()):
269 children = self._children[child_field]
270 assert isinstance(children, list)
271 m.update(''.join(c._hash for c in children))
272 return m.hexdigest()[:12]
273
274 @property
275 def hash(self):
276 return self._hash
277
278 @property
279 def data(self):
280 return None if self._config is None else self._config.data
281
282 @property
283 def node(self):
284 return self._branch._node
285
286 @property
287 def type(self):
288 return self._config.data.__class__
289
290 def clear_hash(self):
291 self._hash = None
292
293 def get(self, depth):
294 """
295 Get config data of node. If depth > 0, recursively assemble the
296 branch nodes. If depth is < 0, this results in a fully exhaustive
297 "complete config".
298 """
299 orig_data = self._config.data
300 data = orig_data.__class__()
301 data.CopyFrom(orig_data)
302 if depth:
303 # collect children
304 cfields = children_fields(self.type).iteritems()
305 for field_name, field in cfields:
306 if field.is_container:
307 for rev in self._children[field_name]:
308 child_data = rev.get(depth=depth - 1)
309 child_data_holder = getattr(data, field_name).add()
310 child_data_holder.MergeFrom(child_data)
311 else:
312 rev = self._children[field_name][0]
313 child_data = rev.get(depth=depth - 1)
314 child_data_holder = getattr(data, field_name)
315 child_data_holder.MergeFrom(child_data)
316 return data
317
318 def update_data(self, data, branch):
319 """Return a NEW revision which is updated for the modified data"""
320 new_rev = copy(self)
321 new_rev._branch = branch
322 new_rev._config = self._config.__class__(data)
323 new_rev._finalize()
324 return new_rev
325
326 def update_children(self, name, children, branch):
327 """Return a NEW revision which is updated for the modified children"""
328 new_children = self._children.copy()
329 new_children[name] = children
330 new_rev = copy(self)
331 new_rev._branch = branch
332 new_rev._children = new_children
333 new_rev._finalize()
334 return new_rev
335
336 def update_all_children(self, children, branch):
337 """Return a NEW revision which is updated for all children entries"""
338 new_rev = copy(self)
339 new_rev._branch = branch
340 new_rev._children = children
341 new_rev._finalize()
342 return new_rev