Merge branch 'master' of ssh://gerrit.opencord.org:29418/voltha
diff --git a/voltha/core/protos/voltha.proto b/voltha/core/protos/voltha.proto
index a0d65db..647e4f0 100644
--- a/voltha/core/protos/voltha.proto
+++ b/voltha/core/protos/voltha.proto
@@ -10,15 +10,17 @@
option java_outer_classname = "VolthaProtos";
option csharp_namespace = "Opencord.Voltha.Voltha";
+// Empty message
message NullMessage {}
+// Encode health status of a Voltha instance
message HealthStatus {
// Health states
enum HealthState {
- HEALTHY = 0;
- OVERLOADED = 1;
- DYING = 2;
+ HEALTHY = 0; // The instance is healthy
+ OVERLOADED = 1; // The instance is overloaded, decrease query rate
+ DYING = 2; // The instance is in a critical condition, do not use it
}
// Current state of health of this Voltha instance
@@ -26,6 +28,14 @@
}
+// A more complex message type for testing purposes
+message MoreComplex {
+ HealthStatus health = 1; // Embedded health
+ int32 foo_counter = 2; // Counting foos
+ string name = 3; // Name of this thing
+ repeated MoreComplex children = 4; // Nested object to test recursion type
+}
+
service HealthService {
// Return current health status of a Voltha instance
diff --git a/voltha/northbound/grpc/grpc_introspect.py b/voltha/northbound/grpc/grpc_introspect.py
new file mode 100755
index 0000000..f0a6877
--- /dev/null
+++ b/voltha/northbound/grpc/grpc_introspect.py
@@ -0,0 +1,190 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Load a protobuf description file an make sense of it"""
+
+# This is very experimental
+import os
+import inspect
+from collections import OrderedDict
+
+from enum import Enum
+from google.protobuf.descriptor import FieldDescriptor, Descriptor
+from google.protobuf.message import Message
+from simplejson import dumps
+
+from google.protobuf import descriptor_pb2
+
+# TODO this hack needs to go
+# don't worry if the below too lines are flagged by your IDE as unused and
+# unresolvable; they are fine.
+import voltha.northbound.grpc.pb2_loader
+from google.api import http_pb2
+
+
+class InvalidDescriptorError(Exception): pass
+
+
+class DescriptorParser(object):
+
+ def __init__(self, ignore_empty_source_code_info=True):
+ self.ignore_empty_source_code_info = ignore_empty_source_code_info
+ self.catalog = {}
+ self.meta, blob = self.load_root_descriptor()
+ self.load_descriptor(blob)
+
+ def load_root_descriptor(self):
+ """Load descriptor.desc to make things more data driven"""
+ with open('descriptor.desc', 'r') as f:
+ blob = f.read()
+ proto = descriptor_pb2.FileDescriptorSet()
+ proto.ParseFromString(blob)
+ assert len(proto.file) == 1
+ fdp = proto.file[0]
+
+ # for i, (fd, v) in enumerate(fdp.ListFields()):
+ # assert isinstance(fd, FieldDescriptor)
+ # print fd.name, fd.full_name, fd.number, fd.type, fd.label, fd.message_type, type(v)
+
+ return fdp, blob
+
+ def get_catalog(self):
+ return self.catalog
+
+ def load_descriptor(self, descriptor_blob, fold_comments=True):
+
+ # decode desciription
+ file_descriptor_set = descriptor_pb2.FileDescriptorSet()
+ file_descriptor_set.ParseFromString(descriptor_blob)
+
+ d = self.parse(file_descriptor_set)
+ for _file in d['file']:
+ if fold_comments:
+ self.fold_comments_in(_file)
+ self.catalog[_file['package']] = _file
+
+ def parse_message(self, m):
+ assert isinstance(m, Message)
+ d = OrderedDict()
+ for fd, v in m.ListFields():
+ assert isinstance(fd, FieldDescriptor)
+ if fd.label in (1, 2):
+ d[fd.name] = self.parse(v)
+ elif fd.label == 3:
+ d[fd.name] = [self.parse(x) for x in v]
+ else:
+ raise InvalidDescriptorError()
+
+ return d
+
+ parser_table = {
+ unicode: lambda x: x,
+ int: lambda x: x,
+ bool: lambda x: x,
+ }
+
+ def parse(self, o):
+ if isinstance(o, Message):
+ return self.parse_message(o)
+ else:
+ return self.parser_table[type(o)](o)
+
+ def fold_comments_in(self, descriptor):
+ assert isinstance(descriptor, dict)
+
+ locations = descriptor.get('source_code_info', {}).get('location', [])
+ for location in locations:
+ path = location.get('path', [])
+ comments = ''.join([
+ location.get('leading_comments', '').strip(' '),
+ location.get('trailing_comments', '').strip(' '),
+ ''.join(block.strip(' ') for block
+ in location.get('leading_detached_comments', ''))
+ ]).strip()
+
+ # ignore locations with no comments
+ if not comments:
+ continue
+
+ # we ignore path with odd number of entries, since these do
+ # not address our schema nodes, but rather the meta schema
+ if (len(path) % 2 == 0):
+ node = self.find_node_by_path(
+ path, self.meta.DESCRIPTOR, descriptor)
+ assert isinstance(node, dict)
+ node['_description'] = comments
+
+ # remove source_code_info
+ del descriptor['source_code_info']
+
+ def find_node_by_path(self, path, meta, o):
+
+ # stop recursion when path is empty
+ if not path:
+ return o
+
+ # sanity check
+ assert len(path) >= 2
+ assert isinstance(meta, Descriptor)
+ assert isinstance(o, dict)
+
+ # find field name, then actual field
+ field_number = path.pop(0)
+ field_def = meta.fields_by_number[field_number]
+ field = o[field_def.name]
+
+ # field must be a list, extract entry with given index
+ assert isinstance(field, list) # expected to be a list field
+ index = path.pop(0)
+ child_o = field[index]
+
+ child_meta = field_def.message_type
+ return self.find_node_by_path(path, child_meta, child_o)
+
+
+if __name__ == '__main__':
+
+ # try loading voltha descriptor and turn it into JSON data as a preparation
+ # for generating JSON Schema / swagger file (to be done later)
+ from voltha.core.protos import voltha_pb2
+ desc_dir = os.path.dirname(inspect.getfile(voltha_pb2))
+ desc_file = os.path.join(desc_dir, 'voltha.desc')
+ with open(desc_file, 'rb') as f:
+ descriptor_blob = f.read()
+ parser = DescriptorParser()
+ parser.load_descriptor(descriptor_blob)
+ print dumps(parser.get_catalog(), indent=4)
+
+ # try to see if we can decode binary data into JSON automatically
+ def make_mc(name):
+ mc = voltha_pb2.MoreComplex()
+ mc.name = name
+ mc.foo_counter = 123123123
+ # mc.health = voltha_pb2.HealthStatus()
+ mc.health.state = voltha_pb2.HealthStatus.HEALTHY
+ return mc
+
+ mc = make_mc('root')
+ child1 = mc.
+ print dir(mc)
+ blob = mc.SerializeToString()
+ print len(blob), 'bytes'
+ mc2 = voltha_pb2.MoreComplex()
+ mc2.ParseFromString(blob)
+ assert mc == mc2
+
+ print dumps(parser.parse(mc), indent=4)
diff --git a/voltha/northbound/grpc/grpc_server.py b/voltha/northbound/grpc/grpc_server.py
index 7e737ae..166a79d 100644
--- a/voltha/northbound/grpc/grpc_server.py
+++ b/voltha/northbound/grpc/grpc_server.py
@@ -61,7 +61,8 @@
self.server.stop(grace)
-# This is to allow runninf the GRPC server in stand-alone mode
+# This is to allow running the GRPC server in stand-alone mode
+
if __name__ == '__main__':
server = VolthaGrpcServer().run()
diff --git a/voltha/northbound/grpc/pb2_loader.py b/voltha/northbound/grpc/pb2_loader.py
index 12588c6..4e0d41d 100644
--- a/voltha/northbound/grpc/pb2_loader.py
+++ b/voltha/northbound/grpc/pb2_loader.py
@@ -19,6 +19,7 @@
import sys
+# TODO this hack needs to go
#~~~~~~~~~~~~~~~~~~~~ begin import hach ~~~~~~~~~~~~~~~~~~~~~~~~~
# Import hack to allow loading the google.api local files
# without shadowing the google.protoc dependency. We needed