Refactored protobuf framework, added loader

Included changes:

* Initial adapter interface spec
* Adapter loader to auto-load adapters
* Initial adapter NBI
* Better import support in chameleon, allowing more flexible
  protobuf structure (by organizing artifacts and service
  into separate proto files.
* Refactored voltha.proto to split things into logical proto
  modules.
* Some additional cleanup

Change-Id: I75f9883c6992148ea8df430bcdaebf85115fea4a
diff --git a/grpc_client/grpc_client.py b/grpc_client/grpc_client.py
index 75c78eb..9a95330 100644
--- a/grpc_client/grpc_client.py
+++ b/grpc_client/grpc_client.py
@@ -16,7 +16,7 @@
 
 """
 gRPC client meant to connect to a gRPC server endpoint, and query the
-end-point's schema by calling SchemaService.Schema(NullMessage) and all of its
+end-point's schema by calling SchemaService.Schema(Empty) and all of its
 semantics are derived from the recovered schema.
 """
 
@@ -34,7 +34,9 @@
 from werkzeug.exceptions import ServiceUnavailable
 
 from common.utils.asleep import asleep
-from chameleon.protos.schema_pb2 import NullMessage, SchemaServiceStub
+from chameleon.protos.schema_pb2 import SchemaServiceStub
+from google.protobuf.empty_pb2 import Empty
+
 
 log = get_logger()
 
@@ -165,7 +167,7 @@
         assert isinstance(self.channel, grpc.Channel)
         stub = SchemaServiceStub(self.channel)
         # try:
-        schemas = stub.GetSchema(NullMessage())
+        schemas = stub.GetSchema(Empty())
         # except _Rendezvous, e:
         #     if e.code == grpc.StatusCode.UNAVAILABLE:
         #
diff --git a/protoc_plugins/gw_gen.py b/protoc_plugins/gw_gen.py
index 20249c2..4596bef 100755
--- a/protoc_plugins/gw_gen.py
+++ b/protoc_plugins/gw_gen.py
@@ -35,7 +35,14 @@
 from protobuf_to_dict import protobuf_to_dict, dict_to_protobuf
 
 {% set package = file_name.replace('.proto', '') %}
-import {{ package + '_pb2' }} as {{ package }}
+
+{% for pypackage, module in includes %}
+{% if pypackage %}
+from {{ pypackage }} import {{ module }}
+{% else %}
+import {{ module }}
+{% endif %}
+{% endfor %}
 
 log = get_logger()
 
@@ -44,7 +51,7 @@
     pass  # so that if no endpoints are defined, Python is still happy
 
     {% for method in methods %}
-    {% set method_name = method['service'] + '_' + method['method'] %}
+    {% set method_name = method['service'].rpartition('.')[2] + '_' + method['method'] %}
     {% set path = method['path'].replace('{', '<string:').replace('}', '>') %}
     @app.route('{{ path }}', methods=['{{ method['verb'].upper() }}'])
     def {{ method_name }}(server, request, **kw):
@@ -58,12 +65,12 @@
         riase NotImplementedError('cannot handle specific body field list')
         {% endif %}
         try:
-            req = dict_to_protobuf({{ method['input_type'] }}, data)
+            req = dict_to_protobuf({{ type_map[method['input_type']] }}, data)
         except Exception, e:
             log.error('cannot-convert-to-protobuf', e=e, data=data)
             raise
         res = grpc_client.invoke(
-            {{ '.'.join([package, method['service']]) }}Stub,
+            {{ type_map[method['service']] }}Stub,
             '{{ method['method'] }}', req)
         try:
             out_data = protobuf_to_dict(res, use_enum_labels=True)
@@ -128,7 +135,7 @@
                     data = {
                         'package': package,
                         'filename': proto_file.name,
-                        'service': service.name,
+                        'service': proto_file.package + '.' + service.name,
                         'method': method.name,
                         'input_type': input_type,
                         'output_type': output_type,
@@ -140,30 +147,116 @@
                     yield data
 
 
-def generate_gw_code(file_name, methods):
-    return template.render(file_name=file_name, methods=methods)
+def generate_gw_code(file_name, methods, type_map, includes):
+    return template.render(file_name=file_name, methods=methods,
+                           type_map=type_map, includes=includes)
+
+
+class IncludeManager(object):
+    # need to keep track of what files define what message types and
+    # under what package name. Later, when we analyze the methods, we
+    # need to be able to derive the list of files we need to load and we
+    # also need to replce the <proto-package-name>.<artifact-name> in the
+    # templates with <python-package-name>.<artifact-name> so Python can
+    # resolve these.
+    def __init__(self):
+        self.package_to_localname = {}
+        self.fullname_to_filename = {}
+        self.prefix_table = []  # sorted table of top-level symbols in protos
+        self.type_map = {}  # full name as used in .proto -> python name
+        self.includes_needed = set()  # names of files needed to be included
+        self.filename_to_module = {}  # filename -> (package, module)
+
+    def extend_symbol_tables(self, proto_file):
+        # keep track of what file adds what top-level symbol to what abstract
+        # package name
+        package_name = proto_file.package
+        file_name = proto_file.name
+        self._add_filename(file_name)
+        all_defs = list(proto_file.message_type)
+        all_defs.extend(list(proto_file.enum_type))
+        all_defs.extend(list(proto_file.service))
+        for typedef in all_defs:
+            name = typedef.name
+            fullname = package_name + '.' + name
+            self.fullname_to_filename[fullname] = file_name
+            self.package_to_localname.setdefault(package_name, []).append(name)
+        self._update_prefix_table()
+
+    def _add_filename(self, filename):
+        if filename not in self.filename_to_module:
+            python_path = filename.replace('.proto', '_pb2').replace('/', '.')
+            package_name, _, module_name = python_path.rpartition('.')
+            self.filename_to_module[filename] = (package_name, module_name)
+
+    def _update_prefix_table(self):
+        # make a sorted list symbol prefixes needed to resolv for potential use
+        # of nested symbols
+        self.prefix_table = sorted(self.fullname_to_filename.iterkeys(),
+                                   reverse=True)
+
+    def _find_matching_prefix(self, fullname):
+        for prefix in self.prefix_table:
+            if fullname.startswith(prefix):
+                return prefix
+        # This should never happen
+        raise Exception('No match for type name "{}"'.format(fullname))
+
+    def add_needed_symbol(self, fullname):
+        if fullname in self.type_map:
+            return
+        top_level_symbol = self._find_matching_prefix(fullname)
+        name = top_level_symbol.rpartition('.')[2]
+        nested_name = fullname[len(top_level_symbol):]  # may be empty
+        file_name = self.fullname_to_filename[top_level_symbol]
+        self.includes_needed.add(file_name)
+        module_name = self.filename_to_module[file_name][1]
+        python_name = module_name + '.' + name + nested_name
+        self.type_map[fullname] = python_name
+
+    def get_type_map(self):
+        return self.type_map
+
+    def get_includes(self):
+        return sorted(
+            self.filename_to_module[fn] for fn in self.includes_needed)
 
 
 def generate_code(request, response):
 
     assert isinstance(request, plugin.CodeGeneratorRequest)
+
+    include_manager = IncludeManager()
     for proto_file in request.proto_file:
-        output = []
+
+        include_manager.extend_symbol_tables(proto_file)
+
+        methods = []
 
         for data in traverse_methods(proto_file):
-            output.append(data)
+            methods.append(data)
+            include_manager.add_needed_symbol(data['input_type'])
+            include_manager.add_needed_symbol(data['output_type'])
+            include_manager.add_needed_symbol(data['service'])
+
+        type_map = include_manager.get_type_map()
+        includes = include_manager.get_includes()
 
         # as a nice side-effect, generate a json file capturing the essence
         # of the RPC method entries
         f = response.file.add()
         f.name = proto_file.name + '.json'
-        f.content = dumps(output, indent=4)
+        f.content = dumps(dict(
+            type_rename_map=type_map,
+            includes=includes,
+            methods=methods), indent=4)
 
         # generate the real Python code file
         f = response.file.add()
         assert proto_file.name.endswith('.proto')
         f.name = proto_file.name.replace('.proto', '_gw.py')
-        f.content = generate_gw_code(proto_file.name, output)
+        f.content = generate_gw_code(proto_file.name,
+                                     methods, type_map, includes)
 
 
 if __name__ == '__main__':
@@ -175,6 +268,8 @@
     else:
         # read input from stdin
         data = sys.stdin.read()
+        # with file('/tmp/buf', 'wb') as f:
+        #     f.write(data)
 
     # parse request
     request = plugin.CodeGeneratorRequest()
diff --git a/protos/schema.proto b/protos/schema.proto
index 2a1ec08..0c06feb 100644
--- a/protos/schema.proto
+++ b/protos/schema.proto
@@ -2,6 +2,8 @@
 
 package schema;
 
+import "google/protobuf/empty.proto";
+
 // Contains the name and content of a *.proto file
 message ProtoFile {
     string file_name = 1;  // name of proto file
@@ -17,13 +19,10 @@
 
 }
 
-// Empty message
-message NullMessage {}
-
 // Schema services
 service SchemaService {
 
     // Return active grpc schemas
-    rpc GetSchema(NullMessage) returns (Schemas) {}
+    rpc GetSchema(google.protobuf.Empty) returns (Schemas) {}
 
 }
diff --git a/protos/schema_pb2.py b/protos/schema_pb2.py
index f5f17d2..537ba02 100644
--- a/protos/schema_pb2.py
+++ b/protos/schema_pb2.py
@@ -13,14 +13,16 @@
 _sym_db = _symbol_database.Default()
 
 
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='schema.proto',
   package='schema',
   syntax='proto3',
-  serialized_pb=_b('\n\x0cschema.proto\x12\x06schema\"A\n\tProtoFile\x12\x11\n\tfile_name\x18\x01 \x01(\t\x12\r\n\x05proto\x18\x02 \x01(\t\x12\x12\n\ndescriptor\x18\x03 \x01(\x0c\",\n\x07Schemas\x12!\n\x06protos\x18\x01 \x03(\x0b\x32\x11.schema.ProtoFile\"\r\n\x0bNullMessage2D\n\rSchemaService\x12\x33\n\tGetSchema\x12\x13.schema.NullMessage\x1a\x0f.schema.Schemas\"\x00\x62\x06proto3')
-)
+  serialized_pb=_b('\n\x0cschema.proto\x12\x06schema\x1a\x1bgoogle/protobuf/empty.proto\"A\n\tProtoFile\x12\x11\n\tfile_name\x18\x01 \x01(\t\x12\r\n\x05proto\x18\x02 \x01(\t\x12\x12\n\ndescriptor\x18\x03 \x01(\x0c\",\n\x07Schemas\x12!\n\x06protos\x18\x01 \x03(\x0b\x32\x11.schema.ProtoFile2G\n\rSchemaService\x12\x36\n\tGetSchema\x12\x16.google.protobuf.Empty\x1a\x0f.schema.Schemas\"\x00\x62\x06proto3')
+  ,
+  dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
@@ -66,8 +68,8 @@
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=24,
-  serialized_end=89,
+  serialized_start=53,
+  serialized_end=118,
 )
 
 
@@ -97,38 +99,13 @@
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=91,
-  serialized_end=135,
-)
-
-
-_NULLMESSAGE = _descriptor.Descriptor(
-  name='NullMessage',
-  full_name='schema.NullMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=137,
-  serialized_end=150,
+  serialized_start=120,
+  serialized_end=164,
 )
 
 _SCHEMAS.fields_by_name['protos'].message_type = _PROTOFILE
 DESCRIPTOR.message_types_by_name['ProtoFile'] = _PROTOFILE
 DESCRIPTOR.message_types_by_name['Schemas'] = _SCHEMAS
-DESCRIPTOR.message_types_by_name['NullMessage'] = _NULLMESSAGE
 
 ProtoFile = _reflection.GeneratedProtocolMessageType('ProtoFile', (_message.Message,), dict(
   DESCRIPTOR = _PROTOFILE,
@@ -144,13 +121,6 @@
   ))
 _sym_db.RegisterMessage(Schemas)
 
-NullMessage = _reflection.GeneratedProtocolMessageType('NullMessage', (_message.Message,), dict(
-  DESCRIPTOR = _NULLMESSAGE,
-  __module__ = 'schema_pb2'
-  # @@protoc_insertion_point(class_scope:schema.NullMessage)
-  ))
-_sym_db.RegisterMessage(NullMessage)
-
 
 import grpc
 from grpc.beta import implementations as beta_implementations
@@ -171,7 +141,7 @@
     """
     self.GetSchema = channel.unary_unary(
         '/schema.SchemaService/GetSchema',
-        request_serializer=NullMessage.SerializeToString,
+        request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
         response_deserializer=Schemas.FromString,
         )
 
@@ -192,7 +162,7 @@
   rpc_method_handlers = {
       'GetSchema': grpc.unary_unary_rpc_method_handler(
           servicer.GetSchema,
-          request_deserializer=NullMessage.FromString,
+          request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
           response_serializer=Schemas.SerializeToString,
       ),
   }
@@ -222,7 +192,7 @@
 
 def beta_create_SchemaService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
   request_deserializers = {
-    ('schema.SchemaService', 'GetSchema'): NullMessage.FromString,
+    ('schema.SchemaService', 'GetSchema'): google_dot_protobuf_dot_empty__pb2.Empty.FromString,
   }
   response_serializers = {
     ('schema.SchemaService', 'GetSchema'): Schemas.SerializeToString,
@@ -236,7 +206,7 @@
 
 def beta_create_SchemaService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
   request_serializers = {
-    ('schema.SchemaService', 'GetSchema'): NullMessage.SerializeToString,
+    ('schema.SchemaService', 'GetSchema'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
   }
   response_deserializers = {
     ('schema.SchemaService', 'GetSchema'): Schemas.FromString,