This commit consists of:
1) Yang annotations to the protobuf definitions.  These annotations, when
   added to the relevant proto files in Voltha,  allow us to convert
   the voltha proto schemas into Yang schemas without the need to change the
   model definitions.
2) Update to the Yang parser to handle the above annotations
3) Some initial work on the netconf GET RPCs (work in progress)
4) Cleanup

Change-Id: I5e4f4217850f0beb1c41aca1b2530a41e4f8a809
diff --git a/netconf/capabilities.py b/netconf/capabilities.py
index f7c2c5a..78f5cf4 100755
--- a/netconf/capabilities.py
+++ b/netconf/capabilities.py
@@ -30,6 +30,7 @@
         return (
             C.NETCONF_BASE_10,
             C.NETCONF_BASE_11,
+            "urn:ietf:params:netconf:capability:writable-running:1.0",
             "urn:opencord:params:xml:ns:voltha:ietf-voltha",
             "urn:opencord:params:xml:ns:voltha:ietf-openflow_13",
             "urn:opencord:params:xml:ns:voltha:ietf-meta",
diff --git a/netconf/grpc_client/grpc_client.py b/netconf/grpc_client/grpc_client.py
index 1c92f71..d65410e 100644
--- a/netconf/grpc_client/grpc_client.py
+++ b/netconf/grpc_client/grpc_client.py
@@ -22,11 +22,9 @@
 
 import os
 import sys
-from random import randint
 from zlib import decompress
 
 import grpc
-from consul import Consul
 from grpc._channel import _Rendezvous
 from structlog import get_logger
 from twisted.internet import reactor
@@ -38,12 +36,10 @@
 from netconf.protos.schema_pb2 import SchemaServiceStub
 from google.protobuf.empty_pb2 import Empty
 from common.utils.consulhelpers import get_endpoint_from_consul
-from netconf.protos.voltha_pb2  import VolthaLocalServiceStub, \
+from netconf.protos.voltha_pb2 import VolthaLocalServiceStub, \
     VolthaGlobalServiceStub
-from twisted.internet import threads
 from google.protobuf import empty_pb2
 from google.protobuf.json_format import MessageToDict, ParseDict
-from simplejson import dumps, load
 
 log = get_logger()
 
@@ -90,17 +86,14 @@
         self.shutting_down = True
         log.info('stopped')
 
-
     def set_on_start_callback(self, on_start_callback):
         self.on_start_callback = on_start_callback
         return self
 
-
     def set_reconnect_callback(self, reconnect_callback):
         self.reconnect_callback = reconnect_callback
         return self
 
-
     def resolve_endpoint(self, endpoint):
         ip_port_endpoint = endpoint
         if endpoint.startswith('@'):
@@ -117,7 +110,6 @@
             host, port = ip_port_endpoint.split(':', 2)
             return host, int(port)
 
-
     @inlineCallbacks
     def connect(self):
         """
@@ -132,11 +124,12 @@
             # If host and port is not set then we will retry
             if host and port:
                 log.info('grpc-endpoint-connecting', host=host, port=port)
-                self.channel = grpc.insecure_channel('{}:{}'.format(host, port))
+                self.channel = grpc.insecure_channel(
+                    '{}:{}'.format(host, port))
 
-                # yang_from = self._retrieve_schema()
-                # log.info('proto-to-yang-schema', file=yang_from)
-                # self._compile_proto_files(yang_from)
+                yang_from = self._retrieve_schema()
+                log.info('proto-to-yang-schema', file=yang_from)
+                self._compile_proto_files(yang_from)
                 self._clear_backoff()
 
                 if self.on_start_callback is not None:
@@ -146,8 +139,8 @@
                 if self.reconnect_callback is not None:
                     reactor.callLater(0, self.reconnect_callback)
 
-                self.local_stub = VolthaLocalServiceStub(self.channel)
-                self.global_stub = VolthaGlobalServiceStub(self.channel)
+                # self.local_stub = voltha_pb2.VolthaLocalServiceStub(self.channel)
+                # self.global_stub = voltha_pb2.VolthaGlobalServiceStub(self.channel)
 
                 return
 
@@ -163,7 +156,7 @@
                 log.exception('cannot-connect', endpoint=_endpoint)
             yield self._backoff('unknown-error')
 
-        reactor.callLater(0, self.connect)
+        reactor.callLater(1, self.connect)
 
     def _backoff(self, msg):
         wait_time = self.RETRY_BACKOFF[min(self.retries,
@@ -202,14 +195,14 @@
             # instead of just one?
             proto_content = proto_file.proto
             log.info('saving-proto', fname=proto_fname, dir=self.work_dir,
-                      length=len(proto_content))
+                     length=len(proto_content))
             with open(os.path.join(self.work_dir, proto_fname), 'w') as f:
                 f.write(proto_content)
 
             desc_content = decompress(proto_file.descriptor)
             desc_fname = proto_fname.replace('.proto', '.desc')
             log.info('saving-descriptor', fname=desc_fname, dir=self.work_dir,
-                      length=len(desc_content))
+                     length=len(desc_content))
             with open(os.path.join(self.work_dir, desc_fname), 'wb') as f:
                 f.write(desc_content)
         return schemas.yang_from
@@ -233,7 +226,6 @@
         ))
         log.info('netconf-dir', dir=netconf_base_dir)
 
-
         for fname in [f for f in os.listdir(self.work_dir)
                       if f.endswith('.proto')]:
             log.info('filename', file=fname)
@@ -265,66 +257,85 @@
             os.system(cmd)
             log.info('compiled', file=fname)
 
-        # # test-load each _pb2 file to see all is right
-        # if self.work_dir not in sys.path:
-        #     sys.path.insert(0, self.work_dir)
-        #
-        # for fname in [f for f in os.listdir(self.work_dir)
-        #               if f.endswith('_pb2.py')]:
-        #     modname = fname[:-len('.py')]
-        #     log.debug('test-import', modname=modname)
-        #     _ = __import__(modname)
+            # # test-load each _pb2 file to see all is right
+            # if self.work_dir not in sys.path:
+            #     sys.path.insert(0, self.work_dir)
+            #
+            # for fname in [f for f in os.listdir(self.work_dir)
+            #               if f.endswith('_pb2.py')]:
+            #     modname = fname[:-len('.py')]
+            #     log.debug('test-import', modname=modname)
+            #     _ = __import__(modname)
 
-        #TODO: find a different way to test the generated yang files
+            # TODO: find a different way to test the generated yang files
 
-    @inlineCallbacks
-    def get_voltha_instance(self):
-        try:
-            res = yield threads.deferToThread(
-                    self.local_stub.GetVolthaInstance, empty_pb2.Empty())
-
-            out_data = MessageToDict(res, True, True)
-            returnValue(out_data)
-        except Exception, e:
-            log.error('failure', exception=repr(e))
-
-
-    #TODO: should be generated code
+    # TODO: should be generated code
+    # Focus for now is issuing a GET request for VolthaGlobalService or VolthaLocalService
     @inlineCallbacks
     def invoke_voltha_api(self, key):
-        # key = ''.join([service, '-', method])
+        # TODO:  This should be part of a parameter request
+        depth = [('get-depth', '-1')]
         try:
-            if key == 'VolthaGlobalService-GetVoltha':
-                res = yield threads.deferToThread(
-                    self.global_stub.GetVoltha, empty_pb2.Empty())
-            elif key == 'VolthaLocalService-GetVolthaInstance':
-                res = yield threads.deferToThread(
-                    self.local_stub.GetVolthaInstance, empty_pb2.Empty())
-            elif key == 'VolthaLocalService-GetHealth':
-                res = yield threads.deferToThread(
-                    self.local_stub.GetHealth, empty_pb2.Empty())
-            elif key == 'VolthaLocalService-ListAdapters':
-                res = yield threads.deferToThread(
-                    self.local_stub.ListAdapters, empty_pb2.Empty())
-            elif key == 'VolthaLocalService-ListLogicalDevices':
-                res = yield threads.deferToThread(
-                    self.local_stub.ListLogicalDevices, empty_pb2.Empty())
-            elif key == 'VolthaLocalService-ListDevices':
-                res = yield threads.deferToThread(
-                    self.local_stub.ListDevices, empty_pb2.Empty())
-            elif key == 'VolthaLocalService-ListDeviceTypes':
-                res = yield threads.deferToThread(
-                    self.local_stub.ListDeviceTypes, empty_pb2.Empty())
-            elif key == 'VolthaLocalService-ListDeviceGroups':
-                res = yield threads.deferToThread(
-                    self.local_stub.ListDeviceGroups, empty_pb2.Empty())
-            else: # for now just return voltha instance data
-                res = yield threads.deferToThread(
-                    self.local_stub.GetVolthaInstance, empty_pb2.Empty())
+            data = {}
+            req = ParseDict(data, empty_pb2.Empty())
+            service_method = key.split('-')
+            service = service_method[0]
+            method = service_method[1]
+            stub = None
+            # if service == 'VolthaGlobalService':
+            #     stub = VolthaGlobalServiceStub
+            # elif service == 'VolthaLocalService':
+            #     stub = VolthaLocalServiceStub
+            # else:
+            #     raise  # Exception
 
-            out_data = MessageToDict(res, True, True)
-            returnValue(out_data)
+            res, metadata = yield self.invoke(stub, method, req, depth)
+
+            returnValue(MessageToDict(res, True, True))
         except Exception, e:
             log.error('failure', exception=repr(e))
 
+    @inlineCallbacks
+    def invoke(self, stub, method_name, request, metadata, retry=1):
+        """
+        Invoke a gRPC call to the remote server and return the response.
+        :param stub: Reference to the *_pb2 service stub
+        :param method_name: The method name inside the service stub
+        :param request: The request protobuf message
+        :param metadata: [(str, str), (str, str), ...]
+        :return: The response protobuf message and returned trailing metadata
+        """
 
+        if not self.connected:
+            raise ServiceUnavailable()
+
+        try:
+            method = getattr(stub(self.channel), method_name)
+            response, rendezvous = method.with_call(request, metadata=metadata)
+            returnValue((response, rendezvous.trailing_metadata()))
+
+        except grpc._channel._Rendezvous, e:
+            code = e.code()
+            if code == grpc.StatusCode.UNAVAILABLE:
+                e = ServiceUnavailable()
+
+                if self.connected:
+                    self.connected = False
+                    yield self.connect()
+                    if retry > 0:
+                        response = yield self.invoke(stub, method_name,
+                                                     request, metadata,
+                                                     retry=retry - 1)
+                        returnValue(response)
+
+            elif code in (
+                    grpc.StatusCode.NOT_FOUND,
+                    grpc.StatusCode.INVALID_ARGUMENT,
+                    grpc.StatusCode.ALREADY_EXISTS):
+
+                pass  # don't log error, these occur naturally
+
+            else:
+                log.exception(e)
+
+            raise e
diff --git a/netconf/nc_rpc/base/get.py b/netconf/nc_rpc/base/get.py
index 05542dc..7334dba 100644
--- a/netconf/nc_rpc/base/get.py
+++ b/netconf/nc_rpc/base/get.py
@@ -20,40 +20,63 @@
 import netconf.nc_common.error as ncerror
 from netconf.constants import Constants as C
 from netconf.utils import filter_tag_match
+from twisted.internet.defer import inlineCallbacks, returnValue
+import dicttoxml
+from simplejson import dumps, load
 
 log = structlog.get_logger()
 
+
 class Get(Rpc):
+    def __init__(self, rpc_request, rpc_method, voltha_method_ref, grpc_client,
+                 session):
+        super(Get, self).__init__(rpc_request, rpc_method, voltha_method_ref,
+                                        grpc_client, session)
+        self._validate_parameters()
 
-	def __init__(self, rpc_request, rpc_method, grpc_client, session):
-		super(Get, self).__init__(rpc_request, rpc_method, grpc_client,
-								  session)
-		self._validate_parameters()
+    @inlineCallbacks
+    def execute(self):
+        log.info('get-request', session=self.session.session_id,
+                 method=self.rpc_method)
+        if self.rpc_response.is_error:
+            returnValue(self.rpc_response)
 
-	def execute(self):
-		log.info('get-request', session=self.session.session_id)
-		if self.rpc_response.is_error:
-			return self.rpc_response
+        # TODO: for debugging only, assume we are doing a voltha-getinstance
+        self.voltha_method_ref='VolthaLocalService-GetVolthaInstance'
+        # Invoke voltha via the grpc client
+        res_dict = yield self.grpc_client.invoke_voltha_api(self.voltha_method_ref)
 
-	def _validate_parameters(self):
-		log.info('validate-parameters',
-				 session=self.session.session_id,
-				 request=self.rpc_request,
-				 method=self.rpc_method
-				 )
-		self.params = self.rpc_method.getchildren()
-		if len(self.params) > 1:
-			self.rpc_response.is_error = True
-			self.rpc_response.node = ncerror.BadMsg(self.rpc_request)
-			return
+        # convert dict to xml
+        xml = dicttoxml.dicttoxml(res_dict, attr_type=False)
+        log.info('voltha-info', res=res_dict, xml=xml)
 
-		if self.params and not filter_tag_match(self.params[0], C.NC_FILTER):
-			self.rpc_response.is_error = True
-			self.rpc_response.node = ncerror.UnknownElement(
-				self.rpc_request, self.params[0])
-			return
+        root_elem = self.get_root_element(xml)
+        root_elem.tag = 'data'
 
-		if not self.params:
-			self.params = [None]
+        log.info('rpc-method', etree.tounicode(self.rpc_method,
+                                               pretty_print=True))
 
+        self.rpc_method.append(root_elem)
+        self.rpc_response.node = self.rpc_method
+        self.rpc_response.is_error = False
+
+        returnValue(self.rpc_response)
+
+
+    def _validate_parameters(self):
+        log.info('validate-parameters', session=self.session.session_id)
+        self.params = self.rpc_method.getchildren()
+        if len(self.params) > 1:
+            self.rpc_response.is_error = True
+            self.rpc_response.node = ncerror.BadMsg(self.rpc_request)
+            return
+
+        if self.params and not filter_tag_match(self.params[0], C.NC_FILTER):
+            self.rpc_response.is_error = True
+            self.rpc_response.node = ncerror.UnknownElement(
+                self.rpc_request, self.params[0])
+            return
+
+        if not self.params:
+            self.params = [None]
 
diff --git a/netconf/nc_rpc/ext/get_voltha.py b/netconf/nc_rpc/ext/get_voltha.py
index 271a20e..a083401 100644
--- a/netconf/nc_rpc/ext/get_voltha.py
+++ b/netconf/nc_rpc/ext/get_voltha.py
@@ -44,10 +44,8 @@
         # Invoke voltha via the grpc client
         res_dict = yield self.grpc_client.invoke_voltha_api(self.voltha_method_ref)
 
-        # res_dict = yield self.grpc_client.get_voltha_instance()
-
         # convert dict to xml
-        xml = dicttoxml.dicttoxml(res_dict)
+        xml = dicttoxml.dicttoxml(res_dict, attr_type=False)
         log.info('voltha-info', res=res_dict, xml=xml)
 
         root_elem = self.get_root_element(xml)
diff --git a/netconf/nc_rpc/rpc.py b/netconf/nc_rpc/rpc.py
index fb35313..5b579c9 100644
--- a/netconf/nc_rpc/rpc.py
+++ b/netconf/nc_rpc/rpc.py
@@ -31,6 +31,7 @@
         self.voltha_method_ref = voltha_method_ref
         self.session = session
 
+
     def execute(self):
         """ run the command - returns a OperationResponse """
         pass
diff --git a/netconf/nc_rpc/rpc_factory.py b/netconf/nc_rpc/rpc_factory.py
index e409198..aa0ced1 100644
--- a/netconf/nc_rpc/rpc_factory.py
+++ b/netconf/nc_rpc/rpc_factory.py
@@ -80,36 +80,13 @@
 
         rpc_method = rpc_method[0]
 
-        if rpc_method.prefix is None:
-            log.error("rpc-method-has-no-prefix", msg_id=msg_id)
-            raise ncerror.BadMsg(rpc_node)
+        rpc_name = rpc_method.tag.replace(qmap('nc'), "")
 
-        try:
-            # extract the namespace, service and name
-            namespace = ''.join(
-                ['{', rpc_method.nsmap[rpc_method.prefix], '}'])
-            # rpc_name = rpc_method.tag.replace(qmap('nc'), "")
-            rpc = rpc_method.tag.replace(namespace, "").split('-')
-            rpc_service = rpc[0]
-            rpc_name = rpc[1]
-            log.info("rpc-request",
-                     namespace=namespace,
-                     service=rpc_service,
-                     name=rpc_name)
-        except Exception as e:
-            log.error("rpc-parsing-error", exception=repr(e))
-            raise ncerror.BadMsg(rpc_node)
-
-        class_handler = self.get_handler(namespace, rpc_service, rpc_name)
-        if class_handler is None:
-            # TODO: for now just assume anything in voltha namespace will be
-            #  handled by the same api
-            class_handler = self.get_handler(namespace, 'any', 'any')
-
-        voltha_method_ref = ''.join([rpc_service, '-', rpc_name])
+        log.info("rpc-request", rpc=rpc_name)
+        class_handler = self.rpc_class_handlers.get(rpc_name, None)
         if class_handler is not None:
-            return class_handler(rpc_node, rpc_method, voltha_method_ref,
-                                 grpc_channel, session)
+            return class_handler(rpc_node, rpc_method, None, grpc_channel,
+                                 session)
 
         log.error("rpc-not-implemented", rpc=rpc_name)
 
diff --git a/netconf/protoc_plugins/addressbook.proto b/netconf/protoc_plugins/addressbook.proto
deleted file mode 100644
index fc1a10f..0000000
--- a/netconf/protoc_plugins/addressbook.proto
+++ /dev/null
@@ -1,34 +0,0 @@
-// See README.txt for information and build instructions.
-
-syntax = "proto3";
-
-package tutorial;
-
-option java_package = "com.example.tutorial";
-option java_outer_classname = "AddressBookProtos";
-option csharp_namespace = "Google.Protobuf.Examples.AddressBook";
-
-message Person {
-  string name = 1;
-  int32 id = 2;        // Unique ID number for this person.
-  string email = 3;
-
-  enum PhoneType {
-    MOBILE = 0;
-    HOME = 1;
-    WORK = 2;
-  }
-
-  message PhoneNumber {
-    string number = 1;
-    PhoneType type = 2;
-  }
-
-  repeated PhoneNumber phones = 4;
-  repeated string khen = 5;
-}
-
-// Our address book file is just one of these.
-message AddressBook {
-  repeated Person people = 1;
-}
diff --git a/netconf/protoc_plugins/proto2yang.py b/netconf/protoc_plugins/proto2yang.py
index ae1999e..adc6338 100755
--- a/netconf/protoc_plugins/proto2yang.py
+++ b/netconf/protoc_plugins/proto2yang.py
@@ -37,7 +37,10 @@
 
 from jinja2 import Template
 from google.protobuf.compiler import plugin_pb2 as plugin
+from google.protobuf.descriptor_pb2 import DescriptorProto, FieldDescriptorProto
 from descriptor_parser import DescriptorParser
+import copy
+import yang_options_pb2
 
 from google.protobuf.descriptor import FieldDescriptor
 
@@ -208,18 +211,98 @@
 }
 """, trim_blocks=True, lstrip_blocks=True)
 
-# def traverse_dependencies(descriptor):
-#     dependencies = []
-#     proto_imports = descriptor.get('dependency', [])
-#     for proto_import in proto_imports:
-#         # If the import file has a directory path to it remove it as it is not
-#         # allowed in Yang.  The proto extension should be removed as well
-#         dependencies.append (
-#             {
-#             'name' : proto_import.split('/')[-1][:-len('.proto')]
-#             }
-#         )
-#     return dependencies
+
+def traverse_field_options(fields, prefix):
+    field_options = []
+    for field in fields:
+        assert isinstance(field, FieldDescriptorProto)
+        full_name = prefix + '-' + field.name
+        option = None
+        if field.type == FieldDescriptor.TYPE_MESSAGE and field.label != \
+                FieldDescriptor.LABEL_REPEATED:
+            if field.options:
+                for fd, val in field.options.ListFields():
+                    if fd.full_name == 'voltha.yang_inline_node':
+                        field_options.append(
+                            {'name' : full_name,
+                             'option' : fd.full_name,
+                             'proto_name' : val.id,
+                             'proto_type' : val.type
+                             }
+                        )
+        return field_options
+
+
+def traverse_message_options(message_types, prefix):
+    message_options=[]
+    for message_type in message_types:
+        assert isinstance(message_type, DescriptorProto)
+        full_name = prefix + '-' + message_type.name
+        option_rules = []
+
+        options = message_type.options
+        if options:
+            for fd, val in options.ListFields():
+                if fd.full_name in ['voltha.yang_child_rule',
+                                    'voltha.yang_message_rule']:
+                    option_rules.append({
+                        'name' : fd.full_name,
+                        'value' : val
+                    })
+
+        # parse fields for options
+        field_options = traverse_field_options(message_type.field,
+                                               full_name)
+
+        # parse nested messages
+        nested_messages_options = []
+        nested = message_type.nested_type
+        if nested:
+            nested_messages_options = traverse_message_options(nested,
+                                                              full_name)
+
+        if option_rules or nested_messages_options or field_options:
+            message_options.append(
+                {
+                    'name': full_name,
+                    'options': option_rules,
+                    'field_options' : field_options,
+                    'nested_options': nested_messages_options,
+                }
+            )
+    return message_options
+
+
+def get_message_options(name, options):
+    result = None
+    for opt in options:
+        if opt['name'] == name:
+            return opt['options']
+        if opt['nested_options']:
+            result = get_message_options(name, opt['nested_options'])
+        if result:
+            return result
+
+def get_field_options(name, options):
+    result = None
+    for opt in options:
+        if opt['field_options']:
+            for field_opt in opt['field_options']:
+                if field_opt['name'] == name:
+                    result = field_opt
+        if opt['nested_options']:
+            result = get_field_options(name, opt['nested_options'])
+        if result:
+            return result
+
+
+def traverse_options(proto_file):
+    package = proto_file.name
+    prefix = package.replace('.proto', '')
+    if proto_file.message_type:
+        message_options = traverse_message_options(proto_file.message_type,
+                                                   prefix)
+        return message_options
 
 
 def traverse_messages(message_types, prefix, referenced_messages):
@@ -227,8 +310,8 @@
     for message_type in message_types:
         assert message_type['_type'] == 'google.protobuf.DescriptorProto'
 
-        # full_name = prefix + '-' + message_type['name']
-        full_name = message_type['name']
+        full_name = prefix + '-' + message_type['name']
+        name = message_type['name']
 
         # parse the fields
         fields = traverse_fields(message_type.get('field', []), full_name,
@@ -241,17 +324,16 @@
         nested = message_type.get('nested_type', [])
         nested_messages = traverse_messages(nested, full_name,
                                             referenced_messages)
+
         messages.append(
             {
-                'name': full_name,
+                'full_name': full_name,
+                'name': name,
                 'fields': fields,
                 'enums': enums,
-                # 'extensions': extensions,
                 'messages': nested_messages,
                 'description': remove_unsupported_characters(
                     message_type.get('_description', '')),
-                # 'extension_ranges': extension_ranges,
-                # 'oneof': oneof
             }
         )
     return messages
@@ -271,7 +353,7 @@
 
         fields.append(
             {
-                # 'name': prefix + '-' + field.get('name', ''),
+                'full_name': prefix + '-' + field.get('name', ''),
                 'name': field.get('name', ''),
                 'label': field.get('label', ''),
                 'repeated': field['label'] == FieldDescriptor.LABEL_REPEATED,
@@ -291,11 +373,12 @@
     enums = []
     for enum in enums_desc:
         assert enum['_type'] == 'google.protobuf.EnumDescriptorProto'
-        # full_name = prefix + '-' + enum.get('name', '')
-        full_name = enum.get('name', '')
+        full_name = prefix + '-' + enum.get('name', '')
+        name = enum.get('name', '')
         enums.append(
             {
-                'name': full_name,
+                'full_name': full_name,
+                'name': name,
                 'value': enum.get('value', ''),
                 'description': remove_unsupported_characters(enum.get(
                     '_description', ''))
@@ -364,23 +447,11 @@
     name = rchop(descriptor.get('name', ''), '.proto')
     package = descriptor.get('package', '')
     description = descriptor.get('_description', '')
-    # imports=traverse_dependencies(descriptor)
     messages = traverse_messages(descriptor.get('message_type', []),
-                                 package, referenced_messages)
-    enums = traverse_enums(descriptor.get('enum_type', []), package)
+                                 name, referenced_messages)
+    enums = traverse_enums(descriptor.get('enum_type', []), name)
     services = traverse_services(descriptor.get('service', []),
                                  referenced_messages)
-    # extensions = _traverse_extensions(descriptors)
-    # options = _traverse_options(descriptors)
-    # set_messages_keys(messages)
-    # unique_referred_messages_with_keys = []
-    # for message_name in list(set(referenced_messages)):
-    #     unique_referred_messages_with_keys.append(
-    #         {
-    #             'name': message_name,
-    #             'key': get_message_key(message_name, messages)
-    #         }
-    #     )
 
     # Get a list of type definitions (messages, enums) defined in this
     # descriptor
@@ -391,20 +462,113 @@
         'name': name.split('/')[-1],
         'package': package,
         'description': description,
-        # 'imports' : imports,
         'messages': messages,
         'enums': enums,
         'services': services,
         'defined_types' : defined_types,
         'referenced_messages': list(set(referenced_messages)),
-        # TODO:  simplify for easier jinja2 template use
-        # 'referred_messages_with_keys': unique_referred_messages_with_keys,
-        # 'extensions': extensions,
-        # 'options': options
     }
     return data
 
 
+# For now, annotations are added to first level messages only.
+# Therefore, at this time no need to tackle nested messages.
+def move_message_to_parent_level(message, messages, enums):
+    new_message = []
+    new_enum = copy.deepcopy(enums)
+    for msg in messages:
+        if msg['full_name'] == message['full_name']:
+            # Move all sub messages and enums to top level
+            if msg['messages']:
+                new_message = new_message + copy.deepcopy(msg['messages'])
+            if msg['enums']:
+                new_enum = new_enum + copy.deepcopy(msg['enums'])
+
+            # if the message has some fields then enclose them in a container
+            if msg['fields']:
+                new_message.append(
+                    {
+                        'full_name': msg['full_name'],
+                        'name': msg['name'],
+                        'fields': msg['fields'],
+                        'description': msg['description'],
+                        'messages': [],
+                        'enums': []
+                    }
+                )
+        else:
+            new_message.append(msg)
+
+    return new_message, new_enum
+
+
+def update_messages_per_annotations_rule(options, messages, enums):
+    new_messages = messages
+    new_enums = enums
+    # Used when a message needs to exist both as a type and a container
+    duplicate_messages = []
+    for message in messages:
+        opts = get_message_options(message['full_name'], options)
+        if opts:
+            for opt in opts:
+                if opt['name'] == 'voltha.yang_child_rule':
+                    new_messages, new_enums = move_message_to_parent_level(message,
+                                                 new_messages, new_enums)
+                elif opt['name'] == 'voltha.yang_message_rule':
+                    # create a duplicate message
+                    #TODO: update references to point to the
+                    duplicate_messages.append(message['name'])
+                    clone = copy.deepcopy(message)
+                    clone['full_name'] = ''.join([clone['full_name'], '_', 'grouping'])
+                    clone['name'] = ''.join([clone['name'], '_', 'grouping'])
+                    new_messages = new_messages + [clone]
+
+    return new_messages, new_enums, duplicate_messages
+
+
+def inline_field(message, field, option, messages):
+    new_message = copy.deepcopy(message)
+    new_message['fields'] = []
+    for f in message['fields']:
+        if f['full_name'] == field['full_name']:
+            # look for the message this field referred to.
+            # Addresses only top-level messages
+            for m in messages:
+                # 'proto_type' is the name of the message type this field
+                # refers to
+                if m['full_name'] == option['proto_type']:
+                    # Copy all content of m into the field
+                    new_message['fields'] = new_message['fields'] + \
+                                            copy.deepcopy(m['fields'])
+                    new_message['enums'] = new_message['enums'] + \
+                                           copy.deepcopy(m['enums'])
+                    new_message['messages'] = new_message['messages'] + \
+                                           copy.deepcopy(m['messages'])
+        else:
+            new_message['fields'].append(f)
+
+    return new_message
+
+# Address only annotations on top-level messages, i.e. no nested messages
+def update_fields_per_annotations_rule(options, messages):
+    new_messages = []
+    for message in messages:
+        new_message = None
+        for field in message['fields']:
+            opt = get_field_options(field['full_name'], options)
+            if opt:
+                if opt['option'] == 'voltha.yang_inline_node':
+                    new_message = inline_field(message, field, opt,  messages)
+
+        if new_message:
+            new_messages.append(new_message)
+        else:
+            new_messages.append(message)
+
+    return new_messages
+
+
+
 def set_messages_keys(messages):
     for message in messages:
         message['key'] = _get_message_key(message, messages)
@@ -457,6 +621,40 @@
     module['imports'] = [{'name' : i} for i in used_imports]
 
 
+def update_referred_messages(all_referred_messages, all_duplicate_messages):
+    new_referred_messages = []
+    for ref in all_referred_messages:
+        if ref in all_duplicate_messages:
+            new_referred_messages.append(''.join([ref, '_grouping']))
+        else:
+            new_referred_messages.append(ref)
+
+    return new_referred_messages
+
+def update_message_references_based_on_duplicates(duplicates, messages):
+    # Duplicates has a list of messages that exist both as a grouping and as
+    # a container.   All reference to the container name by existing fields
+    # should be changed to the grouping name instead
+    for m in messages:
+        for f in m['fields']:
+            if f['type'] in duplicates:
+                f['type'] = ''.join([f['type'], '_grouping'])
+        if m['messages']:
+            update_message_references_based_on_duplicates(duplicates,
+                                                      m['messages'])
+
+def update_servic_references_based_on_duplicates(duplicates, services):
+    # Duplicates has a list of messages that exist both as a grouping and as
+    # a container.   All reference to the container name by existing fields
+    # should be changed to the grouping name instead
+    for s in services:
+        for m in s['methods']:
+            if m['input_ref'] and m['input'] in duplicates:
+                m['input'] = ''.join([m['input'], '_grouping'])
+            if m['output_ref'] and m['output'] in duplicates:
+                m['output'] = ''.join([m['output'], '_grouping'])
+
+
 def generate_code(request, response):
     assert isinstance(request, plugin.CodeGeneratorRequest)
 
@@ -467,13 +665,38 @@
     all_proto_data = []
     all_referred_messages = []
     all_messages = []
+    all_duplicate_messages = []
     for proto_file in request.proto_file:
+        options = traverse_options(proto_file)
+        # print options
+
         native_data = parser.parse_file_descriptor(proto_file,
                                                    type_tag_name='_type',
                                                    fold_comments=True)
 
         # Consolidate the defined types across imports
         yang_data = traverse_desc(native_data)
+
+        duplicates = []
+        if options:
+            new_messages, new_enums, duplicates  = \
+                update_messages_per_annotations_rule(
+                options, yang_data['messages'], yang_data['enums'])
+
+            new_messages = update_fields_per_annotations_rule(options,
+                                                            new_messages)
+
+            # TODO:  Need to do the change across all schema files.  Not
+            # needed as annotations are single file based for now
+            if duplicates:
+                update_message_references_based_on_duplicates(duplicates,
+                                                        new_messages)
+                update_servic_references_based_on_duplicates(duplicates,
+                                                             yang_data['services'])
+
+            yang_data['messages'] = new_messages
+            yang_data['enums'] = new_enums
+
         for type in yang_data['defined_types']:
             all_defined_types.append(
                 {
@@ -482,6 +705,7 @@
                 }
             )
 
+
         all_proto_data.append(
             {
                 'file_name': '{}-{}'.format('ietf', proto_file.name.split(
@@ -490,12 +714,18 @@
             }
         )
 
+        # Consolidate all duplicate messages
+        all_duplicate_messages = all_duplicate_messages + duplicates
+
         # Consolidate referred messages across imports
         all_referred_messages = all_referred_messages + yang_data['referenced_messages']
 
         # consolidate all messages
         all_messages = all_messages + yang_data['messages']
 
+    # Update the referred_messages
+    all_referred_messages = update_referred_messages(all_referred_messages, all_duplicate_messages)
+
     # Set the message keys - required for List definitions (repeated label)
     set_messages_keys(all_messages)
     unique_referred_messages_with_keys = []
@@ -507,6 +737,7 @@
                 }
             )
 
+    # print_referred_msg(unique_referred_messages_with_keys)
     # Create the files
     for proto_data in all_proto_data:
         f = response.file.add()
@@ -514,7 +745,9 @@
         proto_data['module']['data_types'] = all_defined_types
         proto_data['module']['referred_messages'] = all_referred_messages
         proto_data['module']['referred_messages_with_keys'] = unique_referred_messages_with_keys
+        proto_data['module']['duplicates'] = all_duplicate_messages
         update_module_imports(proto_data['module'])
+        # print_message(proto_data['module']['messages'])
         f.content = template_yang.render(module=proto_data['module'])
 
 
@@ -524,8 +757,6 @@
         _type, _ = YANG_TYPE_MAP[type]
         if _type in ['enumeration', 'message', 'group']:
             return field['type_name'].split('.')[-1]
-            # return remove_first_character_if_match(field['type_name'],
-            #                                        '.').replace('.', '-')
         else:
             return _type
     else:
diff --git a/netconf/protoc_plugins/proto2yang_work_latest.py b/netconf/protoc_plugins/proto2yang_work_latest.py
deleted file mode 100755
index 6a2a3e7..0000000
--- a/netconf/protoc_plugins/proto2yang_work_latest.py
+++ /dev/null
@@ -1,601 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2016 the original author or authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""protoc plugin to convert a protobuf schema to a yang schema
-
-   - basic support for message, fields. enumeration, service, method
-
-   - yang semantic rules needs to be implemented
-
-   - to run this plugin :
-
-   $ python -m grpc.tools.protoc -I.
-   --plugin=protoc-gen-custom=./proto2yang.py --custom_out=. <proto file>.proto
-
-   - the above will produce a ietf-<proto file>.yang file formatted for yang
-
-   - two examples of proto that can be used in the same directory are
-   yang.proto and addressbook.proto
-
-"""
-
-import sys
-
-from jinja2 import Template
-from google.protobuf.compiler import plugin_pb2 as plugin
-from descriptor_parser import DescriptorParser
-
-from google.protobuf.descriptor import FieldDescriptor
-
-template_yang = Template("""
-module ietf-{{ module.name }} {
-
-    {% macro set_module_prefix(type) %}
-        {% for t in module.data_types %}
-            {% if t.type == type %}
-                {% if t.module != module.name %} {{ t.module }}:{{ type }};
-                {% else %} {{ type }};
-                {% endif %}
-                {% set found=True %}
-            {% endif %}
-        {% if loop.last %}
-            {% if not found %} {{ type }}; {% endif %}
-        {% endif %}
-        {% endfor %}
-    {% endmacro %}
-
-    namespace "urn:opencord:params:xml:ns:voltha:ietf-{{ module.name }}";
-    prefix {{ module.name }};
-
-    {% for imp in module.imports %}
-    import ietf-{{ imp.name }} { prefix {{ imp.name }} ; }
-    {% endfor %}
-
-    organization "CORD";
-    contact
-        " Any name";
-
-    description
-        "{{ module.description }}";
-
-    revision "2016-11-15" {
-        description "Initial revision.";
-        reference "reference";
-    }
-
-    {% for enum in module.enums %}
-    typedef {{ enum.name }} {
-        type enumeration {
-        {% for v in enum.value %}
-            enum {{ v.name }} {
-                description "{{ v.description }}";
-            }
-        {% endfor %}
-        }
-        description
-            "{{ enum.description }}";
-    }
-    {% endfor %}
-
-    {% for message in module.messages recursive %}
-    {% if message.name in module.referred_messages %}
-    grouping {{ message.name }} {
-    {% else %}
-    container {{ message.name }} {
-    {% endif %}
-        description
-            "{{ message.description }}";
-        {% for field in message.fields %}
-        {% if field.type_ref %}
-        {% for dict_item in module.referred_messages_with_keys %}
-            {% if dict_item.name == field.type %}
-                {% if not field.repeated %}
-        container {{ field.name }} {
-                {% else %}
-        list {{ field.name }} {
-            key "{{ dict_item.key }}";
-            {% if not field.repeated %}
-            max-elements 1;
-            {% endif %}
-            {% endif %}
-            uses {{ set_module_prefix(field.type) }}
-            description
-                "{{ field.description }}";
-        }
-            {% endif %}
-        {% endfor %}
-        {% elif field.repeated %}
-        list {{ field.name }} {
-            key "{{ field.name }}";
-            leaf {{ field.name }} {
-                {% if field.type == "decimal64" %}
-                type {{ field.type }} {
-                   fraction-digits 5;
-                }
-                {% else %}
-                type {{ set_module_prefix(field.type) }}
-                {% endif %}
-                description
-                    "{{ field.description }}";
-            }
-            description
-                "{{ field.description }}";
-        }
-        {% else %}
-        leaf {{ field.name }} {
-            {% if field.type == "decimal64" %}
-            type {{ field.type }} {
-               fraction-digits 5;
-            }
-            {% else %}
-            type {{ set_module_prefix(field.type) }}
-            {% endif %}
-            description
-                "{{ field.description }}";
-        }
-        {% endif %}
-
-        {% endfor %}
-        {% for enum_type in message.enums %}
-        typedef {{ enum_type.name }} {
-            type enumeration {
-            {% for v in enum_type.value %}
-                enum {{ v.name }} {
-                    description "{{ v.description }}";
-                }
-            {% endfor %}
-            }
-            description
-                "{{ enum_type.description }}";
-        }
-
-        {% endfor %}
-    {% if message.messages %}
-    {{ loop (message.messages)|indent(4, false) }}
-    {% endif %}
-    }
-
-    {% endfor %}
-    {% for service in module.services %}
-    {% if service.description %}
-    /*  {{ service.description }}" */
-    {% endif %}
-    {% for method in service.methods %}
-    rpc {{ service.service }}-{{ method.method }} {
-        description
-            "{{ method.description }}";
-        {% if method.input %}
-        input {
-            {% if method.input_ref %}
-            uses {{ set_module_prefix(method.input) }}
-            {% else %}
-            leaf {{ method.input }} {
-                type {{ set_module_prefix(method.input) }}
-            }
-            {% endif %}
-        }
-        {% endif %}
-        {% if method.output %}
-        output {
-            {% if method.output_ref %}
-            uses {{ set_module_prefix(method.output) }}
-            {% else %}
-            leaf {{ method.output }} {
-                type {{ set_module_prefix(method.output) }}
-            }
-            {% endif %}
-        }
-        {% endif %}
-    }
-
-    {% endfor %}
-
-    {% endfor %}
-}
-""", trim_blocks=True, lstrip_blocks=True)
-
-def traverse_dependencies(descriptor):
-    dependencies = []
-    proto_imports = descriptor.get('dependency', [])
-    for proto_import in proto_imports:
-        # If the import file has a directory path to it remove it as it is not
-        # allowed in Yang.  The proto extension should be removed as well
-        dependencies.append (
-            {
-            'name' : proto_import.split('/')[-1][:-len('.proto')]
-            }
-        )
-    return dependencies
-
-
-def traverse_messages(message_types, prefix, referenced_messages):
-    messages = []
-    for message_type in message_types:
-        assert message_type['_type'] == 'google.protobuf.DescriptorProto'
-
-        # full_name = prefix + '-' + message_type['name']
-        full_name = message_type['name']
-
-        # parse the fields
-        fields = traverse_fields(message_type.get('field', []), full_name,
-                                 referenced_messages)
-
-        # parse the enums
-        enums = traverse_enums(message_type.get('enum_type', []), full_name)
-
-        # parse nested messages
-        nested = message_type.get('nested_type', [])
-        nested_messages = traverse_messages(nested, full_name,
-                                            referenced_messages)
-        messages.append(
-            {
-                'name': full_name,
-                'fields': fields,
-                'enums': enums,
-                # 'extensions': extensions,
-                'messages': nested_messages,
-                'description': remove_unsupported_characters(
-                    message_type.get('_description', '')),
-                # 'extension_ranges': extension_ranges,
-                # 'oneof': oneof
-            }
-        )
-    return messages
-
-
-def traverse_fields(fields_desc, prefix, referenced_messages):
-    fields = []
-    for field in fields_desc:
-        assert field['_type'] == 'google.protobuf.FieldDescriptorProto'
-        yang_base_type = is_base_type(field['type'])
-        _type = get_yang_type(field)
-        if not yang_base_type:
-            referenced_messages.append(_type)
-
-        fields.append(
-            {
-                # 'name': prefix + '-' + field.get('name', ''),
-                'name': field.get('name', ''),
-                'label': field.get('label', ''),
-                'repeated': field['label'] == FieldDescriptor.LABEL_REPEATED,
-                'number': field.get('number', ''),
-                'options': field.get('options', ''),
-                'type_name': field.get('type_name', ''),
-                'type': _type,
-                'type_ref': not yang_base_type,
-                'description': remove_unsupported_characters(field.get(
-                    '_description', ''))
-            }
-        )
-    return fields
-
-
-def traverse_enums(enums_desc, prefix):
-    enums = []
-    for enum in enums_desc:
-        assert enum['_type'] == 'google.protobuf.EnumDescriptorProto'
-        # full_name = prefix + '-' + enum.get('name', '')
-        full_name = enum.get('name', '')
-        enums.append(
-            {
-                'name': full_name,
-                'value': enum.get('value', ''),
-                'description': remove_unsupported_characters(enum.get(
-                    '_description', ''))
-            }
-        )
-    return enums
-
-
-def traverse_services(service_desc, referenced_messages):
-    services = []
-    for service in service_desc:
-        methods = []
-        for method in service.get('method', []):
-            assert method['_type'] == 'google.protobuf.MethodDescriptorProto'
-
-            input_name = method.get('input_type')
-            input_ref = False
-            if not is_base_type(input_name):
-                input_name = remove_first_character_if_match(input_name, '.')
-                # input_name = input_name.replace(".", "-")
-                input_name = input_name.split('.')[-1]
-                referenced_messages.append(input_name)
-                input_ref = True
-
-            output_name = method.get('output_type')
-            output_ref = False
-            if not is_base_type(output_name):
-                output_name = remove_first_character_if_match(output_name, '.')
-                # output_name = output_name.replace(".", "-")
-                output_name = output_name.split('.')[-1]
-                referenced_messages.append(output_name)
-                output_ref = True
-
-            methods.append(
-                {
-                    'method': method.get('name', ''),
-                    'input': input_name,
-                    'input_ref': input_ref,
-                    'output': output_name,
-                    'output_ref': output_ref,
-                    'description': remove_unsupported_characters(method.get(
-                        '_description', '')),
-                    'server_streaming': method.get('server_streaming',
-                                                   False) == True
-                }
-            )
-        services.append(
-            {
-                'service': service.get('name', ''),
-                'methods': methods,
-                'description': remove_unsupported_characters(service.get(
-                    '_description', '')),
-            }
-        )
-    return services
-
-
-def rchop(thestring, ending):
-    if thestring.endswith(ending):
-        return thestring[:-len(ending)]
-    return thestring
-
-
-def traverse_desc(descriptor):
-    referenced_messages = []
-    name = rchop(descriptor.get('name', ''), '.proto')
-    package = descriptor.get('package', '')
-    description = descriptor.get('_description', '')
-    # imports=traverse_dependencies(descriptor)
-    messages = traverse_messages(descriptor.get('message_type', []),
-                                 package, referenced_messages)
-    enums = traverse_enums(descriptor.get('enum_type', []), package)
-    services = traverse_services(descriptor.get('service', []),
-                                 referenced_messages)
-    # extensions = _traverse_extensions(descriptors)
-    # options = _traverse_options(descriptors)
-    # set_messages_keys(messages)
-    # unique_referred_messages_with_keys = []
-    # for message_name in list(set(referenced_messages)):
-    #     unique_referred_messages_with_keys.append(
-    #         {
-    #             'name': message_name,
-    #             'key': get_message_key(message_name, messages)
-    #         }
-    #     )
-
-    # Get a list of type definitions (messages, enums) defined in this
-    # descriptor
-    defined_types = [m['name'].split('/')[-1] for m in messages] + \
-                    [e['name'].split('/')[-1]  for e in enums]
-
-    data = {
-        'name': name.split('/')[-1],
-        'package': package,
-        'description': description,
-        # 'imports' : imports,
-        'messages': messages,
-        'enums': enums,
-        'services': services,
-        'defined_types' : defined_types,
-        'referenced_messages': list(set(referenced_messages)),
-        # TODO:  simplify for easier jinja2 template use
-        # 'referred_messages_with_keys': unique_referred_messages_with_keys,
-        # 'extensions': extensions,
-        # 'options': options
-    }
-    return data
-
-
-def set_messages_keys(messages):
-    for message in messages:
-        message['key'] = _get_message_key(message, messages)
-        if message['messages']:
-            set_messages_keys(message['messages'])
-
-def _get_message_key(message, messages):
-    # assume key is first yang base type field
-    for field in message['fields']:
-        if not field['type_ref']:
-            return field['name']
-        else:
-            # if the field name is a message then loop for the key in that
-            # message
-            ref_message = _get_message(field['type'], messages)
-            if ref_message:
-                return _get_message_key(ref_message, messages)
-
-    # no key yet - search nested messaged
-    for m in message['messages']:
-        key = _get_message_key(m, messages)
-        if key is not None:
-            return key
-    else:
-        return None
-
-def _get_message(name, messages):
-    for m in messages:
-        if m['name'] == name:
-            return m
-    return None
-
-def get_message_key(message_name, messages):
-    for message in messages:
-        if message_name == message['name']:
-            return message['key']
-        if message['messages']:
-            return get_message_key(message_name, message['messages'])
-    return None
-
-
-def update_module_imports(module):
-    used_imports = []
-    for ref_msg in module['referenced_messages']:
-        for type_dict in module['data_types']:
-            if ref_msg == type_dict['type']:
-                if module['name'] != type_dict['module']:
-                    print '{}++++{}'.format(module['name'], type_dict[
-                        'module'])
-                    used_imports.append(
-                        {
-                            'name' : type_dict['module']
-                        }
-                    )
-                break
-    module['imports'] = used_imports
-
-def generate_code(request, response):
-    assert isinstance(request, plugin.CodeGeneratorRequest)
-
-    parser = DescriptorParser()
-
-    # First process the proto file with the imports
-    all_defined_types = []
-    all_proto_data = []
-    all_referred_messages = []
-    all_messages = []
-    for proto_file in request.proto_file:
-        native_data = parser.parse_file_descriptor(proto_file,
-                                                   type_tag_name='_type',
-                                                   fold_comments=True)
-
-        # Consolidate the defined types across imports
-        yang_data = traverse_desc(native_data)
-        for type in yang_data['defined_types']:
-            all_defined_types.append(
-                {
-                    'type' : type,
-                    'module' : yang_data['name']
-                }
-            )
-
-        all_proto_data.append(
-            {
-                'file_name': '{}-{}'.format('ietf', proto_file.name.split(
-                    '/')[-1].replace('.proto','.yang')),
-                'module': yang_data
-            }
-        )
-
-        # Consolidate referred messages across imports
-        all_referred_messages = all_referred_messages + yang_data['referenced_messages']
-
-        # consolidate all messages
-        all_messages = all_messages + yang_data['messages']
-
-    # Set the message keys - required for List definitions (repeated label)
-    set_messages_keys(all_messages)
-    unique_referred_messages_with_keys = []
-    for m in all_messages:
-        unique_referred_messages_with_keys.append(
-                {
-                    'name': m['name'],
-                    'key': m['key']
-                }
-            )
-
-    # Create the files
-    for proto_data in all_proto_data:
-        f = response.file.add()
-        f.name = proto_data['file_name']
-        proto_data['module']['data_types'] = all_defined_types
-        proto_data['module']['referred_messages'] = all_referred_messages
-        proto_data['module']['referred_messages_with_keys'] = unique_referred_messages_with_keys
-        update_module_imports(proto_data['module'])
-        f.content = template_yang.render(module=proto_data['module'])
-
-
-def get_yang_type(field):
-    type = field['type']
-    if type in YANG_TYPE_MAP.keys():
-        _type, _ = YANG_TYPE_MAP[type]
-        if _type in ['enumeration', 'message', 'group']:
-            return field['type_name'].split('.')[-1]
-            # return remove_first_character_if_match(field['type_name'],
-            #                                        '.').replace('.', '-')
-        else:
-            return _type
-    else:
-        return type
-
-
-def is_base_type(type):
-    # check numeric value of the type first
-    if type in YANG_TYPE_MAP.keys():
-        _type, _ = YANG_TYPE_MAP[type]
-        return _type not in ['message', 'group']
-    else:
-        # proto name of the type
-        result = [_format for (_, _format) in YANG_TYPE_MAP.values() if
-                  _format == type and _format not in ['message', 'group']]
-        return len(result) > 0
-
-
-def remove_unsupported_characters(text):
-    unsupported_characters = ["{", "}", "[", "]", "\"", "\\", "*", "/"]
-    return ''.join([i if i not in unsupported_characters else ' ' for i in
-                    text])
-
-
-def remove_first_character_if_match(str, char):
-    if str.startswith(char):
-        return str[1:]
-    return str
-
-
-YANG_TYPE_MAP = {
-    FieldDescriptor.TYPE_BOOL: ('boolean', 'boolean'),
-    FieldDescriptor.TYPE_BYTES: ('binary', 'byte'),
-    FieldDescriptor.TYPE_DOUBLE: ('decimal64', 'double'),
-    FieldDescriptor.TYPE_ENUM: ('enumeration', 'enum'),
-    FieldDescriptor.TYPE_FIXED32: ('int32', 'int64'),
-    FieldDescriptor.TYPE_FIXED64: ('int64', 'uint64'),
-    FieldDescriptor.TYPE_FLOAT: ('decimal64', 'float'),
-    FieldDescriptor.TYPE_INT32: ('int32', 'int32'),
-    FieldDescriptor.TYPE_INT64: ('int64', 'int64'),
-    FieldDescriptor.TYPE_SFIXED32: ('int32', 'int32'),
-    FieldDescriptor.TYPE_SFIXED64: ('int64', 'int64'),
-    FieldDescriptor.TYPE_STRING: ('string', 'string'),
-    FieldDescriptor.TYPE_SINT32: ('int32', 'int32'),
-    FieldDescriptor.TYPE_SINT64: ('int64', 'int64'),
-    FieldDescriptor.TYPE_UINT32: ('uint32', 'int64'),
-    FieldDescriptor.TYPE_UINT64: ('uint64', 'uint64'),
-    FieldDescriptor.TYPE_MESSAGE: ('message', 'message'),
-    FieldDescriptor.TYPE_GROUP: ('group', 'group')
-}
-
-if __name__ == '__main__':
-    # Read request message from stdin
-    data = sys.stdin.read()
-
-    # Parse request
-    request = plugin.CodeGeneratorRequest()
-    request.ParseFromString(data)
-
-    # Create response
-    response = plugin.CodeGeneratorResponse()
-
-    # Generate code
-    generate_code(request, response)
-
-    # Serialise response message
-    output = response.SerializeToString()
-
-    # Write to stdout
-    sys.stdout.write(output)
-    # print is_base_type(9)
diff --git a/netconf/protoc_plugins/proto2yang_work_list.py b/netconf/protoc_plugins/proto2yang_work_list.py
deleted file mode 100755
index eba8924..0000000
--- a/netconf/protoc_plugins/proto2yang_work_list.py
+++ /dev/null
@@ -1,552 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2016 the original author or authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""protoc plugin to convert a protobuf schema to a yang schema
-
-   - basic support for message, fields. enumeration, service, method
-
-   - yang semantic rules needs to be implemented
-
-   - to run this plugin :
-
-   $ python -m grpc.tools.protoc -I.
-   --plugin=protoc-gen-custom=./proto2yang.py --custom_out=. <proto file>.proto
-
-   - the above will produce a ietf-<proto file>.yang file formatted for yang
-
-   - two examples of proto that can be used in the same directory are
-   yang.proto and addressbook.proto
-
-"""
-
-import sys
-
-from jinja2 import Template
-from google.protobuf.compiler import plugin_pb2 as plugin
-from descriptor_parser import DescriptorParser
-
-from google.protobuf.descriptor import FieldDescriptor
-
-template_yang = Template("""
-module ietf-{{ module.name }} {
-
-    {% macro set_module_prefix(type) %}
-        {% for t in module.data_types %}
-            {% if t.type == type %}
-                {% if t.module != module.name %} {{ t.module }}:{{ type }};
-                {% else %} {{ type }};
-                {% endif %}
-                {% set found=True %}
-            {% endif %}
-        {% if loop.last %}
-            {% if not found %} {{ type }}; {% endif %}
-        {% endif %}
-        {% endfor %}
-    {% endmacro %}
-
-    yang-version 1.1;
-    namespace "urn:ietf:params:xml:ns:yang:ietf-{{ module.name }}";
-    prefix {{ module.name }};
-
-    {% for imp in module.imports %}
-    import ietf-{{ imp.name }} { prefix {{ imp.name }} ; }
-    {% endfor %}
-
-    organization "CORD";
-    contact
-        " Any name";
-
-    description
-        "{{ module.description }}";
-
-    revision "2016-11-15" {
-        description "Initial revision.";
-        reference "reference";
-    }
-
-    {% for enum in module.enums %}
-    typedef {{ enum.name }} {
-        type enumeration {
-        {% for v in enum.value %}
-            enum {{ v.name }} {
-                description "{{ v.description }}";
-            }
-        {% endfor %}
-        }
-        description
-            "{{ enum.description }}";
-    }
-    {% endfor %}
-
-    {% for message in module.messages recursive %}
-    {% if message.name in module.referred_messages %}
-    grouping {{ message.name }} {
-    {% else %}
-    container {{ message.name }} {
-    {% endif %}
-        description
-            "{{ message.description }}";
-        {% for field in message.fields %}
-        {% if field.type_ref %}
-        {% for dict_item in module.referred_messages_with_keys %}
-                {% if dict_item.name == field.type %}
-        list {{ field.name }} {
-            key "{{ dict_item.key }}";
-            {% if not field.repeated %}
-            max-elements 1;
-            {% endif %}
-            uses {{ set_module_prefix(field.type) }}
-            description
-                "{{ field.description }}";
-        }
-                {% endif %}
-        {% endfor %}
-        {% elif field.repeated %}
-        list {{ field.name }} {
-            key "{{ field.name }}";
-            leaf {{ field.name }} {
-                {% if field.type == "decimal64" %}
-                type {{ field.type }} {
-                   fraction-digits 5;
-                }
-                {% else %}
-                type {{ set_module_prefix(field.type) }}
-                {% endif %}
-                description
-                    "{{ field.description }}";
-            }
-            description
-                "{{ field.description }}";
-        }
-        {% else %}
-        leaf {{ field.name }} {
-            {% if field.type == "decimal64" %}
-            type {{ field.type }} {
-               fraction-digits 5;
-            }
-            {% else %}
-            type {{ set_module_prefix(field.type) }}
-            {% endif %}
-            description
-                "{{ field.description }}";
-        }
-        {% endif %}
-
-        {% endfor %}
-        {% for enum_type in message.enums %}
-        typedef {{ enum_type.name }} {
-            type enumeration {
-            {% for v in enum_type.value %}
-                enum {{ v.name }} {
-                    description "{{ v.description }}";
-                }
-            {% endfor %}
-            }
-            description
-                "{{ enum_type.description }}";
-        }
-
-        {% endfor %}
-    {% if message.messages %}
-    {{ loop (message.messages)|indent(4, false) }}
-    {% endif %}
-    }
-
-    {% endfor %}
-    {% for service in module.services %}
-    {% if service.description %}
-    /*  {{ service.description }}" */
-    {% endif %}
-    {% for method in service.methods %}
-    rpc {{ service.service }}-{{ method.method }} {
-        description
-            "{{ method.description }}";
-        {% if method.input %}
-        input {
-            {% if method.input_ref %}
-            uses {{ set_module_prefix(method.input) }}
-            {% else %}
-            leaf {{ method.input }} {
-                type {{ set_module_prefix(method.input) }}
-            }
-            {% endif %}
-        }
-        {% endif %}
-        {% if method.output %}
-        output {
-            {% if method.output_ref %}
-            uses {{ set_module_prefix(method.output) }}
-            {% else %}
-            leaf {{ method.output }} {
-                type {{ set_module_prefix(method.output) }}
-            }
-            {% endif %}
-        }
-        {% endif %}
-    }
-
-    {% endfor %}
-
-    {% endfor %}
-}
-""", trim_blocks=True, lstrip_blocks=True)
-
-def traverse_dependencies(descriptor):
-    dependencies = []
-    proto_imports = descriptor.get('dependency', [])
-    for proto_import in proto_imports:
-        # If the import file has a directory path to it remove it as it is not
-        # allowed in Yang.  The proto extension should be removed as well
-        dependencies.append (
-            {
-            'name' : proto_import.split('/')[-1][:-len('.proto')]
-            }
-        )
-    return dependencies
-
-
-def traverse_messages(message_types, prefix, referenced_messages):
-    messages = []
-    for message_type in message_types:
-        assert message_type['_type'] == 'google.protobuf.DescriptorProto'
-
-        # full_name = prefix + '-' + message_type['name']
-        full_name = message_type['name']
-
-        # parse the fields
-        fields = traverse_fields(message_type.get('field', []), full_name,
-                                 referenced_messages)
-
-        # parse the enums
-        enums = traverse_enums(message_type.get('enum_type', []), full_name)
-
-        # parse nested messages
-        nested = message_type.get('nested_type', [])
-        nested_messages = traverse_messages(nested, full_name,
-                                            referenced_messages)
-        messages.append(
-            {
-                'name': full_name,
-                'fields': fields,
-                'enums': enums,
-                # 'extensions': extensions,
-                'messages': nested_messages,
-                'description': remove_unsupported_characters(
-                    message_type.get('_description', '')),
-                # 'extension_ranges': extension_ranges,
-                # 'oneof': oneof
-            }
-        )
-    return messages
-
-
-def traverse_fields(fields_desc, prefix, referenced_messages):
-    fields = []
-    for field in fields_desc:
-        assert field['_type'] == 'google.protobuf.FieldDescriptorProto'
-        yang_base_type = is_base_type(field['type'])
-        _type = get_yang_type(field)
-        if not yang_base_type:
-            referenced_messages.append(_type)
-
-        fields.append(
-            {
-                # 'name': prefix + '-' + field.get('name', ''),
-                'name': field.get('name', ''),
-                'label': field.get('label', ''),
-                'repeated': field['label'] == FieldDescriptor.LABEL_REPEATED,
-                'number': field.get('number', ''),
-                'options': field.get('options', ''),
-                'type_name': field.get('type_name', ''),
-                'type': _type,
-                'type_ref': not yang_base_type,
-                'description': remove_unsupported_characters(field.get(
-                    '_description', ''))
-            }
-        )
-    return fields
-
-
-def traverse_enums(enums_desc, prefix):
-    enums = []
-    for enum in enums_desc:
-        assert enum['_type'] == 'google.protobuf.EnumDescriptorProto'
-        # full_name = prefix + '-' + enum.get('name', '')
-        full_name = enum.get('name', '')
-        enums.append(
-            {
-                'name': full_name,
-                'value': enum.get('value', ''),
-                'description': remove_unsupported_characters(enum.get(
-                    '_description', ''))
-            }
-        )
-    return enums
-
-
-def traverse_services(service_desc, referenced_messages):
-    services = []
-    for service in service_desc:
-        methods = []
-        for method in service.get('method', []):
-            assert method['_type'] == 'google.protobuf.MethodDescriptorProto'
-
-            input_name = method.get('input_type')
-            input_ref = False
-            if not is_base_type(input_name):
-                input_name = remove_first_character_if_match(input_name, '.')
-                # input_name = input_name.replace(".", "-")
-                input_name = input_name.split('.')[-1]
-                referenced_messages.append(input_name)
-                input_ref = True
-
-            output_name = method.get('output_type')
-            output_ref = False
-            if not is_base_type(output_name):
-                output_name = remove_first_character_if_match(output_name, '.')
-                # output_name = output_name.replace(".", "-")
-                output_name = output_name.split('.')[-1]
-                referenced_messages.append(output_name)
-                output_ref = True
-
-            methods.append(
-                {
-                    'method': method.get('name', ''),
-                    'input': input_name,
-                    'input_ref': input_ref,
-                    'output': output_name,
-                    'output_ref': output_ref,
-                    'description': remove_unsupported_characters(method.get(
-                        '_description', '')),
-                    'server_streaming': method.get('server_streaming',
-                                                   False) == True
-                }
-            )
-        services.append(
-            {
-                'service': service.get('name', ''),
-                'methods': methods,
-                'description': remove_unsupported_characters(service.get(
-                    '_description', '')),
-            }
-        )
-    return services
-
-
-def rchop(thestring, ending):
-    if thestring.endswith(ending):
-        return thestring[:-len(ending)]
-    return thestring
-
-
-def traverse_desc(descriptor):
-    referenced_messages = []
-    name = rchop(descriptor.get('name', ''), '.proto')
-    package = descriptor.get('package', '')
-    description = descriptor.get('_description', '')
-    imports=traverse_dependencies(descriptor)
-    messages = traverse_messages(descriptor.get('message_type', []),
-                                 package, referenced_messages)
-    enums = traverse_enums(descriptor.get('enum_type', []), package)
-    services = traverse_services(descriptor.get('service', []),
-                                 referenced_messages)
-    # extensions = _traverse_extensions(descriptors)
-    # options = _traverse_options(descriptors)
-    set_messages_keys(messages)
-    unique_referred_messages_with_keys = []
-    for message_name in list(set(referenced_messages)):
-        unique_referred_messages_with_keys.append(
-            {
-                'name': message_name,
-                'key': get_message_key(message_name, messages)
-            }
-        )
-
-    # Get a list of type definitions (messages, enums) defined in this
-    # descriptor
-    defined_types = [m['name'].split('/')[-1] for m in messages] + \
-                    [e['name'].split('/')[-1]  for e in enums]
-
-    data = {
-        'name': name.split('/')[-1],
-        'package': package,
-        'description': description,
-        'imports' : imports,
-        'messages': messages,
-        'enums': enums,
-        'services': services,
-        'defined_types' : defined_types,
-        'referenced_messages': list(set(referenced_messages)),
-        # TODO:  simplify for easier jinja2 template use
-        'referred_messages_with_keys': unique_referred_messages_with_keys,
-        # 'extensions': extensions,
-        # 'options': options
-    }
-    return data
-
-
-def set_messages_keys(messages):
-    for message in messages:
-        message['key'] = _get_message_key(message)
-        if message['messages']:
-            set_messages_keys(message['messages'])
-
-
-def _get_message_key(message):
-    # assume key is first yang base type field
-    for field in message['fields']:
-        if not field['type_ref']:
-            return field['name']
-    # no key yet - search nested messaged
-    if message['messages']:
-        return get_message_key(message['name'], message['messages'])
-    else:
-        return None
-
-
-def get_message_key(message_name, messages):
-    for message in messages:
-        if message_name == message['name']:
-            return message['key']
-        if message['messages']:
-            return get_message_key(message_name, message['messages'])
-    return None
-
-
-def generate_code(request, response):
-    assert isinstance(request, plugin.CodeGeneratorRequest)
-
-    parser = DescriptorParser()
-
-    # First process the proto file with the imports
-    all_defined_types = []
-    all_proto_data = []
-    all_referred_messages = []
-    for proto_file in request.proto_file:
-        native_data = parser.parse_file_descriptor(proto_file,
-                                                   type_tag_name='_type',
-                                                   fold_comments=True)
-
-        # Consolidate the defined types across imports
-        yang_data = traverse_desc(native_data)
-        for type in yang_data['defined_types']:
-            all_defined_types.append(
-                {
-                    'type' : type,
-                    'module' : yang_data['name']
-                }
-            )
-
-        all_proto_data.append(
-            {
-                'file_name': '{}-{}'.format('ietf', proto_file.name.split(
-                    '/')[-1].replace('.proto','.yang')),
-                'module': yang_data
-            }
-        )
-
-        # Consolidate referred messages across imports
-        all_referred_messages = all_referred_messages + yang_data['referenced_messages']
-
-    # Create the files
-    for proto_data in all_proto_data:
-        f = response.file.add()
-        f.name = proto_data['file_name']
-        proto_data['module']['data_types'] = all_defined_types
-        proto_data['module']['referred_messages'] = all_referred_messages
-        f.content = template_yang.render(module=proto_data['module'])
-
-
-def get_yang_type(field):
-    type = field['type']
-    if type in YANG_TYPE_MAP.keys():
-        _type, _ = YANG_TYPE_MAP[type]
-        if _type in ['enumeration', 'message', 'group']:
-            return field['type_name'].split('.')[-1]
-            # return remove_first_character_if_match(field['type_name'],
-            #                                        '.').replace('.', '-')
-        else:
-            return _type
-    else:
-        return type
-
-
-def is_base_type(type):
-    # check numeric value of the type first
-    if type in YANG_TYPE_MAP.keys():
-        _type, _ = YANG_TYPE_MAP[type]
-        return _type not in ['message', 'group']
-    else:
-        # proto name of the type
-        result = [_format for (_, _format) in YANG_TYPE_MAP.values() if
-                  _format == type and _format not in ['message', 'group']]
-        return len(result) > 0
-
-
-def remove_unsupported_characters(text):
-    unsupported_characters = ["{", "}", "[", "]", "\"", "\\", "*", "/"]
-    return ''.join([i if i not in unsupported_characters else ' ' for i in
-                    text])
-
-
-def remove_first_character_if_match(str, char):
-    if str.startswith(char):
-        return str[1:]
-    return str
-
-
-YANG_TYPE_MAP = {
-    FieldDescriptor.TYPE_BOOL: ('boolean', 'boolean'),
-    FieldDescriptor.TYPE_BYTES: ('binary', 'byte'),
-    FieldDescriptor.TYPE_DOUBLE: ('decimal64', 'double'),
-    FieldDescriptor.TYPE_ENUM: ('enumeration', 'enum'),
-    FieldDescriptor.TYPE_FIXED32: ('int32', 'int64'),
-    FieldDescriptor.TYPE_FIXED64: ('int64', 'uint64'),
-    FieldDescriptor.TYPE_FLOAT: ('decimal64', 'float'),
-    FieldDescriptor.TYPE_INT32: ('int32', 'int32'),
-    FieldDescriptor.TYPE_INT64: ('int64', 'int64'),
-    FieldDescriptor.TYPE_SFIXED32: ('int32', 'int32'),
-    FieldDescriptor.TYPE_SFIXED64: ('int64', 'int64'),
-    FieldDescriptor.TYPE_STRING: ('string', 'string'),
-    FieldDescriptor.TYPE_SINT32: ('int32', 'int32'),
-    FieldDescriptor.TYPE_SINT64: ('int64', 'int64'),
-    FieldDescriptor.TYPE_UINT32: ('uint32', 'int64'),
-    FieldDescriptor.TYPE_UINT64: ('uint64', 'uint64'),
-    FieldDescriptor.TYPE_MESSAGE: ('message', 'message'),
-    FieldDescriptor.TYPE_GROUP: ('group', 'group')
-}
-
-if __name__ == '__main__':
-    # Read request message from stdin
-    data = sys.stdin.read()
-
-    # Parse request
-    request = plugin.CodeGeneratorRequest()
-    request.ParseFromString(data)
-
-    # Create response
-    response = plugin.CodeGeneratorResponse()
-
-    # Generate code
-    generate_code(request, response)
-
-    # Serialise response message
-    output = response.SerializeToString()
-
-    # Write to stdout
-    sys.stdout.write(output)
-    # print is_base_type(9)
diff --git a/netconf/protoc_plugins/yang.proto b/netconf/protoc_plugins/yang.proto
deleted file mode 100644
index 718951c..0000000
--- a/netconf/protoc_plugins/yang.proto
+++ /dev/null
@@ -1,64 +0,0 @@
-syntax = "proto3";
-
-package experiment;
-
-message AsyncEvent {
-    int32 seq = 1;
-    enum EventType {
-        BIG_BANG = 0;  // just a big bang
-        SMALL_BANG = 1;  // so small bang
-        NO_BANG = 2;
-    }
-    EventType type = 2;
-    string details = 3;
-}
-
-enum SimpleEnum {
-    APPLE = 0;
-    BANANA = 1;
-    ORANGE = 2;
-}
-
-message Packet {
-    int32 source = 1;
-    bytes content = 2;
-    message InnerPacket {
-        string url = 1;
-        string title = 2;
-        repeated string snippets = 3;
-        message InnerInnerPacket {
-            string input = 1;
-            string desc = 2;
-        }
-        repeated InnerInnerPacket inner_inner_packet = 4;
-    }
-    repeated InnerPacket inner_packets = 3;
-}
-
-message Echo {
-    string msg = 1;
-    float delay = 2;
-}
-
-message testMessage{
-    oneof oneOfTest {
-        string test2 = 1;
-        int32 test3 = 2;
-    }
-}
-
-service ExperimentalService {
-
-    rpc GetEcho(Echo) returns(Echo);
-
-    // For server to send async stream to client
-    rpc ReceiveStreamedEvents(Packet)
-        returns(stream AsyncEvent);
-
-    // For server to send async packets to client
-    rpc ReceivePackets(Echo) returns(stream Packet);
-
-    // For client to send async packets to server
-    rpc SendPackets(stream Packet) returns(Echo);
-
-}
\ No newline at end of file
diff --git a/netconf/protos/Makefile b/netconf/protos/Makefile
index 008a531..9bd1108 100644
--- a/netconf/protos/Makefile
+++ b/netconf/protos/Makefile
@@ -20,38 +20,15 @@
   $(error To get started, please source the env.sh file from Voltha top level directory)
 endif
 
-default: build
-
-PB2_FILES := \
-	voltha_pb2.py
+default: copyfiles
 
 TARGET_PROTO_DIR := $(VOLTHA_BASE)/netconf/protos
 SOURCE_PROTO_DIR := $(VOLTHA_BASE)/voltha/protos
 
-build: copyfiles
-
 copyfiles:
 	rsync -av --include '*/' --exclude='third_party/__init__.py' --include '*.py' --exclude='*' $(SOURCE_PROTO_DIR)/ $(TARGET_PROTO_DIR)
 
 
-PROTO_FILES := $(wildcard *.proto) $(wildcard third_party/google/api/*proto)
-PROTO_PB2_FILES := $(foreach f,$(PROTO_FILES),$(subst .proto,_pb2.py,$(f)))
-PROTO_DESC_FILES := $(foreach f,$(PROTO_FILES),$(subst .proto,.desc,$(f)))
-
-PROTOC_PREFIX := /usr/local
-PROTOC_LIBDIR := $(PROTOC_PREFIX)/lib
-
-build: $(PROTO_PB2_FILES)
-
-%_pb2.py: %.proto Makefile
-	@echo "Building protocol buffer artifacts from $<"
-	env LD_LIBRARY_PATH=$(PROTOC_LIBDIR) python -m grpc.tools.protoc \
-	    -I. \
-	    -I./third_party \
-	    --python_out=. \
-	    --grpc_python_out=. \
-	    $<
-
 clean:
 	rm -f $(PROTO_PB2_FILES) $(PROTO_DESC_FILES)
 
diff --git a/netconf/protos/yang_options.proto b/netconf/protos/yang_options.proto
new file mode 100644
index 0000000..5ff2ed6
--- /dev/null
+++ b/netconf/protos/yang_options.proto
@@ -0,0 +1,52 @@
+// Copyright (c) 2015, Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file contains annotation definitions that can be used to describe
+// a configuration tree.
+
+syntax = "proto3";
+
+package voltha;
+
+import "google/protobuf/descriptor.proto";
+
+enum MessageParserOption {
+    // Move any enclosing child enum/message definition to the same level
+    // as the parent (this message) in the yang generated file
+    MOVE_TO_PARENT_LEVEL= 0;
+
+    // Create both a grouping and a container for this message.  The container
+    // name will be the message name.  The grouping name will be the message
+    // name prefixed with "grouping_"
+    CREATE_BOTH_GROUPING_AND_CONTAINER = 1;
+}
+
+message InlineNode {
+    string id = 1;
+    string type = 2;
+}
+
+extend google.protobuf.MessageOptions {
+    // This annotation is used to indicate how a message is parsed when
+    // converting from proto to yang format.
+    MessageParserOption yang_child_rule = 7761774;
+
+    MessageParserOption yang_message_rule = 7761775;
+}
+
+extend google.protobuf.FieldOptions {
+    // If present, the field (a message reference) should be replaced by the
+    // message itself.  For now, this applies only to non-repeated fields.
+    InlineNode yang_inline_node = 7761776;
+}
diff --git a/netconf/session/nc_protocol_handler.py b/netconf/session/nc_protocol_handler.py
index a08ea37..c64c2c1 100644
--- a/netconf/session/nc_protocol_handler.py
+++ b/netconf/session/nc_protocol_handler.py
@@ -180,7 +180,9 @@
                              rpc_handler=rpc_handler,
                              is_error=response.is_error,
                              response=response)
-                    self.send_rpc_reply(response.node, rpc)
+                    # self.send_rpc_reply(response.node, rpc)
+                    self.send_rpc_reply(self.get_instance(), rpc)
+
                     if response.close_session:
                         log.info('response-closing-session', response=response)
                         self.close()
@@ -230,3 +232,372 @@
             self.session.session_opened = False
             self.connected.callback(None)
             log.info('closing-client')
+
+    # Example of a properly formatted Yang-XML message
+    def get_instance(self):
+        xml_string = """
+            <data>
+             <Voltha xmlns="urn:opencord:params:xml:ns:voltha:ietf-voltha">
+             <instances>
+              <log_level>INFO</log_level>
+                <device_types>
+                  <adapter>simulated_onu</adapter>
+                  <accepts_bulk_flow_update>True</accepts_bulk_flow_update>
+                  <id>simulated_onu</id>
+                  <accepts_add_remove_flow_updates>False</accepts_add_remove_flow_updates>
+                </device_types>
+                <device_types>
+                  <adapter>tibit_onu</adapter>
+                  <accepts_bulk_flow_update>True</accepts_bulk_flow_update>
+                  <id>tibit_onu</id>
+                  <accepts_add_remove_flow_updates>False</accepts_add_remove_flow_updates>
+                </device_types>
+                <device_types>
+                  <adapter>maple_olt</adapter>
+                  <accepts_bulk_flow_update>True</accepts_bulk_flow_update>
+                  <id>maple_olt</id>
+                  <accepts_add_remove_flow_updates>False</accepts_add_remove_flow_updates>
+                </device_types>
+                <device_types>
+                  <adapter>tibit_olt</adapter>
+                  <accepts_bulk_flow_update>True</accepts_bulk_flow_update>
+                  <id>tibit_olt</id>
+                  <accepts_add_remove_flow_updates>False</accepts_add_remove_flow_updates>
+                </device_types>
+                <device_types>
+                  <adapter>broadcom_onu</adapter>
+                  <accepts_bulk_flow_update>True</accepts_bulk_flow_update>
+                  <id>broadcom_onu</id>
+                  <accepts_add_remove_flow_updates>False</accepts_add_remove_flow_updates>
+                </device_types>
+                <device_types>
+                  <adapter>simulated_olt</adapter>
+                  <accepts_bulk_flow_update>True</accepts_bulk_flow_update>
+                  <id>simulated_olt</id>
+                  <accepts_add_remove_flow_updates>False</accepts_add_remove_flow_updates>
+                </device_types>
+                <logical_devices>
+                  <datapath_id>1</datapath_id>
+                  <root_device_id>simulated_olt_1</root_device_id>
+                  <switch_features>
+                    <auxiliary_id>0</auxiliary_id>
+                    <n_tables>2</n_tables>
+                    <datapath_id>0</datapath_id>
+                    <capabilities>15</capabilities>
+                    <n_buffers>256</n_buffers>
+                  </switch_features>
+                  <flows/>
+                  <id>simulated1</id>
+                  <flow_groups/>
+                    <ports>
+                      <device_port_no>2</device_port_no>
+                      <root_port>False</root_port>
+                      <device_id>simulated_onu_1</device_id>
+                      <id>onu1</id>
+                      <ofp_port>
+                        <hw_addr>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>1</item>
+                        </hw_addr>
+                        <curr_speed>32</curr_speed>
+                        <curr>4128</curr>
+                        <name>onu1</name>
+                        <supported>0</supported>
+                        <state>4</state>
+                        <max_speed>32</max_speed>
+                        <advertised>4128</advertised>
+                        <peer>4128</peer>
+                        <config>0</config>
+                        <port_no>1</port_no>
+                      </ofp_port>
+                    </ports>
+                    <ports>
+                      <device_port_no>2</device_port_no>
+                      <root_port>False</root_port>
+                      <device_id>simulated_onu_2</device_id>
+                      <id>onu2</id>
+                      <ofp_port>
+                        <hw_addr>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>2</item>
+                        </hw_addr>
+                        <curr_speed>32</curr_speed>
+                        <curr>4128</curr>
+                        <name>onu2</name>
+                        <supported>0</supported>
+                        <state>4</state>
+                        <max_speed>32</max_speed>
+                        <advertised>4128</advertised>
+                        <peer>4128</peer>
+                        <config>0</config>
+                        <port_no>2</port_no>
+                      </ofp_port>
+                    </ports>
+                    <ports>
+                      <device_port_no>2</device_port_no>
+                      <root_port>True</root_port>
+                      <device_id>simulated_olt_1</device_id>
+                      <id>olt1</id>
+                      <ofp_port>
+                        <hw_addr>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>0</item>
+                          <item>129</item>
+                        </hw_addr>
+                        <curr_speed>32</curr_speed>
+                        <curr>4128</curr>
+                        <name>olt1</name>
+                        <supported>0</supported>
+                        <state>4</state>
+                        <max_speed>32</max_speed>
+                        <advertised>4128</advertised>
+                        <peer>4128</peer>
+                        <config>0</config>
+                        <port_no>129</port_no>
+                      </ofp_port>
+                    </ports>
+                  <desc>
+                    <dp_desc>n/a</dp_desc>
+                    <sw_desc>simualted pon</sw_desc>
+                    <hw_desc>simualted pon</hw_desc>
+                    <serial_num>985c4449d50a441ca843401e2f44e682</serial_num>
+                    <mfr_desc>cord porject</mfr_desc>
+                  </desc>
+                </logical_devices>
+              <devices>
+                <item>
+                  <vendor>simulated</vendor>
+                  <parent_port_no>0</parent_port_no>
+                  <software_version>1.0</software_version>
+                  <connect_status>UNKNOWN</connect_status>
+                  <type>simulated_olt</type>
+                  <adapter>simulated_olt</adapter>
+                  <vlan>0</vlan>
+                  <hardware_version>n/a</hardware_version>
+                  <flows>
+                    <items/>
+                  </flows>
+                  <ports>
+                    <item>
+                      <peers>
+                        <item>
+                          <port_no>1</port_no>
+                          <device_id>simulated_onu_1</device_id>
+                        </item>
+                        <item>
+                          <port_no>1</port_no>
+                          <device_id>simulated_onu_2</device_id>
+                        </item>
+                      </peers>
+                      <label>pon</label>
+                      <oper_status>UNKNOWN</oper_status>
+                      <admin_state>UNKNOWN</admin_state>
+                      <type>PON_OLT</type>
+                      <port_no>1</port_no>
+                      <device_id>simulated_olt_1</device_id>
+                    </item>
+                    <item>
+                      <peers/>
+                      <label>eth</label>
+                      <oper_status>UNKNOWN</oper_status>
+                      <admin_state>UNKNOWN</admin_state>
+                      <type>ETHERNET_NNI</type>
+                      <port_no>2</port_no>
+                      <device_id>simulated_olt_1</device_id>
+                    </item>
+                  </ports>
+                  <parent_id/>
+                  <oper_status>DISCOVERED</oper_status>
+                  <flow_groups>
+                    <items/>
+                  </flow_groups>
+                  <admin_state>UNKNOWN</admin_state>
+                  <serial_number>19addcd7305d4d4fa90300cb8e4ab9a6</serial_number>
+                  <model>n/a</model>
+                  <root>True</root>
+                  <id>simulated_olt_1</id>
+                  <firmware_version>n/a</firmware_version>
+                </item>
+                <item>
+                  <vendor>simulated</vendor>
+                  <parent_port_no>1</parent_port_no>
+                  <software_version>1.0</software_version>
+                  <connect_status>UNKNOWN</connect_status>
+                  <root>False</root>
+                  <adapter>simulated_onu</adapter>
+                  <vlan>101</vlan>
+                  <hardware_version>n/a</hardware_version>
+                  <flows>
+                    <items/>
+                  </flows>
+                  <ports>
+                    <item>
+                      <peers/>
+                      <label>eth</label>
+                      <oper_status>UNKNOWN</oper_status>
+                      <admin_state>UNKNOWN</admin_state>
+                      <type>ETHERNET_UNI</type>
+                      <port_no>2</port_no>
+                      <device_id>simulated_onu_1</device_id>
+                    </item>
+                    <item>
+                      <peers>
+                        <item>
+                          <port_no>1</port_no>
+                          <device_id>simulated_olt_1</device_id>
+                        </item>
+                      </peers>
+                      <label>pon</label>
+                      <oper_status>UNKNOWN</oper_status>
+                      <admin_state>UNKNOWN</admin_state>
+                      <type>PON_ONU</type>
+                      <port_no>1</port_no>
+                      <device_id>simulated_onu_1</device_id>
+                    </item>
+                  </ports>
+                  <parent_id>simulated_olt_1</parent_id>
+                  <oper_status>DISCOVERED</oper_status>
+                  <flow_groups>
+                    <items/>
+                  </flow_groups>
+                  <admin_state>UNKNOWN</admin_state>
+                  <serial_number>8ce6514e1b324d349038d9a80af04772</serial_number>
+                  <model>n/a</model>
+                  <type>simulated_onu</type>
+                  <id>simulated_onu_1</id>
+                  <firmware_version>n/a</firmware_version>
+                </item>
+                <item>
+                  <vendor>simulated</vendor>
+                  <parent_port_no>1</parent_port_no>
+                  <software_version>1.0</software_version>
+                  <connect_status>UNKNOWN</connect_status>
+                  <root>False</root>
+                  <adapter>simulated_onu</adapter>
+                  <vlan>102</vlan>
+                  <hardware_version>n/a</hardware_version>
+                  <flows>
+                    <items/>
+                  </flows>
+                  <ports>
+                    <item>
+                      <peers/>
+                      <label>eth</label>
+                      <oper_status>UNKNOWN</oper_status>
+                      <admin_state>UNKNOWN</admin_state>
+                      <type>ETHERNET_UNI</type>
+                      <port_no>2</port_no>
+                      <device_id>simulated_onu_2</device_id>
+                    </item>
+                    <item>
+                      <peers>
+                        <item>
+                          <port_no>1</port_no>
+                          <device_id>simulated_olt_1</device_id>
+                        </item>
+                      </peers>
+                      <label>pon</label>
+                      <oper_status>UNKNOWN</oper_status>
+                      <admin_state>UNKNOWN</admin_state>
+                      <type>PON_ONU</type>
+                      <port_no>1</port_no>
+                      <device_id>simulated_onu_2</device_id>
+                    </item>
+                  </ports>
+                  <parent_id>simulated_olt_1</parent_id>
+                  <oper_status>DISCOVERED</oper_status>
+                  <flow_groups>
+                    <items/>
+                  </flow_groups>
+                  <admin_state>UNKNOWN</admin_state>
+                  <serial_number>0dfbb5af422044639c0660b518c06519</serial_number>
+                  <model>n/a</model>
+                  <type>simulated_onu</type>
+                  <id>simulated_onu_2</id>
+                  <firmware_version>n/a</firmware_version>
+                </item>
+              </devices>
+              <instance_id>compose_voltha_1</instance_id>
+              <version>0.9.0</version>
+              <health>
+                <state>HEALTHY</state>
+              </health>
+              <device_groups>
+                <item>
+                  <logical_devices/>
+                  <id>1</id>
+                  <devices/>
+                </item>
+              </device_groups>
+              <adapters>
+                <item>
+                  <config>
+                    <log_level>INFO</log_level>
+                  </config>
+                  <version>0.1</version>
+                  <vendor>Voltha project</vendor>
+                  <id>simulated_onu</id>
+                  <logical_device_ids/>
+                </item>
+                <item>
+                  <config>
+                    <log_level>INFO</log_level>
+                  </config>
+                  <version>0.1</version>
+                  <vendor>Tibit Communications Inc.</vendor>
+                  <id>tibit_onu</id>
+                  <logical_device_ids/>
+                </item>
+                <item>
+                  <config>
+                    <log_level>INFO</log_level>
+                  </config>
+                  <version>0.1</version>
+                  <vendor>Voltha project</vendor>
+                  <id>maple_olt</id>
+                  <logical_device_ids/>
+                </item>
+                <item>
+                  <config>
+                    <log_level>INFO</log_level>
+                  </config>
+                  <version>0.1</version>
+                  <vendor>Tibit Communications Inc.</vendor>
+                  <id>tibit_olt</id>
+                  <logical_device_ids/>
+                </item>
+                <item>
+                  <config>
+                    <log_level>INFO</log_level>
+                  </config>
+                  <version>0.1</version>
+                  <vendor>Voltha project</vendor>
+                  <id>broadcom_onu</id>
+                  <logical_device_ids/>
+                </item>
+                <item>
+                  <config>
+                    <log_level>INFO</log_level>
+                  </config>
+                  <version>0.1</version>
+                  <vendor>Voltha project</vendor>
+                  <id>simulated_olt</id>
+                  <logical_device_ids/>
+                </item>
+              </adapters>
+             </instances>
+             </Voltha>
+            </data>
+        """
+        return etree.fromstring(xml_string)
\ No newline at end of file