This commit consists of:
1) Dockerizing the netconf server
2) Update proto2yang to support module imports
3) Provide a set of yang modules derived from the proto files in voltha.
   These files as well as the slight mmodifications to the proto files are
   provided in the experiments/netconf/proto2yang directory
4) Code to automatically pull proto files from voltha into the netconf server,
   compiles them and produce the yang equivalent files.
5) Add a getvoltha netconf API to provide voltha state information (basic at
   this time).  There is potential to make this generic once we experiment
   with additional APIs

Change-Id: I94f3a1f871b8025ad675d5f9b9b626d1be8b8d36
diff --git a/netconf/capabilities.py b/netconf/capabilities.py
index 86873f8..f7c2c5a 100755
--- a/netconf/capabilities.py
+++ b/netconf/capabilities.py
@@ -19,9 +19,143 @@
 class Capabilities:
 
     def __init__(self):
-        self.server_caps = (C.NETCONF_BASE_10, C.NETCONF_BASE_11)
+        self.server_caps = self._get_server_capabilities()
         self.client_caps = set()
 
     def add_client_capability(self, cap):
         self.client_caps.add(cap)
 
+    #TODO:  This will be automatically generated from the voltha proto files
+    def _get_server_capabilities(self):
+        return (
+            C.NETCONF_BASE_10,
+            C.NETCONF_BASE_11,
+            "urn:opencord:params:xml:ns:voltha:ietf-voltha",
+            "urn:opencord:params:xml:ns:voltha:ietf-openflow_13",
+            "urn:opencord:params:xml:ns:voltha:ietf-meta",
+            "urn:opencord:params:xml:ns:voltha:ietf-logical_device",
+            "urn:opencord:params:xml:ns:voltha:ietf-health",
+            "urn:opencord:params:xml:ns:voltha:ietf-device",
+            "urn:opencord:params:xml:ns:voltha:ietf-empty",
+            "urn:opencord:params:xml:ns:voltha:ietf-common",
+            "urn:opencord:params:xml:ns:voltha:ietf-any",
+            "urn:opencord:params:xml:ns:voltha:ietf-adapter"
+        )
+
+    #TODO:  A schema exchange will also need to happen
+
+    description = """
+
+    Option 1:  Client already have the yang model for voltha and adapters:
+        <hello xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
+            <capabilities>
+                <capability>
+                    urn:ietf:params:netconf:base:1.1
+                </capability>
+                <capability>
+                    urn:cord:voltha:1.0
+                </capability>
+                <capability>
+                    urn:cord:voltha:adpater_x:1.0
+                </capability>
+
+
+    Option 2: NETCONF-MONITORING - schema exchanges
+
+        server expose capabilities
+
+            <hello xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
+                <capabilities>
+                    <capability>
+                        urn:ietf:params:netconf:base:1.1
+                    </capability>
+                    <capability>
+                        urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring?module=ietf-netconf-monitoring&revision=2010-10-04
+                    </capability>
+
+        client request schemas
+
+            <rpc message-id="101"
+                 xmlns="urn:ietf:params:xml:ns:netconf:base:1.1">
+                <get>
+                    <filter type="subtree">
+                        <netconf-state xmlns=
+                            "urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring">
+                             <schemas/>
+                        </netconf-state>
+                    </filter>
+                </get>
+            </rpc>
+
+        server sends back schemas
+
+            <rpc-reply message-id="101"
+                       xmlns="urn:ietf:params:xml:ns:netconf:base:1.1">
+                  <data>
+                        <netconf-state
+                            xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring">
+                            <schemas>
+                                <schema>
+                                    <identifier>voltha</identifier>
+                                    <version>1.0</version>
+                                    <format>yang</format>
+                                    <namespace>urn:cord:voltha</namespace>
+                                    <location>NETCONF</location>
+                                </schema>
+                                <schema>
+                                    <identifier>adapter_x</identifier>
+                                    <version>x_release</version>
+                                    <format>yang</format>
+                                    <namespace>urn:cord:voltha:adapter_x</namespace>
+                                    <location>NETCONF</location>
+                                </schema>
+                            </schemas>
+                        </netconf-state>
+                  </data>
+            </rpc-reply>
+
+
+        client requests each schema instance
+
+            <rpc message-id="102"
+                xmlns="urn:ietf:params:xml:ns:netconf:base:1.1">
+                <get-schema
+                    xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring">
+                    <identifer>voltha</identifer>
+                    <version>1.0</version>
+                </get-schema>
+             </rpc>
+
+             <rpc-reply message-id="102"
+                xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
+                <data
+                    xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring">
+                    module voltha {
+                        //default format (yang) returned
+                        //voltha version 0.1 yang module
+                        //contents here ...
+                    }
+                </data>
+             </rpc-reply>
+
+
+    GETTING DATA
+
+    Use filter:
+        1) namespace filter
+            <filter type="subtree">
+                <top xmlns="http://example.com/schema/1.2/config"/>
+            </filter>
+
+         2) <filter type="subtree">
+                <adapters xmlns="urn:cord:voltha:adapter_x">
+                    <adapter>
+                        <id>uuid</id>
+                        <config/>
+                    </adapter>
+                </adapters>
+            </filter>
+
+            /voltha/adapters/<adapter>/[<id>, <vendor>, <version>, <config>, <additonal_desc>]
+
+    """
\ No newline at end of file
diff --git a/netconf/grpc_client.py b/netconf/grpc_client.py
deleted file mode 100644
index ebb74eb..0000000
--- a/netconf/grpc_client.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2016 the original author or authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""
-The gRPC client layer for the Netconf agent
-"""
-from Queue import Queue, Empty
-from structlog import get_logger
-from twisted.internet.defer import inlineCallbacks, returnValue, DeferredQueue
-
-
-log = get_logger()
-
-
-class GrpcClient(object):
-
-    def __init__(self, connection_manager, channel):
-
-        self.connection_manager = connection_manager
-        self.channel = channel
-        self.logical_stub = None
-
-        self.stopped = False
-
-        self.packet_out_queue = Queue()  # queue to send out PacketOut msgs
-        self.packet_in_queue = DeferredQueue()  # queue to receive PacketIn
-
diff --git a/netconf/grpc_client/__init__.py b/netconf/grpc_client/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/netconf/grpc_client/__init__.py
diff --git a/netconf/grpc_client/grpc_client.py b/netconf/grpc_client/grpc_client.py
new file mode 100644
index 0000000..ae19067
--- /dev/null
+++ b/netconf/grpc_client/grpc_client.py
@@ -0,0 +1,288 @@
+#
+# Copyright 2016 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+gRPC client meant to connect to a gRPC server endpoint, and query the
+end-point's schema by calling SchemaService.Schema(Empty) and all of its
+semantics are derived from the recovered schema.
+"""
+
+import os
+import sys
+from random import randint
+from zlib import decompress
+
+import grpc
+from consul import Consul
+from grpc._channel import _Rendezvous
+from structlog import get_logger
+from twisted.internet import reactor
+from twisted.internet.defer import inlineCallbacks, returnValue
+from werkzeug.exceptions import ServiceUnavailable
+
+from common.utils.asleep import asleep
+from netconf.protos import third_party
+from netconf.protos.schema_pb2 import SchemaServiceStub
+from google.protobuf.empty_pb2 import Empty
+from common.utils.consulhelpers import get_endpoint_from_consul
+from netconf.protos.voltha_pb2  import VolthaLocalServiceStub
+from twisted.internet import threads
+from google.protobuf import empty_pb2
+from google.protobuf.json_format import MessageToDict, ParseDict
+from simplejson import dumps, load
+
+log = get_logger()
+
+
+class GrpcClient(object):
+    """
+    Connect to a gRPC server, fetch its schema, and process the downloaded
+    proto schema files.  The goal is to convert the proto schemas into yang
+    schemas which would be exposed to the Netconf client.
+    """
+    RETRY_BACKOFF = [0.05, 0.1, 0.2, 0.5, 1, 2, 5]
+
+    def __init__(self, consul_endpoint, work_dir,
+                 grpc_endpoint='localhost:50055',
+                 reconnect_callback=None,
+                 on_start_callback=None):
+        self.consul_endpoint = consul_endpoint
+        self.grpc_endpoint = grpc_endpoint
+        self.work_dir = work_dir
+        self.reconnect_callback = reconnect_callback
+        self.on_start_callback = on_start_callback
+
+        self.plugin_dir = os.path.abspath(os.path.join(
+            os.path.dirname(__file__), '../protoc_plugins'))
+
+        self.channel = None
+        self.local_stub = None
+        self.schema = None
+        self.retries = 0
+        self.shutting_down = False
+        self.connected = False
+
+    def start(self):
+        log.debug('starting')
+        if not self.connected:
+            reactor.callLater(0, self.connect)
+        log.info('started')
+        return self
+
+    def stop(self):
+        log.debug('stopping')
+        if self.shutting_down:
+            return
+        self.shutting_down = True
+        log.info('stopped')
+
+
+    def set_on_start_callback(self, on_start_callback):
+        self.on_start_callback = on_start_callback
+        return self
+
+
+    def set_reconnect_callback(self, reconnect_callback):
+        self.reconnect_callback = reconnect_callback
+        return self
+
+
+    def resolve_endpoint(self, endpoint):
+        ip_port_endpoint = endpoint
+        if endpoint.startswith('@'):
+            try:
+                ip_port_endpoint = get_endpoint_from_consul(
+                    self.consul_endpoint, endpoint[1:])
+                log.info('endpoint-found',
+                         endpoint=endpoint, ip_port=ip_port_endpoint)
+            except Exception as e:
+                log.error('service-not-found-in-consul', endpoint=endpoint,
+                          exception=repr(e))
+                return None, None
+        if ip_port_endpoint:
+            host, port = ip_port_endpoint.split(':', 2)
+            return host, int(port)
+
+
+    @inlineCallbacks
+    def connect(self):
+        """
+        (Re-)Connect to end-point
+        """
+        if self.shutting_down or self.connected:
+            return
+
+        try:
+            host, port = self.resolve_endpoint(self.grpc_endpoint)
+
+            # If host and port is not set then we will retry
+            if host and port:
+                log.info('grpc-endpoint-connecting', host=host, port=port)
+                self.channel = grpc.insecure_channel('{}:{}'.format(host, port))
+
+                yang_from = self._retrieve_schema()
+                log.info('proto-to-yang-schema', file=yang_from)
+                self._compile_proto_files(yang_from)
+                self._clear_backoff()
+
+                if self.on_start_callback is not None:
+                    reactor.callLater(0, self.on_start_callback)
+
+                self.connected = True
+                if self.reconnect_callback is not None:
+                    reactor.callLater(0, self.reconnect_callback)
+
+                self.local_stub = VolthaLocalServiceStub(self.channel)
+
+                return
+
+        except _Rendezvous, e:
+            if e.code() == grpc.StatusCode.UNAVAILABLE:
+                log.info('grpc-endpoint-not-available')
+            else:
+                log.exception(e)
+            yield self._backoff('not-available')
+
+        except Exception, e:
+            if not self.shutting_down:
+                log.exception('cannot-connect', endpoint=_endpoint)
+            yield self._backoff('unknown-error')
+
+        reactor.callLater(0, self.connect)
+
+    def _backoff(self, msg):
+        wait_time = self.RETRY_BACKOFF[min(self.retries,
+                                           len(self.RETRY_BACKOFF) - 1)]
+        self.retries += 1
+        log.error(msg, retry_in=wait_time)
+        return asleep(wait_time)
+
+    def _clear_backoff(self):
+        if self.retries:
+            log.info('reconnected', after_retries=self.retries)
+            self.retries = 0
+
+    def _retrieve_schema(self):
+        """
+        Retrieve schema from gRPC end-point, and save all *.proto files in
+        the work directory.
+        """
+        assert isinstance(self.channel, grpc.Channel)
+        stub = SchemaServiceStub(self.channel)
+        # try:
+        schemas = stub.GetSchema(Empty())
+        # except _Rendezvous, e:
+        #     if e.code == grpc.StatusCode.UNAVAILABLE:
+        #
+        #     else:
+        #         raise e
+
+        os.system('mkdir -p %s' % self.work_dir)
+        os.system('rm -fr /tmp/%s/*' %
+                  self.work_dir.replace('/tmp/', ''))  # safer
+
+        for proto_file in schemas.protos:
+            proto_fname = proto_file.file_name
+            # TODO: Do we need to process a set of files using a prefix
+            # instead of just one?
+            proto_content = proto_file.proto
+            log.info('saving-proto', fname=proto_fname, dir=self.work_dir,
+                      length=len(proto_content))
+            with open(os.path.join(self.work_dir, proto_fname), 'w') as f:
+                f.write(proto_content)
+
+            desc_content = decompress(proto_file.descriptor)
+            desc_fname = proto_fname.replace('.proto', '.desc')
+            log.info('saving-descriptor', fname=desc_fname, dir=self.work_dir,
+                      length=len(desc_content))
+            with open(os.path.join(self.work_dir, desc_fname), 'wb') as f:
+                f.write(desc_content)
+        return schemas.yang_from
+
+    def _compile_proto_files(self, yang_from):
+        """
+        For each *.proto file in the work directory, compile the proto
+        file into the respective *_pb2.py file as well as generate the
+        corresponding yang schema.
+        :return: None
+        """
+        log.info('start')
+        google_api_dir = os.path.abspath(os.path.join(
+            os.path.dirname(__file__), '../protos/third_party'
+        ))
+
+        log.info('google-api', api_dir=google_api_dir)
+
+        netconf_base_dir = os.path.abspath(os.path.join(
+            os.path.dirname(__file__), '../..'
+        ))
+        log.info('netconf-dir', dir=netconf_base_dir)
+
+
+        for fname in [f for f in os.listdir(self.work_dir)
+                      if f.endswith('.proto')]:
+            log.info('filename', file=fname)
+
+            need_yang = fname == yang_from
+            log.debug('compiling',
+                      file=fname,
+                      yang_schema_required=need_yang)
+            cmd = (
+                'cd %s && '
+                'env PATH=%s PYTHONPATH=%s '
+                'python -m grpc.tools.protoc '
+                '-I. '
+                '-I%s '
+                '--python_out=. '
+                '--grpc_python_out=. '
+                '--plugin=protoc-gen-custom=%s/proto2yang.py '
+                '%s'
+                '%s' % (
+                    self.work_dir,
+                    ':'.join([os.environ['PATH'], self.plugin_dir]),
+                    ':'.join([google_api_dir, netconf_base_dir]),
+                    google_api_dir,
+                    self.plugin_dir,
+                    '--custom_out=. ' if need_yang else '',
+                    fname)
+            )
+            log.debug('executing', cmd=cmd, file=fname)
+            os.system(cmd)
+            log.info('compiled', file=fname)
+
+        # # test-load each _pb2 file to see all is right
+        # if self.work_dir not in sys.path:
+        #     sys.path.insert(0, self.work_dir)
+        #
+        # for fname in [f for f in os.listdir(self.work_dir)
+        #               if f.endswith('_pb2.py')]:
+        #     modname = fname[:-len('.py')]
+        #     log.debug('test-import', modname=modname)
+        #     _ = __import__(modname)
+
+        #TODO: find a different way to test the generated yang files
+
+    @inlineCallbacks
+    def get_voltha_instance(self):
+        try:
+            res = yield threads.deferToThread(
+                    self.local_stub.GetVolthaInstance, empty_pb2.Empty())
+
+            out_data = MessageToDict(res, True, True)
+            returnValue(out_data)
+        except Exception, e:
+            log.error('failure', exception=repr(e))
+
diff --git a/netconf/main.py b/netconf/main.py
index 06edfe4..e049433 100755
--- a/netconf/main.py
+++ b/netconf/main.py
@@ -16,15 +16,20 @@
 #
 import argparse
 import os
-
+import sys
 import yaml
 from twisted.internet import reactor
 from twisted.internet.defer import inlineCallbacks
 
+base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.append(base_dir)
+sys.path.append(os.path.join(base_dir, '/netconf/protos/third_party'))
+
 from common.structlog_setup import setup_logging
 from common.utils.dockerhelpers import get_my_containers_name
 from common.utils.nethelpers import get_my_primary_local_ipv4
-from connection_mgr import ConnectionManager
+from netconf.grpc_client.grpc_client import GrpcClient
+from netconf.nc_server import NCServer
 
 defs = dict(
     config=os.environ.get('CONFIG', './netconf.yml'),
@@ -247,7 +252,10 @@
                                  fluentd=args.fluentd)
 
         # components
-        self.connection_manager = None
+        self.nc_server = None
+        self.grpc_client = None  # single, shared gRPC client to Voltha
+
+        self.netconf_server_started = False
 
         self.exiting = False
 
@@ -261,25 +269,53 @@
 
     @inlineCallbacks
     def startup_components(self):
-        self.log.info('starting-netconf-server')
+        self.log.info('starting')
         args = self.args
-        self.connection_manager = yield ConnectionManager(
-            args.consul,
-            args.grpc_endpoint,
-            args.netconf_port,
-            args.server_private_key_file,
-            args.server_public_key_file,
-            args.client_public_keys_file,
-            args.client_passwords_file).start()
-        self.log.info('started-netconf-server')
+
+        self.grpc_client = yield \
+            GrpcClient(args.consul, args.work_dir, args.grpc_endpoint)
+
+        self.nc_server =  yield \
+                NCServer(args.netconf_port,
+                         args.server_private_key_file,
+                         args.server_public_key_file,
+                         args.client_public_keys_file,
+                         args.client_passwords_file,
+                         self.grpc_client)
+
+        # set on start callback
+        self.grpc_client.set_on_start_callback(self._start_netconf_server)
+
+        # set the callback if there is a reconnect with voltha.
+        self.grpc_client.set_reconnect_callback(self.nc_server.reload_capabilities)
+
+        # start grpc client
+        self.grpc_client.start()
+
+        self.log.info('started')
+
+    @inlineCallbacks
+    def _start_netconf_server(self):
+        if not self.netconf_server_started:
+            self.log.info('starting')
+            yield self.nc_server.start()
+            self.netconf_server_started = True
+            self.log.info('started')
+        else:
+            self.log.info('server-already-started')
 
     @inlineCallbacks
     def shutdown_components(self):
         """Execute before the reactor is shut down"""
         self.log.info('exiting-on-keyboard-interrupt')
         self.exiting = True
-        if self.connection_manager is not None:
-            yield self.connection_manager.stop()
+
+        if self.grpc_client is not None:
+            yield self.grpc_client.stop()
+
+        if self.nc_server is not None:
+            yield self.nc_server.stop()
+
 
     def start_reactor(self):
         reactor.callWhenRunning(
diff --git a/netconf/nc_rpc/base/close_session.py b/netconf/nc_rpc/base/close_session.py
index 43babb8..ce187cd 100644
--- a/netconf/nc_rpc/base/close_session.py
+++ b/netconf/nc_rpc/base/close_session.py
@@ -24,8 +24,9 @@
 
 class CloseSession(Rpc):
 
-    def __init__(self, rpc_request, rpc_method, session):
-        super(CloseSession, self).__init__(rpc_request, rpc_method, session)
+    def __init__(self, rpc_request, rpc_method, grpc_client, session):
+        super(CloseSession, self).__init__(rpc_request, rpc_method,
+                                           grpc_client, session)
         self._validate_parameters()
 
     def execute(self):
diff --git a/netconf/nc_rpc/base/commit.py b/netconf/nc_rpc/base/commit.py
index 61b7604..8933dc3 100644
--- a/netconf/nc_rpc/base/commit.py
+++ b/netconf/nc_rpc/base/commit.py
@@ -24,8 +24,9 @@
 
 class Commit(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(Commit, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(Commit, self).__init__(rpc_request, rpc_method, grpc_client,
+									 session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/copy_config.py b/netconf/nc_rpc/base/copy_config.py
index cf2fc82..e287770 100644
--- a/netconf/nc_rpc/base/copy_config.py
+++ b/netconf/nc_rpc/base/copy_config.py
@@ -23,8 +23,9 @@
 
 class CopyConfig(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(CopyConfig, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(CopyConfig, self).__init__(rpc_request, rpc_method,
+										 grpc_client, session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/delete_config.py b/netconf/nc_rpc/base/delete_config.py
index 7163ee6..e267807 100644
--- a/netconf/nc_rpc/base/delete_config.py
+++ b/netconf/nc_rpc/base/delete_config.py
@@ -23,8 +23,9 @@
 
 class DeleteConfig(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(DeleteConfig, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(DeleteConfig, self).__init__(rpc_request, rpc_method,
+										   grpc_client, session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/discard_changes.py b/netconf/nc_rpc/base/discard_changes.py
index c41d32e..57bdfed 100644
--- a/netconf/nc_rpc/base/discard_changes.py
+++ b/netconf/nc_rpc/base/discard_changes.py
@@ -23,8 +23,9 @@
 
 class DiscardChanges(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(DiscardChanges, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(DiscardChanges, self).__init__(rpc_request, rpc_method,
+											 grpc_client, session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/edit_config.py b/netconf/nc_rpc/base/edit_config.py
index 5c7599a..469e957 100644
--- a/netconf/nc_rpc/base/edit_config.py
+++ b/netconf/nc_rpc/base/edit_config.py
@@ -23,8 +23,9 @@
 
 class EditConfig(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(EditConfig, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(EditConfig, self).__init__(rpc_request, rpc_method,
+										 grpc_client, session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/get.py b/netconf/nc_rpc/base/get.py
index c6cdfab..39e095c 100644
--- a/netconf/nc_rpc/base/get.py
+++ b/netconf/nc_rpc/base/get.py
@@ -25,8 +25,9 @@
 
 class Get(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(Get, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(Get, self).__init__(rpc_request, rpc_method, grpc_client,
+								  session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/get_config.py b/netconf/nc_rpc/base/get_config.py
index dffe0d6..09f90b4 100644
--- a/netconf/nc_rpc/base/get_config.py
+++ b/netconf/nc_rpc/base/get_config.py
@@ -26,8 +26,9 @@
 
 class GetConfig(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(GetConfig, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(GetConfig, self).__init__(rpc_request, rpc_method,
+										grpc_client, session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/kill_session.py b/netconf/nc_rpc/base/kill_session.py
index 08a2e7a..c9a3352 100644
--- a/netconf/nc_rpc/base/kill_session.py
+++ b/netconf/nc_rpc/base/kill_session.py
@@ -25,8 +25,9 @@
 
 class KillSession(Rpc):
 
-    def __init__(self, rpc_request, rpc_method, session):
-        super(KillSession, self).__init__(rpc_request, rpc_method, session)
+    def __init__(self, rpc_request, rpc_method, grpc_client, session):
+        super(KillSession, self).__init__(rpc_request, rpc_method,
+                                          grpc_client, session)
         self._validate_parameters()
 
     def execute(self):
diff --git a/netconf/nc_rpc/base/lock.py b/netconf/nc_rpc/base/lock.py
index fc74e83..2f0130d 100644
--- a/netconf/nc_rpc/base/lock.py
+++ b/netconf/nc_rpc/base/lock.py
@@ -23,8 +23,9 @@
 
 class Lock(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(Lock, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(Lock, self).__init__(rpc_request, rpc_method, grpc_client,
+								   session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/unlock.py b/netconf/nc_rpc/base/unlock.py
index 78c59f1..f9ef062 100644
--- a/netconf/nc_rpc/base/unlock.py
+++ b/netconf/nc_rpc/base/unlock.py
@@ -23,8 +23,9 @@
 
 class UnLock(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(UnLock, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(UnLock, self).__init__(rpc_request, rpc_method, grpc_client,
+									 session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/base/validate.py b/netconf/nc_rpc/base/validate.py
index 1cb84af..93faf60 100644
--- a/netconf/nc_rpc/base/validate.py
+++ b/netconf/nc_rpc/base/validate.py
@@ -23,8 +23,9 @@
 
 class Validate(Rpc):
 
-	def __init__(self, rpc_request, rpc_method, session):
-		super(Validate, self).__init__(rpc_request, rpc_method, session)
+	def __init__(self, rpc_request, rpc_method, grpc_client, session):
+		super(Validate, self).__init__(rpc_request, rpc_method,
+									   grpc_client, session)
 		self._validate_parameters()
 
 	def execute(self):
diff --git a/netconf/nc_rpc/ext/__init__.py b/netconf/nc_rpc/ext/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/netconf/nc_rpc/ext/__init__.py
diff --git a/netconf/nc_rpc/ext/get_voltha.py b/netconf/nc_rpc/ext/get_voltha.py
new file mode 100644
index 0000000..a619875
--- /dev/null
+++ b/netconf/nc_rpc/ext/get_voltha.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from lxml import etree
+import structlog
+from netconf.nc_rpc.rpc import Rpc
+import netconf.nc_common.error as ncerror
+from netconf.constants import Constants as C
+from netconf.utils import filter_tag_match
+from twisted.internet.defer import inlineCallbacks, returnValue
+import dicttoxml
+from simplejson import dumps, load
+
+log = structlog.get_logger()
+
+
+class GetVoltha(Rpc):
+    def __init__(self, rpc_request, rpc_method, grpc_client, session):
+        super(GetVoltha, self).__init__(rpc_request, rpc_method,
+                                        grpc_client, session)
+        self._validate_parameters()
+
+    @inlineCallbacks
+    def execute(self):
+        log.info('get-voltha-request', session=self.session.session_id)
+        if self.rpc_response.is_error:
+            returnValue(self.rpc_response)
+
+        # Invoke voltha via the grpc client
+        res_dict = yield self.grpc_client.get_voltha_instance()
+
+        # convert dict to xml
+        xml = dicttoxml.dicttoxml(res_dict)
+        log.info('voltha-info', res=res_dict, xml=xml)
+
+        self.rpc_response.node = self.get_root_element(xml)
+        self.rpc_response.is_error = False
+
+        returnValue(self.rpc_response)
+
+
+    def _validate_parameters(self):
+        log.info('validate-parameters', session=self.session.session_id)
+        self.params = self.rpc_method.getchildren()
+        if len(self.params) > 1:
+            self.rpc_response.is_error = True
+            self.rpc_response.node = ncerror.BadMsg(self.rpc_request)
+            return
+
+        if self.params and not filter_tag_match(self.params[0], C.NC_FILTER):
+            self.rpc_response.is_error = True
+            self.rpc_response.node = ncerror.UnknownElement(
+                self.rpc_request, self.params[0])
+            return
+
+        if not self.params:
+            self.params = [None]
diff --git a/netconf/nc_rpc/rpc.py b/netconf/nc_rpc/rpc.py
index 3dd2f17..ff9b303 100644
--- a/netconf/nc_rpc/rpc.py
+++ b/netconf/nc_rpc/rpc.py
@@ -18,12 +18,15 @@
 #
 
 from  rpc_response import RpcResponse
+from lxml import etree
+import io
 
 class Rpc(object):
-    def __init__(self,rpc_request, rpc_method, session):
+    def __init__(self,rpc_request, rpc_method, grpc_client, session):
         self.rpc_request = rpc_request
         self.rpc_method = rpc_method
         self.rpc_response = RpcResponse()
+        self.grpc_client =  grpc_client
         self.session = session
 
     def execute(self):
@@ -36,3 +39,8 @@
     def _validate_parameters(self, rpc_request):
         """Sets and validates the node as well"""
         pass
+
+    def get_root_element(self, xml_msg):
+        tree = etree.parse(io.BytesIO(xml_msg))
+        return tree.getroot()
+
diff --git a/netconf/nc_rpc/rpc_factory.py b/netconf/nc_rpc/rpc_factory.py
index f3ec0cc..eb13b8d 100644
--- a/netconf/nc_rpc/rpc_factory.py
+++ b/netconf/nc_rpc/rpc_factory.py
@@ -28,6 +28,7 @@
 from base.unlock import UnLock
 from base.close_session import CloseSession
 from base.kill_session import KillSession
+from ext.get_voltha import GetVoltha
 from netconf import NSMAP, qmap
 import netconf.nc_common.error as ncerror
 log = structlog.get_logger()
@@ -36,7 +37,7 @@
 
 	instance = None
 
-	def get_rpc_handler(self, rpc_node, msg, session):
+	def get_rpc_handler(self, rpc_node, msg, grpc_channel, session):
 		try:
 			msg_id = rpc_node.get('message-id')
 			log.info("Received-rpc-message-id", msg_id=msg_id)
@@ -59,12 +60,13 @@
 
 		class_handler = self.rpc_class_handlers.get(rpc_name, None)
 		if class_handler is not None:
-			return class_handler(rpc_node, rpc_method, session)
+			return class_handler(rpc_node, rpc_method, grpc_channel, session)
 
 		log.error("rpc-not-implemented", rpc=rpc_name)
 
 
 	rpc_class_handlers = {
+		'getvoltha' : GetVoltha,
 		'get-config': GetConfig,
 		'get': Get,
 		'edit-config': EditConfig,
@@ -83,3 +85,8 @@
 		RpcFactory.instance = RpcFactory()
 	return RpcFactory.instance
 
+
+if __name__ == '__main__':
+   fac = get_rpc_factory_instance()
+   rpc = fac.rpc_class_handlers.get('getvoltha', None)
+   print rpc(None,None,None)
\ No newline at end of file
diff --git a/netconf/nc_server.py b/netconf/nc_server.py
index 3b22290..6c43194 100644
--- a/netconf/nc_server.py
+++ b/netconf/nc_server.py
@@ -16,6 +16,7 @@
 
 import structlog
 import sys
+import os
 from twisted.conch import avatar
 from twisted.cred import portal
 from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB
@@ -32,6 +33,7 @@
 from session.session_mgr import get_session_manager_instance
 from constants import Constants as C
 
+dir_path = os.path.dirname(os.path.realpath(__file__))
 
 # logp.startLogging(sys.stderr)
 
@@ -40,17 +42,17 @@
 
 # @implementer(conchinterfaces.ISession)
 class NetconfAvatar(avatar.ConchUser):
-    def __init__(self, username, nc_server, grpc_stub):
+    def __init__(self, username, nc_server, grpc_client):
         avatar.ConchUser.__init__(self)
         self.username = username
         self.nc_server = nc_server
-        self.grpc_stub = grpc_stub
+        self.grpc_client = grpc_client
         self.channelLookup.update({'session': session.SSHSession})
         self.subsystemLookup.update(
             {b"netconf": NetconfConnection})
 
-    def get_grpc_stub(self):
-        return self.grpc_stub
+    def get_grpc_client(self):
+        return self.grpc_client
 
     def get_nc_server(self):
         return self.nc_server
@@ -64,12 +66,12 @@
 
 @implementer(portal.IRealm)
 class NetconfRealm(object):
-    def __init__(self, nc_server, grpc_stub):
-        self.grpc_stub = grpc_stub
+    def __init__(self, nc_server, grpc_client):
+        self.grpc_client = grpc_client
         self.nc_server = nc_server
 
     def requestAvatar(self, avatarId, mind, *interfaces):
-        user = NetconfAvatar(avatarId, self.nc_server, self.grpc_stub)
+        user = NetconfAvatar(avatarId, self.nc_server, self.grpc_client)
         return interfaces[0], user, user.logout
 
 
@@ -86,7 +88,7 @@
                  server_public_key_file,
                  client_public_keys_file,
                  client_passwords_file,
-                 grpc_stub):
+                 grpc_client):
 
         self.netconf_port = netconf_port
         self.server_private_key_file = server_private_key_file
@@ -94,7 +96,7 @@
         self.client_public_keys_file = client_public_keys_file
         self.client_passwords_file = client_passwords_file
         self.session_mgr = get_session_manager_instance()
-        self.grpc_stub = grpc_stub
+        self.grpc_client = grpc_client
         self.connector = None
         self.nc_client_map = {}
         self.running = False
@@ -116,6 +118,12 @@
         self.d_stopped.callback(None)
         log.info('stopped')
 
+    def reload_capabilities(self):
+        # TODO: Called when there is a reconnect to voltha
+        # If there are new device types then the new
+        # capabilities will be exposed for subsequent client connections to use
+        pass
+
     def client_disconnected(self, result, handler, reason):
         assert isinstance(handler, NetconfProtocolHandler)
 
@@ -131,30 +139,33 @@
         #create a session
         session = self.session_mgr.create_session(client_conn.avatar.get_user())
         handler = NetconfProtocolHandler(self, client_conn,
-                                         session, self.grpc_stub)
+                                         session, self.grpc_client)
         client_conn.proto_handler = handler
         reactor.callLater(0, handler.start)
 
     def setup_secure_access(self):
         try:
             from twisted.cred import portal
-            portal = portal.Portal(NetconfRealm(self, self.grpc_stub))
+            portal = portal.Portal(NetconfRealm(self, self.grpc_client))
 
             # setup userid-password access
-            password_file = '{}/{}'.format(C.CLIENT_CRED_DIRECTORY,
-                                           self.client_passwords_file)
+            password_file = '{}/{}/{}'.format(dir_path,
+                                              C.CLIENT_CRED_DIRECTORY,
+                                              self.client_passwords_file)
             portal.registerChecker(FilePasswordDB(password_file))
 
             # setup access when client uses keys
-            keys_file = '{}/{}'.format(C.CLIENT_CRED_DIRECTORY,
-                                       self.client_public_keys_file)
+            keys_file = '{}/{}/{}'.format(dir_path,
+                                          C.CLIENT_CRED_DIRECTORY,
+                                          self.client_public_keys_file)
             with open(keys_file) as f:
                 users = [line.rstrip('\n') for line in f]
             users_dict = {}
             for user in users:
                 users_dict[user.split(':')[0]] = [
-                    keys.Key.fromFile('{}/{}'.format(C.CLIENT_CRED_DIRECTORY,
-                                                     user.split(':')[1]))]
+                    keys.Key.fromFile('{}/{}/{}'.format(dir_path,
+                                                        C.CLIENT_CRED_DIRECTORY,
+                                                        user.split(':')[1]))]
             sshDB = SSHPublicKeyChecker(InMemorySSHKeyDB(users_dict))
             portal.registerChecker(sshDB)
             return portal
@@ -182,8 +193,9 @@
         return SSHServerTransport()
 
     def getPublicKeys(self):
-        key_file_name = '{}/{}'.format(C.KEYS_DIRECTORY,
-                                       self.server_public_key_file)
+        key_file_name = '{}/{}/{}'.format(dir_path,
+                                          C.KEYS_DIRECTORY,
+                                          self.server_public_key_file)
         try:
             publicKeys = {
                 'ssh-rsa': keys.Key.fromFile(key_file_name)
@@ -194,8 +206,9 @@
                       filename=key_file_name, exception=repr(e))
 
     def getPrivateKeys(self):
-        key_file_name = '{}/{}'.format(C.KEYS_DIRECTORY,
-                                       self.server_private_key_file)
+        key_file_name = '{}/{}/{}'.format(dir_path,
+                                          C.KEYS_DIRECTORY,
+                                          self.server_private_key_file)
         try:
             privateKeys = {
                 'ssh-rsa': keys.Key.fromFile(key_file_name)
diff --git a/netconf/protoc_plugins/addressbook.proto b/netconf/protoc_plugins/addressbook.proto
new file mode 100644
index 0000000..fc1a10f
--- /dev/null
+++ b/netconf/protoc_plugins/addressbook.proto
@@ -0,0 +1,34 @@
+// See README.txt for information and build instructions.
+
+syntax = "proto3";
+
+package tutorial;
+
+option java_package = "com.example.tutorial";
+option java_outer_classname = "AddressBookProtos";
+option csharp_namespace = "Google.Protobuf.Examples.AddressBook";
+
+message Person {
+  string name = 1;
+  int32 id = 2;        // Unique ID number for this person.
+  string email = 3;
+
+  enum PhoneType {
+    MOBILE = 0;
+    HOME = 1;
+    WORK = 2;
+  }
+
+  message PhoneNumber {
+    string number = 1;
+    PhoneType type = 2;
+  }
+
+  repeated PhoneNumber phones = 4;
+  repeated string khen = 5;
+}
+
+// Our address book file is just one of these.
+message AddressBook {
+  repeated Person people = 1;
+}
diff --git a/netconf/protoc_plugins/descriptor.desc b/netconf/protoc_plugins/descriptor.desc
new file mode 100644
index 0000000..ac12c36
--- /dev/null
+++ b/netconf/protoc_plugins/descriptor.desc
Binary files differ
diff --git a/netconf/protoc_plugins/descriptor_parser.py b/netconf/protoc_plugins/descriptor_parser.py
new file mode 100644
index 0000000..c23f497
--- /dev/null
+++ b/netconf/protoc_plugins/descriptor_parser.py
@@ -0,0 +1,164 @@
+#
+# Copyright 2016 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+from collections import OrderedDict
+
+from google.protobuf import descriptor_pb2
+from google.protobuf.descriptor import FieldDescriptor, Descriptor
+from google.protobuf.message import Message
+
+
+class InvalidDescriptorError(Exception): pass
+
+
+class DescriptorParser(object):
+    """
+    Used to parse protobuf FileDescriptor objects into native Python
+    data structures (nested dict/list/intrinsic values. Two of the typical
+    sources of FileDescriptor objects are:
+    1. CodeGeneratorRequest, used as binary input to any protoc plugin,
+       contains a list of these FileDescriptor objects (under the
+       proto_file attribute)
+    2. FileDescriptorSet, as saved by protoc when using the -o option.
+
+    An important feature of the parser is that it can process the source
+    code annotations and can fold comments into the relevant defintions
+    present in the proto file.
+
+    Usage (in a protoc plugin):
+    >>> request = plugin.CodeGeneratorRequest()
+    >>> request.ParseFromString(sys.stdin.read())
+    >>> parser = DescriptorParser()
+    >>> for proto_file in request.proto_file:
+    >>>     parsed_data = parser.parse_file_descriptor()
+    >>>     print json.dumps(parsed_data, indent=4)
+    """
+
+    meta = None
+
+    def __init__(self):
+        if DescriptorParser.meta is None:
+            DescriptorParser.meta = self.load_meta_descriptor()
+
+    def load_meta_descriptor(self):
+        """
+        Load the protobuf version of descriptor.proto to use it in
+        decoding protobuf paths.
+        """
+        fpath = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                             'descriptor.desc'))
+        with open(fpath, 'r') as f:
+            blob = f.read()
+        proto = descriptor_pb2.FileDescriptorSet()
+        proto.ParseFromString(blob)
+        assert len(proto.file) == 1
+        return proto.file[0]
+
+    parser_table = {
+        unicode: lambda x: x,
+        int: lambda x: x,
+        bool: lambda x: x,
+    }
+
+    def parse(self, o, type_tag_name=None):
+        if isinstance(o, Message):
+            return self.parse_message(o, type_tag_name)
+        else:
+            return self.parser_table[type(o)](o)
+
+    def parse_message(self, m, type_tag_name=None):
+        assert isinstance(m, Message)
+        d = OrderedDict()
+        for field, value in m.ListFields():
+            assert isinstance(field, FieldDescriptor)
+            if field.label in (1, 2):
+                d[field.name] = self.parse(value, type_tag_name)
+            elif field.label == 3:
+                d[field.name] = [self.parse(x, type_tag_name) for x in
+                                 value]
+            else:
+                raise InvalidDescriptorError()
+
+        if type_tag_name is not None:
+            d[type_tag_name] = m.DESCRIPTOR.full_name.strip('.')
+
+        return d
+
+    def parse_file_descriptor(self, descriptor,
+                              type_tag_name=None,
+                              fold_comments=False):
+
+        d = self.parse(descriptor, type_tag_name=type_tag_name)
+
+        if fold_comments:
+            locations = d.get('source_code_info', {}).get('location', [])
+            for location in locations:
+                path = location.get('path', [])
+                comments = ''.join([
+                    location.get('leading_comments', '').strip(' '),
+                    location.get('trailing_comments', '').strip(' '),
+                    ''.join(block.strip(' ') for block
+                            in
+                            location.get('leading_detached_comments', ''))
+                ]).strip()
+
+                # ignore locations with no comments
+                if not comments:
+                    continue
+
+                # we ignore path with odd number of entries, since these do
+                # not address our schema nodes, but rather the meta schema
+                if (len(path) % 2 == 0):
+                    node = self.find_node_by_path(
+                        path, self.meta.DESCRIPTOR, d)
+                    assert isinstance(node, dict)
+                    node['_description'] = comments
+
+            # remove source_code_info
+            del d['source_code_info']
+
+        return d
+
+    def parse_file_descriptors(self, descriptors,
+                              type_tag_name=None,
+                              fold_comments=False):
+        return [self.parse_file_descriptor(descriptor,
+                                           type_tag_name=type_tag_name,
+                                           fold_comments=fold_comments)
+                for descriptor in descriptors]
+
+    def find_node_by_path(self, path, meta, o):
+        # stop recursion when path is empty
+        if not path:
+            return o
+
+        # sanity check
+        assert len(path) >= 2
+        assert isinstance(meta, Descriptor)
+        assert isinstance(o, dict)
+
+        # find field name, then actual field
+        field_number = path.pop(0)
+        field_def = meta.fields_by_number[field_number]
+        field = o[field_def.name]
+
+        # field must be a list, extract entry with given index
+        assert isinstance(field, list)  # expected to be a list field
+        index = path.pop(0)
+        child_o = field[index]
+
+        child_meta = field_def.message_type
+        return self.find_node_by_path(path, child_meta, child_o)
diff --git a/netconf/protoc_plugins/proto2yang.py b/netconf/protoc_plugins/proto2yang.py
new file mode 100755
index 0000000..ae1999e
--- /dev/null
+++ b/netconf/protoc_plugins/proto2yang.py
@@ -0,0 +1,605 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""protoc plugin to convert a protobuf schema to a yang schema
+
+   - basic support for message, fields. enumeration, service, method
+
+   - yang semantic rules needs to be implemented
+
+   - to run this plugin :
+
+   $ python -m grpc.tools.protoc -I.
+   --plugin=protoc-gen-custom=./proto2yang.py --custom_out=. <proto file>.proto
+
+   - the above will produce a ietf-<proto file>.yang file formatted for yang
+
+   - two examples of proto that can be used in the same directory are
+   yang.proto and addressbook.proto
+
+"""
+
+import sys
+
+from jinja2 import Template
+from google.protobuf.compiler import plugin_pb2 as plugin
+from descriptor_parser import DescriptorParser
+
+from google.protobuf.descriptor import FieldDescriptor
+
+template_yang = Template("""
+module ietf-{{ module.name }} {
+
+    {% macro set_module_prefix(type) %}
+        {% for t in module.data_types %}
+            {% if t.type == type %}
+                {% if t.module != module.name %} {{ t.module }}:{{ type }};
+                {% else %} {{ type }};
+                {% endif %}
+                {% set found=True %}
+            {% endif %}
+        {% if loop.last %}
+            {% if not found %} {{ type }}; {% endif %}
+        {% endif %}
+        {% endfor %}
+    {% endmacro %}
+
+    namespace "urn:opencord:params:xml:ns:voltha:ietf-{{ module.name }}";
+    prefix {{ module.name }};
+
+    {% for imp in module.imports %}
+    import ietf-{{ imp.name }} { prefix {{ imp.name }} ; }
+    {% endfor %}
+
+    organization "CORD";
+    contact
+        " Any name";
+
+    description
+        "{{ module.description }}";
+
+    revision "2016-11-15" {
+        description "Initial revision.";
+        reference "reference";
+    }
+
+    {% for enum in module.enums %}
+    typedef {{ enum.name }} {
+        type enumeration {
+        {% for v in enum.value %}
+            enum {{ v.name }} {
+                description "{{ v.description }}";
+            }
+        {% endfor %}
+        }
+        description
+            "{{ enum.description }}";
+    }
+    {% endfor %}
+
+    {% for message in module.messages recursive %}
+    {% if message.name in module.referred_messages %}
+    grouping {{ message.name }} {
+    {% else %}
+    container {{ message.name }} {
+    {% endif %}
+        description
+            "{{ message.description }}";
+        {% for field in message.fields %}
+        {% if field.type_ref %}
+        {% for dict_item in module.referred_messages_with_keys %}
+            {% if dict_item.name == field.type %}
+                {% if not field.repeated %}
+        container {{ field.name }} {
+                {% else %}
+        list {{ field.name }} {
+            key "{{ dict_item.key }}";
+            {% if not field.repeated %}
+            max-elements 1;
+            {% endif %}
+            {% endif %}
+            uses {{ set_module_prefix(field.type) }}
+            description
+                "{{ field.description }}";
+        }
+            {% endif %}
+        {% endfor %}
+        {% elif field.repeated %}
+        list {{ field.name }} {
+            key "{{ field.name }}";
+            leaf {{ field.name }} {
+                {% if field.type == "decimal64" %}
+                type {{ field.type }} {
+                   fraction-digits 5;
+                }
+                {% else %}
+                type {{ set_module_prefix(field.type) }}
+                {% endif %}
+                description
+                    "{{ field.description }}";
+            }
+            description
+                "{{ field.description }}";
+        }
+        {% else %}
+        leaf {{ field.name }} {
+            {% if field.type == "decimal64" %}
+            type {{ field.type }} {
+               fraction-digits 5;
+            }
+            {% else %}
+            type {{ set_module_prefix(field.type) }}
+            {% endif %}
+            description
+                "{{ field.description }}";
+        }
+        {% endif %}
+
+        {% endfor %}
+        {% for enum_type in message.enums %}
+        typedef {{ enum_type.name }} {
+            type enumeration {
+            {% for v in enum_type.value %}
+                enum {{ v.name }} {
+                    description "{{ v.description }}";
+                }
+            {% endfor %}
+            }
+            description
+                "{{ enum_type.description }}";
+        }
+
+        {% endfor %}
+    {% if message.messages %}
+    {{ loop (message.messages)|indent(4, false) }}
+    {% endif %}
+    }
+
+    {% endfor %}
+    {% for service in module.services %}
+    {% if service.description %}
+    /*  {{ service.description }}" */
+    {% endif %}
+    {% for method in service.methods %}
+    rpc {{ service.service }}-{{ method.method }} {
+        description
+            "{{ method.description }}";
+        {% if method.input %}
+        input {
+            {% if method.input_ref %}
+            uses {{ set_module_prefix(method.input) }}
+            {% else %}
+            leaf {{ method.input }} {
+                type {{ set_module_prefix(method.input) }}
+            }
+            {% endif %}
+        }
+        {% endif %}
+        {% if method.output %}
+        output {
+            {% if method.output_ref %}
+            uses {{ set_module_prefix(method.output) }}
+            {% else %}
+            leaf {{ method.output }} {
+                type {{ set_module_prefix(method.output) }}
+            }
+            {% endif %}
+        }
+        {% endif %}
+    }
+
+    {% endfor %}
+
+    {% endfor %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+# def traverse_dependencies(descriptor):
+#     dependencies = []
+#     proto_imports = descriptor.get('dependency', [])
+#     for proto_import in proto_imports:
+#         # If the import file has a directory path to it remove it as it is not
+#         # allowed in Yang.  The proto extension should be removed as well
+#         dependencies.append (
+#             {
+#             'name' : proto_import.split('/')[-1][:-len('.proto')]
+#             }
+#         )
+#     return dependencies
+
+
+def traverse_messages(message_types, prefix, referenced_messages):
+    messages = []
+    for message_type in message_types:
+        assert message_type['_type'] == 'google.protobuf.DescriptorProto'
+
+        # full_name = prefix + '-' + message_type['name']
+        full_name = message_type['name']
+
+        # parse the fields
+        fields = traverse_fields(message_type.get('field', []), full_name,
+                                 referenced_messages)
+
+        # parse the enums
+        enums = traverse_enums(message_type.get('enum_type', []), full_name)
+
+        # parse nested messages
+        nested = message_type.get('nested_type', [])
+        nested_messages = traverse_messages(nested, full_name,
+                                            referenced_messages)
+        messages.append(
+            {
+                'name': full_name,
+                'fields': fields,
+                'enums': enums,
+                # 'extensions': extensions,
+                'messages': nested_messages,
+                'description': remove_unsupported_characters(
+                    message_type.get('_description', '')),
+                # 'extension_ranges': extension_ranges,
+                # 'oneof': oneof
+            }
+        )
+    return messages
+
+
+def traverse_fields(fields_desc, prefix, referenced_messages):
+    fields = []
+    for field in fields_desc:
+        assert field['_type'] == 'google.protobuf.FieldDescriptorProto'
+        yang_base_type = is_base_type(field['type'])
+        _type = get_yang_type(field)
+        if not yang_base_type:
+            referenced_messages.append(_type)
+        # add to referred messages also if it is an enumeration type
+        if is_enumeration(field['type']):
+            referenced_messages.append(_type)
+
+        fields.append(
+            {
+                # 'name': prefix + '-' + field.get('name', ''),
+                'name': field.get('name', ''),
+                'label': field.get('label', ''),
+                'repeated': field['label'] == FieldDescriptor.LABEL_REPEATED,
+                'number': field.get('number', ''),
+                'options': field.get('options', ''),
+                'type_name': field.get('type_name', ''),
+                'type': _type,
+                'type_ref': not yang_base_type,
+                'description': remove_unsupported_characters(field.get(
+                    '_description', ''))
+            }
+        )
+    return fields
+
+
+def traverse_enums(enums_desc, prefix):
+    enums = []
+    for enum in enums_desc:
+        assert enum['_type'] == 'google.protobuf.EnumDescriptorProto'
+        # full_name = prefix + '-' + enum.get('name', '')
+        full_name = enum.get('name', '')
+        enums.append(
+            {
+                'name': full_name,
+                'value': enum.get('value', ''),
+                'description': remove_unsupported_characters(enum.get(
+                    '_description', ''))
+            }
+        )
+    return enums
+
+
+def traverse_services(service_desc, referenced_messages):
+    services = []
+    for service in service_desc:
+        methods = []
+        for method in service.get('method', []):
+            assert method['_type'] == 'google.protobuf.MethodDescriptorProto'
+
+            input_name = method.get('input_type')
+            input_ref = False
+            if not is_base_type(input_name):
+                input_name = remove_first_character_if_match(input_name, '.')
+                # input_name = input_name.replace(".", "-")
+                input_name = input_name.split('.')[-1]
+                referenced_messages.append(input_name)
+                input_ref = True
+
+            output_name = method.get('output_type')
+            output_ref = False
+            if not is_base_type(output_name):
+                output_name = remove_first_character_if_match(output_name, '.')
+                # output_name = output_name.replace(".", "-")
+                output_name = output_name.split('.')[-1]
+                referenced_messages.append(output_name)
+                output_ref = True
+
+            methods.append(
+                {
+                    'method': method.get('name', ''),
+                    'input': input_name,
+                    'input_ref': input_ref,
+                    'output': output_name,
+                    'output_ref': output_ref,
+                    'description': remove_unsupported_characters(method.get(
+                        '_description', '')),
+                    'server_streaming': method.get('server_streaming',
+                                                   False) == True
+                }
+            )
+        services.append(
+            {
+                'service': service.get('name', ''),
+                'methods': methods,
+                'description': remove_unsupported_characters(service.get(
+                    '_description', '')),
+            }
+        )
+    return services
+
+
+def rchop(thestring, ending):
+    if thestring.endswith(ending):
+        return thestring[:-len(ending)]
+    return thestring
+
+
+def traverse_desc(descriptor):
+    referenced_messages = []
+    name = rchop(descriptor.get('name', ''), '.proto')
+    package = descriptor.get('package', '')
+    description = descriptor.get('_description', '')
+    # imports=traverse_dependencies(descriptor)
+    messages = traverse_messages(descriptor.get('message_type', []),
+                                 package, referenced_messages)
+    enums = traverse_enums(descriptor.get('enum_type', []), package)
+    services = traverse_services(descriptor.get('service', []),
+                                 referenced_messages)
+    # extensions = _traverse_extensions(descriptors)
+    # options = _traverse_options(descriptors)
+    # set_messages_keys(messages)
+    # unique_referred_messages_with_keys = []
+    # for message_name in list(set(referenced_messages)):
+    #     unique_referred_messages_with_keys.append(
+    #         {
+    #             'name': message_name,
+    #             'key': get_message_key(message_name, messages)
+    #         }
+    #     )
+
+    # Get a list of type definitions (messages, enums) defined in this
+    # descriptor
+    defined_types = [m['name'].split('/')[-1] for m in messages] + \
+                    [e['name'].split('/')[-1]  for e in enums]
+
+    data = {
+        'name': name.split('/')[-1],
+        'package': package,
+        'description': description,
+        # 'imports' : imports,
+        'messages': messages,
+        'enums': enums,
+        'services': services,
+        'defined_types' : defined_types,
+        'referenced_messages': list(set(referenced_messages)),
+        # TODO:  simplify for easier jinja2 template use
+        # 'referred_messages_with_keys': unique_referred_messages_with_keys,
+        # 'extensions': extensions,
+        # 'options': options
+    }
+    return data
+
+
+def set_messages_keys(messages):
+    for message in messages:
+        message['key'] = _get_message_key(message, messages)
+        if message['messages']:
+            set_messages_keys(message['messages'])
+
+def _get_message_key(message, messages):
+    # assume key is first yang base type field
+    for field in message['fields']:
+        if not field['type_ref']:
+            return field['name']
+        else:
+            # if the field name is a message then loop for the key in that
+            # message
+            ref_message = _get_message(field['type'], messages)
+            if ref_message:
+                return _get_message_key(ref_message, messages)
+
+    # no key yet - search nested messaged
+    for m in message['messages']:
+        key = _get_message_key(m, messages)
+        if key is not None:
+            return key
+    else:
+        return None
+
+def _get_message(name, messages):
+    for m in messages:
+        if m['name'] == name:
+            return m
+    return None
+
+def get_message_key(message_name, messages):
+    for message in messages:
+        if message_name == message['name']:
+            return message['key']
+        if message['messages']:
+            return get_message_key(message_name, message['messages'])
+    return None
+
+
+def update_module_imports(module):
+    used_imports = set()
+    for ref_msg in module['referenced_messages']:
+        for type_dict in module['data_types']:
+            if ref_msg == type_dict['type']:
+                if module['name'] != type_dict['module']:
+                    used_imports.add(type_dict['module'])
+                break
+    module['imports'] = [{'name' : i} for i in used_imports]
+
+
+def generate_code(request, response):
+    assert isinstance(request, plugin.CodeGeneratorRequest)
+
+    parser = DescriptorParser()
+
+    # First process the proto file with the imports
+    all_defined_types = []
+    all_proto_data = []
+    all_referred_messages = []
+    all_messages = []
+    for proto_file in request.proto_file:
+        native_data = parser.parse_file_descriptor(proto_file,
+                                                   type_tag_name='_type',
+                                                   fold_comments=True)
+
+        # Consolidate the defined types across imports
+        yang_data = traverse_desc(native_data)
+        for type in yang_data['defined_types']:
+            all_defined_types.append(
+                {
+                    'type' : type,
+                    'module' : yang_data['name']
+                }
+            )
+
+        all_proto_data.append(
+            {
+                'file_name': '{}-{}'.format('ietf', proto_file.name.split(
+                    '/')[-1].replace('.proto','.yang')),
+                'module': yang_data
+            }
+        )
+
+        # Consolidate referred messages across imports
+        all_referred_messages = all_referred_messages + yang_data['referenced_messages']
+
+        # consolidate all messages
+        all_messages = all_messages + yang_data['messages']
+
+    # Set the message keys - required for List definitions (repeated label)
+    set_messages_keys(all_messages)
+    unique_referred_messages_with_keys = []
+    for m in all_messages:
+        unique_referred_messages_with_keys.append(
+                {
+                    'name': m['name'],
+                    'key': m['key']
+                }
+            )
+
+    # Create the files
+    for proto_data in all_proto_data:
+        f = response.file.add()
+        f.name = proto_data['file_name']
+        proto_data['module']['data_types'] = all_defined_types
+        proto_data['module']['referred_messages'] = all_referred_messages
+        proto_data['module']['referred_messages_with_keys'] = unique_referred_messages_with_keys
+        update_module_imports(proto_data['module'])
+        f.content = template_yang.render(module=proto_data['module'])
+
+
+def get_yang_type(field):
+    type = field['type']
+    if type in YANG_TYPE_MAP.keys():
+        _type, _ = YANG_TYPE_MAP[type]
+        if _type in ['enumeration', 'message', 'group']:
+            return field['type_name'].split('.')[-1]
+            # return remove_first_character_if_match(field['type_name'],
+            #                                        '.').replace('.', '-')
+        else:
+            return _type
+    else:
+        return type
+
+def is_enumeration(type):
+    if type in YANG_TYPE_MAP.keys():
+        _type, _ = YANG_TYPE_MAP[type]
+        return _type in ['enumeration']
+    return False
+
+def is_base_type(type):
+    # check numeric value of the type first
+    if type in YANG_TYPE_MAP.keys():
+        _type, _ = YANG_TYPE_MAP[type]
+        return _type not in ['message', 'group']
+    else:
+        # proto name of the type
+        result = [_format for (_, _format) in YANG_TYPE_MAP.values() if
+                  _format == type and _format not in ['message',
+                                                      'group']]
+        return len(result) > 0
+
+
+def remove_unsupported_characters(text):
+    unsupported_characters = ["{", "}", "[", "]", "\"", "\\", "*", "/"]
+    return ''.join([i if i not in unsupported_characters else ' ' for i in
+                    text])
+
+
+def remove_first_character_if_match(str, char):
+    if str.startswith(char):
+        return str[1:]
+    return str
+
+
+YANG_TYPE_MAP = {
+    FieldDescriptor.TYPE_BOOL: ('boolean', 'boolean'),
+    FieldDescriptor.TYPE_BYTES: ('binary', 'byte'),
+    FieldDescriptor.TYPE_DOUBLE: ('decimal64', 'double'),
+    FieldDescriptor.TYPE_ENUM: ('enumeration', 'enum'),
+    FieldDescriptor.TYPE_FIXED32: ('int32', 'int64'),
+    FieldDescriptor.TYPE_FIXED64: ('int64', 'uint64'),
+    FieldDescriptor.TYPE_FLOAT: ('decimal64', 'float'),
+    FieldDescriptor.TYPE_INT32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_INT64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_SFIXED32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_SFIXED64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_STRING: ('string', 'string'),
+    FieldDescriptor.TYPE_SINT32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_SINT64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_UINT32: ('uint32', 'int64'),
+    FieldDescriptor.TYPE_UINT64: ('uint64', 'uint64'),
+    FieldDescriptor.TYPE_MESSAGE: ('message', 'message'),
+    FieldDescriptor.TYPE_GROUP: ('group', 'group')
+}
+
+if __name__ == '__main__':
+    # Read request message from stdin
+    data = sys.stdin.read()
+
+    # Parse request
+    request = plugin.CodeGeneratorRequest()
+    request.ParseFromString(data)
+
+    # Create response
+    response = plugin.CodeGeneratorResponse()
+
+    # Generate code
+    generate_code(request, response)
+
+    # Serialise response message
+    output = response.SerializeToString()
+
+    # Write to stdout
+    sys.stdout.write(output)
+    # print is_base_type(9)
diff --git a/netconf/protoc_plugins/proto2yang_work_latest.py b/netconf/protoc_plugins/proto2yang_work_latest.py
new file mode 100755
index 0000000..6a2a3e7
--- /dev/null
+++ b/netconf/protoc_plugins/proto2yang_work_latest.py
@@ -0,0 +1,601 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""protoc plugin to convert a protobuf schema to a yang schema
+
+   - basic support for message, fields. enumeration, service, method
+
+   - yang semantic rules needs to be implemented
+
+   - to run this plugin :
+
+   $ python -m grpc.tools.protoc -I.
+   --plugin=protoc-gen-custom=./proto2yang.py --custom_out=. <proto file>.proto
+
+   - the above will produce a ietf-<proto file>.yang file formatted for yang
+
+   - two examples of proto that can be used in the same directory are
+   yang.proto and addressbook.proto
+
+"""
+
+import sys
+
+from jinja2 import Template
+from google.protobuf.compiler import plugin_pb2 as plugin
+from descriptor_parser import DescriptorParser
+
+from google.protobuf.descriptor import FieldDescriptor
+
+template_yang = Template("""
+module ietf-{{ module.name }} {
+
+    {% macro set_module_prefix(type) %}
+        {% for t in module.data_types %}
+            {% if t.type == type %}
+                {% if t.module != module.name %} {{ t.module }}:{{ type }};
+                {% else %} {{ type }};
+                {% endif %}
+                {% set found=True %}
+            {% endif %}
+        {% if loop.last %}
+            {% if not found %} {{ type }}; {% endif %}
+        {% endif %}
+        {% endfor %}
+    {% endmacro %}
+
+    namespace "urn:opencord:params:xml:ns:voltha:ietf-{{ module.name }}";
+    prefix {{ module.name }};
+
+    {% for imp in module.imports %}
+    import ietf-{{ imp.name }} { prefix {{ imp.name }} ; }
+    {% endfor %}
+
+    organization "CORD";
+    contact
+        " Any name";
+
+    description
+        "{{ module.description }}";
+
+    revision "2016-11-15" {
+        description "Initial revision.";
+        reference "reference";
+    }
+
+    {% for enum in module.enums %}
+    typedef {{ enum.name }} {
+        type enumeration {
+        {% for v in enum.value %}
+            enum {{ v.name }} {
+                description "{{ v.description }}";
+            }
+        {% endfor %}
+        }
+        description
+            "{{ enum.description }}";
+    }
+    {% endfor %}
+
+    {% for message in module.messages recursive %}
+    {% if message.name in module.referred_messages %}
+    grouping {{ message.name }} {
+    {% else %}
+    container {{ message.name }} {
+    {% endif %}
+        description
+            "{{ message.description }}";
+        {% for field in message.fields %}
+        {% if field.type_ref %}
+        {% for dict_item in module.referred_messages_with_keys %}
+            {% if dict_item.name == field.type %}
+                {% if not field.repeated %}
+        container {{ field.name }} {
+                {% else %}
+        list {{ field.name }} {
+            key "{{ dict_item.key }}";
+            {% if not field.repeated %}
+            max-elements 1;
+            {% endif %}
+            {% endif %}
+            uses {{ set_module_prefix(field.type) }}
+            description
+                "{{ field.description }}";
+        }
+            {% endif %}
+        {% endfor %}
+        {% elif field.repeated %}
+        list {{ field.name }} {
+            key "{{ field.name }}";
+            leaf {{ field.name }} {
+                {% if field.type == "decimal64" %}
+                type {{ field.type }} {
+                   fraction-digits 5;
+                }
+                {% else %}
+                type {{ set_module_prefix(field.type) }}
+                {% endif %}
+                description
+                    "{{ field.description }}";
+            }
+            description
+                "{{ field.description }}";
+        }
+        {% else %}
+        leaf {{ field.name }} {
+            {% if field.type == "decimal64" %}
+            type {{ field.type }} {
+               fraction-digits 5;
+            }
+            {% else %}
+            type {{ set_module_prefix(field.type) }}
+            {% endif %}
+            description
+                "{{ field.description }}";
+        }
+        {% endif %}
+
+        {% endfor %}
+        {% for enum_type in message.enums %}
+        typedef {{ enum_type.name }} {
+            type enumeration {
+            {% for v in enum_type.value %}
+                enum {{ v.name }} {
+                    description "{{ v.description }}";
+                }
+            {% endfor %}
+            }
+            description
+                "{{ enum_type.description }}";
+        }
+
+        {% endfor %}
+    {% if message.messages %}
+    {{ loop (message.messages)|indent(4, false) }}
+    {% endif %}
+    }
+
+    {% endfor %}
+    {% for service in module.services %}
+    {% if service.description %}
+    /*  {{ service.description }}" */
+    {% endif %}
+    {% for method in service.methods %}
+    rpc {{ service.service }}-{{ method.method }} {
+        description
+            "{{ method.description }}";
+        {% if method.input %}
+        input {
+            {% if method.input_ref %}
+            uses {{ set_module_prefix(method.input) }}
+            {% else %}
+            leaf {{ method.input }} {
+                type {{ set_module_prefix(method.input) }}
+            }
+            {% endif %}
+        }
+        {% endif %}
+        {% if method.output %}
+        output {
+            {% if method.output_ref %}
+            uses {{ set_module_prefix(method.output) }}
+            {% else %}
+            leaf {{ method.output }} {
+                type {{ set_module_prefix(method.output) }}
+            }
+            {% endif %}
+        }
+        {% endif %}
+    }
+
+    {% endfor %}
+
+    {% endfor %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+def traverse_dependencies(descriptor):
+    dependencies = []
+    proto_imports = descriptor.get('dependency', [])
+    for proto_import in proto_imports:
+        # If the import file has a directory path to it remove it as it is not
+        # allowed in Yang.  The proto extension should be removed as well
+        dependencies.append (
+            {
+            'name' : proto_import.split('/')[-1][:-len('.proto')]
+            }
+        )
+    return dependencies
+
+
+def traverse_messages(message_types, prefix, referenced_messages):
+    messages = []
+    for message_type in message_types:
+        assert message_type['_type'] == 'google.protobuf.DescriptorProto'
+
+        # full_name = prefix + '-' + message_type['name']
+        full_name = message_type['name']
+
+        # parse the fields
+        fields = traverse_fields(message_type.get('field', []), full_name,
+                                 referenced_messages)
+
+        # parse the enums
+        enums = traverse_enums(message_type.get('enum_type', []), full_name)
+
+        # parse nested messages
+        nested = message_type.get('nested_type', [])
+        nested_messages = traverse_messages(nested, full_name,
+                                            referenced_messages)
+        messages.append(
+            {
+                'name': full_name,
+                'fields': fields,
+                'enums': enums,
+                # 'extensions': extensions,
+                'messages': nested_messages,
+                'description': remove_unsupported_characters(
+                    message_type.get('_description', '')),
+                # 'extension_ranges': extension_ranges,
+                # 'oneof': oneof
+            }
+        )
+    return messages
+
+
+def traverse_fields(fields_desc, prefix, referenced_messages):
+    fields = []
+    for field in fields_desc:
+        assert field['_type'] == 'google.protobuf.FieldDescriptorProto'
+        yang_base_type = is_base_type(field['type'])
+        _type = get_yang_type(field)
+        if not yang_base_type:
+            referenced_messages.append(_type)
+
+        fields.append(
+            {
+                # 'name': prefix + '-' + field.get('name', ''),
+                'name': field.get('name', ''),
+                'label': field.get('label', ''),
+                'repeated': field['label'] == FieldDescriptor.LABEL_REPEATED,
+                'number': field.get('number', ''),
+                'options': field.get('options', ''),
+                'type_name': field.get('type_name', ''),
+                'type': _type,
+                'type_ref': not yang_base_type,
+                'description': remove_unsupported_characters(field.get(
+                    '_description', ''))
+            }
+        )
+    return fields
+
+
+def traverse_enums(enums_desc, prefix):
+    enums = []
+    for enum in enums_desc:
+        assert enum['_type'] == 'google.protobuf.EnumDescriptorProto'
+        # full_name = prefix + '-' + enum.get('name', '')
+        full_name = enum.get('name', '')
+        enums.append(
+            {
+                'name': full_name,
+                'value': enum.get('value', ''),
+                'description': remove_unsupported_characters(enum.get(
+                    '_description', ''))
+            }
+        )
+    return enums
+
+
+def traverse_services(service_desc, referenced_messages):
+    services = []
+    for service in service_desc:
+        methods = []
+        for method in service.get('method', []):
+            assert method['_type'] == 'google.protobuf.MethodDescriptorProto'
+
+            input_name = method.get('input_type')
+            input_ref = False
+            if not is_base_type(input_name):
+                input_name = remove_first_character_if_match(input_name, '.')
+                # input_name = input_name.replace(".", "-")
+                input_name = input_name.split('.')[-1]
+                referenced_messages.append(input_name)
+                input_ref = True
+
+            output_name = method.get('output_type')
+            output_ref = False
+            if not is_base_type(output_name):
+                output_name = remove_first_character_if_match(output_name, '.')
+                # output_name = output_name.replace(".", "-")
+                output_name = output_name.split('.')[-1]
+                referenced_messages.append(output_name)
+                output_ref = True
+
+            methods.append(
+                {
+                    'method': method.get('name', ''),
+                    'input': input_name,
+                    'input_ref': input_ref,
+                    'output': output_name,
+                    'output_ref': output_ref,
+                    'description': remove_unsupported_characters(method.get(
+                        '_description', '')),
+                    'server_streaming': method.get('server_streaming',
+                                                   False) == True
+                }
+            )
+        services.append(
+            {
+                'service': service.get('name', ''),
+                'methods': methods,
+                'description': remove_unsupported_characters(service.get(
+                    '_description', '')),
+            }
+        )
+    return services
+
+
+def rchop(thestring, ending):
+    if thestring.endswith(ending):
+        return thestring[:-len(ending)]
+    return thestring
+
+
+def traverse_desc(descriptor):
+    referenced_messages = []
+    name = rchop(descriptor.get('name', ''), '.proto')
+    package = descriptor.get('package', '')
+    description = descriptor.get('_description', '')
+    # imports=traverse_dependencies(descriptor)
+    messages = traverse_messages(descriptor.get('message_type', []),
+                                 package, referenced_messages)
+    enums = traverse_enums(descriptor.get('enum_type', []), package)
+    services = traverse_services(descriptor.get('service', []),
+                                 referenced_messages)
+    # extensions = _traverse_extensions(descriptors)
+    # options = _traverse_options(descriptors)
+    # set_messages_keys(messages)
+    # unique_referred_messages_with_keys = []
+    # for message_name in list(set(referenced_messages)):
+    #     unique_referred_messages_with_keys.append(
+    #         {
+    #             'name': message_name,
+    #             'key': get_message_key(message_name, messages)
+    #         }
+    #     )
+
+    # Get a list of type definitions (messages, enums) defined in this
+    # descriptor
+    defined_types = [m['name'].split('/')[-1] for m in messages] + \
+                    [e['name'].split('/')[-1]  for e in enums]
+
+    data = {
+        'name': name.split('/')[-1],
+        'package': package,
+        'description': description,
+        # 'imports' : imports,
+        'messages': messages,
+        'enums': enums,
+        'services': services,
+        'defined_types' : defined_types,
+        'referenced_messages': list(set(referenced_messages)),
+        # TODO:  simplify for easier jinja2 template use
+        # 'referred_messages_with_keys': unique_referred_messages_with_keys,
+        # 'extensions': extensions,
+        # 'options': options
+    }
+    return data
+
+
+def set_messages_keys(messages):
+    for message in messages:
+        message['key'] = _get_message_key(message, messages)
+        if message['messages']:
+            set_messages_keys(message['messages'])
+
+def _get_message_key(message, messages):
+    # assume key is first yang base type field
+    for field in message['fields']:
+        if not field['type_ref']:
+            return field['name']
+        else:
+            # if the field name is a message then loop for the key in that
+            # message
+            ref_message = _get_message(field['type'], messages)
+            if ref_message:
+                return _get_message_key(ref_message, messages)
+
+    # no key yet - search nested messaged
+    for m in message['messages']:
+        key = _get_message_key(m, messages)
+        if key is not None:
+            return key
+    else:
+        return None
+
+def _get_message(name, messages):
+    for m in messages:
+        if m['name'] == name:
+            return m
+    return None
+
+def get_message_key(message_name, messages):
+    for message in messages:
+        if message_name == message['name']:
+            return message['key']
+        if message['messages']:
+            return get_message_key(message_name, message['messages'])
+    return None
+
+
+def update_module_imports(module):
+    used_imports = []
+    for ref_msg in module['referenced_messages']:
+        for type_dict in module['data_types']:
+            if ref_msg == type_dict['type']:
+                if module['name'] != type_dict['module']:
+                    print '{}++++{}'.format(module['name'], type_dict[
+                        'module'])
+                    used_imports.append(
+                        {
+                            'name' : type_dict['module']
+                        }
+                    )
+                break
+    module['imports'] = used_imports
+
+def generate_code(request, response):
+    assert isinstance(request, plugin.CodeGeneratorRequest)
+
+    parser = DescriptorParser()
+
+    # First process the proto file with the imports
+    all_defined_types = []
+    all_proto_data = []
+    all_referred_messages = []
+    all_messages = []
+    for proto_file in request.proto_file:
+        native_data = parser.parse_file_descriptor(proto_file,
+                                                   type_tag_name='_type',
+                                                   fold_comments=True)
+
+        # Consolidate the defined types across imports
+        yang_data = traverse_desc(native_data)
+        for type in yang_data['defined_types']:
+            all_defined_types.append(
+                {
+                    'type' : type,
+                    'module' : yang_data['name']
+                }
+            )
+
+        all_proto_data.append(
+            {
+                'file_name': '{}-{}'.format('ietf', proto_file.name.split(
+                    '/')[-1].replace('.proto','.yang')),
+                'module': yang_data
+            }
+        )
+
+        # Consolidate referred messages across imports
+        all_referred_messages = all_referred_messages + yang_data['referenced_messages']
+
+        # consolidate all messages
+        all_messages = all_messages + yang_data['messages']
+
+    # Set the message keys - required for List definitions (repeated label)
+    set_messages_keys(all_messages)
+    unique_referred_messages_with_keys = []
+    for m in all_messages:
+        unique_referred_messages_with_keys.append(
+                {
+                    'name': m['name'],
+                    'key': m['key']
+                }
+            )
+
+    # Create the files
+    for proto_data in all_proto_data:
+        f = response.file.add()
+        f.name = proto_data['file_name']
+        proto_data['module']['data_types'] = all_defined_types
+        proto_data['module']['referred_messages'] = all_referred_messages
+        proto_data['module']['referred_messages_with_keys'] = unique_referred_messages_with_keys
+        update_module_imports(proto_data['module'])
+        f.content = template_yang.render(module=proto_data['module'])
+
+
+def get_yang_type(field):
+    type = field['type']
+    if type in YANG_TYPE_MAP.keys():
+        _type, _ = YANG_TYPE_MAP[type]
+        if _type in ['enumeration', 'message', 'group']:
+            return field['type_name'].split('.')[-1]
+            # return remove_first_character_if_match(field['type_name'],
+            #                                        '.').replace('.', '-')
+        else:
+            return _type
+    else:
+        return type
+
+
+def is_base_type(type):
+    # check numeric value of the type first
+    if type in YANG_TYPE_MAP.keys():
+        _type, _ = YANG_TYPE_MAP[type]
+        return _type not in ['message', 'group']
+    else:
+        # proto name of the type
+        result = [_format for (_, _format) in YANG_TYPE_MAP.values() if
+                  _format == type and _format not in ['message', 'group']]
+        return len(result) > 0
+
+
+def remove_unsupported_characters(text):
+    unsupported_characters = ["{", "}", "[", "]", "\"", "\\", "*", "/"]
+    return ''.join([i if i not in unsupported_characters else ' ' for i in
+                    text])
+
+
+def remove_first_character_if_match(str, char):
+    if str.startswith(char):
+        return str[1:]
+    return str
+
+
+YANG_TYPE_MAP = {
+    FieldDescriptor.TYPE_BOOL: ('boolean', 'boolean'),
+    FieldDescriptor.TYPE_BYTES: ('binary', 'byte'),
+    FieldDescriptor.TYPE_DOUBLE: ('decimal64', 'double'),
+    FieldDescriptor.TYPE_ENUM: ('enumeration', 'enum'),
+    FieldDescriptor.TYPE_FIXED32: ('int32', 'int64'),
+    FieldDescriptor.TYPE_FIXED64: ('int64', 'uint64'),
+    FieldDescriptor.TYPE_FLOAT: ('decimal64', 'float'),
+    FieldDescriptor.TYPE_INT32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_INT64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_SFIXED32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_SFIXED64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_STRING: ('string', 'string'),
+    FieldDescriptor.TYPE_SINT32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_SINT64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_UINT32: ('uint32', 'int64'),
+    FieldDescriptor.TYPE_UINT64: ('uint64', 'uint64'),
+    FieldDescriptor.TYPE_MESSAGE: ('message', 'message'),
+    FieldDescriptor.TYPE_GROUP: ('group', 'group')
+}
+
+if __name__ == '__main__':
+    # Read request message from stdin
+    data = sys.stdin.read()
+
+    # Parse request
+    request = plugin.CodeGeneratorRequest()
+    request.ParseFromString(data)
+
+    # Create response
+    response = plugin.CodeGeneratorResponse()
+
+    # Generate code
+    generate_code(request, response)
+
+    # Serialise response message
+    output = response.SerializeToString()
+
+    # Write to stdout
+    sys.stdout.write(output)
+    # print is_base_type(9)
diff --git a/netconf/protoc_plugins/proto2yang_work_list.py b/netconf/protoc_plugins/proto2yang_work_list.py
new file mode 100755
index 0000000..eba8924
--- /dev/null
+++ b/netconf/protoc_plugins/proto2yang_work_list.py
@@ -0,0 +1,552 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""protoc plugin to convert a protobuf schema to a yang schema
+
+   - basic support for message, fields. enumeration, service, method
+
+   - yang semantic rules needs to be implemented
+
+   - to run this plugin :
+
+   $ python -m grpc.tools.protoc -I.
+   --plugin=protoc-gen-custom=./proto2yang.py --custom_out=. <proto file>.proto
+
+   - the above will produce a ietf-<proto file>.yang file formatted for yang
+
+   - two examples of proto that can be used in the same directory are
+   yang.proto and addressbook.proto
+
+"""
+
+import sys
+
+from jinja2 import Template
+from google.protobuf.compiler import plugin_pb2 as plugin
+from descriptor_parser import DescriptorParser
+
+from google.protobuf.descriptor import FieldDescriptor
+
+template_yang = Template("""
+module ietf-{{ module.name }} {
+
+    {% macro set_module_prefix(type) %}
+        {% for t in module.data_types %}
+            {% if t.type == type %}
+                {% if t.module != module.name %} {{ t.module }}:{{ type }};
+                {% else %} {{ type }};
+                {% endif %}
+                {% set found=True %}
+            {% endif %}
+        {% if loop.last %}
+            {% if not found %} {{ type }}; {% endif %}
+        {% endif %}
+        {% endfor %}
+    {% endmacro %}
+
+    yang-version 1.1;
+    namespace "urn:ietf:params:xml:ns:yang:ietf-{{ module.name }}";
+    prefix {{ module.name }};
+
+    {% for imp in module.imports %}
+    import ietf-{{ imp.name }} { prefix {{ imp.name }} ; }
+    {% endfor %}
+
+    organization "CORD";
+    contact
+        " Any name";
+
+    description
+        "{{ module.description }}";
+
+    revision "2016-11-15" {
+        description "Initial revision.";
+        reference "reference";
+    }
+
+    {% for enum in module.enums %}
+    typedef {{ enum.name }} {
+        type enumeration {
+        {% for v in enum.value %}
+            enum {{ v.name }} {
+                description "{{ v.description }}";
+            }
+        {% endfor %}
+        }
+        description
+            "{{ enum.description }}";
+    }
+    {% endfor %}
+
+    {% for message in module.messages recursive %}
+    {% if message.name in module.referred_messages %}
+    grouping {{ message.name }} {
+    {% else %}
+    container {{ message.name }} {
+    {% endif %}
+        description
+            "{{ message.description }}";
+        {% for field in message.fields %}
+        {% if field.type_ref %}
+        {% for dict_item in module.referred_messages_with_keys %}
+                {% if dict_item.name == field.type %}
+        list {{ field.name }} {
+            key "{{ dict_item.key }}";
+            {% if not field.repeated %}
+            max-elements 1;
+            {% endif %}
+            uses {{ set_module_prefix(field.type) }}
+            description
+                "{{ field.description }}";
+        }
+                {% endif %}
+        {% endfor %}
+        {% elif field.repeated %}
+        list {{ field.name }} {
+            key "{{ field.name }}";
+            leaf {{ field.name }} {
+                {% if field.type == "decimal64" %}
+                type {{ field.type }} {
+                   fraction-digits 5;
+                }
+                {% else %}
+                type {{ set_module_prefix(field.type) }}
+                {% endif %}
+                description
+                    "{{ field.description }}";
+            }
+            description
+                "{{ field.description }}";
+        }
+        {% else %}
+        leaf {{ field.name }} {
+            {% if field.type == "decimal64" %}
+            type {{ field.type }} {
+               fraction-digits 5;
+            }
+            {% else %}
+            type {{ set_module_prefix(field.type) }}
+            {% endif %}
+            description
+                "{{ field.description }}";
+        }
+        {% endif %}
+
+        {% endfor %}
+        {% for enum_type in message.enums %}
+        typedef {{ enum_type.name }} {
+            type enumeration {
+            {% for v in enum_type.value %}
+                enum {{ v.name }} {
+                    description "{{ v.description }}";
+                }
+            {% endfor %}
+            }
+            description
+                "{{ enum_type.description }}";
+        }
+
+        {% endfor %}
+    {% if message.messages %}
+    {{ loop (message.messages)|indent(4, false) }}
+    {% endif %}
+    }
+
+    {% endfor %}
+    {% for service in module.services %}
+    {% if service.description %}
+    /*  {{ service.description }}" */
+    {% endif %}
+    {% for method in service.methods %}
+    rpc {{ service.service }}-{{ method.method }} {
+        description
+            "{{ method.description }}";
+        {% if method.input %}
+        input {
+            {% if method.input_ref %}
+            uses {{ set_module_prefix(method.input) }}
+            {% else %}
+            leaf {{ method.input }} {
+                type {{ set_module_prefix(method.input) }}
+            }
+            {% endif %}
+        }
+        {% endif %}
+        {% if method.output %}
+        output {
+            {% if method.output_ref %}
+            uses {{ set_module_prefix(method.output) }}
+            {% else %}
+            leaf {{ method.output }} {
+                type {{ set_module_prefix(method.output) }}
+            }
+            {% endif %}
+        }
+        {% endif %}
+    }
+
+    {% endfor %}
+
+    {% endfor %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+def traverse_dependencies(descriptor):
+    dependencies = []
+    proto_imports = descriptor.get('dependency', [])
+    for proto_import in proto_imports:
+        # If the import file has a directory path to it remove it as it is not
+        # allowed in Yang.  The proto extension should be removed as well
+        dependencies.append (
+            {
+            'name' : proto_import.split('/')[-1][:-len('.proto')]
+            }
+        )
+    return dependencies
+
+
+def traverse_messages(message_types, prefix, referenced_messages):
+    messages = []
+    for message_type in message_types:
+        assert message_type['_type'] == 'google.protobuf.DescriptorProto'
+
+        # full_name = prefix + '-' + message_type['name']
+        full_name = message_type['name']
+
+        # parse the fields
+        fields = traverse_fields(message_type.get('field', []), full_name,
+                                 referenced_messages)
+
+        # parse the enums
+        enums = traverse_enums(message_type.get('enum_type', []), full_name)
+
+        # parse nested messages
+        nested = message_type.get('nested_type', [])
+        nested_messages = traverse_messages(nested, full_name,
+                                            referenced_messages)
+        messages.append(
+            {
+                'name': full_name,
+                'fields': fields,
+                'enums': enums,
+                # 'extensions': extensions,
+                'messages': nested_messages,
+                'description': remove_unsupported_characters(
+                    message_type.get('_description', '')),
+                # 'extension_ranges': extension_ranges,
+                # 'oneof': oneof
+            }
+        )
+    return messages
+
+
+def traverse_fields(fields_desc, prefix, referenced_messages):
+    fields = []
+    for field in fields_desc:
+        assert field['_type'] == 'google.protobuf.FieldDescriptorProto'
+        yang_base_type = is_base_type(field['type'])
+        _type = get_yang_type(field)
+        if not yang_base_type:
+            referenced_messages.append(_type)
+
+        fields.append(
+            {
+                # 'name': prefix + '-' + field.get('name', ''),
+                'name': field.get('name', ''),
+                'label': field.get('label', ''),
+                'repeated': field['label'] == FieldDescriptor.LABEL_REPEATED,
+                'number': field.get('number', ''),
+                'options': field.get('options', ''),
+                'type_name': field.get('type_name', ''),
+                'type': _type,
+                'type_ref': not yang_base_type,
+                'description': remove_unsupported_characters(field.get(
+                    '_description', ''))
+            }
+        )
+    return fields
+
+
+def traverse_enums(enums_desc, prefix):
+    enums = []
+    for enum in enums_desc:
+        assert enum['_type'] == 'google.protobuf.EnumDescriptorProto'
+        # full_name = prefix + '-' + enum.get('name', '')
+        full_name = enum.get('name', '')
+        enums.append(
+            {
+                'name': full_name,
+                'value': enum.get('value', ''),
+                'description': remove_unsupported_characters(enum.get(
+                    '_description', ''))
+            }
+        )
+    return enums
+
+
+def traverse_services(service_desc, referenced_messages):
+    services = []
+    for service in service_desc:
+        methods = []
+        for method in service.get('method', []):
+            assert method['_type'] == 'google.protobuf.MethodDescriptorProto'
+
+            input_name = method.get('input_type')
+            input_ref = False
+            if not is_base_type(input_name):
+                input_name = remove_first_character_if_match(input_name, '.')
+                # input_name = input_name.replace(".", "-")
+                input_name = input_name.split('.')[-1]
+                referenced_messages.append(input_name)
+                input_ref = True
+
+            output_name = method.get('output_type')
+            output_ref = False
+            if not is_base_type(output_name):
+                output_name = remove_first_character_if_match(output_name, '.')
+                # output_name = output_name.replace(".", "-")
+                output_name = output_name.split('.')[-1]
+                referenced_messages.append(output_name)
+                output_ref = True
+
+            methods.append(
+                {
+                    'method': method.get('name', ''),
+                    'input': input_name,
+                    'input_ref': input_ref,
+                    'output': output_name,
+                    'output_ref': output_ref,
+                    'description': remove_unsupported_characters(method.get(
+                        '_description', '')),
+                    'server_streaming': method.get('server_streaming',
+                                                   False) == True
+                }
+            )
+        services.append(
+            {
+                'service': service.get('name', ''),
+                'methods': methods,
+                'description': remove_unsupported_characters(service.get(
+                    '_description', '')),
+            }
+        )
+    return services
+
+
+def rchop(thestring, ending):
+    if thestring.endswith(ending):
+        return thestring[:-len(ending)]
+    return thestring
+
+
+def traverse_desc(descriptor):
+    referenced_messages = []
+    name = rchop(descriptor.get('name', ''), '.proto')
+    package = descriptor.get('package', '')
+    description = descriptor.get('_description', '')
+    imports=traverse_dependencies(descriptor)
+    messages = traverse_messages(descriptor.get('message_type', []),
+                                 package, referenced_messages)
+    enums = traverse_enums(descriptor.get('enum_type', []), package)
+    services = traverse_services(descriptor.get('service', []),
+                                 referenced_messages)
+    # extensions = _traverse_extensions(descriptors)
+    # options = _traverse_options(descriptors)
+    set_messages_keys(messages)
+    unique_referred_messages_with_keys = []
+    for message_name in list(set(referenced_messages)):
+        unique_referred_messages_with_keys.append(
+            {
+                'name': message_name,
+                'key': get_message_key(message_name, messages)
+            }
+        )
+
+    # Get a list of type definitions (messages, enums) defined in this
+    # descriptor
+    defined_types = [m['name'].split('/')[-1] for m in messages] + \
+                    [e['name'].split('/')[-1]  for e in enums]
+
+    data = {
+        'name': name.split('/')[-1],
+        'package': package,
+        'description': description,
+        'imports' : imports,
+        'messages': messages,
+        'enums': enums,
+        'services': services,
+        'defined_types' : defined_types,
+        'referenced_messages': list(set(referenced_messages)),
+        # TODO:  simplify for easier jinja2 template use
+        'referred_messages_with_keys': unique_referred_messages_with_keys,
+        # 'extensions': extensions,
+        # 'options': options
+    }
+    return data
+
+
+def set_messages_keys(messages):
+    for message in messages:
+        message['key'] = _get_message_key(message)
+        if message['messages']:
+            set_messages_keys(message['messages'])
+
+
+def _get_message_key(message):
+    # assume key is first yang base type field
+    for field in message['fields']:
+        if not field['type_ref']:
+            return field['name']
+    # no key yet - search nested messaged
+    if message['messages']:
+        return get_message_key(message['name'], message['messages'])
+    else:
+        return None
+
+
+def get_message_key(message_name, messages):
+    for message in messages:
+        if message_name == message['name']:
+            return message['key']
+        if message['messages']:
+            return get_message_key(message_name, message['messages'])
+    return None
+
+
+def generate_code(request, response):
+    assert isinstance(request, plugin.CodeGeneratorRequest)
+
+    parser = DescriptorParser()
+
+    # First process the proto file with the imports
+    all_defined_types = []
+    all_proto_data = []
+    all_referred_messages = []
+    for proto_file in request.proto_file:
+        native_data = parser.parse_file_descriptor(proto_file,
+                                                   type_tag_name='_type',
+                                                   fold_comments=True)
+
+        # Consolidate the defined types across imports
+        yang_data = traverse_desc(native_data)
+        for type in yang_data['defined_types']:
+            all_defined_types.append(
+                {
+                    'type' : type,
+                    'module' : yang_data['name']
+                }
+            )
+
+        all_proto_data.append(
+            {
+                'file_name': '{}-{}'.format('ietf', proto_file.name.split(
+                    '/')[-1].replace('.proto','.yang')),
+                'module': yang_data
+            }
+        )
+
+        # Consolidate referred messages across imports
+        all_referred_messages = all_referred_messages + yang_data['referenced_messages']
+
+    # Create the files
+    for proto_data in all_proto_data:
+        f = response.file.add()
+        f.name = proto_data['file_name']
+        proto_data['module']['data_types'] = all_defined_types
+        proto_data['module']['referred_messages'] = all_referred_messages
+        f.content = template_yang.render(module=proto_data['module'])
+
+
+def get_yang_type(field):
+    type = field['type']
+    if type in YANG_TYPE_MAP.keys():
+        _type, _ = YANG_TYPE_MAP[type]
+        if _type in ['enumeration', 'message', 'group']:
+            return field['type_name'].split('.')[-1]
+            # return remove_first_character_if_match(field['type_name'],
+            #                                        '.').replace('.', '-')
+        else:
+            return _type
+    else:
+        return type
+
+
+def is_base_type(type):
+    # check numeric value of the type first
+    if type in YANG_TYPE_MAP.keys():
+        _type, _ = YANG_TYPE_MAP[type]
+        return _type not in ['message', 'group']
+    else:
+        # proto name of the type
+        result = [_format for (_, _format) in YANG_TYPE_MAP.values() if
+                  _format == type and _format not in ['message', 'group']]
+        return len(result) > 0
+
+
+def remove_unsupported_characters(text):
+    unsupported_characters = ["{", "}", "[", "]", "\"", "\\", "*", "/"]
+    return ''.join([i if i not in unsupported_characters else ' ' for i in
+                    text])
+
+
+def remove_first_character_if_match(str, char):
+    if str.startswith(char):
+        return str[1:]
+    return str
+
+
+YANG_TYPE_MAP = {
+    FieldDescriptor.TYPE_BOOL: ('boolean', 'boolean'),
+    FieldDescriptor.TYPE_BYTES: ('binary', 'byte'),
+    FieldDescriptor.TYPE_DOUBLE: ('decimal64', 'double'),
+    FieldDescriptor.TYPE_ENUM: ('enumeration', 'enum'),
+    FieldDescriptor.TYPE_FIXED32: ('int32', 'int64'),
+    FieldDescriptor.TYPE_FIXED64: ('int64', 'uint64'),
+    FieldDescriptor.TYPE_FLOAT: ('decimal64', 'float'),
+    FieldDescriptor.TYPE_INT32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_INT64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_SFIXED32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_SFIXED64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_STRING: ('string', 'string'),
+    FieldDescriptor.TYPE_SINT32: ('int32', 'int32'),
+    FieldDescriptor.TYPE_SINT64: ('int64', 'int64'),
+    FieldDescriptor.TYPE_UINT32: ('uint32', 'int64'),
+    FieldDescriptor.TYPE_UINT64: ('uint64', 'uint64'),
+    FieldDescriptor.TYPE_MESSAGE: ('message', 'message'),
+    FieldDescriptor.TYPE_GROUP: ('group', 'group')
+}
+
+if __name__ == '__main__':
+    # Read request message from stdin
+    data = sys.stdin.read()
+
+    # Parse request
+    request = plugin.CodeGeneratorRequest()
+    request.ParseFromString(data)
+
+    # Create response
+    response = plugin.CodeGeneratorResponse()
+
+    # Generate code
+    generate_code(request, response)
+
+    # Serialise response message
+    output = response.SerializeToString()
+
+    # Write to stdout
+    sys.stdout.write(output)
+    # print is_base_type(9)
diff --git a/netconf/protoc_plugins/yang.proto b/netconf/protoc_plugins/yang.proto
new file mode 100644
index 0000000..718951c
--- /dev/null
+++ b/netconf/protoc_plugins/yang.proto
@@ -0,0 +1,64 @@
+syntax = "proto3";
+
+package experiment;
+
+message AsyncEvent {
+    int32 seq = 1;
+    enum EventType {
+        BIG_BANG = 0;  // just a big bang
+        SMALL_BANG = 1;  // so small bang
+        NO_BANG = 2;
+    }
+    EventType type = 2;
+    string details = 3;
+}
+
+enum SimpleEnum {
+    APPLE = 0;
+    BANANA = 1;
+    ORANGE = 2;
+}
+
+message Packet {
+    int32 source = 1;
+    bytes content = 2;
+    message InnerPacket {
+        string url = 1;
+        string title = 2;
+        repeated string snippets = 3;
+        message InnerInnerPacket {
+            string input = 1;
+            string desc = 2;
+        }
+        repeated InnerInnerPacket inner_inner_packet = 4;
+    }
+    repeated InnerPacket inner_packets = 3;
+}
+
+message Echo {
+    string msg = 1;
+    float delay = 2;
+}
+
+message testMessage{
+    oneof oneOfTest {
+        string test2 = 1;
+        int32 test3 = 2;
+    }
+}
+
+service ExperimentalService {
+
+    rpc GetEcho(Echo) returns(Echo);
+
+    // For server to send async stream to client
+    rpc ReceiveStreamedEvents(Packet)
+        returns(stream AsyncEvent);
+
+    // For server to send async packets to client
+    rpc ReceivePackets(Echo) returns(stream Packet);
+
+    // For client to send async packets to server
+    rpc SendPackets(stream Packet) returns(Echo);
+
+}
\ No newline at end of file
diff --git a/netconf/protos/Makefile b/netconf/protos/Makefile
index 2d9c069..008a531 100644
--- a/netconf/protos/Makefile
+++ b/netconf/protos/Makefile
@@ -20,14 +20,12 @@
   $(error To get started, please source the env.sh file from Voltha top level directory)
 endif
 
-# This makefile is used only to copy relevant *_pb2.py files from Voltha
-# to allow ofagent to function properly.
+default: build
 
 PB2_FILES := \
-    voltha_pb2.py \
-    openflow_13_pb2.py
+	voltha_pb2.py
 
-TARGET_PROTO_DIR := $(VOLTHA_BASE)/ofagent/protos
+TARGET_PROTO_DIR := $(VOLTHA_BASE)/netconf/protos
 SOURCE_PROTO_DIR := $(VOLTHA_BASE)/voltha/protos
 
 build: copyfiles
@@ -35,3 +33,25 @@
 copyfiles:
 	rsync -av --include '*/' --exclude='third_party/__init__.py' --include '*.py' --exclude='*' $(SOURCE_PROTO_DIR)/ $(TARGET_PROTO_DIR)
 
+
+PROTO_FILES := $(wildcard *.proto) $(wildcard third_party/google/api/*proto)
+PROTO_PB2_FILES := $(foreach f,$(PROTO_FILES),$(subst .proto,_pb2.py,$(f)))
+PROTO_DESC_FILES := $(foreach f,$(PROTO_FILES),$(subst .proto,.desc,$(f)))
+
+PROTOC_PREFIX := /usr/local
+PROTOC_LIBDIR := $(PROTOC_PREFIX)/lib
+
+build: $(PROTO_PB2_FILES)
+
+%_pb2.py: %.proto Makefile
+	@echo "Building protocol buffer artifacts from $<"
+	env LD_LIBRARY_PATH=$(PROTOC_LIBDIR) python -m grpc.tools.protoc \
+	    -I. \
+	    -I./third_party \
+	    --python_out=. \
+	    --grpc_python_out=. \
+	    $<
+
+clean:
+	rm -f $(PROTO_PB2_FILES) $(PROTO_DESC_FILES)
+
diff --git a/netconf/protos/__init__.py b/netconf/protos/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/netconf/protos/__init__.py
diff --git a/netconf/protos/schema.proto b/netconf/protos/schema.proto
new file mode 100644
index 0000000..5114208
--- /dev/null
+++ b/netconf/protos/schema.proto
@@ -0,0 +1,34 @@
+syntax = "proto3";
+
+package schema;
+
+import "google/protobuf/empty.proto";
+
+// Contains the name and content of a *.proto file
+message ProtoFile {
+    string file_name = 1;  // name of proto file
+    string proto = 2;  // content of proto file
+    bytes descriptor = 3;  // compiled descriptor for proto (zlib compressed)
+}
+
+// Proto files and compiled descriptors for this interface
+message Schemas {
+
+    // Proto files
+    repeated ProtoFile protos = 1;
+
+    // Name of proto file to generae swagger.json from
+    string swagger_from = 2;
+
+    // Prefix of proto files which would require a yang file generated for it
+    string yang_from = 3;
+
+}
+
+// Schema services
+service SchemaService {
+
+    // Return active grpc schemas
+    rpc GetSchema(google.protobuf.Empty) returns (Schemas) {}
+
+}
diff --git a/netconf/protos/third_party/__init__.py b/netconf/protos/third_party/__init__.py
index 6dab4e7..e53147b 100644
--- a/netconf/protos/third_party/__init__.py
+++ b/netconf/protos/third_party/__init__.py
@@ -38,7 +38,7 @@
     def load_module(self, name):
         if name in sys.modules:
             return sys.modules[name]
-        full_name = 'ofagent.protos.third_party.' + name
+        full_name = 'netconf.protos.third_party.' + name
         import_module(full_name)
         module = sys.modules[full_name]
         sys.modules[name] = module
diff --git a/netconf/protos/third_party/google/api/annotations.proto b/netconf/protos/third_party/google/api/annotations.proto
new file mode 100644
index 0000000..cbd18b8
--- /dev/null
+++ b/netconf/protos/third_party/google/api/annotations.proto
@@ -0,0 +1,29 @@
+// Copyright (c) 2015, Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/api/http.proto";
+import "google/protobuf/descriptor.proto";
+
+option java_multiple_files = true;
+option java_outer_classname = "AnnotationsProto";
+option java_package = "com.google.api";
+
+extend google.protobuf.MethodOptions {
+  // See `HttpRule`.
+  HttpRule http = 72295728;
+}
diff --git a/netconf/protos/third_party/google/api/http.proto b/netconf/protos/third_party/google/api/http.proto
new file mode 100644
index 0000000..ce07aa1
--- /dev/null
+++ b/netconf/protos/third_party/google/api/http.proto
@@ -0,0 +1,127 @@
+// Copyright (c) 2015, Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option java_multiple_files = true;
+option java_outer_classname = "HttpProto";
+option java_package = "com.google.api";
+
+
+// `HttpRule` defines the mapping of an RPC method to one or more HTTP REST API
+// methods. The mapping determines what portions of the request message are
+// populated from the path, query parameters, or body of the HTTP request.  The
+// mapping is typically specified as an `google.api.http` annotation, see
+// "google/api/annotations.proto" for details.
+//
+// The mapping consists of a mandatory field specifying a path template and an
+// optional `body` field specifying what data is represented in the HTTP request
+// body. The field name for the path indicates the HTTP method. Example:
+//
+// ```
+// package google.storage.v2;
+//
+// import "google/api/annotations.proto";
+//
+// service Storage {
+//   rpc CreateObject(CreateObjectRequest) returns (Object) {
+//     option (google.api.http) {
+//       post: "/v2/{bucket_name=buckets/*}/objects"
+//       body: "object"
+//     };
+//   };
+// }
+// ```
+//
+// Here `bucket_name` and `object` bind to fields of the request message
+// `CreateObjectRequest`.
+//
+// The rules for mapping HTTP path, query parameters, and body fields
+// to the request message are as follows:
+//
+// 1. The `body` field specifies either `*` or a field path, or is
+//    omitted. If omitted, it assumes there is no HTTP body.
+// 2. Leaf fields (recursive expansion of nested messages in the
+//    request) can be classified into three types:
+//     (a) Matched in the URL template.
+//     (b) Covered by body (if body is `*`, everything except (a) fields;
+//         else everything under the body field)
+//     (c) All other fields.
+// 3. URL query parameters found in the HTTP request are mapped to (c) fields.
+// 4. Any body sent with an HTTP request can contain only (b) fields.
+//
+// The syntax of the path template is as follows:
+//
+//     Template = "/" Segments [ Verb ] ;
+//     Segments = Segment { "/" Segment } ;
+//     Segment  = "*" | "**" | LITERAL | Variable ;
+//     Variable = "{" FieldPath [ "=" Segments ] "}" ;
+//     FieldPath = IDENT { "." IDENT } ;
+//     Verb     = ":" LITERAL ;
+//
+// `*` matches a single path component, `**` zero or more path components, and
+// `LITERAL` a constant.  A `Variable` can match an entire path as specified
+// again by a template; this nested template must not contain further variables.
+// If no template is given with a variable, it matches a single path component.
+// The notation `{var}` is henceforth equivalent to `{var=*}`.
+//
+// Use CustomHttpPattern to specify any HTTP method that is not included in the
+// pattern field, such as HEAD, or "*" to leave the HTTP method unspecified for
+// a given URL path rule. The wild-card rule is useful for services that provide
+// content to Web (HTML) clients.
+message HttpRule {
+
+  // Determines the URL pattern is matched by this rules. This pattern can be
+  // used with any of the {get|put|post|delete|patch} methods. A custom method
+  // can be defined using the 'custom' field.
+  oneof pattern {
+    // Used for listing and getting information about resources.
+    string get = 2;
+
+    // Used for updating a resource.
+    string put = 3;
+
+    // Used for creating a resource.
+    string post = 4;
+
+    // Used for deleting a resource.
+    string delete = 5;
+
+    // Used for updating a resource.
+    string patch = 6;
+
+    // Custom pattern is used for defining custom verbs.
+    CustomHttpPattern custom = 8;
+  }
+
+  // The name of the request field whose value is mapped to the HTTP body, or
+  // `*` for mapping all fields not captured by the path pattern to the HTTP
+  // body.
+  string body = 7;
+
+  // Additional HTTP bindings for the selector. Nested bindings must not
+  // specify a selector and must not contain additional bindings.
+  repeated HttpRule additional_bindings = 11;
+}
+
+// A custom pattern is used for defining custom HTTP verb.
+message CustomHttpPattern {
+  // The name of this custom HTTP verb.
+  string kind = 1;
+
+  // The path matched by this custom verb.
+  string path = 2;
+}
diff --git a/netconf/session/nc_protocol_handler.py b/netconf/session/nc_protocol_handler.py
index e0a4baf..a08ea37 100644
--- a/netconf/session/nc_protocol_handler.py
+++ b/netconf/session/nc_protocol_handler.py
@@ -33,11 +33,11 @@
 
 
 class NetconfProtocolHandler:
-    def __init__(self, nc_server, nc_conn, session, grpc_stub):
+    def __init__(self, nc_server, nc_conn, session, grpc_client):
         self.started = True
         self.conn = nc_conn
         self.nc_server = nc_server
-        self.grpc_stub = grpc_stub
+        self.grpc_client = grpc_client
         self.new_framing = False
         self.capabilities = Capabilities()
         self.session = session
@@ -171,6 +171,7 @@
                 # Get a rpc handler
                 rpc_handler = rpc_factory.get_rpc_handler(rpc,
                                                           msg,
+                                                          self.grpc_client,
                                                           self.session)
                 if rpc_handler:
                     # set the parameters for this handler
@@ -206,21 +207,6 @@
                 error = ncerror.ServerException(rpc, ex)
                 self.send_message(error.get_reply_msg())
 
-    # @inlineCallbacks
-    # def invoke_method(self, rpcname, rpc, params):
-    #     try:
-    #         # Handle any namespaces or prefixes in the tag, other than
-    #         # "nc" which was removed above. Of course, this does not handle
-    #         # namespace collisions, but that seems reasonable for now.
-    #         rpcname = rpcname.rpartition("}")[-1]
-    #         method_name = "rpc_" + rpcname.replace('-', '_')
-    #         method = getattr(self.methods, method_name,
-    #                          self._rpc_not_implemented)
-    #         log.info("invoking-method", method=method_name)
-    #         reply = yield method(self, rpc, *params)
-    #         returnValue(reply)
-    #     except NotImplementedError:
-    #         raise ncerror.NotImpl(rpc)
 
     def stop(self, reason):
         if not self.exiting: