Updated dashd to automatically create a "Voltha Stats" data source in
Grafana to simplify use of Grafana as a performance management graphing
tool. Users of dashd no longer need to add the data source manually.
Also added a compose file that uses fixed ports which is mainly used
for developent.

Change-Id: I07927f27608c0b6601f266d2bac138844840e8f9
diff --git a/dashd/dashd_impl.py b/dashd/dashd_impl.py
index 2be471a..6d1ef2f 100755
--- a/dashd/dashd_impl.py
+++ b/dashd/dashd_impl.py
@@ -79,6 +79,7 @@
 import json
 import re
 import sys
+import time
 from dashd.dash_template import DashTemplate
 
 log = get_logger()
@@ -97,9 +98,16 @@
         self.topic = topic
         self.dash_template = DashTemplate(grafana_url)
         self.grafana_url = grafana_url
-        self.kafka_endpoint = get_endpoint_from_consul(consul_endpoint,
-                                                       'kafka')
-        # print('kafka endpoint: ', self.kafka_endpoint)
+        self.kafka_endpoint = None
+        self.consul_endpoint = consul_endpoint
+        while True:
+            try:
+                self.kafka_endpoint = get_endpoint_from_consul(self.consul_endpoint,
+                'kafka')
+                break
+            except:
+                log.error("unable-to-communicate-with-consul")
+            time.sleep(10)
         self.on_start_callback = None
 
         self._client = KafkaClient(self.kafka_endpoint)
@@ -118,12 +126,15 @@
         try:
             while not partitions:
                 yield self._client.load_metadata_for_topics(self.topic)
+                #self._client.load_metadata_for_topics(self.topic)
                 e = self._client.metadata_error_for_topic(self.topic)
                 if e:
                     log.warning('no-metadata-for-topic', error=e,
                                 topic=self.topic)
                 else:
                     partitions = self._client.topic_partitions[self.topic]
+                    break
+                time.sleep(20)
         except KafkaUnavailableError:
             log.error("unable-to-communicate-with-Kafka-brokers")
             self.stop()
@@ -147,7 +158,30 @@
         # they'll be deleted. If they are valid then they'll persist.
         #print("Starting main loop")
         try:
-            r = requests.get(self.grafana_url + "/search?")
+            while True:
+                r = requests.get(self.grafana_url + "/datasources")
+                if r.status_code == requests.codes.ok:
+                    break
+                else:
+                    time.sleep(10)
+            j = r.json()
+            data_source = False
+            for i in j:
+                if i["name"] == "Voltha Stats":
+                     data_source = True
+                     break
+            if not data_source:
+                r = requests.post(self.grafana_url + "/datasources",
+                data = {"name":"Voltha Stats","type":"graphite",
+                        "access":"proxy","url":"http://localhost:81"})
+                log.info('data-source-added',status=r.status_code, text=r.text)
+
+            while True:
+                r = requests.get(self.grafana_url + "/search?")
+                if r.status_code == requests.codes.ok:
+                    break
+                else:
+                    time.sleep(10)
             j = r.json()
             for i in j:
                 # Look for dashboards that have a title of *olt.[[:hexidgit:]].