[VOL-901] As an operator, I should be able to view logs from all VOLTHA components in a single stream

Change-Id: Iaf99c360895e16cbf299418861f2990f06ab39cd
diff --git a/voltha b/voltha
index 8ced0a8..eb02a2a 100755
--- a/voltha
+++ b/voltha
@@ -72,6 +72,7 @@
 ADAPTER_NS=${ADAPTER_NS:-voltha}
 WITH_TIMINGS=${WITH_TIMINGS:-no}
 WITH_BBSIM=${WITH_BBSIM:-no}
+WITH_EFK=${WITH_EFK:-no}
 WITH_RADIUS=${WITH_RADIUS:-no}
 WITH_EAPOL=${WITH_EAPOL:-yes}
 WITH_DHCP=${WITH_DHCP:-yes}
@@ -106,6 +107,12 @@
 VOLTHA_CHART_VERSION=${VOLTHA_CHART_VERSION:-latest}
 VOLTHA_BBSIM_CHART=${VOLTHA_BBSIM_CHART:-onf/bbsim}
 VOLTHA_BBSIM_CHART_VERSION=${VOLTHA_BBSIM_CHART_VERSION:-latest}
+ELASTICSEARCH_CHART=${ELASTICSEARCH_CHART:-elastic/elasticsearch}
+ELASTICSEARCH_CHART_VERSION=${ELASTICSEARCH_CHART_VERSION:-latest}
+KIBANA_CHART=${KIBANA_CHART:-elastic/kibana}
+KIBANA_CHART_VERSION=${KIBANA_CHART_VERSION:-latest}
+FLUENTD_ELASTICSEARCH_CHART=${FLUENTD_ELASTICSEARCH_CHART:-kiwigrid/fluentd-elasticsearch}
+FLUENTD_ELASTICSEARCH_CHART_VERSION=${FLUENTD_ELASTICSEARCH_CHART_VERSION:-latest}
 VOLTHA_ADAPTER_SIM_CHART=${VOLTHA_ADAPTER_SIM_CHART:-onf/voltha-adapter-simulated}
 VOLTHA_ADAPTER_SIM_CHART_VERSION=${VOLTHA_ADAPTER_SIM_CHART_VERSION:-latest}
 VOLTHA_ADAPTER_OPEN_OLT_CHART=${VOLTHA_ADAPTER_OPEN_OLT_CHART:-onf/voltha-adapter-openolt}
@@ -259,6 +266,7 @@
     ENABLE_ONOS_EXTRANEOUS_RULES \
     WITH_TIMINGS \
     WITH_BBSIM \
+    WITH_EFK \
     WITH_EAPOL \
     WITH_DHCP \
     WITH_IGMP \
@@ -304,6 +312,12 @@
     BBSIM_SADIS_SERVER_CHART \
     BBSIM_SADIS_SERVER_CHART_VERSION \
     NUM_OF_BBSIM \
+    ELASTICSEARCH_CHART \
+    ELASTICSEARCH_CHART_VERSION \
+    KIBANA_CHART \
+    KIBANA_CHART_VERSION \
+    FLUENTD_ELASTICSEARCH_CHART \
+    FLUENTD_ELASTICSEARCH_CHART_VERSION \
     NUM_OF_OPENONU \
     VOLTHA_ADAPTER_SIM_CHART \
     VOLTHA_ADAPTER_SIM_CHART_VERSION \
@@ -327,6 +341,8 @@
     VOLTHA_API_PORT \
     VOLTHA_SSH_PORT \
     VOLTHA_ETCD_PORT \
+    ELASTICSEARCH_PORT \
+    KIBANA_PORT \
     VOLTHA_KAFKA_PORT \
     VOLTHA_PPROF_PORT \
     OPENOLT_PPROF_PORT \
@@ -441,6 +457,8 @@
     VOLTHA_PPROF_PORT=${VOLTHA_PPROF_PORT:-$((60 + DELTA))60}
     OPENOLT_PPROF_PORT=${OPENOLT_PPROF_PORT:-$((60 + DELTA))61}
     OFAGENT_PPROF_PORT=${OFAGENT_PPROF_PORT:-$((60 + DELTA))62}
+    ELASTICSEARCH_PORT=${ELASTICSEARCH_PORT:-$((92 + DELTA))00}
+    KIBANA_PORT=${KIBANA_PORT:-$((56 + DELTA))01}
 else
     VALUES="$(echo "$HAVE" | sed -e 's/\s//g' | cut -d= -f2)"
     ONOS_API_PORT=${ONOS_API_PORT:-$(echo "$VALUES" | cut -d, -f1)}
@@ -452,6 +470,8 @@
     VOLTHA_PPROF_PORT=${VOLTHA_PPROF_PORT:-$(echo "$VALUES" | cut -d, -f7)}
     OPENOLT_PPROF_PORT=${OPENOLT_PPROF_PORT:-$(echo "$VALUES" | cut -d, -f8)}
     OFAGENT_PPROF_PORT=${OFAGENT_PPROF_PORT:-$(echo "$VALUES" | cut -d, -f9)}
+    ELASTICSEARCH_PORT=${ELASTICSEARCH_PORT:-$(echo "$VALUES" | cut -d, -f10)}
+    KIBANA_PORT=${KIBANA_PORT:-$(echo "$VALUES" | cut -d, -f11)}
 
     # Some ports were added after the .voltha/ports files was created.
     # Calculate the original DELTA from the VOLTHA_SSH_PORT so that it can
@@ -463,19 +483,27 @@
     fi
 
     if [ -z "$VOLTHA_PPROF_PORT" ]; then
-      VOLTHA_PPROF_PORT=${VOLTHA_PPROF_PORT:-$((60 + DELTA))60}
+        VOLTHA_PPROF_PORT=${VOLTHA_PPROF_PORT:-$((60 + DELTA))60}
     fi
     if [ -z "$OPENOLT_PPROF_PORT" ]; then
-      OPENOLT_PPROF_PORT=${OPENOLT_PPROF_PORT:-$((60 + DELTA))61}
+        OPENOLT_PPROF_PORT=${OPENOLT_PPROF_PORT:-$((60 + DELTA))61}
     fi
     if [ -z "$OFAGENT_PPROF_PORT" ]; then
-      OFAGENT_PPROF_PORT=${OFAGENT_PPROF_PORT:-$((60 + DELTA))62}
+        OFAGENT_PPROF_PORT=${OFAGENT_PPROF_PORT:-$((60 + DELTA))62}
+    fi
+
+    if [ -z "$ELASTICSEARCH_PORT" ]; then
+        ELASTICSEARCH_PORT=${ELASTICSEARCH_PORT:-$((92 + DELTA))00}
+    fi
+
+    if [ -z "$KIBANA_PORT" ]; then
+        KIBANA_PORT=${KIBANA_PORT:-$((56 + DELTA))01}
     fi
 fi
 
 PORTTMP="$(mktemp -u)"
 grep -v "$NAME" .voltha/ports > "$PORTTMP"
-echo "$NAME=$ONOS_API_PORT,$ONOS_SSH_PORT,$VOLTHA_API_PORT,$VOLTHA_SSH_PORT,$VOLTHA_ETCD_PORT,$VOLTHA_KAFKA_PORT,$VOLTHA_PPROF_PORT,$OPENOLT_PPROF_PORT,$OFAGENT_PPROF_PORT" >> "$PORTTMP"
+echo "$NAME=$ONOS_API_PORT,$ONOS_SSH_PORT,$VOLTHA_API_PORT,$VOLTHA_SSH_PORT,$VOLTHA_ETCD_PORT,$VOLTHA_KAFKA_PORT,$VOLTHA_PPROF_PORT,$OPENOLT_PPROF_PORT,$OFAGENT_PPROF_PORT,$ELASTICSEARCH_PORT,$KIBANA_PORT" >> "$PORTTMP"
 cp "$PORTTMP" .voltha/ports
 rm -f "$PORTTMP"
 
@@ -615,12 +643,26 @@
     exit 1
 fi
 
-push_onos_config() {
-    local TYPE MSG RESOURCE DATA CMD_ECHO CMD_OUTPUT SC_OUTPUT WAIT_START NOW
-    TYPE=$1
-    MSG=$2
-    RESOURCE=$3
-    DATA=$4
+do_curl() {
+    local DATA_ARGS CREDS CMD_ECHO CMD_OUTPUT SC_OUTPUT WAIT_START INDENTA EXTRAA NOW
+    local INDENT=$1
+    local OP=$2
+    local USER=$3
+    local PASS=$4
+    local URL=$5
+    local DATA_TYPE=$6
+    local DATA=$7
+    local MSG=$8
+    local VALID_RESULTS=$9
+    local EXTRA=${10}
+    local ICON=${11}
+
+    # Turn into an array for passing
+    INDENTA=()
+    if [ -n "$INDENT" ]; then
+        INDENTA=("$INDENT")
+    fi
+    IFS=' ' read -r -a EXTRAA <<< "$EXTRA"
 
     # Thanks to the latest version of ONOS using the return code 207 this gets a
     # whole lot nastier. Can't thank them enough for doing this. So in order to
@@ -632,22 +674,30 @@
     SC_OUTPUT="$(mktemp -u)"
     WAIT_START="$(date +%s)"
 
-    bspin - "$MSG $GEAR"
+    CREDS=()
+    if [ -n "$USER" ]; then
+        CREDS=("--user" "$USER:$PASS")
+    fi
+
+    DATA_ARGS=()
+    if [ -n "$DATA" ]; then
+        if [ "$DATA_TYPE" == "file" ]; then
+            DATA_ARGS=("--data" "@$DATA")
+        elif [ "$DATA_TYPE" == "json" ]; then
+            DATA_ARGS=("--data" "$DATA")
+        fi
+    fi
+
+    bspin "${INDENTA[@]}" "$MSG" "$ICON"
     while true; do
         NOW="$(date +%s)"
         if [ "$((NOW - WAIT_START))" -gt "$TIMEOUT_SECONDS" ]; then
-            espin - "$THEX"
+            espin "${INDENTA[@]}" "$THEX"
             rm -f "$CMD_ECHO" "$CMD_OUTPUT" "$SC_OUTPUT"
-            doTimeout "waiting for ONOS config push on $RESOURCE"
+            doTimeout "$MSG"
         fi
-        if [ "$TYPE" == "file" ]; then
-            (set -x; curl --fail -sSL --user karaf:karaf -w "%{http_code}" -o "$CMD_OUTPUT" -X POST -H Content-Type:application/json "http://$_ONOS_API_EP/onos/v1/$RESOURCE" --data "@$DATA" >"$SC_OUTPUT" 2>/dev/null) >>"$CMD_ECHO" 2>&1
-            RESULT=$?
-        fi
-        if [ "$TYPE" == "json" ]; then
-            (set -x; curl --fail -sSL --user karaf:karaf -w "%{http_code}" -o "$CMD_OUTPUT" -X POST -H Content-Type:application/json "http://$_ONOS_API_EP/onos/v1/$RESOURCE" --data "$DATA" >"$SC_OUTPUT" 2>/dev/null) >>"$CMD_ECHO" 2>&1
-            RESULT=$?
-        fi
+        (set -x; curl -sSL "${CREDS[@]}" -w "%{http_code}" -o "$CMD_OUTPUT" -X "$OP" "${EXTRAA[@]}" "$URL" "${DATA_ARGS[@]}" >"$SC_OUTPUT" 2>/dev/null) >>"$CMD_ECHO" 2>&1
+        RESULT=$?
         # Dump everything to the log
         cat "$CMD_ECHO" >> "$LOG"
         test -r "$CMD_OUTPUT" && cat "$CMD_OUTPUT" >> "$LOG"
@@ -657,13 +707,22 @@
 
         # clean up temp files
         rm -f "$CMD_ECHO" "$CMD_OUTPUT" "$SC_OUTPUT"
-        if [ "$RESULT" -eq 0 ] && [ "$SC" == "200" ]; then
+        if [ "$RESULT" -eq 0 ] && is_in "$SC" "$VALID_RESULTS"; then
             break
         fi
         sleep 1
-        sspin -
+        sspin "${INDENTA[@]}"
     done
-    espin - "$VERIFIED"
+    espin "${INDENTA[@]}" "$VERIFIED"
+}
+
+push_onos_config() {
+    local DATA_TYPE=$1
+    local MSG=$2
+    local RESOURCE=$3
+    local DATA=$4
+
+    do_curl "-" "POST" "karaf" "karaf" "http://$_ONOS_API_EP/onos/v1/$RESOURCE" "$DATA_TYPE" "$DATA" "$MSG" "200" "--fail -H Content-Type:application/json" "$GEAR"
 }
 
 check_onos_app_active() {
@@ -957,6 +1016,9 @@
         if [ "$WITH_BBSIM" == "yes" ]; then
             EXPECT+=" bbsim"
         fi
+        if [ "$WITH_EFK" == "yes" ]; then
+            EXPECT+=" elasticsearch kibana fluentd"
+        fi
         if [ "$WITH_OPEN_ADAPTERS" == "yes" ]; then
             EXPECT+=" open-olt open-onu"
         fi
@@ -984,12 +1046,16 @@
             INFRA_PODS=
             ADAPT_PODS=
             SIM_PODS=
+            EFK_PODS=
             if [ "$WITH_RADIUS" == "yes" ]; then
                 PODS+=" radius.*"
             fi
             if [ "$WITH_BBSIM" == "yes" ]; then
                 SIM_PODS+=" bbsim.*"
             fi
+            if [ "$WITH_EFK" == "yes" ]; then
+                EFK_PODS+=" kibana-* elasticsearch-* fluentd-*"
+            fi
             if [ "$WITH_OPEN_ADAPTERS" ] || [ "$WITH_SIM_ADAPTERS" ]; then
                 ADAPT_PODS+=" adapter-*"
             fi
@@ -1011,6 +1077,9 @@
             if [ -n "$ADAPT_PODS" ]; then
                 wait_for_pods "$ADAPTER_NS" 0 "not" "Waiting for adapter  PODs to terminate" "$NO_LABEL" "$ADAPT_PODS"
             fi
+            if [ -n "$EFK_PODS" ]; then
+                wait_for_pods "$INFRA_NS" 0 "not" "Waiting for EFK PODs to terminate" "$NO_LABEL" "$EFK_PODS"
+            fi
             wait_for_pods "$VOLTHA_NS" 0 "not" "Waiting for VOLTHA PODs to terminate" "$NO_LABEL" "$PODS"
         fi
     fi
@@ -1084,7 +1153,7 @@
 LOG="install-$NAME.log"
 PFLOG="port-forward-$NAME.log"
 date > "$LOG"
-echo "PORTS=$ONOS_API_PORT,$ONOS_SSH_PORT,$VOLTHA_API_PORT,$VOLTHA_SSH_PORT,$VOLTHA_ETCD_PORT" >> "$LOG"
+echo "PORTS=$ONOS_API_PORT,$ONOS_SSH_PORT,$VOLTHA_API_PORT,$VOLTHA_SSH_PORT,$VOLTHA_ETCD_PORT,$ELASTICSEARCH_PORT,$KIBANA_PORT" >> "$LOG"
 
 # Output install options to log
 echo "OPTIONS" >> "$LOG"
@@ -1501,6 +1570,12 @@
     bspin - "Add Custom BBSIM SADIS Server repository to Helm"
     (set -x; helm repo add bbsim-sadis https://ciena.github.io/bbsim-sadis-server/charts >>"$LOG" 2>&1) >>"$LOG" 2>&1
     espin - "$VERIFIED"
+    bspin - "Add  Elastic repository to Helm"
+    (set -x; helm repo add elastic https://helm.elastic.co  >>"$LOG" 2>&1) >>"$LOG" 2>&1
+    espin - "$VERIFIED"
+    bspin - "Add Fluentd-ElasticSearch repository to Helm"
+    (set -x; helm repo add kiwigrid https://kiwigrid.github.io >>"$LOG" 2>&1) >>"$LOG" 2>&1
+    espin - "$VERIFIED"
     bspin - "Update Helm repository cache"
     (set -x; helm repo update >>"$LOG" 2>&1) >>"$LOG" 2>&1
     espin - "$VERIFIED"
@@ -1548,6 +1623,9 @@
 RESOLVED_ONOS_CHART_VERSION=$(resolve_chart_version "$ONOS_CHART" "$ONOS_CHART_VERSION")
 RESOLVED_ONOS_CLASSIC_CHART_VERSION=$(resolve_chart_version "$ONOS_CLASSIC_CHART" "$ONOS_CLASSIC_CHART_VERSION")
 RESOLVED_KAFKA_CHART_VERSION=$(resolve_chart_version "$KAFKA_CHART" "$KAFKA_CHART_VERSION")
+RESOLVED_ELASTICSEARCH_CHART_VERSION=$(resolve_chart_version "$ELASTICSEARCH_CHART" "$ELASTICSEARCH_CHART_VERSION")
+RESOLVED_KIBANA_CHART_VERSION=$(resolve_chart_version "$KIBANA_CHART" "$KIBANA_CHART_VERSION")
+RESOLVED_FLUENTD_ELASTICSEARCH_CHART_VERSION=$(resolve_chart_version "$FLUENTD_ELASTICSEARCH_CHART" "$FLUENTD_ELASTICSEARCH_CHART_VERSION")
 cat <<EOV >>"$LOG"
 Resolved helm charts and versions:
     $VOLTHA_CHART:$RESOLVED_VOLTHA_CHART_VERSION
@@ -1558,6 +1636,9 @@
     $ONOS_CHART:$RESOLVED_ONOS_CHART_VERSION
     $ONOS_CLASSIC_CHART:$RESOLVED_ONOS_CLASSIC_CHART_VERSION
     $KAFKA_CHART:$RESOLVED_KAFKA_CHART_VERSION
+    $ELASTICSEARCH_CHART:$RESOLVED_ELASTICSEARCH_CHART_VERSION
+    $KIBANA_CHART:$RESOLVED_KIBANA_CHART_VERSION
+    $FLUENTD_ELASTICSEARCH_CHART:$RESOLVED_FLUENTD_ELASTICSEARCH_CHART_VERSION
 EOV
 
 STIME="$(date +%s)"
@@ -2142,6 +2223,41 @@
     fi
 fi
 
+if [ "$WITH_EFK" == "yes" ]; then
+    STIME="$(date +%s)"
+    echo -e "Verify EFK $PLUG"
+    bspin - "Verify EFK Installed"
+    if [ "$HELM_USE_UPGRADE" == "yes" ] || [ "$(helm list --deployed --short --namespace "$INFRA_NS" "^elasticsearch\$" | wc -l)" -ne 1 ] || [ "$(helm list --deployed --short --namespace "$INFRA_NS" "^kibana\$" | wc -l)" -ne 1 ] || [ "$(helm list --deployed --short --namespace "$INFRA_NS" "^fluentd\$" | wc -l)" -ne 1 ]; then
+          espin - "$NOT_VERIFIED"
+          if [ "$HELM_USE_UPGRADE" == "yes" ] || [ "$(helm list --deployed --short --namespace "$INFRA_NS" "^elasticsearch\$" | wc -l)" -ne 1 ]; then
+              helm_install - "$INFRA_NS" elasticsearch "$ELASTICSEARCH_CHART" "$ELASTICSEARCH_CHART_VERSION" elasticsearch "$_HELM_DESC elasticsearch"
+          fi
+          if [ "$HELM_USE_UPGRADE" == "yes" ] || [ "$(helm list --deployed --short --namespace "$INFRA_NS" "^kibana\$" | wc -l)" -ne 1 ]; then
+              helm_install - "$INFRA_NS" kibana  "$KIBANA_CHART" "$KIBANA_CHART_VERSION" kibana "$_HELM_DESC kibana"
+          fi
+          if [ "$HELM_USE_UPGRADE" == "yes" ] || [ "$(helm list --deployed --short --namespace "$INFRA_NS" "^fluentd\$" | wc -l)" -ne 1 ]; then
+              helm_install - "$INFRA_NS" fluentd  "$FLUENTD_ELASTICSEARCH_CHART" "$FLUENTD_ELASTICSEARCH_CHART_VERSION" fluentd-elasticsearch "$_HELM_DESC fluentd-elasticsearch"
+          fi
+    else
+            espin - "$VERIFIED"
+    fi
+    EXPECT=2 # for elastic and kibana
+    if [ "$TYPE" == "minimal" ]; then
+        EXPECT=$((EXPECT + 2)) # for fluentd on worker 2 worker nodes
+    else
+        EXPECT=$((EXPECT + 3)) # for fluentd on worker 3 worker nodes
+    fi
+    if [ "$SCHEDULE_ON_CONTROL_NODES" == "yes" ]; then
+        EXPECT=$((EXPECT + 1)) # for fluentd on the control plan node
+    fi
+
+    wait_for_pods - "$INFRA_NS" "$EXPECT" "includes" "Waiting for EFK to start" "$NO_LABEL" "fluentd-* elasticsearch-* kibana-*"
+    if [ "$WITH_TIMINGS" == "yes" ]; then
+        NOW="$(date +%s)"
+        printtime $((NOW - STIME))
+    fi
+fi
+
 if [ "$CONFIG_SADIS" == "external" ]; then
     STIME="$(date +%s)"
     echo -e "Verify BBSIM SADIS Service $PLUG"
@@ -2150,7 +2266,7 @@
     (set -x; kubectl -n "$BBSIM_NS" create configmap kube-config "--from-file=kube_config=$KUBECONFIG" >>"$LOG" 2>&1) >>"$LOG" 2>&1
     espin - "$VERIFIED"
     bspin - "Verify BBSIM SADIS Service Installed"
-    if [ "$HELM_USE_UPGRADE" == "yes" ] || [ "$(helm list --deployed --short --namespace "$BBSIM_NS" "^bbsim-sadis-servier\$" | wc -l)" -ne 1 ]; then
+    if [ "$HELM_USE_UPGRADE" == "yes" ] || [ "$(helm list --deployed --short --namespace "$BBSIM_NS" "^bbsim-sadis-server\$" | wc -l)" -ne 1 ]; then
         espin - "$NOT_VERIFIED"
         helm_install - "$BBSIM_NS" bbsim-sadis-server "$BBSIM_SADIS_SERVER_CHART" "$BBSIM_SADIS_SERVER_CHART_VERSION" "+sadis" "$_HELM_DESC BBSIM SADIS Server"
     else
@@ -2203,6 +2319,14 @@
         port_forward "$INFRA_NS" kafka "$VOLTHA_KAFKA_PORT:9092"
         espin - "$VERIFIED"
     fi
+    if [ "$WITH_EFK" == "yes" ]; then
+        bspin - "Forward EFK port $FORWARD"
+        kill_port_forward elasticsearch-master
+        kill_port_forward kibana-kibana
+        port_forward "$INFRA_NS" elasticsearch-master "$ELASTICSEARCH_PORT:9200"
+        port_forward "$INFRA_NS" kibana-kibana "$KIBANA_PORT:5601"
+        espin - "$VERIFIED"
+    fi
     if [ "$WITH_PPROF" == "yes" ]; then
         VOLTHA_OPENOLT_PREFIX=
         if kubectl get -n "$ADAPTER_NS" "svc/open-olt-adapter-open-olt-profiler" >/dev/null 2>&1; then
@@ -2240,6 +2364,11 @@
     espin "$VERIFIED"
 fi
 
+if [ "$WITH_EFK" == "yes" ]; then
+    do_curl "" "POST" "" "" "http://localhost:$KIBANA_PORT/api/saved_objects/index-pattern/logst*" "json" '{"attributes":{"title":"logst*","timeFieldName":"@timestamp"}}' "Verify logging index in EFK" "409,200" "-H Content-type:application/json -H kbn-xsrf:true" "$GEAR"
+#    fi
+fi
+
 if [ "$WITH_CHAOS" == "yes" ]; then
     STIME="$(date +%s)"
     echo -e "Verify kube-monkey $LOCK"