Fix logging to Kafka on ONOS 2.2

Need ONOS with Karaf>4.2.8 to support logging on Kafka with KafkaAppender of
log4j2.

Change-Id: I6f53329dcf367c93f7913d363c3048e80c2c5392
diff --git a/onos/Chart.yaml b/onos/Chart.yaml
index f1521c5..cfa1d3f 100644
--- a/onos/Chart.yaml
+++ b/onos/Chart.yaml
@@ -16,6 +16,6 @@
 apiVersion: v1
 name: onos
 description: Open Network Operating System
-version: 2.0.1
+version: 3.0.0
 icon: https://guide.opencord.org/logos/onos.svg
 appVersion: 2.2.1
diff --git a/onos/templates/_helpers.tpl b/onos/templates/_helpers.tpl
index 6d82564..43b84a5 100644
--- a/onos/templates/_helpers.tpl
+++ b/onos/templates/_helpers.tpl
@@ -66,46 +66,97 @@
 #
 ################################################################################
 
-# Root logger
-log4j.rootLogger=INFO, out, json, osgi:*, stdout
-log4j.throwableRenderer=org.apache.log4j.OsgiThrowableRenderer
+# Colors for log level rendering
+color.fatal = bright red
+color.error = bright red
+color.warn = bright yellow
+color.info = bright green
+color.debug = cyan
+color.trace = cyan
 
-# CONSOLE appender not used by default
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} | %-5.5p | %-16.16t | %-32.32c{1} | %X{bundle.id} - %X{bundle.name} - %X{bundle.version} | %m%n
+# Common pattern layout for appenders
+log4j2.pattern = %d{ISO8601} | %-5p | %-16t | %-32c{1} | %X{bundle.id} - %X{bundle.name} - %X{bundle.version} | %m%n
+log4j2.out.pattern = \u001b[90m%d{HH:mm:ss\.SSS}\u001b[0m %highlight{%-5level}{FATAL=${color.fatal}, ERROR=${color.error}, WARN=${color.warn}, INFO=${color.info}, DEBUG=${color.debug}, TRACE=${color.trace}} \u001b[90m[%c{1}]\u001b[0m %msg%n%throwable
 
-# File appender
-log4j.appender.out=org.apache.log4j.RollingFileAppender
-log4j.appender.out.layout=org.apache.log4j.PatternLayout
-log4j.appender.out.layout.ConversionPattern=%d{ISO8601} | %-5.5p | %-16.16t | %-32.32c{1} | %X{bundle.id} - %X{bundle.name} - %X{bundle.version} | %m%n
-log4j.appender.out.file=${karaf.data}/log/karaf.log
-log4j.appender.out.append=true
-log4j.appender.out.maxFileSize=10MB
-log4j.appender.out.maxBackupIndex=10
+# Root logger configuration
+log4j2.rootLogger.level = INFO
+# uncomment to use asynchronous loggers, which require mvn:com.lmax/disruptor/3.3.2 library
+#log4j2.rootLogger.type = asyncRoot
+#log4j2.rootLogger.includeLocation = false
+log4j2.rootLogger.appenderRef.RollingFile.ref = RollingFile
+log4j2.rootLogger.appenderRef.Kafka.ref = Kafka
+log4j2.rootLogger.appenderRef.PaxOsgi.ref = PaxOsgi
+log4j2.rootLogger.appenderRef.Console.ref = Console
+log4j2.rootLogger.appenderRef.Console.filter.regex.type = RegexFilter
+log4j2.rootLogger.appenderRef.Console.filter.regex.regex = .*Audit.*
+log4j2.rootLogger.appenderRef.Console.filter.regex.onMatch = DENY
+log4j2.rootLogger.appenderRef.Console.filter.regex.onMismatch = ACCEPT
+#log4j2.rootLogger.appenderRef.Console.filter.threshold.type = ThresholdFilter
+#log4j2.rootLogger.appenderRef.Console.filter.threshold.level = ${karaf.log.console:-OFF}
 
-# JSON-ish appender (doesn't handle quotes in fields correctly)
-# docs: https://logging.apache.org/log4j/1.2/apidocs/org/apache/log4j/PatternLayout.html
-log4j.appender.json=org.apache.log4j.RollingFileAppender
-log4j.appender.json.layout=org.apache.log4j.PatternLayout
-log4j.appender.json.layout.ConversionPattern={"@timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss.SSS'Z'}","levelname":"%p","threadName":"%t","category":"%c{1}","bundle.id":"%X{bundle.id}","bundle.name":"%X{bundle.name}","bundle.version":"%X{bundle.version}","message":"%m"}%n
-log4j.appender.json.file=${karaf.data}/log/karaf_json.log
-log4j.appender.json.append=true
-log4j.appender.json.maxFileSize=10MB
-log4j.appender.json.maxBackupIndex=10
+# Specific Loggers configuration
 
-# Sift appender - one logfile per bundle ID
-log4j.appender.sift=org.apache.log4j.sift.MDCSiftingAppender
-log4j.appender.sift.key=bundle.name
-log4j.appender.sift.default=karaf
-log4j.appender.sift.appender=org.apache.log4j.FileAppender
-log4j.appender.sift.appender.layout=org.apache.log4j.PatternLayout
-log4j.appender.sift.appender.layout.ConversionPattern=%d{ISO8601} | %-5.5p | %-16.16t | %-32.32c{1} | %m%n
-log4j.appender.sift.appender.file=${karaf.data}/log/$\\{bundle.name\\}.log
-log4j.appender.sift.appender.append=true
+## SSHD logger
+log4j2.logger.sshd.name = org.apache.sshd
+log4j2.logger.sshd.level = INFO
+
+## Spifly logger
+log4j2.logger.spifly.name = org.apache.aries.spifly
+log4j2.logger.spifly.level = WARN
+
+## Kafka logger to avoid recursive logging
+log4j2.logger.apacheKafka.name = org.apache.kafka
+log4j2.logger.apacheKafka.level = INFO
+
+# Appenders configuration
+
+## Console appender not used by default (see log4j2.rootLogger.appenderRefs)
+log4j2.appender.console.type = Console
+log4j2.appender.console.name = Console
+log4j2.appender.console.layout.type = PatternLayout
+log4j2.appender.console.layout.pattern = ${log4j2.out.pattern}
+
+## Rolling file appender
+log4j2.appender.rolling.type = RollingRandomAccessFile
+log4j2.appender.rolling.name = RollingFile
+log4j2.appender.rolling.filter.regex.type = RegexFilter
+log4j2.appender.rolling.filter.regex.regex = .*AuditLog.*
+log4j2.appender.rolling.filter.regex.onMatch = DENY
+log4j2.appender.rolling.filter.regex.onMismatch = ACCEPT
+log4j2.appender.rolling.fileName = ${karaf.data}/log/karaf.log
+log4j2.appender.rolling.filePattern = ${karaf.data}/log/karaf.log.%i
+# uncomment to not force a disk flush
+#log4j2.appender.rolling.immediateFlush = false
+log4j2.appender.rolling.append = true
+log4j2.appender.rolling.layout.type = PatternLayout
+log4j2.appender.rolling.layout.pattern = ${log4j2.pattern}
+log4j2.appender.rolling.rolling.type = DefaultRolloverStrategy
+log4j2.appender.rolling.rolling.max = 10
+log4j2.appender.rolling.policies.type = Policies
+log4j2.appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
+log4j2.appender.rolling.policies.size.size = 10MB
+
+## OSGi appender
+log4j2.appender.osgi.type = PaxOsgi
+log4j2.appender.osgi.name = PaxOsgi
+log4j2.appender.osgi.filter = *
+
+## Kafka appender
+log4j2.appender.kafka.type = Kafka
+log4j2.appender.kafka.name = Kafka
+log4j2.appender.kafka.property.type = Property
+log4j2.appender.kafka.property.name = bootstrap.servers
+log4j2.appender.kafka.property.value = {{- join "," .Values.kafka_logging.brokers }}
+log4j2.appender.kafka.topic = onos.log
+# Async send, no need to wait for Kafka ack for each record
+log4j2.appender.kafka.syncSend = false
+log4j2.kafka.pattern = {"@timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss.SSS'Z'}","levelname":"%p","threadName":"%t","category":"%c{1}","bundle.id":"%X{bundle.id}","bundle.name":"%X{bundle.name}","bundle.version":"%X{bundle.version}","message":"%m"}%n
+log4j2.appender.kafka.layout.type = PatternLayout
+log4j2.appender.kafka.layout.pattern = ${log4j2.kafka.pattern}
 
 # Application logs
 {{ .Values.application_logs }}
+
 {{- end -}}
 
 {{/*
diff --git a/onos/templates/deployment.yaml b/onos/templates/deployment.yaml
index fc6c014..85cd1ff 100644
--- a/onos/templates/deployment.yaml
+++ b/onos/templates/deployment.yaml
@@ -55,30 +55,10 @@
             - name: node-key
               mountPath: /root/vtn
               readOnly: true
+{{- if .Values.kafka_logging.enabled }}
             - name: onos-logs-cfg
-              mountPath: /root/onos/apache-karaf-3.0.8/etc/org.ops4j.pax.logging.cfg
+              mountPath: /root/onos/apache-karaf-4.2.8/etc/org.ops4j.pax.logging.cfg
               subPath: org.ops4j.pax.logging.cfg
-            - name: onos-logs
-              mountPath: /root/onos/apache-karaf-3.0.8/data/log
-{{- if .Values.log_agent.enabled }}
-        - name: {{ .Chart.Name }}-log-agent
-          image: "{{ .Values.global.registry }}{{ .Values.images.log_agent.repository }}:{{ .Values.images.log_agent.tag }}"
-          imagePullPolicy: {{ .Values.images.log_agent.pullPolicy }}
-          args: [ "-c", "/conf/filebeat.yml", "-e" ]
-          env:
-            - name: POD_NAMESPACE
-              valueFrom:
-                fieldRef:
-                  fieldPath: metadata.namespace
-            - name: NODE_NAME
-              valueFrom:
-                fieldRef:
-                  fieldPath: spec.nodeName
-          volumeMounts:
-            - name: onos-logs
-              mountPath: /onos_logs
-            - name: log-agent-configmap-volume
-              mountPath: /conf
 {{- end }}
       volumes:
         - name: node-key
@@ -86,22 +66,16 @@
             secretName: node-key
             defaultMode: 256
             optional: true
+{{- if .Values.kafka_logging.enabled }}
         - name: onos-logs-cfg
           configMap:
             name: {{ template "onos.fullname" . }}-onos
             items:
               - key: logCfg
                 path: org.ops4j.pax.logging.cfg
+{{- end}}
         - name: onos-logs
           emptyDir: {}
-{{- if .Values.log_agent.enabled }}
-        - name: log-agent-configmap-volume
-          configMap:
-            name: {{ template "onos.fullname" . }}-log-agent-configmap
-            items:
-              - key: config
-                path: filebeat.yml
-{{- end }}
 {{- with .Values.nodeSelector }}
       nodeSelector:
 {{ toYaml . | indent 8 }}
@@ -114,6 +88,3 @@
       tolerations:
 {{ toYaml . | indent 8 }}
     {{- end }}
-{{- if .Values.log_agent.enabled }}
-{{- include "onos.log-agent-configmap" . }}
-{{- end }}
diff --git a/onos/templates/log-agent-configmap.yaml b/onos/templates/log-agent-configmap.yaml
deleted file mode 100644
index 945e146..0000000
--- a/onos/templates/log-agent-configmap.yaml
+++ /dev/null
@@ -1,53 +0,0 @@
-{{- /*
-Copyright 2017-present Open Networking Foundation
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/ -}}
-{{- define "onos.log-agent-configmap" }}
----
-kind: ConfigMap
-apiVersion: v1
-metadata:
-  name: {{ template "onos.fullname" . }}-log-agent-configmap
-data:
-  config: |
-    ---
-    # filebeat configuration for ONOS
-    filebeat.inputs:
-      - type: log
-        paths:
-          - "/onos_logs/karaf_json.log"
-
-        fields_under_root: true
-
-        json.keys_under_root: true
-        json.add_error_key: true
-        json.overwrite_keys: true
-        json.message_key: "message"
-
-        multiline.pattern: '^[[:space:]]'
-        multiline.negate: false
-        multiline.match: after
-
-    # remove unneeded fields
-    processors:
-      - drop_fields:
-          fields: ["host", "prospector", "input"]
-
-    output.kafka:
-      hosts: {{ .Values.log_agent.kafka_brokers | toJson }}
-      topic: 'onos.log'
-      key: '%{[bundle.name]}'
-
-{{- end }}
-
diff --git a/onos/values.yaml b/onos/values.yaml
index 5a2d46b..58a5d87 100644
--- a/onos/values.yaml
+++ b/onos/values.yaml
@@ -25,18 +25,12 @@
     tag: '{{ .Chart.AppVersion }}'
     pullPolicy: 'Always'
 
-  # keep in sync with: https://github.com/helm/charts/blob/master/stable/filebeat/values.yaml
-  log_agent:
-    repository: docker.elastic.co/beats/filebeat-oss
-    tag: 6.4.2
-    pullPolicy: IfNotPresent
-
 global:
   registry: ''
 
-log_agent:
+kafka_logging:
   enabled: True
-  kafka_brokers: ['cord-kafka:9092']
+  brokers: ['cord-kafka:9092']
 
 nameOverride: ''
 fullnameOverride: ''