Fix logging to Kafka on ONOS 2.2

Need ONOS with Karaf>4.2.8 to support logging on Kafka with KafkaAppender of
log4j2.

Change-Id: I6f53329dcf367c93f7913d363c3048e80c2c5392
diff --git a/onos/templates/_helpers.tpl b/onos/templates/_helpers.tpl
index 6d82564..43b84a5 100644
--- a/onos/templates/_helpers.tpl
+++ b/onos/templates/_helpers.tpl
@@ -66,46 +66,97 @@
 #
 ################################################################################
 
-# Root logger
-log4j.rootLogger=INFO, out, json, osgi:*, stdout
-log4j.throwableRenderer=org.apache.log4j.OsgiThrowableRenderer
+# Colors for log level rendering
+color.fatal = bright red
+color.error = bright red
+color.warn = bright yellow
+color.info = bright green
+color.debug = cyan
+color.trace = cyan
 
-# CONSOLE appender not used by default
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} | %-5.5p | %-16.16t | %-32.32c{1} | %X{bundle.id} - %X{bundle.name} - %X{bundle.version} | %m%n
+# Common pattern layout for appenders
+log4j2.pattern = %d{ISO8601} | %-5p | %-16t | %-32c{1} | %X{bundle.id} - %X{bundle.name} - %X{bundle.version} | %m%n
+log4j2.out.pattern = \u001b[90m%d{HH:mm:ss\.SSS}\u001b[0m %highlight{%-5level}{FATAL=${color.fatal}, ERROR=${color.error}, WARN=${color.warn}, INFO=${color.info}, DEBUG=${color.debug}, TRACE=${color.trace}} \u001b[90m[%c{1}]\u001b[0m %msg%n%throwable
 
-# File appender
-log4j.appender.out=org.apache.log4j.RollingFileAppender
-log4j.appender.out.layout=org.apache.log4j.PatternLayout
-log4j.appender.out.layout.ConversionPattern=%d{ISO8601} | %-5.5p | %-16.16t | %-32.32c{1} | %X{bundle.id} - %X{bundle.name} - %X{bundle.version} | %m%n
-log4j.appender.out.file=${karaf.data}/log/karaf.log
-log4j.appender.out.append=true
-log4j.appender.out.maxFileSize=10MB
-log4j.appender.out.maxBackupIndex=10
+# Root logger configuration
+log4j2.rootLogger.level = INFO
+# uncomment to use asynchronous loggers, which require mvn:com.lmax/disruptor/3.3.2 library
+#log4j2.rootLogger.type = asyncRoot
+#log4j2.rootLogger.includeLocation = false
+log4j2.rootLogger.appenderRef.RollingFile.ref = RollingFile
+log4j2.rootLogger.appenderRef.Kafka.ref = Kafka
+log4j2.rootLogger.appenderRef.PaxOsgi.ref = PaxOsgi
+log4j2.rootLogger.appenderRef.Console.ref = Console
+log4j2.rootLogger.appenderRef.Console.filter.regex.type = RegexFilter
+log4j2.rootLogger.appenderRef.Console.filter.regex.regex = .*Audit.*
+log4j2.rootLogger.appenderRef.Console.filter.regex.onMatch = DENY
+log4j2.rootLogger.appenderRef.Console.filter.regex.onMismatch = ACCEPT
+#log4j2.rootLogger.appenderRef.Console.filter.threshold.type = ThresholdFilter
+#log4j2.rootLogger.appenderRef.Console.filter.threshold.level = ${karaf.log.console:-OFF}
 
-# JSON-ish appender (doesn't handle quotes in fields correctly)
-# docs: https://logging.apache.org/log4j/1.2/apidocs/org/apache/log4j/PatternLayout.html
-log4j.appender.json=org.apache.log4j.RollingFileAppender
-log4j.appender.json.layout=org.apache.log4j.PatternLayout
-log4j.appender.json.layout.ConversionPattern={"@timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss.SSS'Z'}","levelname":"%p","threadName":"%t","category":"%c{1}","bundle.id":"%X{bundle.id}","bundle.name":"%X{bundle.name}","bundle.version":"%X{bundle.version}","message":"%m"}%n
-log4j.appender.json.file=${karaf.data}/log/karaf_json.log
-log4j.appender.json.append=true
-log4j.appender.json.maxFileSize=10MB
-log4j.appender.json.maxBackupIndex=10
+# Specific Loggers configuration
 
-# Sift appender - one logfile per bundle ID
-log4j.appender.sift=org.apache.log4j.sift.MDCSiftingAppender
-log4j.appender.sift.key=bundle.name
-log4j.appender.sift.default=karaf
-log4j.appender.sift.appender=org.apache.log4j.FileAppender
-log4j.appender.sift.appender.layout=org.apache.log4j.PatternLayout
-log4j.appender.sift.appender.layout.ConversionPattern=%d{ISO8601} | %-5.5p | %-16.16t | %-32.32c{1} | %m%n
-log4j.appender.sift.appender.file=${karaf.data}/log/$\\{bundle.name\\}.log
-log4j.appender.sift.appender.append=true
+## SSHD logger
+log4j2.logger.sshd.name = org.apache.sshd
+log4j2.logger.sshd.level = INFO
+
+## Spifly logger
+log4j2.logger.spifly.name = org.apache.aries.spifly
+log4j2.logger.spifly.level = WARN
+
+## Kafka logger to avoid recursive logging
+log4j2.logger.apacheKafka.name = org.apache.kafka
+log4j2.logger.apacheKafka.level = INFO
+
+# Appenders configuration
+
+## Console appender not used by default (see log4j2.rootLogger.appenderRefs)
+log4j2.appender.console.type = Console
+log4j2.appender.console.name = Console
+log4j2.appender.console.layout.type = PatternLayout
+log4j2.appender.console.layout.pattern = ${log4j2.out.pattern}
+
+## Rolling file appender
+log4j2.appender.rolling.type = RollingRandomAccessFile
+log4j2.appender.rolling.name = RollingFile
+log4j2.appender.rolling.filter.regex.type = RegexFilter
+log4j2.appender.rolling.filter.regex.regex = .*AuditLog.*
+log4j2.appender.rolling.filter.regex.onMatch = DENY
+log4j2.appender.rolling.filter.regex.onMismatch = ACCEPT
+log4j2.appender.rolling.fileName = ${karaf.data}/log/karaf.log
+log4j2.appender.rolling.filePattern = ${karaf.data}/log/karaf.log.%i
+# uncomment to not force a disk flush
+#log4j2.appender.rolling.immediateFlush = false
+log4j2.appender.rolling.append = true
+log4j2.appender.rolling.layout.type = PatternLayout
+log4j2.appender.rolling.layout.pattern = ${log4j2.pattern}
+log4j2.appender.rolling.rolling.type = DefaultRolloverStrategy
+log4j2.appender.rolling.rolling.max = 10
+log4j2.appender.rolling.policies.type = Policies
+log4j2.appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
+log4j2.appender.rolling.policies.size.size = 10MB
+
+## OSGi appender
+log4j2.appender.osgi.type = PaxOsgi
+log4j2.appender.osgi.name = PaxOsgi
+log4j2.appender.osgi.filter = *
+
+## Kafka appender
+log4j2.appender.kafka.type = Kafka
+log4j2.appender.kafka.name = Kafka
+log4j2.appender.kafka.property.type = Property
+log4j2.appender.kafka.property.name = bootstrap.servers
+log4j2.appender.kafka.property.value = {{- join "," .Values.kafka_logging.brokers }}
+log4j2.appender.kafka.topic = onos.log
+# Async send, no need to wait for Kafka ack for each record
+log4j2.appender.kafka.syncSend = false
+log4j2.kafka.pattern = {"@timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss.SSS'Z'}","levelname":"%p","threadName":"%t","category":"%c{1}","bundle.id":"%X{bundle.id}","bundle.name":"%X{bundle.name}","bundle.version":"%X{bundle.version}","message":"%m"}%n
+log4j2.appender.kafka.layout.type = PatternLayout
+log4j2.appender.kafka.layout.pattern = ${log4j2.kafka.pattern}
 
 # Application logs
 {{ .Values.application_logs }}
+
 {{- end -}}
 
 {{/*