Merge "[VOL-4237] Creating keyword to collect kubernetes components log removing log-collector.sh from voltha-dt-physical-functional-tests.groovy"
diff --git a/jjb/pipeline/voltha-dt-physical-functional-tests.groovy b/jjb/pipeline/voltha/master/voltha-dt-physical-functional-tests.groovy
similarity index 80%
rename from jjb/pipeline/voltha-dt-physical-functional-tests.groovy
rename to jjb/pipeline/voltha/master/voltha-dt-physical-functional-tests.groovy
index 0279f0a..6155a33 100644
--- a/jjb/pipeline/voltha-dt-physical-functional-tests.groovy
+++ b/jjb/pipeline/voltha/master/voltha-dt-physical-functional-tests.groovy
@@ -33,26 +33,9 @@
}
stages {
- stage('Clone kind-voltha') {
- steps {
- step([$class: 'WsCleanup'])
- checkout([
- $class: 'GitSCM',
- userRemoteConfigs: [[
- url: "https://gerrit.opencord.org/kind-voltha",
- refspec: "${kindVolthaChange}"
- ]],
- branches: [[ name: "master", ]],
- extensions: [
- [$class: 'WipeWorkspace'],
- [$class: 'RelativeTargetDirectory', relativeTargetDir: "kind-voltha"],
- [$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false],
- ],
- ])
- }
- }
stage('Clone voltha-system-tests') {
steps {
+ step([$class: 'WsCleanup'])
checkout([
$class: 'GitSCM',
userRemoteConfigs: [[
@@ -76,23 +59,6 @@
}
}
}
- stage('Clone cord-tester') {
- steps {
- checkout([
- $class: 'GitSCM',
- userRemoteConfigs: [[
- url: "https://gerrit.opencord.org/cord-tester",
- refspec: "${cordTesterChange}"
- ]],
- branches: [[ name: "master", ]],
- extensions: [
- [$class: 'WipeWorkspace'],
- [$class: 'RelativeTargetDirectory', relativeTargetDir: "cord-tester"],
- [$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false],
- ],
- ])
- }
- }
// This checkout allows us to show changes in Jenkins
// we only do this on master as we don't branch all the repos for all the releases
// (we should compute the difference by tracking the container version, not the code)
@@ -170,11 +136,8 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/FunctionalTests"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/FunctionalTests")
sh """
- cd $WORKSPACE/kind-voltha/scripts
- ./log-collector.sh > /dev/null &
- ./log-combine.sh > /dev/null &
-
mkdir -p $ROBOT_LOGS_DIR
if ( ${powerSwitch} ); then
export ROBOT_MISC_ARGS="--removekeywords wuks -i PowerSwitch -i sanityDt -i functionalDt -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
@@ -183,6 +146,7 @@
fi
make -C $WORKSPACE/voltha-system-tests voltha-dt-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/FunctionalTests", compress: true)
}
}
@@ -193,6 +157,7 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/FailureScenarios"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/FailureScenarios")
sh """
mkdir -p $ROBOT_LOGS_DIR
if ( ${powerSwitch} ); then
@@ -202,6 +167,7 @@
fi
make -C $WORKSPACE/voltha-system-tests voltha-dt-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/FailureScenarios", compress: true)
}
}
@@ -212,11 +178,13 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/DataplaneTests"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/DataplaneTests")
sh """
mkdir -p $ROBOT_LOGS_DIR
export ROBOT_MISC_ARGS="--removekeywords wuks -i dataplaneDt -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
make -C $WORKSPACE/voltha-system-tests voltha-dt-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/DataplaneTests", compress: true)
}
}
stage('HA Tests') {
@@ -226,11 +194,13 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/ONOSHAScenarios"
}
steps {
- sh """
- mkdir -p $ROBOT_LOGS_DIR
- export ROBOT_MISC_ARGS="--removekeywords wuks -L TRACE -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v workflow:${params.workFlow} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
- make -C $WORKSPACE/voltha-system-tests voltha-test || true
- """
+ startComponentsLog(logsDir: "$WORKSPACE/logs/ONOSHAScenarios")
+ sh """
+ mkdir -p $ROBOT_LOGS_DIR
+ export ROBOT_MISC_ARGS="--removekeywords wuks -L TRACE -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v workflow:${params.workFlow} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
+ make -C $WORKSPACE/voltha-system-tests voltha-test || true
+ """
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/ONOSHAScenarios", compress: true)
}
}
@@ -241,6 +211,7 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/MultipleOLTScenarios"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/ONOSHAScenarios")
sh """
mkdir -p $ROBOT_LOGS_DIR
if ( ${powerSwitch} ); then
@@ -250,6 +221,7 @@
fi
make -C $WORKSPACE/voltha-system-tests voltha-dt-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/ONOSHAScenarios", compress: true)
}
}
@@ -261,11 +233,13 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/ErrorScenarios"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/ErrorScenarios")
sh """
mkdir -p $ROBOT_LOGS_DIR
export ROBOT_MISC_ARGS="--removekeywords wuks -L TRACE -i functional -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v workflow:${params.workFlow} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
make -C $WORKSPACE/voltha-system-tests voltha-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/ErrorScenarios", compress: true)
}
}
}
@@ -279,43 +253,6 @@
kubectl get pods -n voltha -o wide
kubectl get pods -o wide
- sleep 60 # Wait for log-collector and log-combine to complete
-
- # Clean up "announcer" pod used by the tests if present
- kubectl delete pod announcer || true
-
- ## Pull out errors from log files
- extract_errors_go() {
- echo
- echo "Error summary for $1:"
- grep '"level":"error"' $WORKSPACE/kind-voltha/scripts/logger/combined/$1*
- echo
- }
-
- extract_errors_python() {
- echo
- echo "Error summary for $1:"
- grep 'ERROR' $WORKSPACE/kind-voltha/scripts/logger/combined/$1*
- echo
- }
-
- extract_errors_go voltha-rw-core > $WORKSPACE/error-report.log
- extract_errors_go adapter-open-olt >> $WORKSPACE/error-report.log
- extract_errors_python adapter-open-onu >> $WORKSPACE/error-report.log
- extract_errors_python voltha-ofagent >> $WORKSPACE/error-report.log
- extract_errors_python onos >> $WORKSPACE/error-report.log
-
- gzip error-report.log || true
- rm error-report.log || true
-
- cd $WORKSPACE/kind-voltha/scripts/logger/combined/
- tar czf $WORKSPACE/container-logs.tgz *
- rm * || true
-
- cd $WORKSPACE
- gzip *-combined.log || true
- rm *-combined.log || true
-
# store information on running charts
helm ls > $WORKSPACE/helm-list.txt || true
@@ -323,10 +260,6 @@
kubectl get pods --all-namespaces -o wide > $WORKSPACE/pods.txt || true
kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.image}{'\\n'}" | sort | uniq | tee $WORKSPACE/pod-images.txt || true
kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.imageID}{'\\n'}" | sort | uniq | tee $WORKSPACE/pod-imagesId.txt || true
-
- # collect ETCD cluster logs
- mkdir -p $WORKSPACE/etcd
- printf '%s\n' $(kubectl get pods -l app=etcd -o=jsonpath="{.items[*]['metadata.name']}") | xargs -I% bash -c "kubectl logs % > $WORKSPACE/etcd/%.log"
'''
script {
deployment_config.olts.each { olt ->
@@ -355,7 +288,7 @@
unstableThreshold: 0,
onlyCritical: true
]);
- archiveArtifacts artifacts: '*.log,*.gz,*.tgz,etcd/*.log,*.txt'
+ archiveArtifacts artifacts: '**/*.log,**/*.tgz,*.txt'
}
}
}
diff --git a/jjb/pipeline/voltha/master/voltha-scale-test.groovy b/jjb/pipeline/voltha/master/voltha-scale-test.groovy
index 8090b35..6a8535b 100644
--- a/jjb/pipeline/voltha/master/voltha-scale-test.groovy
+++ b/jjb/pipeline/voltha/master/voltha-scale-test.groovy
@@ -50,10 +50,7 @@
NUM_OF_ONOS="${onosReplicas}"
NUM_OF_ATOMIX="${atomixReplicas}"
EXTRA_HELM_FLAGS=" "
-
- APPS_TO_LOG="etcd kafka onos-classic adapter-open-onu adapter-open-olt rw-core ofagent bbsim radius bbsim-sadis-server onos-config-loader"
LOG_FOLDER="$WORKSPACE/logs"
-
GERRIT_PROJECT="${GERRIT_PROJECT}"
}
@@ -127,18 +124,21 @@
steps {
timeout(time: 10, unit: 'MINUTES') {
script {
- sh returnStdout: false, script: '''
- # start logging with kail
-
- mkdir -p $LOG_FOLDER
-
- list=($APPS_TO_LOG)
- for app in "${list[@]}"
- do
- echo "Starting logs for: ${app}"
- _TAG=kail-$app kail -l app=$app --since 1h > $LOG_FOLDER/$app.log&
- done
- '''
+ startComponentsLog([
+ appsToLog: [
+ 'app.kubernetes.io/name=etcd',
+ 'app.kubernetes.io/name=kafka',
+ 'app=onos-classic',
+ 'app=adapter-open-onu',
+ 'app=adapter-open-olt',
+ 'app=rw-core',
+ 'app=ofagent',
+ 'app=bbsim',
+ 'app=radius',
+ 'app=bbsim-sadis-server',
+ 'app=onos-config-loader',
+ ]
+ ])
def returned_flags = sh (returnStdout: true, script: """
export EXTRA_HELM_FLAGS+=' '
@@ -527,25 +527,22 @@
}
post {
always {
+ stopComponentsLog([
+ 'app.kubernetes.io/name=etcd',
+ 'app.kubernetes.io/name=kafka',
+ 'app=onos-classic',
+ 'app=adapter-open-onu',
+ 'app=adapter-open-olt',
+ 'app=rw-core',
+ 'app=ofagent',
+ 'app=bbsim',
+ 'app=radius',
+ 'app=bbsim-sadis-server',
+ 'app=onos-config-loader',
+ ])
// collect result, done in the "post" step so it's executed even in the
// event of a timeout in the tests
sh '''
-
- # stop the kail processes
- list=($APPS_TO_LOG)
- for app in "${list[@]}"
- do
- echo "Stopping logs for: ${app}"
- _TAG="kail-$app"
- P_IDS="$(ps e -ww -A | grep "_TAG=$_TAG" | grep -v grep | awk '{print $1}')"
- if [ -n "$P_IDS" ]; then
- echo $P_IDS
- for P_ID in $P_IDS; do
- kill -9 $P_ID
- done
- fi
- done
-
if [ ${withPcap} = true ] ; then
# stop ofAgent tcpdump
P_ID="\$(ps e -ww -A | grep "_TAG=ofagent-tcpdump" | grep -v grep | awk '{print \$1}')"
@@ -668,7 +665,7 @@
'''
script {
// first make sure the port-forward is still running,
- // sometimes Jenkins kills it relardless of the JENKINS_NODE_COOKIE=dontKillMe
+ // sometimes Jenkins kills it regardless of the JENKINS_NODE_COOKIE=dontKillMe
def running = sh (
script: 'ps aux | grep port-forw | grep -E "onos|voltha" | grep -v grep | wc -l',
returnStdout: true
diff --git a/jjb/pipeline/voltha-dt-physical-functional-tests.groovy b/jjb/pipeline/voltha/voltha-2.8/voltha-dt-physical-functional-tests.groovy
similarity index 75%
copy from jjb/pipeline/voltha-dt-physical-functional-tests.groovy
copy to jjb/pipeline/voltha/voltha-2.8/voltha-dt-physical-functional-tests.groovy
index 0279f0a..6744891 100644
--- a/jjb/pipeline/voltha-dt-physical-functional-tests.groovy
+++ b/jjb/pipeline/voltha/voltha-2.8/voltha-dt-physical-functional-tests.groovy
@@ -33,26 +33,9 @@
}
stages {
- stage('Clone kind-voltha') {
- steps {
- step([$class: 'WsCleanup'])
- checkout([
- $class: 'GitSCM',
- userRemoteConfigs: [[
- url: "https://gerrit.opencord.org/kind-voltha",
- refspec: "${kindVolthaChange}"
- ]],
- branches: [[ name: "master", ]],
- extensions: [
- [$class: 'WipeWorkspace'],
- [$class: 'RelativeTargetDirectory', relativeTargetDir: "kind-voltha"],
- [$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false],
- ],
- ])
- }
- }
stage('Clone voltha-system-tests') {
steps {
+ step([$class: 'WsCleanup'])
checkout([
$class: 'GitSCM',
userRemoteConfigs: [[
@@ -76,48 +59,6 @@
}
}
}
- stage('Clone cord-tester') {
- steps {
- checkout([
- $class: 'GitSCM',
- userRemoteConfigs: [[
- url: "https://gerrit.opencord.org/cord-tester",
- refspec: "${cordTesterChange}"
- ]],
- branches: [[ name: "master", ]],
- extensions: [
- [$class: 'WipeWorkspace'],
- [$class: 'RelativeTargetDirectory', relativeTargetDir: "cord-tester"],
- [$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false],
- ],
- ])
- }
- }
- // This checkout allows us to show changes in Jenkins
- // we only do this on master as we don't branch all the repos for all the releases
- // (we should compute the difference by tracking the container version, not the code)
- stage('Download All the VOLTHA repos') {
- when {
- expression {
- return "${branch}" == 'master';
- }
- }
- steps {
- checkout(changelog: true,
- poll: false,
- scm: [$class: 'RepoScm',
- manifestRepositoryUrl: "${params.manifestUrl}",
- manifestBranch: "${params.branch}",
- currentBranch: true,
- destinationDir: 'voltha',
- forceSync: true,
- resetFirst: true,
- quiet: true,
- jobs: 4,
- showAllChanges: true]
- )
- }
- }
stage ('Initialize') {
steps {
sh returnStdout: false, script: "git clone -b ${branch} ${cordRepoUrl}/${configBaseDir}"
@@ -170,11 +111,8 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/FunctionalTests"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/FunctionalTests")
sh """
- cd $WORKSPACE/kind-voltha/scripts
- ./log-collector.sh > /dev/null &
- ./log-combine.sh > /dev/null &
-
mkdir -p $ROBOT_LOGS_DIR
if ( ${powerSwitch} ); then
export ROBOT_MISC_ARGS="--removekeywords wuks -i PowerSwitch -i sanityDt -i functionalDt -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
@@ -183,6 +121,7 @@
fi
make -C $WORKSPACE/voltha-system-tests voltha-dt-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/FunctionalTests", compress: true)
}
}
@@ -193,6 +132,7 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/FailureScenarios"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/FailureScenarios")
sh """
mkdir -p $ROBOT_LOGS_DIR
if ( ${powerSwitch} ); then
@@ -202,6 +142,7 @@
fi
make -C $WORKSPACE/voltha-system-tests voltha-dt-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/FailureScenarios", compress: true)
}
}
@@ -212,11 +153,13 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/DataplaneTests"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/DataplaneTests")
sh """
mkdir -p $ROBOT_LOGS_DIR
export ROBOT_MISC_ARGS="--removekeywords wuks -i dataplaneDt -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
make -C $WORKSPACE/voltha-system-tests voltha-dt-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/DataplaneTests", compress: true)
}
}
stage('HA Tests') {
@@ -226,11 +169,13 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/ONOSHAScenarios"
}
steps {
- sh """
- mkdir -p $ROBOT_LOGS_DIR
- export ROBOT_MISC_ARGS="--removekeywords wuks -L TRACE -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v workflow:${params.workFlow} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
- make -C $WORKSPACE/voltha-system-tests voltha-test || true
- """
+ startComponentsLog(logsDir: "$WORKSPACE/logs/ONOSHAScenarios")
+ sh """
+ mkdir -p $ROBOT_LOGS_DIR
+ export ROBOT_MISC_ARGS="--removekeywords wuks -L TRACE -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v workflow:${params.workFlow} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
+ make -C $WORKSPACE/voltha-system-tests voltha-test || true
+ """
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/ONOSHAScenarios", compress: true)
}
}
@@ -241,6 +186,7 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/MultipleOLTScenarios"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/ONOSHAScenarios")
sh """
mkdir -p $ROBOT_LOGS_DIR
if ( ${powerSwitch} ); then
@@ -250,6 +196,7 @@
fi
make -C $WORKSPACE/voltha-system-tests voltha-dt-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/ONOSHAScenarios", compress: true)
}
}
@@ -261,11 +208,13 @@
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow/ErrorScenarios"
}
steps {
+ startComponentsLog(logsDir: "$WORKSPACE/logs/ErrorScenarios")
sh """
mkdir -p $ROBOT_LOGS_DIR
export ROBOT_MISC_ARGS="--removekeywords wuks -L TRACE -i functional -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v workflow:${params.workFlow} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE -v OLT_ADAPTER_APP_LABEL:${oltAdapterAppLabel}"
make -C $WORKSPACE/voltha-system-tests voltha-test || true
"""
+ stopComponentsLog(logsDir: "$WORKSPACE/logs/ErrorScenarios", compress: true)
}
}
}
@@ -279,43 +228,6 @@
kubectl get pods -n voltha -o wide
kubectl get pods -o wide
- sleep 60 # Wait for log-collector and log-combine to complete
-
- # Clean up "announcer" pod used by the tests if present
- kubectl delete pod announcer || true
-
- ## Pull out errors from log files
- extract_errors_go() {
- echo
- echo "Error summary for $1:"
- grep '"level":"error"' $WORKSPACE/kind-voltha/scripts/logger/combined/$1*
- echo
- }
-
- extract_errors_python() {
- echo
- echo "Error summary for $1:"
- grep 'ERROR' $WORKSPACE/kind-voltha/scripts/logger/combined/$1*
- echo
- }
-
- extract_errors_go voltha-rw-core > $WORKSPACE/error-report.log
- extract_errors_go adapter-open-olt >> $WORKSPACE/error-report.log
- extract_errors_python adapter-open-onu >> $WORKSPACE/error-report.log
- extract_errors_python voltha-ofagent >> $WORKSPACE/error-report.log
- extract_errors_python onos >> $WORKSPACE/error-report.log
-
- gzip error-report.log || true
- rm error-report.log || true
-
- cd $WORKSPACE/kind-voltha/scripts/logger/combined/
- tar czf $WORKSPACE/container-logs.tgz *
- rm * || true
-
- cd $WORKSPACE
- gzip *-combined.log || true
- rm *-combined.log || true
-
# store information on running charts
helm ls > $WORKSPACE/helm-list.txt || true
@@ -323,10 +235,6 @@
kubectl get pods --all-namespaces -o wide > $WORKSPACE/pods.txt || true
kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.image}{'\\n'}" | sort | uniq | tee $WORKSPACE/pod-images.txt || true
kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.imageID}{'\\n'}" | sort | uniq | tee $WORKSPACE/pod-imagesId.txt || true
-
- # collect ETCD cluster logs
- mkdir -p $WORKSPACE/etcd
- printf '%s\n' $(kubectl get pods -l app=etcd -o=jsonpath="{.items[*]['metadata.name']}") | xargs -I% bash -c "kubectl logs % > $WORKSPACE/etcd/%.log"
'''
script {
deployment_config.olts.each { olt ->
@@ -355,7 +263,7 @@
unstableThreshold: 0,
onlyCritical: true
]);
- archiveArtifacts artifacts: '*.log,*.gz,*.tgz,etcd/*.log,*.txt'
+ archiveArtifacts artifacts: '**/*.log,**/*.tgz,*.txt'
}
}
}
diff --git a/jjb/voltha-test/voltha.yaml b/jjb/voltha-test/voltha.yaml
index 1921987..5e2353c 100644
--- a/jjb/voltha-test/voltha.yaml
+++ b/jjb/voltha-test/voltha.yaml
@@ -225,7 +225,7 @@
work-flow: 'DT'
test-repo: 'voltha-system-tests'
profile: '1T8GEM'
- pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
+ pipeline-script: 'voltha/master/voltha-dt-physical-functional-tests.groovy'
power-switch: True
# Menlo pod with olt/onu - released branch, Default tech profile and timer based job
@@ -254,7 +254,7 @@
work-flow: 'DT'
test-repo: 'voltha-system-tests'
profile: '1T8GEM'
- pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
+ pipeline-script: 'voltha/voltha-2.8/voltha-dt-physical-functional-tests.groovy'
power-switch: True
# ONF Menlo Soak POD build job - voltha-master branch
@@ -407,7 +407,7 @@
test-repo: 'voltha-system-tests'
profile: '1T8GEM'
power-switch: True
- pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
+ pipeline-script: 'voltha/master/voltha-dt-physical-functional-tests.groovy'
# Certification (Radisys) pod with olt/onu - 2.8 version timer based job
- 'build_voltha_pod_release_timer':
@@ -511,7 +511,7 @@
test-repo: 'voltha-system-tests'
profile: '1T8GEM'
power-switch: True
- pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
+ pipeline-script: 'voltha/master/voltha-dt-physical-functional-tests.groovy'
# Berlin pod with gpon olt/onu - master 1T8GEM tech profile and timer based job
- 'build_voltha_pod_release_timer':
@@ -535,7 +535,7 @@
test-repo: 'voltha-system-tests'
profile: '1T8GEM'
power-switch: True
- pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
+ pipeline-script: 'voltha/master/voltha-dt-physical-functional-tests.groovy'
# Berlin pod with gpon olt/onu - master 1T8GEM tech profile and timer based job
- 'build_voltha_pod_release_timer':
@@ -562,7 +562,7 @@
test-repo: 'voltha-system-tests'
profile: '1T8GEM'
power-switch: True
- pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
+ pipeline-script: 'voltha/voltha-2.8/voltha-dt-physical-functional-tests.groovy'
# Berlin pod with gpon olt/onu - master 1T8GEM tech profile and openonu go and timer based job
- 'build_voltha_pod_release_timer':
@@ -590,4 +590,4 @@
test-repo: 'voltha-system-tests'
profile: '1T8GEM'
power-switch: True
- pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
+ pipeline-script: 'voltha/master/voltha-dt-physical-functional-tests.groovy'
diff --git a/vars/startComponentsLogs.groovy b/vars/startComponentsLogs.groovy
new file mode 100644
index 0000000..954d0b1
--- /dev/null
+++ b/vars/startComponentsLogs.groovy
@@ -0,0 +1,54 @@
+// check if kail is installed, if not installs it
+// and then uses it to collect logs on specified containers
+
+// appsToLog is a list of kubernetes labels used by kail to get the logs
+// the generated log file is named with the string after =
+// for example app=bbsim will generate a file called bbsim.log
+
+// to archive the logs use: archiveArtifacts artifacts: '${logsDir}/*.log'
+def call(Map config) {
+
+ def tagPrefix = "jenkins"
+
+ def defaultConfig = [
+ appsToLog: [
+ 'app=onos-classic',
+ 'app=adapter-open-onu',
+ 'app=adapter-open-olt',
+ 'app=rw-core',
+ 'app=ofagent',
+ 'app=bbsim',
+ 'app=radius',
+ 'app=bbsim-sadis-server',
+ 'app=onos-config-loader',
+ ],
+ logsDir: "$WORKSPACE/logs"
+ ]
+
+ if (!config) {
+ config = [:]
+ }
+
+ def cfg = defaultConfig + config
+
+ // check if kail is installed and if not installs it
+ sh """
+ if ! command -v kail &> /dev/null
+ then
+ bash <( curl -sfL https://raw.githubusercontent.com/boz/kail/master/godownloader.sh) -b "$WORKSPACE/bin"
+ fi
+ """
+
+ // fi the logsDir does not exists dir() will create it
+ dir(cfg.logsDir) {
+ for(int i = 0;i<cfg.appsToLog.size();i++) {
+ def label = cfg.appsToLog[i]
+ def logFile = label.split('=')[1]
+ def tag = "${tagPrefix}-kail-${logFile}"
+ println "Starting logging process for label: ${label}"
+ sh """
+ _TAG=${tag} kail -l ${label} --since 1h > ${logsDir}/${logFile}.log&
+ """
+ }
+ }
+}
\ No newline at end of file
diff --git a/vars/stopComponentsLogs.groovy b/vars/stopComponentsLogs.groovy
new file mode 100644
index 0000000..fc0f0e9
--- /dev/null
+++ b/vars/stopComponentsLogs.groovy
@@ -0,0 +1,27 @@
+// stops all the kail processes created by startComponentsLog
+
+def call(Map config) {
+
+ def defaultConfig = [
+ logsDir: "$WORKSPACE/logs",
+ compress: false, // wether to compress the logs in a tgz file
+ ]
+
+ def tag = "jenkins-"
+ println "Stopping all kail logging process"
+ sh """
+ P_IDS="$(ps e -ww -A | grep "_TAG=jenkins-kail" | grep -v grep | awk '{print $1}')"
+ if [ -n "$P_IDS" ]; then
+ for P_ID in $P_IDS; do
+ kill -9 $P_ID
+ done
+ fi
+ """
+ if (compress) {
+ sh """
+ tar czf ${logsDir}/combined.tgz *
+ rm *.log
+ """
+
+ }
+}
\ No newline at end of file