Merge "[VOL-3780] Using umbrella charts in voltha-scale pipeline"
diff --git a/jjb/cord-macros.yaml b/jjb/cord-macros.yaml
index e49239d..59cd72c 100644
--- a/jjb/cord-macros.yaml
+++ b/jjb/cord-macros.yaml
@@ -221,6 +221,8 @@
<permission>hudson.model.Run.Update:JenkinsPowerusers</permission>
<permission>hudson.model.Item.Discover:AetherAccess</permission>
<permission>hudson.model.Item.Discover:anonymous</permission>
+ <permission>hudson.model.Item.Discover:ONFStaff</permission>
<permission>hudson.model.Item.Read:AetherAccess</permission>
+ <permission>hudson.model.Item.Read:ONFStaff</permission>
<permission>hudson.model.Item.ViewStatus:anonymous</permission>
</hudson.security.AuthorizationMatrixProperty>
diff --git a/jjb/cord-test/voltha.yaml b/jjb/cord-test/voltha.yaml
index 4679e8c..1a9957e 100644
--- a/jjb/cord-test/voltha.yaml
+++ b/jjb/cord-test/voltha.yaml
@@ -234,36 +234,6 @@
profile: 'TP'
power-switch: True
- # Menlo pod with olt/onu - Default tech profile and timer based job
- - 'build_voltha_pod_release_timer':
- build-node: 'menlo-demo-pod'
- config-pod: 'onf-demo-pod-gpon'
- release: 'master'
- branch: 'master'
- num-of-openonu: '1'
- num-of-onos: '3'
- num-of-atomix: '3'
- name-extension: '_DT_GPON'
- work-flow: 'DT'
- test-repo: 'voltha-system-tests'
- Jenkinsfile: 'Jenkinsfile-voltha-build'
- configurePod: true
- profile: '1T8GEM'
- time: '22'
- VolthaEtcdPort: '9999'
-
- # Menlo pod test job - uses tech profile on voltha branch
- - 'build_voltha_pod_test':
- build-node: 'menlo-demo-pod'
- config-pod: 'onf-demo-pod-gpon'
- release: 'master'
- branch: 'master'
- name-extension: '_DT_GPON'
- work-flow: 'DT'
- test-repo: 'voltha-system-tests'
- profile: '1T8GEM'
- pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
-
# Menlo pod with olt/onu - 1T4GEM tech profile and timer based job
- 'build_voltha_pod_release_timer':
build-node: 'menlo-demo-pod'
@@ -639,7 +609,7 @@
configurePod: true
profile: '1T8GEM'
reinstall-olt: false
- openoltAdapterChart: '/home/community/adtran-2021-01-08/voltha-helm-chart-adapter-adtran-olt_v1.2.1/voltha-adapter-adtran-olt'
+ openoltAdapterChart: '/home/community/adtran-2021-01-19/voltha-adapter-adtran-olt'
time: '9'
diff --git a/jjb/fossa.yaml b/jjb/fossa.yaml
index 38fe47e..acfa347 100644
--- a/jjb/fossa.yaml
+++ b/jjb/fossa.yaml
@@ -22,35 +22,6 @@
Created by {id} job-template from ci-management/jjb/fossa.yaml<br/>
Copyright (c) 2018-present Open Networking Foundation (ONF)
-# # replace with cord-infra-gerrit-trigger-patchset when skip-vote is removed
-# triggers:
-# - gerrit:
-# server-name: '{gerrit-server-name}'
-# dependency-jobs: '{dependency-jobs}'
-# silent-start: true
-# trigger-on:
-# - patchset-created-event:
-# exclude-drafts: true
-# exclude-trivial-rebase: false
-# exclude-no-code-change: true
-# - draft-published-event
-# - comment-added-contains-event:
-# comment-contains-value: '(?i)^.*recheck$'
-# projects:
-# - project-compare-type: REG_EXP
-# project-pattern: '{project-regexp}'
-# branches:
-# - branch-compare-type: REG_EXP
-# branch-pattern: '{branch-regexp}'
-# file-paths:
-# - compare-type: REG_EXP
-# pattern: '{file-include-regexp}'
-# skip-vote:
-# success: true
-# failed: true
-# unstable: true
-# notbuilt: true
-
properties:
- cord-infra-properties:
build-days-to-keep: '{build-days-to-keep}'
diff --git a/jjb/omec-ci.yaml b/jjb/omec-ci.yaml
index d44e964..674ed2a 100644
--- a/jjb/omec-ci.yaml
+++ b/jjb/omec-ci.yaml
@@ -37,6 +37,9 @@
cp-context: 'acc-dev-gcp'
dp-context: 'ace-dev-menlo'
log-since: '1h'
+ - 'omec-post-results':
+ pod: 'dev'
+ build-node: 'omec-ci'
- 'omec-postmerge':
project: '{name}'
build-node: 'omec-ci'
@@ -45,6 +48,28 @@
nucleus-branch-name: 'master'
upf-branch-name: 'master'
pipeline-file: 'omec-postmerge.groovy'
+ - 'omec-nightly':
+ pod: 'dev'
+ build-node: 'omec-ci'
+ type: 'func'
+ build-timeout: 30
+ ntl-file: 'func.ntl'
+ time: '1'
+ desc: |
+ <img src="https://jenkins.opencord.org/job/omec_func_dev/lastCompletedBuild/artifact/plots/plot.png", alt="Failed to load plot. Either a build is currently running, or the plot was not generated from the last build." width="840", height="360"/>
+ - 'omec-nightly':
+ pod: 'dev'
+ build-node: 'omec-ci'
+ type: 'scale'
+ build-timeout: 180
+ ntl-file: 'scale-16k-rate10.ntl'
+ time: '2'
+ desc: |
+ <img src="https://jenkins.opencord.org/job/omec_scale_dev/lastCompletedBuild/artifact/plots/attach.png", alt="Failed to load plot. Either a build is currently running, or the plot was not generated from the last build." width="840", height="360"/><br />
+ ----------<br />
+ <img src="https://jenkins.opencord.org/job/omec_scale_dev/lastCompletedBuild/artifact/plots/detach.png", alt="Failed to load plot. Either a build is currently running, or the plot was not generated from the last build." width="840", height="360"/><br />
+ ----------<br />
+ <img src="https://jenkins.opencord.org/job/omec_scale_dev/lastCompletedBuild/artifact/plots/ping.png", alt="Failed to load plot. Either a build is currently running, or the plot was not generated from the last build." width="840", height="360"/><br />
# for ngic-rtc
- project:
@@ -1004,3 +1029,154 @@
url: 'https://github.com/{github-organization}/omec-project-ci'
branches:
- 'master'
+
+# OMEC nightly job
+- job-template:
+ id: 'omec-nightly'
+ name: 'omec_{type}_{pod}'
+ project-type: pipeline
+
+ description: |
+ Created from job-template {id} from ci-management/jjb/omec-ci.yaml <br />
+ {desc}
+
+ properties:
+ - cord-infra-properties:
+ build-days-to-keep: '{build-days-to-keep}'
+ artifact-num-to-keep: '{artifact-num-to-keep}'
+
+ wrappers:
+ - lf-infra-wrappers:
+ build-timeout: '{build-timeout}'
+ jenkins-ssh-credential: '{jenkins-ssh-credential}'
+
+ parameters:
+ - string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
+ name: testType
+ default: '{type}'
+ description: 'Type of test'
+
+ - string:
+ name: pod
+ default: '{pod}'
+ description: 'Name of OMEC POD'
+
+ - string:
+ name: timeout
+ default: '{build-timeout}'
+ description: 'Timeout period for this pipeline in minutes'
+
+ - string:
+ name: ntlFile
+ default: '{ntl-file}'
+ description: 'NG40 test automation file'
+
+ - bool:
+ name: redeploy
+ default: true
+ description: 'Redeploy OMEC before running tests if set to true'
+
+ - string:
+ name: hssdbImage
+ default: '{registry-proxy}/c3po-hssdb:master-latest'
+ description: 'Docker image for hssdb. Leave empty to use default helm-charts value'
+
+ - string:
+ name: hssImage
+ default: '{registry-proxy}/c3po-hss:master-latest'
+ description: 'Docker image for hss. Leave empty to use default helm-charts value'
+
+ - string:
+ name: mmeImage
+ default: '{registry-proxy}/nucleus:master-latest'
+ description: 'Docker image for mme. Leave empty to use default helm-charts value'
+
+ - string:
+ name: spgwcImage
+ default: '{registry-proxy}/spgw:master-latest'
+ description: 'Docker image for spgwc. Leave empty to use default helm-charts value'
+
+ - string:
+ name: bessImage
+ default: '{registry-proxy}/upf-epc-bess:master-latest'
+ description: 'Docker image for bess. Leave empty to use default helm-charts value'
+
+ - string:
+ name: zmqifaceImage
+ default: '{registry-proxy}/upf-epc-cpiface:master-latest'
+ description: 'Docker image for zmqiface. Leave empty to use default helm-charts value'
+
+ - string:
+ name: pfcpifaceImage
+ default: '{registry-proxy}/upf-epc-pfcpiface:master-latest'
+ description: 'Docker image for pfcpiface. Leave empty to use default helm-charts value'
+
+ concurrent: false
+
+ triggers:
+ - timed: |
+ TZ=America/Los_Angeles
+ H {time} * * *
+
+ pipeline-scm:
+ script-path: 'Jenkinsfile-omec-nightly.groovy'
+ scm:
+ - git:
+ url: 'https://github.com/{github-organization}/omec-project-ci'
+ branches:
+ - 'master'
+
+# OMEC post results job
+- job-template:
+ id: 'omec-post-results'
+ name: 'omec_post-results_{pod}'
+ project-type: pipeline
+
+ description: |
+ Created from job-template {id} from ci-management/jjb/omec-ci.yaml <br />
+
+ properties:
+ - cord-infra-properties:
+ build-days-to-keep: '{build-days-to-keep}'
+ artifact-num-to-keep: 2
+
+ wrappers:
+ - lf-infra-wrappers:
+ build-timeout: '{build-timeout}'
+ jenkins-ssh-credential: '{jenkins-ssh-credential}'
+
+ parameters:
+ - string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
+ name: pod
+ default: '{pod}'
+ description: 'Name of OMEC pod'
+
+ - string:
+ name: testType
+ default: 'scale'
+ description: 'Type of test e.g. scale, func, etc.'
+
+ - string:
+ name: buildNumber
+ default: '0'
+ description: 'Build number of upstream job'
+
+ concurrent: false
+
+ pipeline-scm:
+ script-path: 'Jenkinsfile-omec-post-results.groovy'
+ scm:
+ - git:
+ url: 'https://github.com/{github-organization}/omec-project-ci'
+ branches:
+ - 'master'
diff --git a/jjb/pipeline/omec-postmerge.groovy b/jjb/pipeline/omec-postmerge.groovy
index d901418..aaac1b7 100644
--- a/jjb/pipeline/omec-postmerge.groovy
+++ b/jjb/pipeline/omec-postmerge.groovy
@@ -61,13 +61,13 @@
stage ("Get Image Tags"){
steps {
script {
- hssdb_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/c3po-hssdb/tags/' | jq '.results[] | select(.name | test("${c3poBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="zdwonf") |.name' | head -1 | tr -d \\\""""
- hss_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/c3po-hss/tags/' | jq '.results[] | select(.name | test("${c3poBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="zdwonf") |.name' | head -1 | tr -d \\\""""
- mme_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/nucleus/tags/' | jq '.results[] | select(.name | test("${nucleusBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="zdwonf") |.name' | head -1 | tr -d \\\""""
- spgwc_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/spgw/tags/' | jq '.results[] | select(.name | test("${spgwBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="zdwonf") |.name' | head -1 | tr -d \\\""""
- bess_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/upf-epc-bess/tags/' | jq '.results[] | select(.name | test("${upfBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="zdwonf") |.name' | head -1 | tr -d \\\""""
- zmqiface_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/upf-epc-cpiface/tags/' | jq '.results[] | select(.name | test("${upfBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="zdwonf") |.name' | head -1 | tr -d \\\""""
- pfcpiface_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/upf-epc-pfcpiface/tags/' | jq '.results[] | select(.name | test("${upfBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="zdwonf") |.name' | head -1 | tr -d \\\""""
+ hssdb_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/c3po-hssdb/tags/' | jq '.results[] | select(.name | test("${c3poBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="onfauto") |.name' | head -1 | tr -d \\\""""
+ hss_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/c3po-hss/tags/' | jq '.results[] | select(.name | test("${c3poBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="onfauto") |.name' | head -1 | tr -d \\\""""
+ mme_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/nucleus/tags/' | jq '.results[] | select(.name | test("${nucleusBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="onfauto") |.name' | head -1 | tr -d \\\""""
+ spgwc_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/spgw/tags/' | jq '.results[] | select(.name | test("${spgwBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="onfauto") |.name' | head -1 | tr -d \\\""""
+ bess_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/upf-epc-bess/tags/' | jq '.results[] | select(.name | test("${upfBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="onfauto") |.name' | head -1 | tr -d \\\""""
+ zmqiface_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/upf-epc-cpiface/tags/' | jq '.results[] | select(.name | test("${upfBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="onfauto") |.name' | head -1 | tr -d \\\""""
+ pfcpiface_tag = sh returnStdout: true, script: """curl -s 'https://registry.hub.docker.com/v2/repositories/omecproject/upf-epc-pfcpiface/tags/' | jq '.results[] | select(.name | test("${upfBranchName}-[0-9a-z]{7}\$")) | select(.last_updater_username=="onfauto") |.name' | head -1 | tr -d \\\""""
hssdb_image = "${params.registryProxy}/c3po-hssdb:"+hssdb_tag
hss_image = "${params.registryProxy}/c3po-hss:"+hss_tag
@@ -109,15 +109,6 @@
}
}
- stage ("Update aether-pod-configs"){
- steps {
- build job: "aether-member-only-jobs/aether-postmerge", parameters: [
- string(name: 'repoName', value: "${repoName}"),
- string(name: 'images', value: "${updatedImages}"),
- ]
- }
- }
-
stage ("Deploy and Test"){
options {
lock(resource: 'aether-dev-cluster')
diff --git a/jjb/pipeline/voltha-bbsim-tests.groovy b/jjb/pipeline/voltha-bbsim-tests.groovy
index 5ece060..65f1eac 100644
--- a/jjb/pipeline/voltha-bbsim-tests.groovy
+++ b/jjb/pipeline/voltha-bbsim-tests.groovy
@@ -33,7 +33,7 @@
NAME="test"
VOLTCONFIG="$HOME/.volt/config-$NAME"
KUBECONFIG="$HOME/.kube/kind-config-voltha-$NAME"
- EXTRA_HELM_FLAGS=" --set defaults.image_registry=mirror.registry.opennetworking.org/ "
+ EXTRA_HELM_FLAGS=" --set global.image_registry=mirror.registry.opennetworking.org/ "
}
stages {
@@ -215,7 +215,7 @@
fi
if [[ "${gerritProject}" == voltha-helm-charts ]]; then
- export EXTRA_HELM_FLAGS+="--set defaults.image_tag=null "
+ export EXTRA_HELM_FLAGS+="--set global.image_tag=null "
fi
# Workflow-specific flags
@@ -343,7 +343,7 @@
export BBSIM_CFG="configs/bbsim-sadis-dt.yaml"
if [[ "${gerritProject}" == voltha-helm-charts ]]; then
- export EXTRA_HELM_FLAGS+="--set defaults.image_tag=null "
+ export EXTRA_HELM_FLAGS+="--set global.image_tag=null "
fi
# start logging
@@ -405,7 +405,7 @@
export BBSIM_CFG="configs/bbsim-sadis-tt.yaml"
if [[ "${gerritProject}" == voltha-helm-charts ]]; then
- export EXTRA_HELM_FLAGS+="--set defaults.image_tag=null "
+ export EXTRA_HELM_FLAGS+="--set global.image_tag=null "
fi
# start logging
@@ -454,9 +454,9 @@
sh '''
# get pods information
- kubectl get pods -o wide
+ kubectl get pods -o wide --all-namespaces
kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.image}{'\\n'}"
- helm ls
+ helm ls --all-namespaces
set +e
cp $WORKSPACE/kind-voltha/install-$NAME.log $WORKSPACE/
diff --git a/jjb/pipeline/voltha-openonu-go-test-bbsim.groovy b/jjb/pipeline/voltha-openonu-go-test-bbsim.groovy
new file mode 100755
index 0000000..461500b
--- /dev/null
+++ b/jjb/pipeline/voltha-openonu-go-test-bbsim.groovy
@@ -0,0 +1,459 @@
+// Copyright 2017-present Open Networking Foundation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// voltha-2.x e2e tests
+// uses kind-voltha to deploy voltha-2.X
+// uses bbsim to simulate OLT/ONUs
+
+pipeline {
+
+ /* no label, executor is determined by JJB */
+ agent {
+ label "${params.buildNode}"
+ }
+ options {
+ timeout(time: 90, unit: 'MINUTES')
+ }
+ environment {
+ KUBECONFIG="$HOME/.kube/kind-config-voltha-minimal"
+ VOLTCONFIG="$HOME/.volt/config-minimal"
+ PATH="$PATH:$WORKSPACE/kind-voltha/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
+ NAME="minimal"
+ FANCY=0
+ WITH_SIM_ADAPTERS="no"
+ WITH_RADIUS="yes"
+ WITH_BBSIM="yes"
+ DEPLOY_K8S="yes"
+ VOLTHA_LOG_LEVEL="DEBUG"
+ CONFIG_SADIS="external"
+ BBSIM_CFG="configs/bbsim-sadis-att.yaml"
+ ROBOT_MISC_ARGS="-e PowerSwitch ${params.extraRobotArgs}"
+ KARAF_HOME="${params.karafHome}"
+ DIAGS_PROFILE="VOLTHA_PROFILE"
+ NUM_OF_BBSIM="${olts}"
+ }
+ stages {
+ stage('Clone kind-voltha') {
+ steps {
+ checkout([
+ $class: 'GitSCM',
+ userRemoteConfigs: [[
+ url: "https://gerrit.opencord.org/kind-voltha",
+ // refspec: "${kindVolthaChange}"
+ ]],
+ branches: [[ name: "master", ]],
+ extensions: [
+ [$class: 'WipeWorkspace'],
+ [$class: 'RelativeTargetDirectory', relativeTargetDir: "kind-voltha"],
+ [$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false],
+ ],
+ ])
+ }
+ }
+ stage('Cleanup') {
+ steps {
+ sh """
+ cd $WORKSPACE/kind-voltha/
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down || ./voltha down
+ """
+ }
+ }
+ stage('Clone voltha-system-tests') {
+ steps {
+ checkout([
+ $class: 'GitSCM',
+ userRemoteConfigs: [[
+ url: "https://gerrit.opencord.org/voltha-system-tests",
+ // refspec: "${volthaSystemTestsChange}"
+ ]],
+ branches: [[ name: "${branch}", ]],
+ extensions: [
+ [$class: 'WipeWorkspace'],
+ [$class: 'RelativeTargetDirectory', relativeTargetDir: "voltha-system-tests"],
+ [$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false],
+ ],
+ ])
+ }
+ }
+
+ stage('Deploy Voltha') {
+ steps {
+ sh """
+ export EXTRA_HELM_FLAGS=""
+ if [ "${branch}" != "master" ]; then
+ echo "on branch: ${branch}, sourcing kind-voltha/releases/${branch}"
+ source "$WORKSPACE/kind-voltha/releases/${branch}"
+ else
+ echo "on master, using default settings for kind-voltha"
+ fi
+
+ EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${params.extraHelmFlags} --set defaults.image_registry=mirror.registry.opennetworking.org/ "
+
+ cd $WORKSPACE/kind-voltha/
+ ./voltha up
+ """
+ }
+ }
+
+ stage('Run E2E Tests 1t1gem') {
+ environment {
+ ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/1t1gem"
+ }
+ steps {
+ sh '''
+ # start logging
+ mkdir -p $WORKSPACE/1t1gem
+ _TAG=kail-1t1gem kail -n voltha -n default > $WORKSPACE/1t1gem/onos-voltha-combined.log &
+
+ mkdir -p $ROBOT_LOGS_DIR/1t1gem
+ export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR"
+ export TARGET_DEFAULT=openonu-go-adapter-test
+ export NAME=voltha_voltha
+
+ make -C $WORKSPACE/voltha-system-tests \$TARGET_DEFAULT || true
+
+ # stop logging
+ P_IDS="$(ps e -ww -A | grep "_TAG=kail-1t1gem" | grep -v grep | awk '{print $1}')"
+ if [ -n "$P_IDS" ]; then
+ echo $P_IDS
+ for P_ID in $P_IDS; do
+ kill -9 $P_ID
+ done
+ fi
+
+ # get pods information
+ kubectl get pods -o wide --all-namespaces > $WORKSPACE/1t1gem/pods.txt || true
+ '''
+ }
+ }
+
+ stage('Run E2E Tests 1t4gem') {
+ environment {
+ ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/1t4gem"
+ }
+ steps {
+ sh '''
+ cd $WORKSPACE/kind-voltha/
+ #source $NAME-env.sh
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
+
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
+
+ # start logging
+ mkdir -p $WORKSPACE/1t4gem
+ _TAG=kail-1t4gem kail -n voltha -n default > $WORKSPACE/1t4gem/onos-voltha-combined.log &
+
+ DEPLOY_K8S=n ./voltha up
+
+ mkdir -p $ROBOT_LOGS_DIR/1t4gem
+ export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR"
+ export TARGET_DEFAULT=1t4gem-openonu-go-adapter-test
+ export NAME=voltha_voltha
+
+ make -C $WORKSPACE/voltha-system-tests \$TARGET_DEFAULT || true
+
+ # stop logging
+ P_IDS="$(ps e -ww -A | grep "_TAG=kail-1t4gem" | grep -v grep | awk '{print $1}')"
+ if [ -n "$P_IDS" ]; then
+ echo $P_IDS
+ for P_ID in $P_IDS; do
+ kill -9 $P_ID
+ done
+ fi
+
+ # get pods information
+ kubectl get pods -o wide --all-namespaces > $WORKSPACE/1t4gem/pods.txt || true
+ '''
+ }
+ }
+
+ stage('Run E2E Tests 1t8gem') {
+ environment {
+ ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/1t8gem"
+ }
+ steps {
+ sh '''
+ cd $WORKSPACE/kind-voltha/
+ #source $NAME-env.sh
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
+
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
+
+ # start logging
+ mkdir -p $WORKSPACE/1t8gem
+ _TAG=kail-1t8gem kail -n voltha -n default > $WORKSPACE/1t8gem/onos-voltha-combined.log &
+
+ DEPLOY_K8S=n ./voltha up
+
+ mkdir -p $ROBOT_LOGS_DIR/1t8gem
+ export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR"
+ export TARGET_1T8GEM=1t8gem-openonu-go-adapter-test
+ export NAME=voltha_voltha
+
+ make -C $WORKSPACE/voltha-system-tests \$TARGET_1T8GEM || true
+
+ # stop logging
+ P_IDS="$(ps e -ww -A | grep "_TAG=kail-1t8gem" | grep -v grep | awk '{print $1}')"
+ if [ -n "$P_IDS" ]; then
+ echo $P_IDS
+ for P_ID in $P_IDS; do
+ kill -9 $P_ID
+ done
+ fi
+
+ # get pods information
+ kubectl get pods -o wide --all-namespaces > $WORKSPACE/1t8gem/pods.txt || true
+ '''
+ }
+ }
+
+ stage('Run MIB Upload Tests') {
+ environment {
+ ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/openonu-go-MIB"
+ }
+ steps {
+ sh '''
+ cd $WORKSPACE/kind-voltha/
+ #source $NAME-env.sh
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
+
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
+
+ export EXTRA_HELM_FLAGS+="--set pon=2,onu=2,controlledActivation=only-onu "
+
+ # start logging
+ mkdir -p $WORKSPACE/mib
+ _TAG=kail-mib kail -n voltha -n default > $WORKSPACE/mib/onos-voltha-combined.log &
+
+ DEPLOY_K8S=n ./voltha up
+
+ mkdir -p $ROBOT_LOGS_DIR
+ export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR"
+ export TARGET_DEFAULT=mib-upload-templating-openonu-go-adapter-test
+
+ make -C $WORKSPACE/voltha-system-tests \$TARGET_DEFAULT || true
+
+ # stop logging
+ P_IDS="$(ps e -ww -A | grep "_TAG=kail-mib" | grep -v grep | awk '{print $1}')"
+ if [ -n "$P_IDS" ]; then
+ echo $P_IDS
+ for P_ID in $P_IDS; do
+ kill -9 $P_ID
+ done
+ fi
+
+ # get pods information
+ kubectl get pods -o wide --all-namespaces > $WORKSPACE/mib/pods.txt || true
+ '''
+ }
+ }
+
+ stage('Reconcile DT workflow') {
+ environment {
+ ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/ReconcileDT"
+ }
+ steps {
+ sh '''
+ cd $WORKSPACE/kind-voltha/
+ #source $NAME-env.sh
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
+
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
+
+ # Workflow-specific flags
+ export WITH_RADIUS=no
+ export WITH_EAPOL=no
+ export WITH_DHCP=no
+ export WITH_IGMP=no
+ export CONFIG_SADIS="external"
+ export BBSIM_CFG="configs/bbsim-sadis-dt.yaml"
+
+ # start logging
+ mkdir -p $WORKSPACE/dt
+ _TAG=kail-reconcile-dt kail -n voltha -n default > $WORKSPACE/reconciledt/onos-voltha-combined.log &
+
+ DEPLOY_K8S=n ./voltha up
+
+ mkdir -p $ROBOT_LOGS_DIR
+ export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR -e PowerSwitch"
+
+ export TARGET=reconcile-openonu-go-adapter-test-dt
+
+
+ make -C $WORKSPACE/voltha-system-tests \$TARGET || true
+
+ # stop logging
+ P_IDS="$(ps e -ww -A | grep "_TAG=kail-reconcile-dt" | grep -v grep | awk '{print $1}')"
+ if [ -n "$P_IDS" ]; then
+ echo $P_IDS
+ for P_ID in $P_IDS; do
+ kill -9 $P_ID
+ done
+ fi
+
+ # get pods information
+ kubectl get pods -o wide --all-namespaces > $WORKSPACE/reconciledt/pods.txt || true
+ '''
+ }
+ }
+
+ stage('Reconcile ATT workflow') {
+ environment {
+ ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/ReconcileATT"
+ }
+ steps {
+ sh '''
+ cd $WORKSPACE/kind-voltha/
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
+
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
+
+ # Workflow-specific flags
+ export WITH_RADIUS=yes
+ export WITH_EAPOL=yes
+ export WITH_BBSIM=yes
+ export DEPLOY_K8S=yes
+ export CONFIG_SADIS="external"
+ export BBSIM_CFG="configs/bbsim-sadis-att.yaml"
+
+ if [ "${gerritProject}" = "voltctl" ]; then
+ export VOLTCTL_VERSION=$(cat $WORKSPACE/voltctl/VERSION)
+ cp $WORKSPACE/voltctl/voltctl $WORKSPACE/kind-voltha/bin/voltctl
+ md5sum $WORKSPACE/kind-voltha/bin/voltctl
+ fi
+
+ # start logging
+ mkdir -p $WORKSPACE/att
+ _TAG=kail-reconcile-att kail -n voltha -n default > $WORKSPACE/reconcileatt/onos-voltha-combined.log &
+
+ DEPLOY_K8S=n ./voltha up
+
+ mkdir -p $ROBOT_LOGS_DIR
+ export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR -e PowerSwitch"
+
+ export TARGET=reconcile-openonu-go-adapter-test
+
+
+ make -C $WORKSPACE/voltha-system-tests \$TARGET || true
+
+ # stop logging
+ P_IDS="$(ps e -ww -A | grep "_TAG=kail-reconcile-att" | grep -v grep | awk '{print $1}')"
+ if [ -n "$P_IDS" ]; then
+ echo $P_IDS
+ for P_ID in $P_IDS; do
+ kill -9 $P_ID
+ done
+ fi
+
+ # get pods information
+ kubectl get pods -o wide --all-namespaces > $WORKSPACE/reconcileatt/pods.txt || true
+ '''
+ }
+ }
+
+ stage('Reconcile TT workflow') {
+ environment {
+ ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/ReconcileTT"
+ }
+ steps {
+ sh '''
+ cd $WORKSPACE/kind-voltha/
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
+
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
+
+ # Workflow-specific flags
+ export WITH_RADIUS=no
+ export WITH_EAPOL=no
+ export WITH_DHCP=yes
+ export WITH_IGMP=yes
+ export CONFIG_SADIS="external"
+ export BBSIM_CFG="configs/bbsim-sadis-tt.yaml"
+
+ # start logging
+ mkdir -p $WORKSPACE/tt
+ _TAG=kail-reconcile-tt kail -n voltha -n default > $WORKSPACE/reconcilett/onos-voltha-combined.log &
+
+ DEPLOY_K8S=n ./voltha up
+
+ mkdir -p $ROBOT_LOGS_DIR
+ export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR -e PowerSwitch"
+
+ export TARGET=reconcile-openonu-go-adapter-test-tt
+
+ make -C $WORKSPACE/voltha-system-tests \$TARGET || true
+
+ # stop logging
+ P_IDS="$(ps e -ww -A | grep "_TAG=kail-reconcile-tt" | grep -v grep | awk '{print $1}')"
+ if [ -n "$P_IDS" ]; then
+ echo $P_IDS
+ for P_ID in $P_IDS; do
+ kill -9 $P_ID
+ done
+ fi
+
+ # get pods information
+ kubectl get pods -o wide --all-namespaces > $WORKSPACE/reconcilett/pods.txt || true
+ '''
+ }
+ }
+ }
+ post {
+ always {
+ sh '''
+ set +e
+ # get pods information
+ kubectl get pods -o wide
+ kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.image}{'\\n'}"
+ helm ls
+
+ sync
+ pkill kail || true
+ md5sum $WORKSPACE/kind-voltha/bin/voltctl
+
+ ## Pull out errors from log files
+ extract_errors_go() {
+ echo
+ echo "Error summary for $1:"
+ grep $1 $WORKSPACE/onos-voltha-combined.log | grep '"level":"error"' | cut -d ' ' -f 2- | jq -r '.msg'
+ echo
+ }
+
+ extract_errors_python() {
+ echo
+ echo "Error summary for $1:"
+ grep $1 $WORKSPACE/onos-voltha-combined.log | grep 'ERROR' | cut -d ' ' -f 2-
+ echo
+ }
+
+ extract_errors_go voltha-rw-core > $WORKSPACE/error-report.log || true
+ extract_errors_go adapter-open-olt >> $WORKSPACE/error-report.log || true
+ extract_errors_python adapter-open-onu >> $WORKSPACE/error-report.log || true
+ extract_errors_python voltha-ofagent >> $WORKSPACE/error-report.log || true
+
+ gzip $WORKSPACE/onos-voltha-combined.log || true
+ '''
+ step([$class: 'RobotPublisher',
+ disableArchiveOutput: false,
+ logFileName: 'RobotLogs/*/log*.html',
+ otherFiles: '',
+ outputFileName: 'RobotLogs/*/output*.xml',
+ outputPath: '.',
+ passThreshold: 100,
+ reportFileName: 'RobotLogs/*/report*.html',
+ unstableThreshold: 0]);
+ archiveArtifacts artifacts: '**/*.log,**/*.gz,**/*.txt'
+ }
+ }
+}
diff --git a/jjb/pipeline/voltha-openonu-go-tests.groovy b/jjb/pipeline/voltha-openonu-go-tests.groovy
index cb9a0c2..9adf4fe 100755
--- a/jjb/pipeline/voltha-openonu-go-tests.groovy
+++ b/jjb/pipeline/voltha-openonu-go-tests.groovy
@@ -39,7 +39,7 @@
CONFIG_SADIS="external"
BBSIM_CFG="configs/bbsim-sadis-att.yaml"
ROBOT_MISC_ARGS="-d $WORKSPACE/RobotLogs"
- EXTRA_HELM_FLAGS=" --set defaults.image_registry=mirror.registry.opennetworking.org/ "
+ EXTRA_HELM_FLAGS=" --set global.image_registry=mirror.registry.opennetworking.org/ "
}
stages {
stage('Clone kind-voltha') {
@@ -163,7 +163,7 @@
stage('Deploy Voltha') {
steps {
sh '''
- export EXTRA_HELM_FLAGS+="--set use_openonu_adapter_go=true,log_agent.enabled=False ${extraHelmFlags} "
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
IMAGES="adapter_open_onu_go"
@@ -180,37 +180,6 @@
}
}
- stage('Run E2E Tests 1t1gem') {
- environment {
- ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/1t1gem"
- }
- steps {
- sh '''
- # start logging
- mkdir -p $WORKSPACE/1t1gem
- _TAG=kail-1t1gem kail -n voltha -n default > $WORKSPACE/1t1gem/onos-voltha-combined.log &
-
- mkdir -p $ROBOT_LOGS_DIR/1t1gem
- export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR"
- export TARGET_DEFAULT=openonu-go-adapter-test
-
- make -C $WORKSPACE/voltha-system-tests \$TARGET_DEFAULT || true
-
- # stop logging
- P_IDS="$(ps e -ww -A | grep "_TAG=kail-1t1gem" | grep -v grep | awk '{print $1}')"
- if [ -n "$P_IDS" ]; then
- echo $P_IDS
- for P_ID in $P_IDS; do
- kill -9 $P_ID
- done
- fi
-
- # get pods information
- kubectl get pods -o wide --all-namespaces > $WORKSPACE/1t1gem/pods.txt || true
- '''
- }
- }
-
stage('Run E2E Tests 1t8gem') {
environment {
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/1t8gem"
@@ -221,7 +190,7 @@
#source $NAME-env.sh
WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
- export EXTRA_HELM_FLAGS+="--set use_openonu_adapter_go=true,log_agent.enabled=False ${extraHelmFlags} "
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
IMAGES="adapter_open_onu_go"
@@ -238,6 +207,7 @@
mkdir -p $ROBOT_LOGS_DIR/1t8gem
export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR"
export TARGET_1T8GEM=1t8gem-openonu-go-adapter-test
+ export NAME=voltha_voltha
make -C $WORKSPACE/voltha-system-tests \$TARGET_1T8GEM || true
@@ -256,54 +226,6 @@
}
}
- stage('Run MIB Upload Tests') {
- environment {
- ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/openonu-go-MIB"
- }
- steps {
- sh '''
- cd $WORKSPACE/kind-voltha/
- #source $NAME-env.sh
- WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
-
- export EXTRA_HELM_FLAGS+="--set use_openonu_adapter_go=true,log_agent.enabled=False ${extraHelmFlags} "
-
- export EXTRA_HELM_FLAGS+="--set pon=2,onu=2,controlledActivation=only-onu "
-
- IMAGES="adapter_open_onu_go"
-
- for I in \$IMAGES
- do
- EXTRA_HELM_FLAGS+="--set images.\$I.tag=citest,images.\$I.pullPolicy=Never "
- done
-
- # start logging
- mkdir -p $WORKSPACE/mib
- _TAG=kail-mib kail -n voltha -n default > $WORKSPACE/mib/onos-voltha-combined.log &
-
- DEPLOY_K8S=n ./voltha up
-
- mkdir -p $ROBOT_LOGS_DIR
- export ROBOT_MISC_ARGS="-d $ROBOT_LOGS_DIR"
- export TARGET_DEFAULT=mib-upload-templating-openonu-go-adapter-test
-
- make -C $WORKSPACE/voltha-system-tests \$TARGET_DEFAULT || true
-
- # stop logging
- P_IDS="$(ps e -ww -A | grep "_TAG=kail-mib" | grep -v grep | awk '{print $1}')"
- if [ -n "$P_IDS" ]; then
- echo $P_IDS
- for P_ID in $P_IDS; do
- kill -9 $P_ID
- done
- fi
-
- # get pods information
- kubectl get pods -o wide --all-namespaces > $WORKSPACE/mib/pods.txt || true
- '''
- }
- }
-
stage('DT workflow') {
environment {
ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/DTWorkflow"
@@ -314,7 +236,7 @@
#source $NAME-env.sh
WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
- export EXTRA_HELM_FLAGS+="--set use_openonu_adapter_go=true,log_agent.enabled=False ${extraHelmFlags} "
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
IMAGES="adapter_open_onu_go"
@@ -378,7 +300,7 @@
cd $WORKSPACE/kind-voltha/
WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
- export EXTRA_HELM_FLAGS+="--set use_openonu_adapter_go=true,log_agent.enabled=False ${extraHelmFlags} "
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
IMAGES="adapter_open_onu_go"
@@ -448,7 +370,7 @@
cd $WORKSPACE/kind-voltha/
WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down
- export EXTRA_HELM_FLAGS+="--set use_openonu_adapter_go=true,log_agent.enabled=False ${extraHelmFlags} "
+ export EXTRA_HELM_FLAGS+="--set log_agent.enabled=False ${extraHelmFlags} "
IMAGES="adapter_open_onu_go"
diff --git a/jjb/verify/up4.yaml b/jjb/verify/up4.yaml
index 524fa39..3c3327f 100644
--- a/jjb/verify/up4.yaml
+++ b/jjb/verify/up4.yaml
@@ -1,74 +1,125 @@
---
# jobs for https://github.com/omec-project/up4
-- project:
- name: up4
- project: "{name}"
- github-organization: "omec-project"
- github_pr_auth_id: "64fe2b1a-b33a-4f13-8442-ad8360434003"
- github_pr_org_list:
- - "omec-project"
- jobs:
- - "up4-jobs":
- stream: "master"
- # As we run integration tests with 3 ONOS instances,
- # we need a beefy node.
- build-node: "ubuntu16.04-basebuild-8c-15g"
+- project:
+ name: up4
+ project: "{name}"
+ github-organization: "omec-project"
+ github_pr_auth_id: "64fe2b1a-b33a-4f13-8442-ad8360434003"
+ github_pr_org_list:
+ - "omec-project"
+ jobs:
+ - "up4-jobs":
+ stream: "master"
+ # As we run integration tests with 3 ONOS instances,
+ # we need a beefy node.
+ build-node: "ubuntu16.04-basebuild-8c-15g"
-- job-group:
- name: "up4-jobs"
- jobs:
- - "up4-pr-verify"
+- job-group:
+ name: "up4-jobs"
+ jobs:
+ - "up4-pr-verify"
-- job-template:
- id: "up4-pr-verify"
- name: "up4-pr-verify"
- description: |
- Created by {id} job-template from ci-management/up4.yaml<br/>
+- job-template:
+ id: "up4-pr-verify"
+ name: "up4-pr-verify"
+ description: |
+ Created by {id} job-template from ci-management/up4.yaml<br/>
- project-type: pipeline
- concurrent: true
+ project-type: pipeline
+ concurrent: true
- properties:
- # Visibility restricted to Aether members only.
- - cord-infra-aether-private:
- - cord-infra-properties:
- build-days-to-keep: "15"
- artifact-num-to-keep: "-1"
- - github:
- url: "https://github.com/{github-organization}/{project}"
+ properties:
+ # Visibility restricted to Aether members only.
+ - cord-infra-aether-private:
+ - cord-infra-properties:
+ build-days-to-keep: "15"
+ artifact-num-to-keep: "-1"
+ - github:
+ url: "https://github.com/{github-organization}/{project}"
- parameters:
- - string:
- name: buildNode
- default: "{build-node}"
- description: "Name of the Jenkins executor node to run the job on."
- - string:
- name: sha1
- default: "origin/{stream}"
- description: "The actual commit or branch to build."
+ parameters:
+ - string:
+ name: buildNode
+ default: "{build-node}"
+ description: "Name of the Jenkins executor node to run the job on."
+ - string:
+ name: sha1
+ default: "origin/{stream}"
+ description: "SHA string of the pull request commit to build. Populated by GitHUb Pull Request Builder."
- # Run pipeline that is checked-in in the same PR we want to verify.
- pipeline-scm:
- script-path: ".jenkins/Jenkinsfile"
- scm:
- - git:
- url: "git@github.com:{github-organization}/{project}.git"
- credentials-id: "github-onf-bot-ssh-key"
- refspec: "+refs/pull/*:refs/remotes/origin/pr/*"
- branches:
- - "$sha1"
- submodule:
- disable: true
- shallow-clone: true
+ # Run pipeline that is checked-in in the same PR we want to verify.
+ pipeline-scm:
+ script-path: ".jenkins/Jenkinsfile"
+ scm:
+ - git:
+ url: "git@github.com:{github-organization}/{project}.git"
+ credentials-id: "github-onf-bot-ssh-key"
+ refspec: "+refs/pull/*:refs/remotes/origin/pr/*"
+ branches:
+ - "$sha1"
+ submodule:
+ disable: true
+ shallow-clone: true
- triggers:
- - cord-infra-github-pr-trigger:
- github_pr_org_list: "{obj:github_pr_org_list}"
- github_pr_auth_id: "{github_pr_auth_id}"
- status_context: "up4-pr-verify"
+ triggers:
+ - cord-infra-github-pr-trigger:
+ github_pr_org_list: "{obj:github_pr_org_list}"
+ github_pr_auth_id: "{github_pr_auth_id}"
+ status_context: "up4-pr-verify"
- wrappers:
- - lf-infra-wrappers:
- build-timeout: "{build-timeout}"
- jenkins-ssh-credential: "{jenkins-ssh-credential}"
+ wrappers:
+ - lf-infra-wrappers:
+ build-timeout: "{build-timeout}"
+ jenkins-ssh-credential: "{jenkins-ssh-credential}"
+
+- job-template:
+ id: "up4-postmerge"
+ name: "up4-postmerge"
+ description: |
+ Created by {id} job-template from ci-management/up4.yaml<br/>
+
+ project-type: pipeline
+ concurrent: true
+
+ properties:
+ # Visibility restricted to Aether members only.
+ - cord-infra-aether-private:
+ - cord-infra-properties:
+ build-days-to-keep: "15"
+ artifact-num-to-keep: "-1"
+ - github:
+ url: "https://github.com/{github-organization}/{project}"
+
+ parameters:
+ - string:
+ name: buildNode
+ default: "{build-node}"
+ description: "Name of the Jenkins executor node to run the job on."
+ - string:
+ name: commitHash
+ default: 'origin/{stream}'
+ description: 'SHA string of the merged commit to build. Populated by Generic Webhook Trigger.'
+
+ pipeline-scm:
+ script-path: ".jenkins/Jenkinsfile"
+ scm:
+ - git:
+ url: "git@github.com:{github-organization}/{project}.git"
+ credentials-id: "github-onf-bot-ssh-key"
+ refspec: "+refs/heads/*:refs/remotes/origin/*"
+ branches:
+ - "$commitHash"
+ submodule:
+ disable: true
+ shallow-clone: true
+
+ triggers:
+ - cord-infra-github-pr-trigger-merge:
+ project: '{project}'
+ - timed: "@midnight"
+
+ wrappers:
+ - lf-infra-wrappers:
+ build-timeout: "{build-timeout}"
+ jenkins-ssh-credential: "{jenkins-ssh-credential}"
diff --git a/jjb/voltha-e2e.yaml b/jjb/voltha-e2e.yaml
old mode 100644
new mode 100755
index 817e15f..5cb86b9
--- a/jjb/voltha-e2e.yaml
+++ b/jjb/voltha-e2e.yaml
@@ -65,6 +65,15 @@
time-trigger: "H H/12 * * *"
- 'voltha-periodic-test':
+ name: 'periodic-voltha-openonu-go-test-bbsim'
+ pipeline-script: 'voltha-openonu-go-test-bbsim.groovy'
+ build-node: 'ubuntu16.04-basebuild-4c-8g'
+ make-target: openonu-go-adapter-test
+ withAlarms: false
+ code-branch: 'master'
+ time-trigger: "H H/12 * * *"
+
+ - 'voltha-periodic-test':
name: 'periodic-voltha-test-DMI'
pipeline-script: 'voltha-DMI-bbsim-tests.groovy'
build-node: 'qct-pod4-node2'
@@ -157,6 +166,7 @@
oltDebVersionMaster: 'openolt_asfvolt16-3.3.2-f7feb4b828467ccc99104b56b29dc7a19aa2008b-40G-NNI.deb'
oltDebVersionVoltha23: 'openolt_asfvolt16-3.2.0-fc10f0d035181d3125ffc6e7a60bf5328fcf5bfa-40G-NNI.deb'
profile: 'Default'
+ default-test-args: '-i sanityORDeleteOLT -i PowerSwitch -X'
# Per-patchset Pod builds on Tucson pod
- 'verify_physical_voltha_patchset_manual':
@@ -167,7 +177,7 @@
oltDebVersionVoltha23: 'openolt_asfvolt16-3.2.0-fc10f0d035181d3125ffc6e7a60bf5328fcf5bfa-40G-NNI.deb'
profile: 'Default'
trigger-string: 'hardware test'
- default-test-args: '-i sanityORDeleteOLT -X'
+ default-test-args: '-i sanityORDeleteOLT -i PowerSwitch -X'
# Per-patchset Pod builds on Tucson pod
@@ -180,8 +190,7 @@
profile: 'Default'
pipeline-script: 'voltha-dt-physical-build-and-tests.groovy'
trigger-string: 'DT hardware test'
- default-test-args: '-i sanityDt -X'
-
+ default-test-args: '-i sanityDt -i PowerSwitch -X'
# Manual build job for Tucson pod
@@ -194,6 +203,7 @@
oltDebVersionMaster: 'openolt_asfvolt16-3.3.2-f7feb4b828467ccc99104b56b29dc7a19aa2008b-40G-NNI.deb'
oltDebVersionVoltha23: 'openolt_asfvolt16-3.2.0-fc10f0d035181d3125ffc6e7a60bf5328fcf5bfa-40G-NNI.deb'
profile: 'Default'
+ default-test-args: '-i sanityORDeleteOLT -i PowerSwitch -X'
- job-template:
id: 'voltha-periodic-test'