Merge "Dumps container logs for xos-core tests"
diff --git a/README.md b/README.md
index c6aa3ed..600674f 100644
--- a/README.md
+++ b/README.md
@@ -169,6 +169,52 @@
> The new AMI ID can be found near the end of the logs of the run of
> [ci-management-packer-merge-<ostype>-basebuild](https://jenkins.opencord.org/job/ci-management-packer-merge-ubuntu-16.04-basebuild/).
+### Finding source AMI images
+
+Source OS images published by upstream projects like Ubuntu and CentOS need to
+be well specified, so that the correct images are used. Anyone can list in the
+marketplace, so care should be taken to use the correct images.
+
+This is done in Packer using
+[source_ami_filter](https://packer.io/docs/builders/amazon-ebs.html#source_ami_filter)
+with is parameterized on the image `name`, `owner`, and `product-code` within
+the `packer/vars/<os_name>.json` files that define the source images.
+
+Upstream docs that specify AMIs:
+
+- [CentOS](https://wiki.centos.org/Cloud/AWS)
+- [Ubuntu](https://cloud-images.ubuntu.com/locator/ec2/)
+
+Unfortunately these filter parameters have conflicts - images with the official
+Ubuntu owner (`099720109477`) doesn't specify a `product-code` field.
+
+As an alternative, `aws-marketplace` owner is used, which also has the same
+images. To find the `product-code`, go to the [AWS
+Marketplace](https://aws.amazon.com/marketplace) and find the image you want,
+then click the button to launch the image. In the URL there will be a
+`productId` UUID parameter - find this, and then use it search for a product
+code using the [aws command
+line](https://docs.aws.amazon.com/cli/latest/index.html):
+
+ aws ec2 describe-images \
+ --owners aws-marketplace \
+ --filters "Name=name,Values=*d83d0782-cb94-46d7-8993-f4ce15d1a484*"
+
+Then look at the output for the `ProductCodeId` - this is what should go in the
+OS json file in the `source_ami_filter_product_code` field.
+
+Once you've determined the correct settings, the Packer filter can be tested
+with this command:
+
+ aws ec2 describe-images \
+ --owners aws-marketplace \
+ --filters "Name=name,Values=*ubuntu*16.04*" \
+ "Name=product-code,Values=csv6h7oyg29b7epjzg7qdr7no" \
+ "Name=architecture,Values=x86_64" \
+ "Name=virtualization-type,Values=hvm" \
+ "Name=root-device-type,Values=ebs"
+
+
### Adding additional EC2 instance types
If you create a new cloud instance type, make sure to set both the `Security
diff --git a/jjb/charts.yaml b/jjb/charts.yaml
index 1c46a78..b359dd6 100644
--- a/jjb/charts.yaml
+++ b/jjb/charts.yaml
@@ -69,7 +69,7 @@
# Setup helm and external repos
helm init --client-only
helm repo add incubator https://kubernetes-charts-incubator.storage.googleapis.com
- helm repo add rook-beta https://charts.rook.io/beta
+ helm repo add rook-release https://charts.rook.io/release
helm repo add cord https://charts.opencord.org
# Update the repo
@@ -164,7 +164,7 @@
# Setup helm and external repos
helm init --client-only
helm repo add incubator https://kubernetes-charts-incubator.storage.googleapis.com
- helm repo add rook-beta https://charts.rook.io/beta
+ helm repo add rook-release https://charts.rook.io/release
helm repo add cord https://charts.opencord.org
git clone https://gerrit.opencord.org/helm-repo-tools
diff --git a/jjb/cord-macros.yaml b/jjb/cord-macros.yaml
index e58eab3..5a0f123 100644
--- a/jjb/cord-macros.yaml
+++ b/jjb/cord-macros.yaml
@@ -138,6 +138,24 @@
org-list: '{obj:github_pr_org_list}'
allow-whitelist-orgs-as-admins: true
+# Trigger on GitHub PR merge
+# docs: https://docs.openstack.org/infra/jenkins-job-builder/triggers.html#triggers.generic-webhook-trigger
+- trigger:
+ name: cord-infra-github-pr-trigger-merge
+ triggers:
+ - generic-webhook-trigger:
+ post-content-params:
+ - type: JSONPath
+ key: action
+ value: $.action
+ - type: JSONPath
+ key: merged
+ value: $.pull_request.merged
+ regex-filter-text: $action,$merged
+ regex-filter-expression: ^(closed,true)$
+ cause: Generic Cause
+ token: '{project}'
+
# wrapper to provide pypi config file
- wrapper:
diff --git a/jjb/cord-test/att-workflow.yaml b/jjb/cord-test/att-workflow.yaml
index cca01c2..f4d304c 100644
--- a/jjb/cord-test/att-workflow.yaml
+++ b/jjb/cord-test/att-workflow.yaml
@@ -11,7 +11,7 @@
jobs:
# onlab pod1 build
- 'build_pod_manual':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod1'
profile: 'att-workflow'
branch: 'master'
@@ -22,7 +22,7 @@
# onlab pod1 build - cord6.1(voltha 1.6) - seba1.0
- 'build_pod_manual':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod1-voltha16'
profile: 'att-workflow'
branch: 'cord-6.1'
@@ -33,7 +33,7 @@
# flex pod1 test job - using voltha-master branch
- 'build_pod_test':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod1-qa'
profile: 'att-workflow'
branch: 'master'
@@ -42,7 +42,7 @@
# onlab pod2 build
- 'build_pod_manual':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod2'
profile: 'att-workflow'
branch: 'master'
@@ -53,7 +53,7 @@
# flex OCP pod with olt/onu : using voltha latest(master)
# - 'build_pod_timer':
- # testvm: 'qa-testvm-pod'
+ # build-node: 'qa-testvm-pod'
# config-pod: 'flex-ocp-cord-voltha-master'
# profile: 'att-workflow'
# branch: 'master'
@@ -65,7 +65,7 @@
# flex OCP pod with olt/onu - seba-1.0/cord-6.1 release
# - 'build_pod_timer':
- # testvm: 'qa-testvm-pod'
+ # build-node: 'qa-testvm-pod'
# config-pod: 'flex-ocp-cord-voltha16'
# profile: 'att-workflow'
# branch: 'cord-6.1'
@@ -77,7 +77,7 @@
# flex OCP test job - using voltha-master branch
- 'build_pod_test':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord-voltha-master'
profile: 'att-workflow'
branch: 'master'
@@ -86,7 +86,7 @@
# flex ocp pod test job - seba-1.0/cord-6.1 release
- 'build_pod_test':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord-voltha16'
profile: 'att-workflow'
branch: 'cord-6.1'
diff --git a/jjb/cord-test/mcord.yaml b/jjb/cord-test/mcord.yaml
index 917d927..6e9fdd1 100644
--- a/jjb/cord-test/mcord.yaml
+++ b/jjb/cord-test/mcord.yaml
@@ -11,14 +11,14 @@
jobs:
# onlab mcord-pod1 build
- 'build_mcord_pod_manual':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onf-mcord-pod1'
profile: 'mcord'
branch: 'master'
Jenkinsfile: 'Jenkinsfile-mcord-local-build'
- 'build_mcord_pod_manual':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onf-mcord-pod2'
profile: 'mcord'
branch: 'master'
@@ -26,7 +26,7 @@
Jenkinsfile: 'Jenkinsfile-mcord-remote-build'
- 'build_mcord_pod_manual':
- testvm: 'mcord-dt'
+ build-node: 'mcord-dt'
config-pod: 'mcord-barcellona-remote-dt'
profile: 'mcord'
branch: 'master'
diff --git a/jjb/cord-test/nightly-build-pipeline.yaml b/jjb/cord-test/nightly-build-pipeline.yaml
index b8cff5d..eaf06bf 100644
--- a/jjb/cord-test/nightly-build-pipeline.yaml
+++ b/jjb/cord-test/nightly-build-pipeline.yaml
@@ -28,9 +28,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -136,9 +141,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -243,9 +253,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -315,9 +330,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -382,9 +402,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -474,9 +499,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -576,9 +606,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -649,9 +684,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -722,9 +762,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -791,7 +836,7 @@
- '{branch}'
- job-template:
- name: 'build_{config-pod}_{profile}_voltha_{release}'
+ name: 'build_{config-pod}_{profile}{name-extension}_voltha_{release}'
id: build_voltha_pod_release
description: |
<!-- Managed by Jenkins Job Builder -->
@@ -804,9 +849,14 @@
parameters:
- string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
name: TestNodeName
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -867,6 +917,10 @@
default: '{onos-version}'
description: 'ONOS version that needs to be configured'
+ - string:
+ name: workFlow
+ default: '{work-flow}'
+ description: 'Installs the specified work flow on the POD'
- bool:
name: released
@@ -904,7 +958,7 @@
H {time} * * *
- job-template:
- name: 'build_{config-pod}_{profile}_voltha_{release}_test'
+ name: 'build_{config-pod}_{profile}_voltha{name-extension}_{release}_test'
id: build_voltha_pod_test
description: |
<!-- Managed by Jenkins Job Builder -->
@@ -921,8 +975,13 @@
parameters:
- string:
name: buildNode
- default: '{testvm}'
- description: 'Jenkins node name of TestVM Node'
+ default: '{build-node}'
+ description: 'Name of the Jenkins executor node to run the job on'
+
+ - string:
+ name: TestNodeName
+ default: '{build-node}'
+ description: 'DEPRECATED - use buildNode instead'
- string:
name: cordRepoUrl
@@ -991,5 +1050,5 @@
triggers:
- reverse:
- jobs: 'build_{config-pod}_{profile}_voltha_{release}'
+ jobs: 'build_{config-pod}_{profile}{name-extension}_voltha_{release}'
result: 'success'
diff --git a/jjb/cord-test/seba-release.yaml b/jjb/cord-test/seba-release.yaml
index b572d6b..a5f6021 100644
--- a/jjb/cord-test/seba-release.yaml
+++ b/jjb/cord-test/seba-release.yaml
@@ -11,7 +11,7 @@
jobs:
# onlab pod1 build 1.0
- 'build_pod_manual_release':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod1-voltha16'
release: '1.0'
branch: 'cord-6.1'
@@ -20,7 +20,7 @@
# onlab pod1 build 2.0
- 'build_pod_manual_release':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod1'
release: '2.0'
branch: 'master'
@@ -29,7 +29,7 @@
# onlab pod1 test job - release 2.0 test job
- 'build_pod_release_test':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod1'
release: '2.0'
branch: 'master'
@@ -37,7 +37,7 @@
# onlab pod2 build
- 'build_pod_manual_release':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod2'
release: '1.0'
branch: 'cord-6.1'
@@ -46,7 +46,7 @@
# onlab pod2 build 2.0
- 'build_pod_manual_release':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod2'
release: '2.0'
branch: 'master'
@@ -55,7 +55,7 @@
# Flex POD build 2.0
- 'build_pod_manual_release':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
release: '2.0'
branch: 'master'
@@ -64,7 +64,7 @@
# # flex OCP pod with olt/onu - release 2.0 build job
# - 'build_pod_release':
- # testvm: 'qa-testvm-pod'
+ # build-node: 'qa-testvm-pod'
# config-pod: 'flex-ocp-cord'
# release: '1.0'
# branch: 'cord-6.1'
@@ -74,7 +74,7 @@
# # flex OCP pod with olt/onu - release 2.0 build job
# - 'build_pod_release':
- # testvm: 'qa-testvm-pod'
+ # build-node: 'qa-testvm-pod'
# config-pod: 'flex-ocp-cord'
# release: '2.0'
# branch: 'master'
@@ -84,7 +84,7 @@
#
# # flex OCP POD with olt/onu - release 1.0 test job
# - 'build_pod_release_test':
- # testvm: 'qa-testvm-pod'
+ # build-node: 'qa-testvm-pod'
# config-pod: 'flex-ocp-cord'
# release: '1.0'
# branch: 'cord-6.1'
@@ -92,7 +92,7 @@
#
# # flex OCP POD with olt/onu - release 2.0 test job
# - 'build_pod_release_test':
- # testvm: 'qa-testvm-pod'
+ # build-node: 'qa-testvm-pod'
# config-pod: 'flex-ocp-cord'
# release: '2.0'
# branch: 'master'
@@ -100,7 +100,7 @@
#
# # flex OCP pod with olt/onu : using voltha latest(master)
# - 'build_pod_release':
- # testvm: 'qa-testvm-pod'
+ # build-node: 'qa-testvm-pod'
# config-pod: 'flex-ocp-cord'
# release: '2.0-microcharts'
# branch: 'master'
@@ -110,7 +110,7 @@
#
# # flex OCP test job - release 2.0 test job
# - 'build_pod_release_test':
- # testvm: 'qa-testvm-pod'
+ # build-node: 'qa-testvm-pod'
# config-pod: 'flex-ocp-cord'
# release: '2.0-microcharts'
# branch: 'master'
diff --git a/jjb/cord-test/voltha.yaml b/jjb/cord-test/voltha.yaml
index d85577d..b0cde41 100644
--- a/jjb/cord-test/voltha.yaml
+++ b/jjb/cord-test/voltha.yaml
@@ -11,22 +11,23 @@
with-kind: false
onos-version: '2.2'
power-switch: False
+ work-flow: ''
jobs:
# flex OCP pod with olt/onu - manual test job, voltha master build job
- 'build_pod_manual':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
release: 'master'
branch: 'master'
test-repo: 'voltha-system-tests'
Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
- oltDebVersion: 'openolt_asfvolt16_bal3.2.deb'
+ oltDebVersion: 'openolt-2.3.0.deb'
profile: '1T4GEM-bal31'
# flex pod1 test job - using voltha branch
- 'build_pod_test':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
profile: '1T4GEM-bal31'
branch: 'master'
@@ -35,18 +36,18 @@
# onlab pod1 OCP pod with olt/onu - Manual testing BAL3.1 release voltha master build job
- 'build_pod_manual':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod1'
release: 'master'
branch: 'master'
test-repo: 'voltha-system-tests'
Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
- oltDebVersion: 'openolt_asfvolt16_bal3.2.deb'
+ oltDebVersion: 'openolt-2.3.0.deb'
profile: '1T4GEM-bal31'
# onlab pod1 test job - BAL3.1 tests using voltha branch
- 'build_pod_test':
- testvm: 'onf-build'
+ build-node: 'onf-build'
config-pod: 'onlab-pod1'
profile: '1T4GEM'
branch: 'master'
@@ -55,21 +56,21 @@
# flex OCP pod with olt/onu - Default tech profile and timer based job
- 'build_voltha_pod_release':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
release: 'master'
branch: 'master'
released: false
test-repo: 'voltha-system-tests'
Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
- oltDebVersion: 'openolt_asfvolt16_bal3.2.deb'
+ oltDebVersion: 'openolt-2.3.0.deb'
configurePod: true
profile: 'Default'
time: '1'
# flex pod1 test job - uses tech profile on voltha branch
- 'build_voltha_pod_test':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
release: 'master'
branch: 'master'
@@ -80,21 +81,21 @@
# flex OCP pod with olt/onu - uses 1TCONT/4GEMs tech profile on voltha - timer based job
- 'build_voltha_pod_release':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
release: 'master'
branch: 'master'
released: false
test-repo: 'voltha-system-tests'
Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
- oltDebVersion: 'openolt_asfvolt16_bal3.2.deb'
+ oltDebVersion: 'openolt-2.3.0.deb'
configurePod: true
profile: '1T4GEM-bal31'
time: '3'
# flex pod1 test job - test job uses 1TCONT/4GEMs tech profile - using voltha branch
- 'build_voltha_pod_test':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
release: 'master'
branch: 'master'
@@ -105,7 +106,7 @@
# flex OCP pod with olt/onu - Released versions Default tech profile and timer based job
- 'build_voltha_pod_release':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
release: 'release'
branch: 'master'
@@ -119,7 +120,7 @@
# flex pod1 test job - released versions: uses tech profile on voltha branch
- 'build_voltha_pod_test':
- testvm: 'qa-testvm-pod'
+ build-node: 'qa-testvm-pod'
config-pod: 'flex-ocp-cord'
release: 'release'
branch: 'master'
@@ -129,21 +130,21 @@
# Menlo pod with olt/onu - Default tech profile and timer based job
- 'build_voltha_pod_release':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
release: 'master'
branch: 'master'
released: false
test-repo: 'voltha-system-tests'
Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
- oltDebVersion: 'openolt_asfvolt16_bal3.2.deb'
+ oltDebVersion: 'openolt-2.3.0.deb'
configurePod: true
profile: 'Default'
time: '1'
# Menlo pod test job - uses tech profile on voltha branch
- 'build_voltha_pod_test':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
release: 'master'
branch: 'master'
@@ -153,7 +154,7 @@
# Menlo pod with olt/onu - Default tech profile and timer based job
- 'build_voltha_pod_release':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
release: 'release'
branch: 'master'
@@ -167,7 +168,7 @@
# Menlo pod test job - uses tech profile on voltha branch
- 'build_voltha_pod_test':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
release: 'release'
branch: 'master'
@@ -177,21 +178,21 @@
# Menlo pod with olt/onu - 1T4GEM tech profile and timer based job
- 'build_voltha_pod_release':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
release: 'master'
branch: 'master'
released: false
test-repo: 'voltha-system-tests'
Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
- oltDebVersion: 'openolt_asfvolt16_bal3.2.deb'
+ oltDebVersion: 'openolt-2.3.0.deb'
configurePod: true
profile: '1T4GEM-bal31'
time: '4'
- # Menlo pod test job - uses tech profile on voltha branch
+ # Menlo pod test job - master test job uses tech profile on voltha branch
- 'build_voltha_pod_test':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
release: 'master'
branch: 'master'
@@ -199,19 +200,47 @@
test-repo: 'voltha-system-tests'
profile: '1T4GEM-bal31'
+ # Menlo pod with olt/onu - Uses GO Version - Default tech profile and timer based job
+ - 'build_voltha_pod_release':
+ build-node: 'menlo-demo-pod'
+ config-pod: 'onf-demo-pod'
+ release: 'master'
+ branch: 'master'
+ name-extension: '_DT'
+ work-flow: 'DT'
+ released: false
+ test-repo: 'voltha-system-tests'
+ Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
+ oltDebVersion: 'openolt-2.3.0.deb'
+ configurePod: true
+ profile: '1T8GEM'
+ time: '6'
+
+ # Menlo pod test job - uses tech profile on voltha branch
+ - 'build_voltha_pod_test':
+ build-node: 'menlo-demo-pod'
+ config-pod: 'onf-demo-pod'
+ release: 'master'
+ branch: 'master'
+ name-extension: '_DT'
+ released: false
+ test-repo: 'voltha-system-tests'
+ profile: '1T8GEM'
+ pipeline-script: 'voltha-dt-physical-functional-tests.groovy'
+
# Menlo DEMO-POD - 1 1TCONT 4 4GEMs TechProfile - Manual build and test job
- 'build_pod_manual':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
release: 'master'
branch: 'master'
test-repo: 'voltha-system-tests'
Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
- oltDebVersion: 'openolt_asfvolt16_bal3.2.deb'
+ oltDebVersion: 'openolt-2.3.0.deb'
configurePod: true
profile: '1T4GEM-bal31'
- 'build_pod_test':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
branch: 'master'
test-repo: 'voltha-system-tests'
@@ -220,7 +249,7 @@
# Menlo DEMO-POD - Default TechProfile - manual build job
- 'build_pod_manual':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
release: 'master'
branch: 'master'
@@ -232,7 +261,7 @@
# ONF DEMO OCP test job - voltha-master branch
- 'build_pod_test':
- testvm: 'menlo-demo-pod'
+ build-node: 'menlo-demo-pod'
config-pod: 'onf-demo-pod'
profile: 'Default'
branch: 'master'
@@ -241,7 +270,7 @@
# Infosys pod with olt/onu - Default tech profile and timer based job
- 'build_voltha_pod_release':
- testvm: 'infosys-test-pod'
+ build-node: 'infosys-test-pod'
config-pod: 'infosys-pod'
release: 'master'
branch: 'master'
@@ -255,7 +284,7 @@
# infosys test job - uses Default tech profile on voltha branch
- 'build_voltha_pod_test':
- testvm: 'infosys-test-pod'
+ build-node: 'infosys-test-pod'
config-pod: 'infosys-pod'
release: 'master'
branch: 'master'
@@ -264,29 +293,30 @@
profile: 'Default'
- # Berlin pod with olt/onu - Released versions Default tech profile and timer based job
+ # Berlin pod with olt/onu - master versions Default tech profile and timer based job
- 'build_voltha_pod_release':
- testvm: 'dt-berlin-community-pod'
+ build-node: 'dt-berlin-community-pod'
config-pod: 'dt-berlin-pod'
- release: 'release'
+ release: 'master'
branch: 'master'
onos-version: '2.2'
test-repo: 'voltha-system-tests'
Jenkinsfile: 'Jenkinsfile-voltha-bal31-build'
- oltDebVersion: 'openolt_asfvolt16_bal3.2.deb'
+ oltDebVersion: 'openolt-2.3.0.deb'
configurePod: true
- released: true
+ released: false
profile: 'Default'
with-kind: true
time: '9'
- # Berlin POD test job - released versions: uses tech profile on voltha branch
+ # Berlin POD test job - master versions: uses tech profile on voltha branch
- 'build_voltha_pod_test':
- testvm: 'dt-berlin-community-pod'
+ build-node: 'dt-berlin-community-pod'
config-pod: 'dt-berlin-pod'
- release: 'release'
+ release: 'master'
branch: 'master'
- released: true
+ released: false
test-repo: 'voltha-system-tests'
profile: 'Default'
+ power-switch: True
diff --git a/jjb/defaults.yaml b/jjb/defaults.yaml
index 9699b23..d2a34bd 100644
--- a/jjb/defaults.yaml
+++ b/jjb/defaults.yaml
@@ -145,9 +145,15 @@
siab-projects-regexp: '^(xos-core/.*|xos-profiles/seba-services/.*|xos-profiles/base-kubernetes/.*|xos-profiles/ponsim-pod/.*|workflows/att-workflow/.*|voltha/.*|onos/.*|mininet/.*|configs/seba-ponsim.yaml)$'
# Optionally allow JUnit results to be empty when test framework is set up,
- # but no tests exist. Default behavior is to fail when test results are empty.
+ # but no tests exist. Default behavior is to fail when test results are
+ # empty. Also will not vote if no test output exists - good for repos where
+ # tests or test output generation may not yet exist.
junit-allow-empty-results: false
+ # Allow xunit to not vote if no test files exist. Default is true because
+ # jUnit is the more commonly supported output format.
+ xunit-skip-if-no-test-files: true
+
# Unit test targets
# List of targets to run when testing a patchset, run with make or similar
# defaults to just 'test', multiple targets should be space separated
diff --git a/jjb/docker-publish-github.yaml b/jjb/docker-publish-github.yaml
index 1071aea..148c228 100644
--- a/jjb/docker-publish-github.yaml
+++ b/jjb/docker-publish-github.yaml
@@ -16,7 +16,8 @@
display-status: 'docker-publish'
triggers:
- - github
+ - cord-infra-github-pr-trigger-merge:
+ project: '{project}'
wrappers:
- lf-infra-wrappers:
diff --git a/jjb/docs.yaml b/jjb/docs.yaml
index 91676b0..c1b244d 100644
--- a/jjb/docs.yaml
+++ b/jjb/docs.yaml
@@ -97,7 +97,7 @@
mkdir -p docs/repos
# Find path to the repo, copy into docs
- PROJECT_PATH=$(xmllint --xpath "string(//project[@name=\"$GERRIT_PROJECT\"]/@path)" .repo/manifest.xml)
+ PROJECT_PATH=$(xmllint --xpath "string(//project[@name=\"$GERRIT_PROJECT\"]/@path)" .repo/manifests/default.xml)
cp -r "$PROJECT_PATH" "docs/repos/$GERRIT_PROJECT"
# build docs, don't touch checkout under test, capture test result
diff --git a/jjb/make-unit.yaml b/jjb/make-unit.yaml
index 9101df3..cceeb5f 100644
--- a/jjb/make-unit.yaml
+++ b/jjb/make-unit.yaml
@@ -54,6 +54,12 @@
- junit:
results: "**/*results.xml,**/*report.xml"
allow-empty-results: '{junit-allow-empty-results}'
+ - xunit:
+ types:
+ - gtest:
+ pattern: "**/*xunit.xml"
+ deleteoutput: false
+ skip-if-no-test-files: '{xunit-skip-if-no-test-files}'
- cobertura:
report-file: "**/*coverage.xml"
targets:
diff --git a/jjb/omec-ci.yaml b/jjb/omec-ci.yaml
index 585b12e..f2922c9 100644
--- a/jjb/omec-ci.yaml
+++ b/jjb/omec-ci.yaml
@@ -106,6 +106,48 @@
build-timeout: 30
docker-repo: 'omecproject'
+# for nucleus
+- project:
+ name: Nucleus
+ project: '{name}'
+
+ build-node: 'intel-102'
+
+ github-organization: 'omec-project'
+
+ github_pr_auth_id: '64fe2b1a-b33a-4f13-8442-ad8360434003'
+ github_pr_org_list:
+ - 'omec-project'
+
+ jobs:
+ - 'omec-fossa':
+ pipeline-file: 'omec-fossa-scan.groovy'
+ build-node: 'omec-qa'
+ - 'omec-reuse':
+ pipeline-file: 'omec-reuse-scan.groovy'
+ build-node: 'omec-qa'
+
+# for freediameter
+- project:
+ name: freediameter
+ project: '{name}'
+
+ build-node: 'intel-102'
+
+ github-organization: 'omec-project'
+
+ github_pr_auth_id: '64fe2b1a-b33a-4f13-8442-ad8360434003'
+ github_pr_org_list:
+ - 'omec-project'
+
+ jobs:
+ - 'omec-fossa':
+ pipeline-file: 'omec-fossa-scan.groovy'
+ build-node: 'omec-qa'
+ - 'omec-reuse':
+ pipeline-file: 'omec-reuse-scan.groovy'
+ build-node: 'omec-qa'
+
# Combined pipeline
- job-template:
id: 'omec-combined'
diff --git a/jjb/pipeline/all-xos-api-test-helm.groovy b/jjb/pipeline/all-xos-api-test-helm.groovy
index 0b9e46b..65c62a8 100644
--- a/jjb/pipeline/all-xos-api-test-helm.groovy
+++ b/jjb/pipeline/all-xos-api-test-helm.groovy
@@ -81,7 +81,7 @@
}
pushd cord
- PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
+ PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifests/default.xml)
repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
pushd \$PROJECT_PATH
diff --git a/jjb/pipeline/all-xos-api-test.groovy b/jjb/pipeline/all-xos-api-test.groovy
index 3e59a9d..a6981c8 100644
--- a/jjb/pipeline/all-xos-api-test.groovy
+++ b/jjb/pipeline/all-xos-api-test.groovy
@@ -46,7 +46,7 @@
steps {
sh """
pushd cord
- PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
+ PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifests/default.xml)
repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
popd
"""
diff --git a/jjb/pipeline/omec-fossa-scan.groovy b/jjb/pipeline/omec-fossa-scan.groovy
index f59f276..613b03e 100644
--- a/jjb/pipeline/omec-fossa-scan.groovy
+++ b/jjb/pipeline/omec-fossa-scan.groovy
@@ -40,7 +40,8 @@
steps {
checkout([
$class: 'GitSCM',
- userRemoteConfigs: [[ url: "https://github.com/${params.ghprbGhRepository}", refspec: "+refs/pull/${params.ghprbPullId}/merge" ]],
+ userRemoteConfigs: [[ url: "https://github.com/${params.ghprbGhRepository}", refspec: "pull/${params.ghprbPullId}/head" ]],
+ extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: "${params.project}"]],
],
)
}
@@ -52,6 +53,7 @@
sh """
#!/usr/bin/env bash
+ cd ${params.project}
git checkout FETCH_HEAD
git show
diff --git a/jjb/pipeline/omec-reuse-scan.groovy b/jjb/pipeline/omec-reuse-scan.groovy
index f4fcf51..6b471ab 100644
--- a/jjb/pipeline/omec-reuse-scan.groovy
+++ b/jjb/pipeline/omec-reuse-scan.groovy
@@ -40,7 +40,8 @@
steps {
checkout([
$class: 'GitSCM',
- userRemoteConfigs: [[ url: "https://github.com/${params.ghprbGhRepository}", refspec: "+refs/pull/${params.ghprbPullId}/merge" ]],
+ userRemoteConfigs: [[ url: "https://github.com/${params.ghprbGhRepository}", refspec: "pull/${params.ghprbPullId}/head" ]],
+ extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: "${params.project}"]],
],
)
}
@@ -51,9 +52,15 @@
sh """
#!/usr/bin/env bash
+ cd ${params.project}
git checkout FETCH_HEAD
git show
+ mkdir ../jenkins-license-scan
+ cp --parents $(git diff-tree --no-commit-id --name-only -r HEAD) ../jenkins-license-scan
+ cd ../jenkins-license-scan
+
+ reuse download --all
reuse lint
"""
}
diff --git a/jjb/pipeline/siab-test.groovy b/jjb/pipeline/siab-test.groovy
index b13ca11..1ab109e 100644
--- a/jjb/pipeline/siab-test.groovy
+++ b/jjb/pipeline/siab-test.groovy
@@ -47,18 +47,6 @@
}
}
- // FIXME: remove once Zack completes cord-tester refactoring, as master is
- // currently broken for siab tests.
- stage('Cord-tester fix') {
- steps {
- sh """
- pushd $WORKSPACE/cord/test/cord-tester
- git checkout -b foo 7b3f901659a22c09e4759e343ad693b80125e06b
- popd
- """
- }
- }
-
stage ('Reset Kubeadm') {
steps {
sh """
diff --git a/jjb/pipeline/voltha-bbsim-tests.groovy b/jjb/pipeline/voltha-bbsim-tests.groovy
index 1865276..a64d6ef 100644
--- a/jjb/pipeline/voltha-bbsim-tests.groovy
+++ b/jjb/pipeline/voltha-bbsim-tests.groovy
@@ -37,7 +37,7 @@
DEPLOY_K8S="y"
VOLTHA_LOG_LEVEL="DEBUG"
CONFIG_SADIS="n"
- ROBOT_MISC_ARGS="-d $WORKSPACE/RobotLogs -v teardown_device:False"
+ ROBOT_MISC_ARGS="-d $WORKSPACE/RobotLogs"
}
stages {
@@ -64,8 +64,12 @@
steps {
sh """
pushd voltha
- PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
- repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
+ if [ "${gerritProject}" != "" -a "${gerritChangeNumber}" != "" -a "${gerritPatchsetNumber}" != "" ]
+ then
+ repo download "${gerritProject}" "${gerritChangeNumber}/${gerritPatchsetNumber}"
+ else
+ echo "No patchset to download!"
+ fi
popd
"""
}
@@ -131,7 +135,10 @@
if [ "${gerritProject}" = "voltha-go" ]; then
IMAGES="rw_core ro_core "
elif [ "${gerritProject}" = "ofagent-py" ]; then
- IMAGES="ofagent "
+ IMAGES="ofagent_py "
+ EXTRA_HELM_FLAGS+="--set use_ofagent_go=false "
+ elif [ "${gerritProject}" = "ofagent-go" ]; then
+ IMAGES="ofagent_go "
elif [ "${gerritProject}" = "voltha-onos" ]; then
IMAGES="onos "
elif [ "${gerritProject}" = "voltha-openolt-adapter" ]; then
diff --git a/jjb/pipeline/voltha-dt-physical-functional-tests.groovy b/jjb/pipeline/voltha-dt-physical-functional-tests.groovy
new file mode 100644
index 0000000..98f4edb
--- /dev/null
+++ b/jjb/pipeline/voltha-dt-physical-functional-tests.groovy
@@ -0,0 +1,165 @@
+// Copyright 2017-present Open Networking Foundation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+node {
+ // Need this so that deployment_config has global scope when it's read later
+ deployment_config = null
+}
+
+pipeline {
+ /* no label, executor is determined by JJB */
+ agent {
+ label "${params.buildNode}"
+ }
+ options {
+ timeout(time: 90, unit: 'MINUTES')
+ }
+
+ environment {
+ KUBECONFIG="$WORKSPACE/${configBaseDir}/${configKubernetesDir}/${configFileName}.conf"
+ VOLTCONFIG="$HOME/.volt/config-minimal"
+ PATH="$WORKSPACE/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
+ }
+
+ stages {
+ stage ('Initialize') {
+ steps {
+ step([$class: 'WsCleanup'])
+ sh returnStdout: false, script: "git clone -b ${branch} ${cordRepoUrl}/${configBaseDir}"
+ script {
+ deployment_config = readYaml file: "${configBaseDir}/${configDeploymentDir}/${configFileName}-DT.yaml"
+ }
+ // This checkout is just so that we can show changes in Jenkins
+ checkout(changelog: true,
+ poll: false,
+ scm: [$class: 'RepoScm',
+ manifestRepositoryUrl: "${params.manifestUrl}",
+ manifestBranch: "${params.manifestBranch}",
+ currentBranch: true,
+ destinationDir: 'voltha',
+ forceSync: true,
+ resetFirst: true,
+ quiet: true,
+ jobs: 4,
+ showAllChanges: true]
+ )
+ sh returnStdout: false, script: """
+ cd voltha
+ git clone -b ${branch} ${cordRepoUrl}/cord-tester
+ mkdir -p $WORKSPACE/bin
+ bash <( curl -sfL https://raw.githubusercontent.com/boz/kail/master/godownloader.sh) -b "$WORKSPACE/bin"
+ cd $WORKSPACE
+ git clone https://github.com/ciena/kind-voltha.git
+
+ VC_VERSION=\$(curl -sSL https://api.github.com/repos/opencord/voltctl/releases/latest | jq -r .tag_name | sed -e 's/^v//g')
+ HOSTOS=\$(uname -s | tr "[:upper:]" "[:lower:"])
+ HOSTARCH=\$(uname -m | tr "[:upper:]" "[:lower:"])
+ if [ \$HOSTARCH == "x86_64" ]; then
+ HOSTARCH="amd64"
+ fi
+ curl -o $WORKSPACE/bin/voltctl -sSL https://github.com/opencord/voltctl/releases/download/v\${VC_VERSION}/voltctl-\${VC_VERSION}-\${HOSTOS}-\${HOSTARCH}
+ chmod 755 $WORKSPACE/bin/voltctl
+ voltctl version --clientonly
+ """
+ }
+ }
+
+ stage('Functional Tests') {
+ environment {
+ ROBOT_CONFIG_FILE="$WORKSPACE/${configBaseDir}/${configDeploymentDir}/${configFileName}-DT.yaml"
+ ROBOT_FILE="Voltha_DT_PODTests.robot"
+ ROBOT_LOGS_DIR="$WORKSPACE/RobotLogs/dt-workflow"
+ }
+ steps {
+ sh """
+ cd $WORKSPACE/kind-voltha/scripts
+ ./log-collector.sh > /dev/null &
+ ./log-combine.sh > /dev/null &
+
+ mkdir -p $ROBOT_LOGS_DIR
+ if ( ${released} ); then
+ export ROBOT_MISC_ARGS="--removekeywords wuks -i released -i sanityDt -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE"
+ else
+ export ROBOT_MISC_ARGS="--removekeywords wuks -i sanityDt -i functionalDt -e bbsim -e notready -d $ROBOT_LOGS_DIR -v POD_NAME:${configFileName} -v KUBERNETES_CONFIGS_DIR:$WORKSPACE/${configBaseDir}/${configKubernetesDir} -v container_log_dir:$WORKSPACE"
+ fi
+ make -C $WORKSPACE/voltha/voltha-system-tests voltha-dt-test || true
+ """
+ }
+ }
+ }
+ post {
+ always {
+ sh returnStdout: false, script: '''
+ set +e
+ kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.image}{'\\t'}{.imageID}{'\\n'}" | sort | uniq -c
+ kubectl get nodes -o wide
+ kubectl get pods -n voltha -o wide
+
+ sleep 60 # Wait for log-collector and log-combine to complete
+
+ # Clean up "announcer" pod used by the tests if present
+ kubectl delete pod announcer || true
+
+ ## Pull out errors from log files
+ extract_errors_go() {
+ echo
+ echo "Error summary for $1:"
+ grep '"level":"error"' $WORKSPACE/kind-voltha/scripts/logger/combined/$1*
+ echo
+ }
+
+ extract_errors_python() {
+ echo
+ echo "Error summary for $1:"
+ grep 'ERROR' $WORKSPACE/kind-voltha/scripts/logger/combined/$1*
+ echo
+ }
+
+ extract_errors_go voltha-rw-core > $WORKSPACE/error-report.log
+ extract_errors_go adapter-open-olt >> $WORKSPACE/error-report.log
+ extract_errors_python adapter-open-onu >> $WORKSPACE/error-report.log
+ extract_errors_python voltha-ofagent >> $WORKSPACE/error-report.log
+ extract_errors_python onos >> $WORKSPACE/error-report.log
+
+ cd $WORKSPACE/kind-voltha/scripts/logger/combined/
+ tar czf $WORKSPACE/container-logs.tgz *
+
+ cd $WORKSPACE
+ gzip *-combined.log || true
+ '''
+ script {
+ deployment_config.olts.each { olt ->
+ sh returnStdout: false, script: """
+ sshpass -p ${olt.pass} scp ${olt.user}@${olt.ip}:/var/log/openolt.log $WORKSPACE/openolt-${olt.ip}.log || true
+ sed -i 's/\\x1b\\[[0-9;]*[a-zA-Z]//g' $WORKSPACE/openolt-${olt.ip}.log # Remove escape sequences
+ """
+ }
+ }
+ step([$class: 'RobotPublisher',
+ disableArchiveOutput: false,
+ logFileName: '**/log*.html',
+ otherFiles: '',
+ outputFileName: '**/output*.xml',
+ outputPath: 'RobotLogs',
+ passThreshold: 100,
+ reportFileName: '**/report*.html',
+ unstableThreshold: 0
+ ]);
+ archiveArtifacts artifacts: '*.log,*.gz,*.tgz'
+ }
+ unstable {
+ step([$class: 'Mailer', notifyEveryUnstableBuild: true, recipients: "${notificationEmail}", sendToIndividuals: false])
+ }
+ }
+}
diff --git a/jjb/pipeline/voltha-go-multi-tests.groovy b/jjb/pipeline/voltha-go-multi-tests.groovy
index c8886ff..4d0fde1 100644
--- a/jjb/pipeline/voltha-go-multi-tests.groovy
+++ b/jjb/pipeline/voltha-go-multi-tests.groovy
@@ -28,7 +28,7 @@
environment {
KUBECONFIG="$HOME/.kube/kind-config-voltha-minimal"
VOLTCONFIG="$HOME/.volt/config-minimal"
- PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$WORKSPACE/kind-voltha/bin"
+ PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$HOME/kind-voltha/bin"
TYPE="minimal"
FANCY=0
WITH_SIM_ADAPTERS="n"
@@ -64,7 +64,11 @@
stage('Download kind-voltha') {
steps {
sh """
- git clone https://github.com/ciena/kind-voltha.git
+ cd $HOME
+ [ -d kind-voltha ] || git clone https://github.com/ciena/kind-voltha.git
+ rm -rf $HOME/kind-voltha/scripts/logger
+ cd $HOME/kind-voltha
+ git pull
"""
}
}
@@ -72,9 +76,8 @@
stage('Deploy Voltha') {
steps {
sh """
- cd kind-voltha/
- JUST_K8S=y ./voltha up
- kail -n voltha -n default > $WORKSPACE/onos-voltha-combined.log &
+ cd $HOME/kind-voltha/
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down || ./voltha down
./voltha up
"""
}
@@ -83,9 +86,14 @@
stage('Run E2E Tests') {
steps {
sh '''
+ set +e
mkdir -p $WORKSPACE/RobotLogs
git clone https://gerrit.opencord.org/voltha-system-tests
- cd kind-voltha
+
+ cd $HOME/kind-voltha/scripts
+ ./log-collector.sh > /dev/null &
+ ./log-combine.sh > /dev/null &
+
for i in \$(seq 1 ${testRuns})
do
make -C $WORKSPACE/voltha-system-tests ${makeTarget}
@@ -101,40 +109,44 @@
always {
sh '''
set +e
- cp $WORKSPACE/kind-voltha/install-minimal.log $WORKSPACE/
+ cp $HOME/kind-voltha/install-minimal.log $WORKSPACE/
kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.image}{'\\t'}{.imageID}{'\\n'}" | sort | uniq -c
kubectl get nodes -o wide
kubectl get pods -o wide
kubectl get pods -n voltha -o wide
- sync
- pkill kail || true
+ sleep 60 # Wait for log-collector and log-combine to complete
## Pull out errors from log files
extract_errors_go() {
echo
echo "Error summary for $1:"
- grep $1 $WORKSPACE/onos-voltha-combined.log | grep '"level":"error"' | cut -d ' ' -f 2- | jq -r '.msg'
+ grep '"level":"error"' $HOME/kind-voltha/scripts/logger/combined/$1*
echo
}
extract_errors_python() {
echo
echo "Error summary for $1:"
- grep $1 $WORKSPACE/onos-voltha-combined.log | grep 'ERROR' | cut -d ' ' -f 2-
+ grep 'ERROR' $HOME/kind-voltha/scripts/logger/combined/$1*
echo
}
extract_errors_go voltha-rw-core > $WORKSPACE/error-report.log
extract_errors_go adapter-open-olt >> $WORKSPACE/error-report.log
extract_errors_python adapter-open-onu >> $WORKSPACE/error-report.log
- extract_errors_python voltha-ofagent >> $WORKSPACE/error-report.log
+ extract_errors_go voltha-ofagent >> $WORKSPACE/error-report.log
+ extract_errors_python onos >> $WORKSPACE/error-report.log
- gzip $WORKSPACE/onos-voltha-combined.log
+ cd $HOME/kind-voltha/scripts/logger/combined/
+ tar czf $WORKSPACE/container-logs.tgz *
- ## shut down voltha
- cd $WORKSPACE/kind-voltha/
- WAIT_ON_DOWN=y ./voltha down
+ cd $WORKSPACE
+ gzip *-combined.log || true
+
+ ## shut down voltha but leave kind-voltha cluster
+ cd $HOME/kind-voltha/
+ DEPLOY_K8S=n WAIT_ON_DOWN=y ./voltha down
'''
step([$class: 'RobotPublisher',
disableArchiveOutput: false,
@@ -145,7 +157,7 @@
passThreshold: 100,
reportFileName: 'RobotLogs/report*.html',
unstableThreshold: 0]);
- archiveArtifacts artifacts: '*.log,*.gz'
+ archiveArtifacts artifacts: '*.log,*.gz,*.tgz'
}
}
diff --git a/jjb/pipeline/voltha-go-tests.groovy b/jjb/pipeline/voltha-go-tests.groovy
index 33da214..38efb38 100644
--- a/jjb/pipeline/voltha-go-tests.groovy
+++ b/jjb/pipeline/voltha-go-tests.groovy
@@ -28,7 +28,7 @@
environment {
KUBECONFIG="$HOME/.kube/kind-config-voltha-minimal"
VOLTCONFIG="$HOME/.volt/config-minimal"
- PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$WORKSPACE/kind-voltha/bin"
+ PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$HOME/kind-voltha/bin"
TYPE="minimal"
FANCY=0
WITH_SIM_ADAPTERS="n"
@@ -64,7 +64,11 @@
stage('Download kind-voltha') {
steps {
sh """
- git clone https://github.com/ciena/kind-voltha.git
+ cd $HOME
+ [ -d kind-voltha ] || git clone https://github.com/ciena/kind-voltha.git
+ rm -rf $HOME/kind-voltha/scripts/logger
+ cd $HOME/kind-voltha
+ git pull
"""
}
}
@@ -72,8 +76,8 @@
stage('Deploy Voltha') {
steps {
sh """
- cd kind-voltha/
- JUST_K8S=y ./voltha up
+ cd $HOME/kind-voltha/
+ WAIT_ON_DOWN=y DEPLOY_K8S=n ./voltha down || ./voltha down
./voltha up
"""
}
@@ -86,7 +90,7 @@
mkdir -p $WORKSPACE/RobotLogs
git clone https://gerrit.opencord.org/voltha-system-tests
- cd $WORKSPACE/kind-voltha/scripts
+ cd $HOME/kind-voltha/scripts
./log-collector.sh > /dev/null &
./log-combine.sh > /dev/null &
@@ -100,7 +104,7 @@
always {
sh '''
set +e
- cp $WORKSPACE/kind-voltha/install-minimal.log $WORKSPACE/
+ cp $HOME/kind-voltha/install-minimal.log $WORKSPACE/
kubectl get pods --all-namespaces -o jsonpath="{range .items[*].status.containerStatuses[*]}{.image}{'\\t'}{.imageID}{'\\n'}" | sort | uniq -c
kubectl get nodes -o wide
kubectl get pods -o wide
@@ -108,15 +112,36 @@
sleep 60 # Wait for log-collector and log-combine to complete
- cd $WORKSPACE/kind-voltha/scripts/logger/combined/
+ ## Pull out errors from log files
+ extract_errors_go() {
+ echo
+ echo "Error summary for $1:"
+ grep '"level":"error"' $HOME/kind-voltha/scripts/logger/combined/$1*
+ echo
+ }
+
+ extract_errors_python() {
+ echo
+ echo "Error summary for $1:"
+ grep 'ERROR' $HOME/kind-voltha/scripts/logger/combined/$1*
+ echo
+ }
+
+ extract_errors_go voltha-rw-core > $WORKSPACE/error-report.log
+ extract_errors_go adapter-open-olt >> $WORKSPACE/error-report.log
+ extract_errors_python adapter-open-onu >> $WORKSPACE/error-report.log
+ extract_errors_go voltha-ofagent >> $WORKSPACE/error-report.log
+ extract_errors_python onos >> $WORKSPACE/error-report.log
+
+ cd $HOME/kind-voltha/scripts/logger/combined/
tar czf $WORKSPACE/container-logs.tgz *
cd $WORKSPACE
gzip *-combined.log || true
- ## shut down voltha
- cd $WORKSPACE/kind-voltha/
- WAIT_ON_DOWN=y ./voltha down
+ ## shut down voltha but leave kind-voltha cluster
+ cd $HOME/kind-voltha/
+ DEPLOY_K8S=n WAIT_ON_DOWN=y ./voltha down
'''
step([$class: 'RobotPublisher',
disableArchiveOutput: false,
diff --git a/jjb/pipeline/voltha-physical-build-and-tests.groovy b/jjb/pipeline/voltha-physical-build-and-tests.groovy
index 5fe2922..ffb45b6 100644
--- a/jjb/pipeline/voltha-physical-build-and-tests.groovy
+++ b/jjb/pipeline/voltha-physical-build-and-tests.groovy
@@ -95,8 +95,7 @@
steps {
sh returnStdout: false, script: """
cd voltha
- PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
- repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
+ repo download "${gerritProject}" "${gerritChangeNumber}/${gerritPatchsetNumber}"
"""
}
}
diff --git a/jjb/pipeline/voltha-physical-functional-tests.groovy b/jjb/pipeline/voltha-physical-functional-tests.groovy
index 51eb413..3cf088b 100644
--- a/jjb/pipeline/voltha-physical-functional-tests.groovy
+++ b/jjb/pipeline/voltha-physical-functional-tests.groovy
@@ -23,13 +23,13 @@
label "${params.buildNode}"
}
options {
- timeout(time: 90, unit: 'MINUTES')
+ timeout(time: 180, unit: 'MINUTES')
}
environment {
KUBECONFIG="$WORKSPACE/${configBaseDir}/${configKubernetesDir}/${configFileName}.conf"
VOLTCONFIG="$HOME/.volt/config-minimal"
- PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$WORKSPACE/bin"
+ PATH="$WORKSPACE/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
}
stages {
@@ -61,9 +61,20 @@
bash <( curl -sfL https://raw.githubusercontent.com/boz/kail/master/godownloader.sh) -b "$WORKSPACE/bin"
cd $WORKSPACE
git clone https://github.com/ciena/kind-voltha.git
+
+ VC_VERSION=\$(curl -sSL https://api.github.com/repos/opencord/voltctl/releases/latest | jq -r .tag_name | sed -e 's/^v//g')
+ HOSTOS=\$(uname -s | tr "[:upper:]" "[:lower:"])
+ HOSTARCH=\$(uname -m | tr "[:upper:]" "[:lower:"])
+ if [ \$HOSTARCH == "x86_64" ]; then
+ HOSTARCH="amd64"
+ fi
+ curl -o $WORKSPACE/bin/voltctl -sSL https://github.com/opencord/voltctl/releases/download/v\${VC_VERSION}/voltctl-\${VC_VERSION}-\${HOSTOS}-\${HOSTARCH}
+ chmod 755 $WORKSPACE/bin/voltctl
+ voltctl version --clientonly
"""
}
}
+
stage('Functional Tests') {
environment {
ROBOT_CONFIG_FILE="$WORKSPACE/${configBaseDir}/${configDeploymentDir}/${configFileName}.yaml"
@@ -137,6 +148,30 @@
sleep 60 # Wait for log-collector and log-combine to complete
+ # Clean up "announcer" pod used by the tests if present
+ kubectl delete pod announcer || true
+
+ ## Pull out errors from log files
+ extract_errors_go() {
+ echo
+ echo "Error summary for $1:"
+ grep '"level":"error"' $WORKSPACE/kind-voltha/scripts/logger/combined/$1*
+ echo
+ }
+
+ extract_errors_python() {
+ echo
+ echo "Error summary for $1:"
+ grep 'ERROR' $WORKSPACE/kind-voltha/scripts/logger/combined/$1*
+ echo
+ }
+
+ extract_errors_go voltha-rw-core > $WORKSPACE/error-report.log
+ extract_errors_go adapter-open-olt >> $WORKSPACE/error-report.log
+ extract_errors_python adapter-open-onu >> $WORKSPACE/error-report.log
+ extract_errors_python voltha-ofagent >> $WORKSPACE/error-report.log
+ extract_errors_python onos >> $WORKSPACE/error-report.log
+
cd $WORKSPACE/kind-voltha/scripts/logger/combined/
tar czf $WORKSPACE/container-logs.tgz *
diff --git a/jjb/pipeline/voltha-scale-measurements.groovy b/jjb/pipeline/voltha-scale-measurements.groovy
index 9b5930c..038c5b2 100644
--- a/jjb/pipeline/voltha-scale-measurements.groovy
+++ b/jjb/pipeline/voltha-scale-measurements.groovy
@@ -7,38 +7,31 @@
environment {
KUBECONFIG="$HOME/.kube/kind-config-voltha-minimal"
VOLTCONFIG="$HOME/.volt/config-minimal"
- PATH="$WORKSPACE/kind-voltha/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
- TYPE="minimal"
- FANCY=0
- SECONDS=0
- WITH_SIM_ADAPTERS="n"
- WITH_RADIUS="y"
- WITH_BBSIM="y"
- DEPLOY_K8S="y"
- VOLTHA_LOG_LEVEL="WARN"
- CONFIG_SADIS="n"
- ROBOT_MISC_ARGS="-d $WORKSPACE/RobotLogs -v teardown_device:False"
SSHPASS="karaf"
+ DEPLOY_K8S="n"
}
-
stages {
- stage('checkout') {
+ stage('set-description') {
steps {
- checkout([
- $class: 'GitSCM',
- userRemoteConfigs: [[ url: "https://github.com/ciena/kind-voltha.git", ]],
- branches: [[ name: "master", ]],
- extensions: [
- [$class: 'WipeWorkspace'],
- [$class: 'RelativeTargetDirectory', relativeTargetDir: "kind-voltha"],
- [$class: 'CloneOption', depth: 0, noTags: false, reference: '', shallow: false],
- ],
- ])
script {
- git_tags = sh(script:"cd kind-voltha; git tag -l --points-at HEAD", returnStdout: true).trim()
+ currentBuild.description = "$BUILD_TIMESTAMP"
}
}
}
+ stage('cleanup') {
+ steps {
+ sh '''
+ rm -rf voltha-devices-count.txt voltha-devices-time.txt onos-ports-count.txt onos-ports-time.txt onos-ports-list.txt voltha-devices-list.json onos-ports-time-num.txt voltha-devices-time-num.txt
+ for hchart in \$(helm list -q | grep -E -v 'docker-registry|cord-kafka|etcd-operator');
+ do
+ echo "Purging chart: \${hchart}"
+ helm delete --purge "\${hchart}"
+ done
+ bash /home/cord/voltha-scale/wait_for_pods.sh
+ bash /home/cord/voltha-scale/stop_port_forward.sh
+ '''
+ }
+ }
stage('start') {
steps {
sh '''
@@ -48,102 +41,198 @@
}
}
stage('deploy-voltha') {
+ options {
+ timeout(time:10)
+ }
steps {
sh '''
- cd kind-voltha
- EXTRA_HELM_FLAGS="--set onu=${onuPerPon},pon=${ponPorts},delay=${BBSIMdelay}" ./voltha up
+ helm repo update
+ helm install -n nem-monitoring cord/nem-monitoring --set kpi_exporter.enabled=false,dashboards.xos=false,dashboards.onos=false,dashboards.aaa=false,dashboards.voltha=false
+
+ IFS=: read -r onosRepo onosTag <<< ${onosImg}
+ helm install -n onos onf/onos --set images.onos.repository=${onosRepo} --set images.onos.tag=${onosTag} ${extraHelmFlags}
+
+ IFS=: read -r volthaRepo volthaTag <<< ${volthaImg}
+ IFS=: read -r ofAgentRepo ofAgentTag <<< ${ofAgentImg}
+ helm install -n voltha ${volthaChart} -f /home/cord/voltha-scale/voltha-values.yaml --set defaults.log_level=${logLevel},images.rw_core.repository=${volthaRepo},images.rw_core.tag=${volthaTag},images.ofagent_go.repository=${ofAgentRepo},images.ofagent_go.tag=${ofAgentTag} ${extraHelmFlags}
+
+ IFS=: read -r openoltAdapterRepo openoltAdapterTag <<< ${openoltAdapterImg}
+ helm install -n openolt ${openoltAdapterChart} -f /home/cord/voltha-scale/voltha-values.yaml --set defaults.log_level=${logLevel},images.adapter_open_olt.repository=${openoltAdapterRepo},images.adapter_open_olt.tag=${openoltAdapterTag} ${extraHelmFlags}
+
+ IFS=: read -r openonuAdapterRepo openonuAdapterTag <<< ${openonuAdapterImg}
+ helm install -n openonu ${openonuAdapterChart} -f /home/cord/voltha-scale/voltha-values.yaml --set defaults.log_level=${logLevel},images.adapter_open_onu.repository=${openonuAdapterRepo},images.adapter_open_onu.tag=${openonuAdapterTag} ${extraHelmFlags}
+
+ IFS=: read -r bbsimRepo bbsimTag <<< ${bbsimImg}
+ helm install -n bbsim ${bbsimChart} --set enablePerf=true,pon=${ponPorts},onu=${onuPerPon},auth=${bbsimAuth},dhcp=${bbsimDhcp},delay=${BBSIMdelay},images.bbsim.repository=${bbsimRepo},images.bbsim.tag=${bbsimTag} ${extraHelmFlags}
+
+ helm install -n radius onf/freeradius ${extraHelmFlags}
+
+ bash /home/cord/voltha-scale/wait_for_pods.sh
+ bash /home/cord/voltha-scale/start_port_forward.sh
'''
}
}
+ stage('wait for adapters to be registered') {
+ options {
+ timeout(time:5)
+ }
+ steps{
+ waitUntil {
+ script {
+ openolt_res = sh returnStdout: true, script: """
+ voltctl adapter list | grep openolt | wc -l
+ """
+
+ openonu_res = sh returnStdout: true, script: """
+ voltctl adapter list | grep brcm_openomci_onu | wc -l
+ """
+
+ return openolt_res.toInteger() == 1 && openonu_res.toInteger() == 1
+ }
+ }
+ }
+ }
stage('MIB-template') {
steps {
sh '''
if [ ${withMibTemplate} = true ] ; then
- git clone https://github.com/opencord/voltha-openonu-adapter.git
- cat voltha-openonu-adapter/templates/BBSM-12345123451234512345-00000000000001-v1.json | kubectl exec -it -n voltha $(kubectl get pods -n voltha | grep etcd-cluster | awk 'NR==1{print $1}') etcdctl put service/voltha/omci_mibs/templates/BBSM/12345123451234512345/00000000000001
- rm -rf voltha-openonu-adapter
+ rm -f BBSM-12345123451234512345-00000000000001-v1.json
+ wget https://raw.githubusercontent.com/opencord/voltha-openonu-adapter/master/templates/BBSM-12345123451234512345-00000000000001-v1.json
+ cat BBSM-12345123451234512345-00000000000001-v1.json | kubectl exec -it $(kubectl get pods | grep etcd-cluster | awk 'NR==1{print $1}') etcdctl put service/voltha/omci_mibs/templates/BBSM/12345123451234512345/00000000000001
fi
'''
}
}
stage('disable-ONOS-apps') {
steps {
- sh '''
+ sh '''
#Check withOnosApps and disable apps accordingly
if [ ${withOnosApps} = false ] ; then
- sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost app deactivate org.opencord.olt
- sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost app deactivate org.opencord.aaa
- sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost app deactivate org.opencord.dhcpl2relay
+ sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost app deactivate org.opencord.olt
+ sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost app deactivate org.opencord.aaa
+ sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost app deactivate org.opencord.dhcpl2relay
fi
- '''
+ '''
}
}
stage('configuration') {
steps {
sh '''
- #Setting LOG level to WARN
- sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost log:set WARN
- kubectl exec -n voltha $(kubectl get pods -n voltha | grep bbsim | awk 'NR==1{print $1}') bbsimctl log warn false
+ #Setting LOG level to ${logLevel}
+ sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost log:set ${logLevel}
+ kubectl exec $(kubectl get pods | grep bbsim | awk 'NR==1{print $1}') bbsimctl log warn false
#Setting link discovery
- sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost cfg set org.onosproject.provider.lldp.impl.LldpLinkProvider enabled ${setLinkDiscovery}
+ sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost cfg set org.onosproject.provider.lldp.impl.LldpLinkProvider enabled ${setLinkDiscovery}
#Setting the flow stats collection interval
- sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost cfg set org.onosproject.provider.of.flow.impl.OpenFlowRuleProvider flowPollFrequency ${flowStatInterval}
+ sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost cfg set org.onosproject.provider.of.flow.impl.OpenFlowRuleProvider flowPollFrequency ${flowStatInterval}
#Setting the ports stats collection interval
- sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost cfg set org.onosproject.provider.of.device.impl.OpenFlowDeviceProvider portStatsPollFrequency ${portsStatInterval}
+ sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost cfg set org.onosproject.provider.of.device.impl.OpenFlowDeviceProvider portStatsPollFrequency ${portsStatInterval}
+ # extending voltctl timeout
+ sed -i 's/timeout: 10s/timeout: 5m/g' /home/cord/.volt/config
'''
}
}
- stage('activate-ONUs') {
- steps {
- sh '''
- if [ -z ${expectedOnus} ]
- then
- echo -e "You need to set the target ONU number\n"
- exit 1
- fi
-
- voltctl device create -t openolt -H bbsim:50060
- voltctl device enable $(voltctl device list --filter Type~openolt -q)
-
- # check ONUs reached Active State in VOLTHA
- i=$(voltctl device list | grep -v OLT | grep ACTIVE | wc -l)
- until [ $i -eq ${expectedOnus} ]
- do
- echo "$i ONUs ACTIVE of ${expectedOnus} expected (time: $SECONDS)"
- sleep ${pollInterval}
- i=$(voltctl device list | grep -v OLT | grep ACTIVE | wc -l)
- done
- echo "${expectedOnus} ONUs Activated in $SECONDS seconds (time: $SECONDS)"
- '''
+ stage('execute') {
+ options {
+ timeout(time:10)
}
- }
- stage('ONOS-ports') {
- steps {
- sh '''
- # Check ports showed up in ONOS
- z=$(sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost ports -e | grep BBSM | wc -l)
- until [ $z -eq ${expectedOnus} ]
- do
- echo "${z} enabled ports of ${expectedOnus} expected (time: $SECONDS)"
- sleep ${pollInterval}
- z=$(sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 8101 karaf@localhost ports -e | grep BBSM | wc -l)
- done
- echo "${expectedOnus} ports enabled in $SECONDS seconds (time: $SECONDS)"
- '''
+ stages {
+ stage('ONUs-enabled') {
+ steps {
+ sh '''
+ if [ -z ${expectedOnus} ]
+ then
+ echo -e "You need to set the target ONU number\n"
+ exit 1
+ fi
+
+ voltctl device create -t openolt -H bbsim:50060
+ voltctl device enable $(voltctl device list --filter Type~openolt -q)
+ # check ONUs reached Active State in VOLTHA
+ i=$(voltctl device list | grep -v OLT | grep ACTIVE | wc -l)
+ until [ $i -eq ${expectedOnus} ]
+ do
+ echo "$i ONUs ACTIVE of ${expectedOnus} expected (time: $SECONDS)"
+ sleep ${pollInterval}
+ i=$(voltctl device list | grep -v OLT | grep ACTIVE | wc -l)
+ done
+ echo "${expectedOnus} ONUs Activated in $SECONDS seconds (time: $SECONDS)"
+ echo $SECONDS > voltha-devices-time-num.txt
+ '''
+ }
+ }
+ stage('ONOS-ports') {
+ steps {
+ sh '''
+ # Check ports showed up in ONOS
+ z=$(sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost ports -e | grep BBSM | wc -l)
+ until [ $z -eq ${expectedOnus} ]
+ do
+ echo "${z} enabled ports of ${expectedOnus} expected (time: $SECONDS)"
+ sleep ${pollInterval}
+ z=$(sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost ports -e | grep BBSM | wc -l)
+ done
+ echo "${expectedOnus} ports enabled in $SECONDS seconds (time: $SECONDS)"
+ echo $SECONDS > temp.txt
+ paste voltha-devices-time-num.txt temp.txt | awk '{print ($1 + $2)}' > onos-ports-time-num.txt
+ echo "ONOS-Duration(s)" > onos-ports-time.txt
+ echo "VOLTHA-Duration(s)" > voltha-devices-time.txt
+ cat voltha-devices-time-num.txt >> voltha-devices-time.txt
+ cat onos-ports-time-num.txt >> onos-ports-time.txt
+ '''
+ }
+ }
}
}
}
post {
- cleanup {
+ success {
+ plot([
+ csvFileName: 'plot-onu-activation.csv',
+ csvSeries: [[displayTableFlag: false, exclusionValues: '', file: 'voltha-devices-time.txt', inclusionFlag: 'OFF', url: ''], [displayTableFlag: false, exclusionValues: '', file: 'onos-ports-time.txt', inclusionFlag: 'OFF', url: '']],
+ group: 'Voltha-Scale-Numbers', numBuilds: '100', style: 'line', title: "Time (${BBSIMdelay}s Delay)", yaxis: 'Time (s)', useDescr: true
+ ])
+ }
+ always {
sh '''
- #!/usr/bin/env bash
- set -euo pipefail
- cd $WORKSPACE/kind-voltha
-
- WAIT_ON_DOWN=y ./voltha down
- cd $WORKSPACE/
- rm -rf kind-voltha/ || true
+ echo $(voltctl device list | grep -v OLT | grep ACTIVE | wc -l) > onus.txt
+ echo "#-of-ONUs" > voltha-devices-count.txt
+ cat onus.txt >> voltha-devices-count.txt
'''
+ sh '''
+ echo $(sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost ports -e | grep BBSM | wc -l) > ports.txt
+ echo "#-of-ports" > onos-ports-count.txt
+ cat ports.txt >> onos-ports-count.txt
+ '''
+ sh '''
+ kubectl get pods -o jsonpath="{range .items[*].status.containerStatuses[*]}{.image}{'\\t'}{.imageID}{'\\n'}" | sort | uniq -c
+ '''
+ sh '''
+ voltctl device list -o json > device-list.json
+ python -m json.tool device-list.json > voltha-devices-list.json
+ '''
+ sh '''
+ sshpass -e ssh -q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@localhost ports > onos-ports-list.txt
+ curl -s -X GET -G http://127.0.0.1:31301/api/v1/query --data-urlencode 'query=avg(rate(container_cpu_usage_seconds_total[10m])*100) by (pod_name)' | jq . > cpu-usage.json
+ '''
+ sh '''
+ kubectl logs deployment/adapter-open-olt > open-olt-logs.txt
+ kubectl logs deployment/adapter-open-onu > open-onu-logs.txt
+ kubectl logs deployment/voltha-rw-core > voltha-rw-core-logs.txt
+ kubectl logs deployment/voltha-ofagent > voltha-ofagent-logs.txt
+ kubectl logs deployment/bbsim > bbsim-logs.txt
+ '''
+ sh '''
+ rm -rf BBSM-12345123451234512345-00000000000001-v1.json device-list.json onus.txt ports.txt temp.txt
+ '''
+ plot([
+ csvFileName: 'plot-numbers.csv',
+ csvSeries: [[displayTableFlag: false, exclusionValues: '', file: 'voltha-devices-count.txt', inclusionFlag: 'OFF', url: ''], [displayTableFlag: false, exclusionValues: '', file: 'onos-ports-count.txt', inclusionFlag: 'OFF', url: '']],
+ group: 'Voltha-Scale-Numbers', numBuilds: '100', style: 'line', title: "Activated ONUs and Recognized Ports", yaxis: 'Number of Ports/ONUs', useDescr: true
+ ])
+
+ archiveArtifacts artifacts: '*.log,*.json,*txt'
+
}
}
-}
\ No newline at end of file
+}
diff --git a/jjb/pipeline/xos-core.groovy b/jjb/pipeline/xos-core.groovy
index 39a2ea3..6405553 100644
--- a/jjb/pipeline/xos-core.groovy
+++ b/jjb/pipeline/xos-core.groovy
@@ -46,7 +46,7 @@
steps {
sh '''
pushd cord
- PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
+ PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifests/default.xml)
repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
popd
'''
diff --git a/jjb/pipeline/xos-service-upgrade.groovy b/jjb/pipeline/xos-service-upgrade.groovy
index 6ddd7ef..12fb69b 100644
--- a/jjb/pipeline/xos-service-upgrade.groovy
+++ b/jjb/pipeline/xos-service-upgrade.groovy
@@ -16,6 +16,7 @@
// Checks functionality of the helm-chart, without overriding the version/tag used
def serviceName = "${gerritProject}"
+def xosCoreVersionMismatch = false
pipeline {
@@ -44,17 +45,6 @@
}
}
- stage('patch') {
- steps {
- sh '''
- pushd cord
- PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
- repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
- popd
- '''
- }
- }
-
stage('minikube') {
steps {
/* see https://github.com/kubernetes/minikube/#linux-continuous-integration-without-vm-support */
@@ -72,7 +62,7 @@
timeout(3) {
waitUntil {
sleep 5
- def kc_ret = sh script: "kubectl get po", returnStatus: true
+ def kc_ret = sh script: "kubectl get pods", returnStatus: true
return (kc_ret == 0);
}
}
@@ -91,9 +81,86 @@
}
}
+ stage('Verify xos-core version requirements') {
+ steps {
+ script {
+ if (serviceName == "olt-service") {
+ serviceName = "volt"
+ }
+ else if (serviceName == "onos-service") {
+ serviceName = "onos"
+ }
+ else if (serviceName == "kubernetes-service") {
+ serviceName = "kubernetes"
+ }
+ }
+ result = sh returnStdout: true, script: """
+ #!/usr/bin/env bash
+ set -eu -o pipefail
+
+ # Obtain git tag of the service corresponding to the the docker image
+ # used in the latest released version of the helm chart (i.e., HEAD
+ # of cord/helm-charts master branch, which should be already checked
+ # out by repo).
+ pushd cord/helm-charts
+ export RELEASED_GIT_TAG=\$(echo -e "import yaml\\nwith open('xos-services/${serviceName}/Chart.yaml', 'r') as f: print yaml.safe_load(f)['appVersion']" | python)
+ popd
+
+ # Obtain the xos-core version requirement from the config.yaml of the
+ # released service.
+ pushd cord
+ PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
+ pushd \${PROJECT_PATH}
+ git fetch --all --tags
+ git checkout tags/\${RELEASED_GIT_TAG} -b foobar
+ export RELEASED_CORE_VER_REQ=\$(echo -e "import yaml\\nwith open('xos/synchronizer/config.yaml', 'r') as f: yaml.safe_load(f)['core_version']" | python)
+ popd
+ popd
+
+ # Do the same for the patchset we want to verify.
+ pushd cord
+ repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
+ pushd \${PROJECT_PATH}
+ export PATCHSET_CORE_VER_REQ=\$(echo -e "import yaml\\nwith open('xos/synchronizer/config.yaml', 'r') as f: yaml.safe_load(f)['core_version']" | python)
+ popd
+ popd
+
+ echo "RELEASED_CORE_VER_REQ: \${RELEASED_CORE_VER_REQ}"
+ echo "PATCHSET_CORE_VER_REQ: \${PATCHSET_CORE_VER_REQ}"
+
+ if [ "\${PATCHSET_CORE_VER_REQ}" == "\${RELEASED_CORE_VER_REQ}" ]; then
+ echo 0
+ else
+ # xosCoreVersionMismatch is true
+ echo 1
+ fi
+ """
+ xosCoreVersionMismatch = result.toBoolean()
+ }
+ }
+
+ if( xosCoreVersionMismatch ) {
+ echo "Detected xos-core version requirements mismatch. Will skip the rest of the pipeline and return SUCCESS"
+ currentBuild.result = 'SUCCESS'
+ return
+ }
+
+ // The patchset should be already checked out, but for consistency with
+ // other pipeline jobs, we re-do the same here.
+ stage('patch') {
+ steps {
+ sh '''
+ pushd cord
+ PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
+ repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
+ popd
+ '''
+ }
+ }
+
stage('Install XOS w/Service') {
steps {
- script {
+ script {
if (serviceName == "olt-service") {
serviceName = "volt"
}
@@ -136,7 +203,7 @@
## get pod logs
for pod in \$(kubectl get pods --no-headers | awk '{print \$1}');
do
- kubectl logs \$pod> $WORKSPACE/\$pod.log;
+ kubectl logs \${pod} > $WORKSPACE/\${pod}_pre.log;
done
"""
}
@@ -166,7 +233,13 @@
stage('Test Pre-Upgrade') {
steps {
sh """
- pushd cord/test/cord-tester/src/test/cord-api/Tests
+ #!/usr/bin/env bash
+ set -ex -o pipefail
+
+ pushd cord/test/cord-tester
+ make venv_cord
+ source venv_cord/bin/activate
+ cd src/test/cord-api/Tests
CORE_CONTAINER=\$(docker ps | grep k8s_xos-core | awk '{print \$1}')
CHAM_CONTAINER=\$(docker ps | grep k8s_xos-chameleon | awk '{print \$1}')
@@ -197,6 +270,7 @@
sh """
#!/usr/bin/env bash
set -eu -o pipefail
+
pushd $WORKSPACE/cord/orchestration/xos-services/${gerritProject}
export DOCKER_TAG=\$(cat VERSION)-test
export DOCKER_REPOSITORY=xosproject/
@@ -233,20 +307,21 @@
stage('Test Post-Upgrade') {
steps {
sh """
+ #!/usr/bin/env bash
+ set -ex -o pipefail
+
+ cd cord/test/cord-tester
+ make venv_cord
+ source venv_cord/bin/activate
+ cd src/test/cord-api/Tests
+
CORE_CONTAINER=\$(docker ps | grep k8s_xos-core | awk '{print \$1}')
export testname=_service_api.robot
export library=_library.robot
SERVICES=\$(docker exec -i \$CORE_CONTAINER /bin/bash -c "cd /opt/xos/dynamic_services/;find -name '*.xproto'" | awk -F[//] '{print \$2}')
echo \$SERVICES
- cd $WORKSPACE/cord/test/cord-tester/src/test/cord-api/Tests
for i in \$SERVICES; do bash -c "robot -v SETUP_FLAG:Setup -i get -d Log -T -v TESTLIBRARY:${serviceName}_library.robot \$i\$testname"; sleep 2; done || true
-
- ## get pod logs
- for pod in \$(kubectl get pods --no-headers | awk '{print \$1}');
- do
- kubectl logs \$pod> $WORKSPACE/\$pod.log;
- done || true
"""
}
}
@@ -270,13 +345,18 @@
timeout 300 bash -c "until http -a admin@opencord.org:letmein GET http://127.0.0.1:30001/xosapi/v1/dynamicload/load_status | jq '.services[] | select(.name==\\"core\\").state'| grep -q present; do echo 'Waiting for Core to be loaded'; sleep 5; done"
timeout 300 bash -c "until http -a admin@opencord.org:letmein GET http://127.0.0.1:30001/xosapi/v1/dynamicload/load_status | jq '.services[] | select(.name==\\"${gerritProject}\\").state'| grep -q present; do echo 'Waiting for Service to be loaded'; sleep 5; done"
sleep 120
- cd $WORKSPACE/cord/test/cord-tester/src/test/cord-api/Tests
+
+ cd $WORKSPACE/cord/test/cord-tester
+ make venv_cord
+ source venv_cord/bin/activate
+ cd src/test/cord-api/Tests
CORE_CONTAINER=\$(docker ps | grep k8s_xos-core | awk '{print \$1}')
export testname=_service_api.robot
export library=_library.robot
SERVICES=\$(docker exec -i \$CORE_CONTAINER /bin/bash -c "cd /opt/xos/dynamic_services/;find -name '*.xproto'" | awk -F[//] '{print \$2}')
+
echo \$SERVICES
for i in \$SERVICES; do bash -c "robot -v SETUP_FLAG:Setup -i get -d Log -T -v TESTLIBRARY:${gerritProject}_library.robot \$i\$testname"; sleep 2; done || true
@@ -297,6 +377,12 @@
post {
always {
sh """
+ ## get pod logs
+ for pod in \$(kubectl get pods --no-headers | awk '{print \$1}');
+ do
+ kubectl logs \${pod} > $WORKSPACE/\${pod}_post.log;
+ done
+
# copy robot logs
if [ -d RobotLogs ]; then rm -r RobotLogs; fi; mkdir RobotLogs
cp -r $WORKSPACE/cord/test/cord-tester/src/test/cord-api/Tests/Log/*ml ./RobotLogs
@@ -325,7 +411,7 @@
reportFileName: 'RobotLogs/report*.html',
unstableThreshold: 0]);
archiveArtifacts artifacts: '*.log'
- step([$class: 'Mailer', notifyEveryUnstableBuild: true, recipients: "kailash@opennetworking.org, scottb@opennetworking.org", sendToIndividuals: false])
+ step([$class: 'Mailer', notifyEveryUnstableBuild: true, recipients: "scottb@opennetworking.org", sendToIndividuals: false])
}
}
}
diff --git a/jjb/pipeline/xos-synchronizer-update.groovy b/jjb/pipeline/xos-synchronizer-update.groovy
index 77045e7..d9607ea 100644
--- a/jjb/pipeline/xos-synchronizer-update.groovy
+++ b/jjb/pipeline/xos-synchronizer-update.groovy
@@ -82,7 +82,7 @@
}
pushd cord
- PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifest.xml)
+ PROJECT_PATH=\$(xmllint --xpath "string(//project[@name=\\\"${gerritProject}\\\"]/@path)" .repo/manifests/default.xml)
repo download "\$PROJECT_PATH" "${gerritChangeNumber}/${gerritPatchsetNumber}"
pushd \$PROJECT_PATH
diff --git a/jjb/shell/ansiblelint.sh b/jjb/shell/ansiblelint.sh
index ebb5328..f9eca6f 100755
--- a/jjb/shell/ansiblelint.sh
+++ b/jjb/shell/ansiblelint.sh
@@ -16,7 +16,7 @@
# ansiblelint.sh - check all yaml files that they pass the ansible-lint tool
-set +e -u -o pipefail
+set +e -o pipefail
fail_ansible=0
@@ -30,16 +30,16 @@
# allow directories to be skipped
# space separated directory list expected in SKIP_DIRS
-
-SKIP_DIRS=""
SKIP_REGEX=""
-if [[ -n $SKIP_DIRS ]]; then
+if [ -n "$SKIP_DIRS" ]; then
echo "=> Skipping files matching these directories: $SKIP_DIRS"
# prefix with ./ as find generates, swap spaces for pipes
- SKIP_REGEX=$(echo $SKIP_DIRS | sed 's/[^ ]*/.\/&\//g' | sed 's/ /|/g')
+ SKIP_REGEX=$(echo "$SKIP_DIRS" | sed 's/[^ ]*/.\/&\//g' | sed 's/ /|/g')
fi
+set -u
+
while IFS= read -r -d '' yf
do
if [[ $yf =~ $SKIP_REGEX ]]; then
diff --git a/jjb/shell/repopatch.sh b/jjb/shell/repopatch.sh
index 6102b49..1ce911e 100755
--- a/jjb/shell/repopatch.sh
+++ b/jjb/shell/repopatch.sh
@@ -31,7 +31,7 @@
echo "Checking out a patchset with repo, using repo version:"
repo version
-PROJECT_PATH=$(xmllint --xpath "string(//project[@name=\"${GERRIT_PROJECT}\"]/@path)" .repo/manifest.xml)
+PROJECT_PATH=$(xmllint --xpath "string(//project[@name=\"${GERRIT_PROJECT}\"]/@path)" .repo/manifests/default.xml)
if [ -z "$PROJECT_PATH" ]
then
diff --git a/jjb/shell/xos-unit.sh b/jjb/shell/xos-unit.sh
index 26b28a2..a0d5442 100644
--- a/jjb/shell/xos-unit.sh
+++ b/jjb/shell/xos-unit.sh
@@ -22,7 +22,7 @@
GERRIT_PROJECT=${GERRIT_PROJECT:-xos}
# find the path to the project that is checked out
-PROJECT_PATH=$(xmllint --xpath "string(//project[@name=\"$GERRIT_PROJECT\"]/@path)" cord/.repo/manifest.xml)
+PROJECT_PATH=$(xmllint --xpath "string(//project[@name=\"$GERRIT_PROJECT\"]/@path)" cord/.repo/manifests/default.xml)
if [ -f "$WORKSPACE/cord/$PROJECT_PATH/Makefile" ]; then
# assume newer testing method which uses Makefile
diff --git a/jjb/siab.yaml b/jjb/siab.yaml
index 3d12c2c..0dc5d9d 100644
--- a/jjb/siab.yaml
+++ b/jjb/siab.yaml
@@ -18,7 +18,7 @@
description: |
<!-- Managed by Jenkins Job Builder -->
- Created by {id} job-template from ci-management/jjb/siba.yaml <br /><br />
+ Created by {id} job-template from ci-management/jjb/siab.yaml <br /><br />
E2E Validation for Seba-in-a-Box
properties:
@@ -68,7 +68,7 @@
description: |
<!-- Managed by Jenkins Job Builder -->
- Created by {id} job-template from ci-management/jjb/siba.yaml <br /><br />
+ Created by {id} job-template from ci-management/jjb/siab.yaml, with pipleine: siab.groovy <br /><br />
E2E Validation for Seba-in-a-Box
properties:
@@ -118,7 +118,7 @@
description: |
<!-- Managed by Jenkins Job Builder -->
- Created by {id} job-template from ci-management/jjb/siba.yaml <br /><br />
+ Created by {id} job-template from ci-management/jjb/siab.yaml, with pipleine: siab.groovy <br /><br />
E2E Validation for Seba-in-a-Box
properties:
@@ -169,7 +169,7 @@
description: |
<!-- Managed by Jenkins Job Builder -->
- Created by {id} job-template from ci-management/jjb/siba.yaml <br /><br />
+ Created by {id} job-template from ci-management/jjb/siab.yaml, with pipleine: siab-test.groovy <br /><br />
E2E Validation for Seba-in-a-Box
properties:
diff --git a/jjb/verify/ofagent-go.yaml b/jjb/verify/ofagent-go.yaml
index 48f6269..023544a 100644
--- a/jjb/verify/ofagent-go.yaml
+++ b/jjb/verify/ofagent-go.yaml
@@ -23,6 +23,8 @@
unit-test-targets: 'lint sca test'
unit-test-keep-going: 'true'
junit-allow-empty-results: true
+ - 'voltha-patch-test':
+ pipeline-script: 'voltha-bbsim-tests.groovy'
- job-group:
name: 'publish-ofagent-go-jobs'
diff --git a/jjb/verify/openolt.yaml b/jjb/verify/openolt.yaml
index 7b4eac8..91159f6 100644
--- a/jjb/verify/openolt.yaml
+++ b/jjb/verify/openolt.yaml
@@ -15,7 +15,11 @@
- 'verify-licensed'
- 'tag-collision-reject'
- 'make-unit-test':
+ build-node: 'ubuntu16.04-basebuild-2c-4g'
unit-test-targets: 'test'
+ # openolt uses gtest which outputs in xunit format
+ junit-allow-empty-results: true
+ xunit-skip-if-no-test-files: false
# This and the job-template below were written in October 2018
#
diff --git a/jjb/verify/pyvoltha.yaml b/jjb/verify/pyvoltha.yaml
index 4100715..b8564cb 100644
--- a/jjb/verify/pyvoltha.yaml
+++ b/jjb/verify/pyvoltha.yaml
@@ -11,6 +11,17 @@
- 'publish-pyvoltha-jobs':
branch-regexp: '{all-branches-regexp}'
pypi-index: 'pypi'
+ - 'voltha-periodic-test':
+ name: 'nightly-pyvoltha-compatibility-test'
+ pipeline-script: 'voltha-bbsim-tests.groovy'
+ build-node: 'ubuntu16.04-basebuild-4c-8g'
+ default-image-tag: 'master'
+ code-branch: 'master'
+ make-target: sanity-single-kind
+ gerrit-project: pyvoltha
+ onus: 1
+ pons: 1
+ time-trigger: "@daily"
- job-group:
name: 'verify-pyvoltha-jobs'
@@ -20,9 +31,7 @@
dependency-jobs: 'verify_pyvoltha_licensed'
- 'python-unit-test':
dependency-jobs: 'verify_pyvoltha_tag-collision'
- - 'voltha-patch-test':
- pipeline-script: 'voltha-bbsim-tests.groovy'
- skip-vote: true
+
- job-group:
name: 'publish-pyvoltha-jobs'
diff --git a/jjb/verify/voltctl.yaml b/jjb/verify/voltctl.yaml
index ead09ef..9b586ab 100644
--- a/jjb/verify/voltctl.yaml
+++ b/jjb/verify/voltctl.yaml
@@ -20,6 +20,7 @@
- 'tag-collision-reject':
dependency-jobs: 'verify_voltctl_licensed'
- 'make-unit-test':
+ build-node: 'ubuntu16.04-basebuild-2c-4g'
unit-test-targets: 'lint sca test'
unit-test-keep-going: 'true'
dependency-jobs: 'verify_voltctl_tag-collision'
diff --git a/jjb/verify/voltha-docs.yaml b/jjb/verify/voltha-docs.yaml
index b1ab32c..c10d081 100644
--- a/jjb/verify/voltha-docs.yaml
+++ b/jjb/verify/voltha-docs.yaml
@@ -24,7 +24,10 @@
name: 'post-submit-voltha-docs-jobs'
jobs:
- 'sync-dir':
- build-command: 'make versioned'
- build-output-path: '_build/'
+ build-command: 'make html'
+ build-output-path: '_build/html/'
+ # When versioning is working properly, use:
+ # build-command: 'make versioned'
+ # build-output-path: '_build/html/'
sync-target-server: 'guide.opencord.org'
sync-target-path: '/var/www/voltha-docs/'
diff --git a/jjb/verify/voltha-lib-go.yaml b/jjb/verify/voltha-lib-go.yaml
index fee00c3..9f148ef 100644
--- a/jjb/verify/voltha-lib-go.yaml
+++ b/jjb/verify/voltha-lib-go.yaml
@@ -8,6 +8,17 @@
jobs:
- 'verify-voltha-lib-go-jobs':
branch-regexp: '{all-branches-regexp}'
+ - 'voltha-periodic-test':
+ name: 'nightly-voltha-lib-go-compatibility-test'
+ pipeline-script: 'voltha-bbsim-tests.groovy'
+ build-node: 'ubuntu16.04-basebuild-4c-8g'
+ default-image-tag: 'master'
+ code-branch: 'master'
+ make-target: sanity-single-kind
+ gerrit-project: voltha-lib-go
+ onus: 1
+ pons: 1
+ time-trigger: "@daily"
- job-group:
name: 'verify-voltha-lib-go-jobs'
@@ -21,6 +32,3 @@
dest-gopath: "github.com/opencord"
unit-test-targets: 'lint sca test'
unit-test-keep-going: 'true'
- - 'voltha-patch-test':
- pipeline-script: 'voltha-bbsim-tests.groovy'
- skip-vote: true
diff --git a/jjb/verify/voltha-protos.yaml b/jjb/verify/voltha-protos.yaml
index ef7b6ab..98afdb0 100644
--- a/jjb/verify/voltha-protos.yaml
+++ b/jjb/verify/voltha-protos.yaml
@@ -12,6 +12,17 @@
branch-regexp: '{all-branches-regexp}'
pypi-index: 'pypi'
pypi-prep-commands: 'make python-protos'
+ - 'voltha-periodic-test':
+ name: 'nightly-voltha-protos-compatibility-test'
+ pipeline-script: 'voltha-bbsim-tests.groovy'
+ build-node: 'ubuntu16.04-basebuild-4c-8g'
+ default-image-tag: 'master'
+ code-branch: 'master'
+ make-target: sanity-single-kind
+ gerrit-project: voltha-protos
+ onus: 1
+ pons: 1
+ time-trigger: "@daily"
- job-group:
name: 'verify-voltha-protos-jobs'
@@ -23,9 +34,6 @@
build-timeout: 20
unit-test-targets: 'test'
unit-test-keep-going: 'true'
- - 'voltha-patch-test':
- pipeline-script: 'voltha-bbsim-tests.groovy'
- skip-vote: true
- job-group:
name: 'post-merge-voltha-protos-jobs'
diff --git a/jjb/voltha-e2e.yaml b/jjb/voltha-e2e.yaml
index f898a90..3aeb284 100644
--- a/jjb/voltha-e2e.yaml
+++ b/jjb/voltha-e2e.yaml
@@ -28,6 +28,16 @@
time-trigger: "H H/12 * * *"
- 'voltha-periodic-test':
+ name: 'periodic-voltha-errorscenarios-test'
+ build-node: 'qct-pod4-node2'
+ default-image-tag: 'master'
+ code-branch: 'master'
+ make-target: bbsim-errorscenarios
+ onus: 1
+ pons: 1
+ time-trigger: "H H/6 * * *"
+
+ - 'voltha-periodic-test':
name: 'periodic-voltha-system-test'
pipeline-script: 'voltha-system-test-bbsim.groovy'
build-node: 'ubuntu16.04-basebuild-4c-8g'
@@ -60,10 +70,20 @@
pons: 8
time-trigger: "H H/3 * * *"
+ - 'voltha-periodic-test':
+ name: 'periodic-voltha-alarm-test'
+ build-node: 'qct-pod4-node2'
+ default-image-tag: 'master'
+ code-branch: 'master'
+ make-target: bbsim-alarms-kind
+ onus: 1
+ pons: 1
+ time-trigger: "H H/3 * * *"
+
# Per-patchset Pod builds on Tucson pod
- 'verify_physical_voltha_patchset_auto':
name: 'verify_physical_voltha_patchset_auto'
- testvm: 'tucson-pod'
+ build-node: 'tucson-pod'
config-pod: 'tucson-pod'
branch: 'master'
oltDebVersion: 'openolt_asfvolt16.deb'
@@ -73,7 +93,7 @@
# Per-patchset Pod builds on Tucson pod
- 'verify_physical_voltha_patchset_manual':
name: 'verify_physical_voltha_patchset_manual'
- testvm: 'tucson-pod'
+ build-node: 'tucson-pod'
config-pod: 'tucson-pod'
branch: 'master'
oltDebVersion: 'openolt_asfvolt16.deb'
@@ -84,7 +104,7 @@
# Allow local testing without disrupting above job
- 'build_physical_voltha_manual':
name: 'build_tucson-pod_manual'
- testvm: 'tucson-pod'
+ build-node: 'tucson-pod'
config-pod: 'tucson-pod'
branch: 'master'
oltDebVersion: 'openolt_asfvolt16.deb'
@@ -97,6 +117,7 @@
pipeline-script: 'voltha-go-tests.groovy'
test-runs: 1
robot-args: ''
+ gerrit-project: ''
description: |
<!-- Managed by Jenkins Job Builder -->
@@ -121,7 +142,7 @@
- string:
name: extraHelmFlags
- default: '--set defaults.image_tag={default-image-tag},onu={onus},pon={pons},use_ofagent_go=True,images.ofagent.repository=voltha/ofagent-go,images.ofagent.tag=master'
+ default: '--set defaults.image_tag={default-image-tag},onu={onus},pon={pons}'
description: 'Helm flags to pass to ./voltha up'
- string:
@@ -140,6 +161,21 @@
description: 'Repo manifest branch for code checkout (so we can display changes in Jenkins)'
- string:
+ name: gerritProject
+ default: '{gerrit-project}'
+ description: 'Name of the Gerrit project'
+
+ - string:
+ name: gerritChangeNumber
+ default: ''
+ description: 'Changeset number in Gerrit'
+
+ - string:
+ name: gerritPatchsetNumber
+ default: ''
+ description: 'PatchSet number in Gerrit'
+
+ - string:
name: testRuns
default: '{test-runs}'
description: 'How many times to repeat the tests'
@@ -272,7 +308,7 @@
parameters:
- string:
name: buildNode
- default: '{testvm}'
+ default: '{build-node}'
description: 'Pod management node'
- string:
@@ -365,7 +401,7 @@
- string:
name: extraRobotArgs
- default: '-i sanity'
+ default: '-i sanityORDeleteOLT'
description: 'Arguments to pass to robot'
project-type: pipeline
@@ -387,7 +423,7 @@
approval-value: '+2'
projects:
- project-compare-type: REG_EXP
- project-pattern: '^(voltha-openolt-adapter|voltha-openonu-adapter)$'
+ project-pattern: '^(voltha-openolt-adapter|voltha-openonu-adapter|voltha-go)$'
branches:
- branch-compare-type: PLAIN
branch-pattern: 'master'
@@ -413,7 +449,7 @@
parameters:
- string:
name: buildNode
- default: '{testvm}'
+ default: '{build-node}'
description: 'Pod management node'
- string:
@@ -506,7 +542,7 @@
- string:
name: extraRobotArgs
- default: '-i sanity'
+ default: '-i sanityORDeleteOLT'
description: 'Arguments to pass to robot'
project-type: pipeline
@@ -555,7 +591,7 @@
parameters:
- string:
name: buildNode
- default: '{testvm}'
+ default: '{build-node}'
description: 'Pod management node'
- string:
diff --git a/jjb/voltha-scale.yaml b/jjb/voltha-scale.yaml
index 33bdd49..6b43359 100644
--- a/jjb/voltha-scale.yaml
+++ b/jjb/voltha-scale.yaml
@@ -7,9 +7,359 @@
project-name: '{name}'
jobs:
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-8-16-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 16
+ ponPorts: 8
+ expectedOnus: 128
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-8-16-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 16
+ ponPorts: 8
+ expectedOnus: 128
+ BBSIMdelay: 1000
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-8-32-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 32
+ ponPorts: 8
+ expectedOnus: 256
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-8-32-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 32
+ ponPorts: 8
+ expectedOnus: 256
+ BBSIMdelay: 1000
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-10-20-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 20
+ ponPorts: 10
+ expectedOnus: 200
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-10-20-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 20
+ ponPorts: 10
+ expectedOnus: 200
+ BBSIMdelay: 1000
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-4-32-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 32
+ ponPorts: 4
+ expectedOnus: 128
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-4-32-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 32
+ ponPorts: 4
+ expectedOnus: 128
+ BBSIMdelay: 1000
+
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-16-32-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 32
+ ponPorts: 16
+ expectedOnus: 512
+ BBSIMdelay: 200
+ # multi-adapter-tmp-changes
+ volthaImg: "matteoscandolo/voltha-rw-core:partition"
+ ofAgentImg: "voltha/voltha-ofagent-go:master"
+ openoltAdapterImg: "matteoscandolo/voltha-openolt-adapter:partition"
+ openonuAdapterImg: "matteoscandolo/voltha-openonu-adapter:partition"
+ openonuAdapterChart: "/home/cord/voltha-helm-charts/voltha-adapter-openonu"
+ extraHelmFlags: "--set use_ofagent_go=true -f /home/cord/partition-values.yaml"
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-16-64-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 64
+ ponPorts: 16
+ expectedOnus: 1024
+ BBSIMdelay: 200
+ # multi-adapter-tmp-changes
+ volthaImg: "matteoscandolo/voltha-rw-core:partition"
+ ofAgentImg: "voltha/voltha-ofagent-go:master"
+ openoltAdapterImg: "matteoscandolo/voltha-openolt-adapter:partition"
+ openonuAdapterImg: "matteoscandolo/voltha-openonu-adapter:partition"
+ openonuAdapterChart: "/home/cord/voltha-helm-charts/voltha-adapter-openonu"
+ extraHelmFlags: "--set use_ofagent_go=true -f /home/cord/partition-values.yaml"
+
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-4-64-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 64
+ ponPorts: 4
+ expectedOnus: 256
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-4-64-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 64
+ ponPorts: 4
+ expectedOnus: 256
+ BBSIMdelay: 1000
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-4-128-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 128
+ ponPorts: 4
+ expectedOnus: 512
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-4-128-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 128
+ ponPorts: 4
+ expectedOnus: 512
+ BBSIMdelay: 1000
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-1-128-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 128
+ ponPorts: 1
+ expectedOnus: 128
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-1-128-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 128
+ ponPorts: 1
+ expectedOnus: 128
+ BBSIMdelay: 1000
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-1-64-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 64
+ ponPorts: 1
+ expectedOnus: 64
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-1-64-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 64
+ ponPorts: 1
+ expectedOnus: 64
+ BBSIMdelay: 1000
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-1-1-200ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 1
+ ponPorts: 1
+ expectedOnus: 1
+ BBSIMdelay: 200
+ - 'voltha-scale-measurements-periodic':
+ name: 'voltha-scale-measurements-periodic-1-1-1000ms'
+ build-node: 'onf-pod1-head-node'
+ time-trigger: "H H/4 * * *"
+ onuPerPon: 1
+ ponPorts: 1
+ expectedOnus: 1
+ BBSIMdelay: 1000
- 'voltha-scale-measurements':
- name: 'voltha-scale-measurements-manual'
+ name: 'voltha-scale-measurements-dev'
build-node: 'voltha-scale'
+ onuPerPon: 16
+ ponPorts: 1
+ expectedOnus: 16
+ BBSIMdelay: 200
+
+- job-template:
+ id: 'voltha-scale-measurements-periodic'
+ name: '{name}'
+ pipeline-script: 'voltha-scale-measurements.groovy'
+
+ description: |
+ <!-- Managed by Jenkins Job Builder -->
+ Created by {id} job-template from ci-management/jjb/voltha-scale.yaml <br /><br />
+ Using pipeline {pipeline-script} <br/><br/>
+ Scale measurements for VOLTHA 2.x
+
+ properties:
+ - cord-infra-properties:
+ build-days-to-keep: '{build-days-to-keep}'
+ artifact-num-to-keep: '{artifact-num-to-keep}'
+
+ wrappers:
+ - lf-infra-wrappers:
+ build-timeout: '{build-timeout}'
+ jenkins-ssh-credential: '{jenkins-ssh-credential}'
+
+ # default parameters
+ bbsimImg: voltha/bbsim:master
+ bbsimChart: onf/bbsim
+ volthaImg: voltha/voltha-rw-core:master
+ ofAgentImg: voltha/voltha-ofagent-go:master
+ volthaChart: onf/voltha
+ openoltAdapterImg: voltha/voltha-openolt-adapter:master
+ openoltAdapterChart: onf/voltha-adapter-openolt
+ openonuAdapterImg: voltha/voltha-openonu-adapter:master
+ openonuAdapterChart: onf/voltha-adapter-openonu
+ onosImg: voltha/voltha-onos:4.0.1
+ extraHelmFlags: ''
+
+ parameters:
+ - string:
+ name: buildNode
+ default: '{build-node}'
+ description: 'Name of the Jenkins node to run the job on'
+
+ - string:
+ name: logLevel
+ default: 'WARN'
+ description: 'Log level for all the components'
+
+ - string:
+ name: onuPerPon
+ default: '{onuPerPon}'
+ description: 'Number of ONUs to provision'
+
+ - string:
+ name: ponPorts
+ default: '{ponPorts}'
+ description: 'Number of PONs to provision'
+
+ - string:
+ name: expectedOnus
+ default: '{expectedOnus}'
+ description: 'Expected number of activated ONUs'
+
+ - string:
+ name: pollInterval
+ default: 5
+ description: 'Sleep time between ONU activation checks'
+
+ - bool:
+ name: withOnosApps
+ default: false
+ description: 'Option to deactivate certain ONOS apps'
+
+ - bool:
+ name: withMibTemplate
+ default: true
+ description: 'Option to trigger MIB template command'
+
+ - bool:
+ name: setLinkDiscovery
+ default: false
+ description: 'Option to toggle Link Discovery'
+
+ - string:
+ name: BBSIMdelay
+ default: '{BBSIMdelay}'
+ description: 'BBSIM Delay, milliseconds'
+
+ - string:
+ name: flowStatInterval
+ default: 600
+ description: 'Flow Stats Collection Interval, milliseconds'
+
+ - string:
+ name: portsStatInterval
+ default: 600
+ description: 'Ports Stats Collection Interval, milliseconds'
+
+ - bool:
+ name: bbsimAuth
+ default: true
+ description: 'Option to toggle BBSIM EAPOL true/false'
+
+ - bool:
+ name: bbsimDhcp
+ default: true
+ description: 'Option to toggle BBSIM DHCP true/false'
+
+ - string:
+ name: bbsimImg
+ default: '{bbsimImg}'
+ description: 'Custom image selection for BBSIM (repo:tag)'
+
+ - string:
+ name: bbsimChart
+ default: '{bbsimChart}'
+ description: 'BBSim chart name (or location on file system)'
+
+ - string:
+ name: volthaImg
+ default: '{volthaImg}'
+ description: 'Custom image selection for VOLTHA (repo:tag)'
+
+ - string:
+ name: ofAgentImg
+ default: '{ofAgentImg}'
+ description: 'Custom image selection for OfAgent (repo:tag), only supports the go version'
+
+ - string:
+ name: volthaChart
+ default: '{volthaChart}'
+ description: 'VOLTHA chart name (or location on file system)'
+
+ - string:
+ name: openoltAdapterImg
+ default: '{openoltAdapterImg}'
+ description: 'Custom image selection for Openolt Adapter (repo:tag)'
+
+ - string:
+ name: openoltAdapterChart
+ default: '{openoltAdapterChart}'
+ description: 'OpenOLT chart name (or location on file system)'
+
+ - string:
+ name: openonuAdapterImg
+ default: '{openonuAdapterImg}'
+ description: 'Custom image selection for Openonu Adapter (repo:tag)'
+
+ - string:
+ name: openonuAdapterChart
+ default: '{openonuAdapterChart}'
+ description: 'OpenONU chart name (or location on file system)'
+
+ - string:
+ name: onosImg
+ default: '{onosImg}'
+ description: 'Custom image selection for Openonu Adapter (repo:tag)'
+
+ - string:
+ name: extraHelmFlags
+ default: '{extraHelmFlags}'
+ description: 'Any extra helm parameters you want (passed to every helm install command)'
+
+ project-type: pipeline
+ concurrent: false
+
+ dsl: !include-raw-escape: pipeline/{pipeline-script}
+
+ triggers:
+ - timed: |
+ TZ=America/Los_Angeles
+ {time-trigger}
- job-template:
id: 'voltha-scale-measurements'
@@ -32,6 +382,19 @@
build-timeout: '{build-timeout}'
jenkins-ssh-credential: '{jenkins-ssh-credential}'
+ # default parameters
+ bbsimImg: "voltha/bbsim:master"
+ bbsimChart: "onf/bbsim"
+ volthaImg: "voltha/voltha-rw-core:master"
+ ofAgentImg: "voltha/voltha-ofagent-go:master"
+ volthaChart: "onf/voltha"
+ openoltAdapterImg: "voltha/voltha-openolt-adapter:master"
+ openoltAdapterChart: "onf/voltha-adapter-openolt"
+ openonuAdapterImg: "voltha/voltha-openonu-adapter:master"
+ openonuAdapterChart: "onf/voltha-adapter-openonu"
+ onosImg: "voltha/voltha-onos:4.0.1"
+ extraHelmFlags: ''
+
parameters:
- string:
name: buildNode
@@ -39,6 +402,11 @@
description: 'Name of the Jenkins node to run the job on'
- string:
+ name: logLevel
+ default: 'WARN'
+ description: 'Log level for all the components'
+
+ - string:
name: onuPerPon
default: 1
description: 'Number of ONUs to provision'
@@ -57,7 +425,7 @@
name: pollInterval
default: 5
description: 'Sleep time between ONU activation checks'
-
+
- bool:
name: withOnosApps
default: false
@@ -88,7 +456,72 @@
default: 600
description: 'Ports Stats Collection Interval, milliseconds'
+ - bool:
+ name: bbsimAuth
+ default: true
+ description: 'Option to toggle BBSIM EAPOL true/false'
+
+ - bool:
+ name: bbsimDhcp
+ default: true
+ description: 'Option to toggle BBSIM DHCP true/false'
+
+ - string:
+ name: bbsimImg
+ default: '{bbsimImg}'
+ description: 'Custom image selection for BBSIM (repo:tag)'
+
+ - string:
+ name: bbsimChart
+ default: '{bbsimChart}'
+ description: 'BBSim chart name (or location on file system)'
+
+ - string:
+ name: volthaImg
+ default: '{volthaImg}'
+ description: 'Custom image selection for VOLTHA (repo:tag)'
+
+ - string:
+ name: ofAgentImg
+ default: '{ofAgentImg}'
+ description: 'Custom image selection for OfAgent (repo:tag), only supports the go version'
+
+ - string:
+ name: volthaChart
+ default: '{volthaChart}'
+ description: 'VOLTHA chart name (or location on file system)'
+
+ - string:
+ name: openoltAdapterImg
+ default: '{openoltAdapterImg}'
+ description: 'Custom image selection for Openolt Adapter (repo:tag)'
+
+ - string:
+ name: openoltAdapterChart
+ default: '{openoltAdapterChart}'
+ description: 'OpenOLT chart name (or location on file system)'
+
+ - string:
+ name: openonuAdapterImg
+ default: '{openonuAdapterImg}'
+ description: 'Custom image selection for Openonu Adapter (repo:tag)'
+
+ - string:
+ name: openonuAdapterChart
+ default: '{openonuAdapterChart}'
+ description: 'OpenONU chart name (or location on file system)'
+
+ - string:
+ name: onosImg
+ default: '{onosImg}'
+ description: 'Custom image selection for Openonu Adapter (repo:tag)'
+
+ - string:
+ name: extraHelmFlags
+ default: '{extraHelmFlags}'
+ description: 'Any extra helm parameters you want (passed to every helm install command)'
+
project-type: pipeline
- concurrent: true
+ concurrent: false
dsl: !include-raw-escape: pipeline/{pipeline-script}
diff --git a/packer/provision/basebuild.sh b/packer/provision/basebuild.sh
index c433d2d..41c358c 100644
--- a/packer/provision/basebuild.sh
+++ b/packer/provision/basebuild.sh
@@ -146,7 +146,7 @@
grpcio-tools \
httpie==1.0.3 \
isort \
- linkchecker \
+ git+https://github.com/linkchecker/linkchecker.git@v9.4.0 \
more-itertools==5.0.0 \
mock \
netaddr \
diff --git a/packer/templates/basebuild.json b/packer/templates/basebuild.json
index 92f5090..e19bf61 100644
--- a/packer/templates/basebuild.json
+++ b/packer/templates/basebuild.json
@@ -1,10 +1,12 @@
{
"variables": {
+ "instance_type": "t2.medium",
"aws_access_key": null,
"aws_security_key": null,
"security_group_id": null,
"source_ami_filter_name": null,
"source_ami_filter_owner": null,
+ "source_ami_filter_product_code": "",
"subnet_id": null,
"ssh_user": null,
"distro": null,
@@ -15,13 +17,14 @@
{
"access_key": "{{user `aws_access_key`}}",
"ami_name": "{{user `distro`}} - basebuild - {{user `arch`}} - {{isotime \"20060102-1504\"}}",
- "instance_type": "t2.micro",
+ "instance_type": "{{user `instance_type`}}",
"region": "us-west-2",
"secret_key": "{{user `aws_security_key`}}",
"security_group_id": "{{user `security_group_id`}}",
"source_ami_filter": {
"filters": {
"name": "{{user `source_ami_filter_name`}}",
+ "product-code": "{{user `source_ami_filter_product_code`}}",
"architecture": "{{user `arch`}}",
"root-device-type": "ebs",
"virtualization-type": "hvm"
diff --git a/packer/vars/centos-7.json b/packer/vars/centos-7.json
index aa629a9..340f545 100644
--- a/packer/vars/centos-7.json
+++ b/packer/vars/centos-7.json
@@ -1,6 +1,8 @@
{
"source_ami_filter_name": "*CentOS Linux 7*HVM*",
- "source_ami_filter_owner": "679593333241",
+ "source_ami_filter_owner": "aws-marketplace",
+ "source_ami_filter_product_code": "aw0evgkw8e5c1q413zgy5pjce",
+
"ssh_user": "centos",
"distro": "CentOS 7",
diff --git a/packer/vars/ubuntu-16.04.json b/packer/vars/ubuntu-16.04.json
index 972ff75..4f7d473 100644
--- a/packer/vars/ubuntu-16.04.json
+++ b/packer/vars/ubuntu-16.04.json
@@ -1,6 +1,7 @@
{
"source_ami_filter_name": "*ubuntu*16.04*",
- "source_ami_filter_owner": "099720109477",
+ "source_ami_filter_owner": "aws-marketplace",
+ "source_ami_filter_product_code": "csv6h7oyg29b7epjzg7qdr7no",
"ssh_user": "ubuntu",