blob: 5876876399451820185e049e837ec3f205657efa [file] [log] [blame]
Joey Armstrongbeef4cd2023-01-18 09:59:58 -05001// Copyright 2017-2023 Open Networking Foundation (ONF) and the ONF Contributors
Matteo Scandolo9b644ba2021-04-19 11:21:07 -07002//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14// used to deploy VOLTHA and configure ONOS physical PODs
15// NOTE we are importing the library even if it's global so that it's
16// easier to change the keywords during a replay
Joey Armstrong65405412022-11-22 10:43:06 -050017
Matteo Scandolo9b644ba2021-04-19 11:21:07 -070018library identifier: 'cord-jenkins-libraries@master',
19 retriever: modernSCM([
20 $class: 'GitSCMSource',
21 remote: 'https://gerrit.opencord.org/ci-management.git'
22])
23def infraNamespace = "infra"
24def volthaNamespace = "voltha"
25def clusterName = "kind-ci"
26pipeline {
27 /* no label, executor is determined by JJB */
28 agent {
29 label "${params.buildNode}"
30 }
31 options {
32 timeout(time: 120, unit: 'MINUTES')
33 }
34 environment {
35 PATH="$PATH:$WORKSPACE/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/snap/bin"
36 KUBECONFIG="$HOME/.kube/kind-${clusterName}"
37 VOLTCONFIG="$HOME/.volt/config"
Andrea Campanella1198cd52021-06-14 16:17:25 +020038 LOG_FOLDER="$WORKSPACE/${workflow}/"
39 APPS_TO_LOG="etcd kafka onos-classic adapter-open-onu adapter-open-olt rw-core ofagent bbsim radius bbsim-sadis-server onos-config-loader"
40
Matteo Scandolo9b644ba2021-04-19 11:21:07 -070041 }
42 stages{
43 stage('Download Code') {
44 steps {
45 getVolthaCode([
46 branch: "${branch}",
47 gerritProject: "${gerritProject}",
48 gerritRefspec: "${gerritRefspec}",
49 volthaSystemTestsChange: "${volthaSystemTestsChange}",
50 volthaHelmChartsChange: "${volthaHelmChartsChange}",
51 ])
52 }
53 }
54 stage ("Parse deployment configuration file") {
55 steps {
56 sh returnStdout: true, script: "rm -rf ${configBaseDir}"
Hardik Windlass6f854a12021-07-12 13:20:21 +000057 sh returnStdout: true, script: "git clone -b ${branch} ${cordRepoUrl}/${configBaseDir}"
Matteo Scandolo9b644ba2021-04-19 11:21:07 -070058 script {
Matteo Scandolob6d80732021-05-05 14:06:42 -070059
60 if (params.workflow.toUpperCase() == "TT") {
61 error("The Tucson POD does not support TT workflow at the moment")
62 }
63
Matteo Scandolo9b644ba2021-04-19 11:21:07 -070064 if ( params.workflow.toUpperCase() == "DT" ) {
65 deployment_config = readYaml file: "${configBaseDir}/${configDeploymentDir}/${configFileName}-DT.yaml"
66 }
67 else if ( params.workflow.toUpperCase() == "TT" ) {
68 deployment_config = readYaml file: "${configBaseDir}/${configDeploymentDir}/${configFileName}-TT.yaml"
69 }
70 else {
71 deployment_config = readYaml file: "${configBaseDir}/${configDeploymentDir}/${configFileName}.yaml"
72 }
73 }
74 }
75 }
76 stage('Clean up') {
77 steps {
78 timeout(15) {
79 script {
80 helmTeardown(["default", infraNamespace, volthaNamespace])
81 }
82 timeout(1) {
83 sh returnStdout: false, script: '''
84 # remove orphaned port-forward from different namespaces
Andrea Campanella4c8af942021-05-12 10:12:13 +020085 ps aux | grep port-forw | grep -v grep | awk '{print $2}' | xargs --no-run-if-empty kill -9 || true
Matteo Scandolo9b644ba2021-04-19 11:21:07 -070086 '''
87 }
88 }
89 }
90 }
91 stage('Build patch') {
92 steps {
93 // NOTE that the correct patch has already been checked out
94 // during the getVolthaCode step
95 buildVolthaComponent("${gerritProject}")
96 }
97 }
98 stage('Create K8s Cluster') {
99 steps {
100 script {
101 def clusterExists = sh returnStdout: true, script: """
102 kind get clusters | grep ${clusterName} | wc -l
103 """
104 if (clusterExists.trim() == "0") {
105 createKubernetesCluster([nodes: 3, name: clusterName])
106 }
107 }
108 }
109 }
110 stage('Load image in kind nodes') {
111 steps {
112 loadToKind()
113 }
114 }
115 stage('Install Voltha') {
116 steps {
117 timeout(20) {
118 script {
119 imageFlags = getVolthaImageFlags(gerritProject)
120 // if we're downloading a voltha-helm-charts patch, then install from a local copy of the charts
121 def localCharts = false
122 if (volthaHelmChartsChange != "" || gerritProject == "voltha-helm-charts") {
123 localCharts = true
124 }
Matteo Scandolobb7382d2021-05-05 08:32:35 -0700125 def flags = "-f $WORKSPACE/${configBaseDir}/${configKubernetesDir}/voltha/${configFileName}.yml ${imageFlags} "
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700126 // NOTE temporary workaround expose ONOS node ports (pod-config needs to be updated to contain these values)
Matteo Scandolobb7382d2021-05-05 08:32:35 -0700127 flags = flags + "--set onos-classic.onosSshPort=30115 " +
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700128 "--set onos-classic.onosApiPort=30120 " +
129 "--set onos-classic.onosOfPort=31653 " +
Matteo Scandolobb7382d2021-05-05 08:32:35 -0700130 "--set onos-classic.individualOpenFlowNodePorts=true " + extraHelmFlags
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700131 volthaDeploy([
132 workflow: workFlow.toLowerCase(),
Matteo Scandolobb7382d2021-05-05 08:32:35 -0700133 extraHelmFlags: flags,
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700134 localCharts: localCharts,
135 kubeconfig: "$WORKSPACE/${configBaseDir}/${configKubernetesDir}/${configFileName}.conf",
136 onosReplica: 3,
137 atomixReplica: 3,
138 kafkaReplica: 3,
139 etcdReplica: 3,
140 ])
141 }
142 // start logging
143 sh """
Andrea Campanella1be85ad2021-06-14 13:01:15 +0200144 rm -rf $WORKSPACE/${workFlow}/
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700145 mkdir -p $WORKSPACE/${workFlow}
146 _TAG=kail-${workFlow} kail -n infra -n voltha > $WORKSPACE/${workFlow}/onos-voltha-combined.log &
147 """
Andrea Campanella1198cd52021-06-14 16:17:25 +0200148 sh returnStdout: false, script: '''
149 # start logging with kail
150
151 mkdir -p $LOG_FOLDER
152
153 list=($APPS_TO_LOG)
154 for app in "${list[@]}"
155 do
156 echo "Starting logs for: ${app}"
157 _TAG=kail-$app kail -l app=$app --since 1h > $LOG_FOLDER/$app.log&
158 done
159 '''
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700160 sh """
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700161 JENKINS_NODE_COOKIE="dontKillMe" bash -c "while true; do kubectl port-forward --address 0.0.0.0 -n ${infraNamespace} svc/voltha-infra-kafka 9092:9092; done"&
162 ps aux | grep port-forward
163 """
164 getPodsInfo("$WORKSPACE")
165 }
166 }
167 }
168 stage('Deploy Kafka Dump Chart') {
169 steps {
170 script {
171 sh returnStdout: false, script: """
172 helm repo add cord https://charts.opencord.org
173 helm repo update
174 if helm version -c --short|grep v2 -q; then
175 helm install -n voltha-kafka-dump cord/voltha-kafka-dump
176 else
177 helm install voltha-kafka-dump cord/voltha-kafka-dump
178 fi
179 """
180 }
181 }
182 }
183 stage('Push Tech-Profile') {
184 when {
185 expression { params.profile != "Default" }
186 }
187 steps {
188 sh returnStdout: false, script: """
189 etcd_container=\$(kubectl get pods -n voltha | grep voltha-etcd-cluster | awk 'NR==1{print \$1}')
190 kubectl cp $WORKSPACE/voltha-system-tests/tests/data/TechProfile-${profile}.json voltha/\$etcd_container:/tmp/flexpod.json
191 kubectl exec -it \$etcd_container -n voltha -- /bin/sh -c 'cat /tmp/flexpod.json | ETCDCTL_API=3 etcdctl put service/voltha/technology_profiles/XGS-PON/64'
192 """
193 }
194 }
195
196 stage('Push Sadis-config') {
197 steps {
198 sh returnStdout: false, script: """
199 ssh-keygen -R [${deployment_config.nodes[0].ip}]:30115
200 ssh-keyscan -p 30115 -H ${deployment_config.nodes[0].ip} >> ~/.ssh/known_hosts
201 sshpass -p karaf ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@${deployment_config.nodes[0].ip} "log:set TRACE org.opencord.dhcpl2relay"
202 sshpass -p karaf ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@${deployment_config.nodes[0].ip} "log:set TRACE org.opencord.aaa"
203 sshpass -p karaf ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@${deployment_config.nodes[0].ip} "log:set TRACE org.opencord.olt"
Andrea Campanella39b9b3d2021-09-09 16:54:20 +0200204 #TRACE in the pipeliner is too chatty, moving to DEBUG
205 sshpass -p karaf ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@${deployment_config.nodes[0].ip} "log:set DEBUG org.opencord.olt.driver"
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700206 sshpass -p karaf ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@${deployment_config.nodes[0].ip} "log:set DEBUG org.onosproject.net.flowobjective.impl.FlowObjectiveManager"
207 sshpass -p karaf ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 30115 karaf@${deployment_config.nodes[0].ip} "log:set DEBUG org.onosproject.net.flowobjective.impl.InOrderFlowObjectiveManager"
208
209 if [[ "${workFlow.toUpperCase()}" == "DT" ]]; then
210 curl -sSL --user karaf:karaf -X POST -H Content-Type:application/json http://${deployment_config.nodes[0].ip}:30120/onos/v1/network/configuration --data @$WORKSPACE/voltha-system-tests/tests/data/${configFileName}-sadis-DT.json
211 elif [[ "${workFlow.toUpperCase()}" == "TT" ]]; then
212 curl -sSL --user karaf:karaf -X POST -H Content-Type:application/json http://${deployment_config.nodes[0].ip}:30120/onos/v1/network/configuration --data @$WORKSPACE/voltha-system-tests/tests/data/${configFileName}-sadis-TT.json
213 else
214 # this is the ATT case, rename the file in *-sadis-ATT.json so that we can avoid special cases and just load the file
215 curl -sSL --user karaf:karaf -X POST -H Content-Type:application/json http://${deployment_config.nodes[0].ip}:30120/onos/v1/network/configuration --data @$WORKSPACE/voltha-system-tests/tests/data/${configFileName}-sadis.json
216 fi
217 """
218 }
219 }
220 stage('Reinstall OLT software') {
221 when {
222 expression { params.reinstallOlt }
223 }
224 steps {
225 script {
226 deployment_config.olts.each { olt ->
227 sh returnStdout: false, script: "sshpass -p ${olt.pass} ssh -l ${olt.user} ${olt.ip} 'dpkg --remove asfvolt16 && dpkg --purge asfvolt16'"
228 waitUntil {
229 olt_sw_present = sh returnStdout: true, script: "sshpass -p ${olt.pass} ssh -l ${olt.user} ${olt.ip} 'dpkg --list | grep asfvolt16 | wc -l'"
230 return olt_sw_present.toInteger() == 0
231 }
232 if ( params.branch == 'voltha-2.3' ) {
233 oltDebVersion = oltDebVersionVoltha23
234 } else {
235 oltDebVersion = oltDebVersionMaster
236 }
237 sh returnStdout: false, script: "sshpass -p ${olt.pass} ssh -l ${olt.user} ${olt.ip} 'dpkg --install ${oltDebVersion}'"
238 waitUntil {
239 olt_sw_present = sh returnStdout: true, script: "sshpass -p ${olt.pass} ssh -l ${olt.user} ${olt.ip} 'dpkg --list | grep asfvolt16 | wc -l'"
240 return olt_sw_present.toInteger() == 1
241 }
242 if ( olt.fortygig ) {
243 // If the OLT is connected to a 40G switch interface, set the NNI port to be downgraded
244 sh returnStdout: false, script: "sshpass -p ${olt.pass} ssh -l ${olt.user} ${olt.ip} 'echo port ce128 sp=40000 >> /broadcom/qax.soc ; /opt/bcm68620/svk_init.sh'"
245 }
246 }
247 }
248 }
249 }
250
251 stage('Restart OLT processes') {
252 steps {
253 script {
254 deployment_config.olts.each { olt ->
255 sh returnStdout: false, script: """
256 ssh-keyscan -H ${olt.ip} >> ~/.ssh/known_hosts
257 sshpass -p ${olt.pass} ssh -l ${olt.user} ${olt.ip} 'rm -f /var/log/openolt.log; rm -f /var/log/dev_mgmt_daemon.log; reboot'
258 sleep 120
259 """
260 waitUntil {
261 onu_discovered = sh returnStdout: true, script: "sshpass -p ${olt.pass} ssh -l ${olt.user} ${olt.ip} 'grep \"onu discover indication\" /var/log/openolt.log | wc -l'"
262 return onu_discovered.toInteger() > 0
263 }
264 }
265 }
266 }
267 }
268 stage('Run E2E Tests') {
269 steps {
270 script {
Matteo Scandolob6d80732021-05-05 14:06:42 -0700271 // different workflows need different make targets and different robot files
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700272 if ( params.workflow.toUpperCase() == "DT" ) {
273 robotConfigFile = "${configBaseDir}/${configDeploymentDir}/${configFileName}-DT.yaml"
Matteo Scandolob6d80732021-05-05 14:06:42 -0700274 robotFile = "Voltha_DT_PODTests.robot"
275 makeTarget = "voltha-dt-test"
276 robotFunctionalKeyword = "-i functionalDt"
277 robotDataplaneKeyword = "-i dataplaneDt"
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700278 }
279 else if ( params.workflow.toUpperCase() == "TT" ) {
Matteo Scandolob6d80732021-05-05 14:06:42 -0700280 // TODO the TT tests have diffent tags, address once/if TT is support on the Tucson POD
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700281 robotConfigFile = "${configBaseDir}/${configDeploymentDir}/${configFileName}-TT.yaml"
Matteo Scandolob6d80732021-05-05 14:06:42 -0700282 robotFile = "Voltha_TT_PODTests.robot"
283 makeTarget = "voltha-tt-test"
284 robotFunctionalKeyword = "-i functionalTt"
285 robotDataplaneKeyword = "-i dataplaneTt"
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700286 }
287 else {
288 robotConfigFile = "${configBaseDir}/${configDeploymentDir}/${configFileName}.yaml"
Matteo Scandolob6d80732021-05-05 14:06:42 -0700289 robotFile = "Voltha_PODTests.robot"
290 makeTarget = "voltha-test"
291 robotFunctionalKeyword = "-i functional"
292 robotDataplaneKeyword = "-i dataplane"
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700293 }
294 }
295 sh returnStdout: false, script: """
296 mkdir -p $WORKSPACE/RobotLogs
297
298 export ROBOT_CONFIG_FILE="$WORKSPACE/${robotConfigFile}"
299 export ROBOT_MISC_ARGS="${params.extraRobotArgs} --removekeywords wuks -d $WORKSPACE/RobotLogs -v container_log_dir:$WORKSPACE "
Matteo Scandolob6d80732021-05-05 14:06:42 -0700300 export ROBOT_FILE="${robotFile}"
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700301
302 # If the Gerrit comment contains a line with "functional tests" then run the full
303 # functional test suite. This covers tests tagged either 'sanity' or 'functional'.
304 # Note: Gerrit comment text will be prefixed by "Patch set n:" and a blank line
305 REGEX="functional tests"
306 if [[ "${gerritComment}" =~ \$REGEX ]]; then
Matteo Scandolob6d80732021-05-05 14:06:42 -0700307 ROBOT_MISC_ARGS+="${robotFunctionalKeyword} "
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700308 fi
309 # Likewise for dataplane tests
310 REGEX="dataplane tests"
311 if [[ "${gerritComment}" =~ \$REGEX ]]; then
Matteo Scandolob6d80732021-05-05 14:06:42 -0700312 ROBOT_MISC_ARGS+="${robotDataplaneKeyword}"
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700313 fi
314
Hardik Windlass7e4e3152021-09-29 06:42:05 +0000315 ROBOT_MISC_ARGS+=" -v NAMESPACE:${volthaNamespace} -v INFRA_NAMESPACE:${infraNamespace}"
Matteo Scandolob6d80732021-05-05 14:06:42 -0700316 make -C $WORKSPACE/voltha-system-tests ${makeTarget} || true
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700317 """
318 }
319 }
320 }
321 post {
322 always {
323 // stop logging
324 sh """
325 P_IDS="\$(ps e -ww -A | grep "_TAG=kail-${workFlow}" | grep -v grep | awk '{print \$1}')"
326 if [ -n "\$P_IDS" ]; then
327 echo \$P_IDS
328 for P_ID in \$P_IDS; do
329 kill -9 \$P_ID
330 done
331 fi
332 gzip $WORKSPACE/${workFlow}/onos-voltha-combined.log || true
333 """
Andrea Campanella1198cd52021-06-14 16:17:25 +0200334 sh '''
335 # stop the kail processes
336 list=($APPS_TO_LOG)
337 for app in "${list[@]}"
338 do
339 echo "Stopping logs for: ${app}"
340 _TAG="kail-$app"
341 P_IDS="$(ps e -ww -A | grep "_TAG=$_TAG" | grep -v grep | awk '{print $1}')"
342 if [ -n "$P_IDS" ]; then
343 echo $P_IDS
344 for P_ID in $P_IDS; do
345 kill -9 $P_ID
346 done
347 fi
348 done
349 '''
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700350 step([$class: 'RobotPublisher',
351 disableArchiveOutput: false,
352 logFileName: 'RobotLogs/log*.html',
353 otherFiles: '',
354 outputFileName: 'RobotLogs/output*.xml',
355 outputPath: '.',
356 passThreshold: 100,
357 reportFileName: 'RobotLogs/report*.html',
Andrea Campanellaabc09772021-06-16 12:08:57 +0200358 unstableThreshold: 0,
359 onlyCritical: true]);
Andrea Campanella1198cd52021-06-14 16:17:25 +0200360 archiveArtifacts artifacts: '**/*.txt,**/*.gz,*.gz,**/*.log'
Matteo Scandolo9b644ba2021-04-19 11:21:07 -0700361 }
362 }
363}
364
365// refs/changes/06/24206/5