123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168 |
- /*
- * This pipeline is the "template" for the Asterisk Periodic Tests multi-branch
- * parent job. Jenkins will automatically scan the branches in the "asterisk"
- * or "Security-asterisk" projects in Gerrit and automatically create a branch-
- * specific job for each branch it finds this file in.
- *
- * This file starts as a declarative pipeline because with a declarative
- * pipeline, you can define the trigger in the pipeline file. This keeps
- * everything in one place. We transition to scripted pipeline later on because
- * we need to dynamically determine which docker image we're going to use and
- * you can't do that in a delcarative pipeline.
- */
- def timeoutTime = 3
- def timeoutUnits = 'HOURS'
- if (env.TIMEOUT_DAILIES) {
- def _timeout = env.TIMEOUT_DAILIES.split()
- timeoutTime = _timeout[0].toInteger()
- timeoutUnits = _timeout[1]
- }
- pipeline {
- options {
- timestamps()
- timeout(time: timeoutTime, unit: timeoutUnits)
- }
- triggers {
- cron 'H H(0-4) * * *'
- }
- agent {
- /* All of the stages need to be performed on a docker host */
- label "swdev-docker"
- }
- stages {
- stage ("->") {
- steps {
- /* Here's where we switch to scripted pipeline */
- script {
- manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
- stage ("Checkout") {
- sh "sudo chown -R jenkins:users ."
- sh "printenv | sort"
- sh "sudo tests/CI/setupJenkinsEnvironment.sh"
- }
- def images = env.DOCKER_IMAGES.split(' ')
- def r = currentBuild.startTimeInMillis % images.length
- def ri = images[(int)r]
- def randomImage = env.DOCKER_REGISTRY + "/" + ri
- def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
- " --mount type=tmpfs,tmpfs-size=1g,dst=/tmp -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
- " --entrypoint=''"
- def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
- def outputdir = "tests/CI/output/Testsuite"
- manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
- def img = docker.image(randomImage)
- img.pull()
- img.inside(dockerOptions + " --name ${bt}-build") {
- stage ("Build") {
- echo 'Building..'
- env.CCACHE_DIR = "/srv/cache/ccache"
- sh "./tests/CI/buildAsterisk.sh --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
- archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
- artifacts: "${outputdir}/*"
- }
- stage ("Docs") {
- sh "sudo ./tests/CI/installAsterisk.sh --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
- def docUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/publish-docs")
- checkout scm: [$class: 'GitSCM',
- branches: [[name: "master"]],
- extensions: [
- [$class: 'RelativeTargetDirectory', relativeTargetDir: "tests/CI/output/publish-docs"],
- [$class: 'CloneOption',
- noTags: true,
- depth: 10,
- honorRefspec: true,
- shallow: true
- ],
- ],
- userRemoteConfigs: [[url: docUrl]]
- ]
- sh "./tests/CI/publishAsteriskDocs.sh --user-group=jenkins:users --branch-name=${BRANCH_NAME} --wiki-doc-branch-regex=\"${WIKI_DOC_BRANCH_REGEX}\""
- }
- }
- def testGroups = readJSON file: "tests/CI/periodic-dailyTestGroups.json"
- def parallelTasks = [ : ]
- for (def testGroup in testGroups) {
- /*
- * Because each task is a Groovy closure, we need to
- * keep local references to some variables.
- */
- def groupName = testGroup.name
- def groupDir = testGroup.dir
- def groupTestcmd = testGroup.testcmd
- def groupRunTestsuiteOptions = testGroup.runTestsuiteOptions
- def testsuiteUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/testsuite")
- parallelTasks[groupName] = {
- stage (groupName) {
- img.inside("${dockerOptions} --name ${bt}-${groupName}") {
- lock("${JOB_NAME}.${NODE_NAME}.installer") {
- sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
- }
- sh "sudo rm -rf ${groupDir} || : "
- checkout scm: [$class: 'GitSCM',
- branches: [[name: "${BRANCH_NAME}"]],
- extensions: [
- [$class: 'RelativeTargetDirectory', relativeTargetDir: groupDir],
- [$class: 'CloneOption',
- noTags: true,
- depth: 10,
- honorRefspec: true,
- shallow: true
- ],
- ],
- userRemoteConfigs: [[url: testsuiteUrl]]
- ]
- sh "sudo tests/CI/runTestsuite.sh ${groupRunTestsuiteOptions} --testsuite-dir='${groupDir}' --testsuite-command='${groupTestcmd}'"
- archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
- artifacts: "${groupDir}/asterisk-test-suite-report.xml, ${groupDir}/logs/**, ${groupDir}/core*.txt"
- junit testResults: "${groupDir}/asterisk-test-suite-report.xml",
- healthScaleFactor: 1.0,
- keepLongStdio: true
- echo "Group result d: ${currentBuild.currentResult}"
- }
- echo "Group result s: ${currentBuild.currentResult}"
- }
- }
- }
- parallel parallelTasks
- }
- }
- }
- }
- post {
- cleanup {
- sh "sudo make distclean >/dev/null 2>&1 || : "
- sh "sudo rm -rf tests/CI/output >/dev/null 2>&1 || : "
- }
- success {
- echo "Reporting ${currentBuild.currentResult} Passed"
- }
- failure {
- echo "Reporting ${currentBuild.currentResult}: Failed: Fatal Error"
- }
- unstable {
- echo "Reporting ${currentBuild.currentResult}: Failed: Tests Failed"
- }
- }
- }
|