// vim: ts=2:sw=2:expandtab:syntax=groovy
/* https://build.libelektra.org/job/elektra-jenkinsfile/
 * This file describes how the elektra-jenkinsfile buildjob should be
 * executed.
 *
 * 1. libraries and global variables are set
 * 2. define the main stages of the pipeline
 * 3. describe sub stages. This is where you will want to add new builds
 * 4. helper section to help write build scripts
 *
 * General Information about Jenkinsfiles can be found at
 * https://jenkins.io/doc/book/pipeline/jenkinsfile/.
 *
 * A Snippet generator is available to the public at
 * https://qa.nuxeo.org/jenkins/pipeline-syntax/.
 * A list of available commands on the build server can be found after a login at
 * https://build.libelektra.org/job/elektra-jenkinsfile/pipeline-syntax/.
 */

// TODO have a per plugin/binding deps in Dockerfile for easier maintenance
// TODO add warnings plugins to scan for compiler warnings
//      Does appear to be not working for multiple runs in one job

// Libraries
@Library('libelektra-shared') _

// Buildjob properties
properties([
  buildDiscarder(
    logRotator(
      artifactDaysToKeepStr: '31',  // Keep artifacts for max 31 days
      artifactNumToKeepStr: '5',    // Keep artifacts for last 5 builds
      daysToKeepStr: '90',          // Keep build info for 90 days
      numToKeepStr: '60'            // Keep a max of 60 builds
    )
  )
])

// If previous run is still running, cancel it unless we are building
// code from the `master` branch.
abortPreviousRunUnlessMaster()

pipelineConfig {
// uncomment if you you want to overwrite these values
// dockerNodeLabel = 'docker'
// registry = 'hub.libelektra.org'
  now = new Date()
}

/* Define reusable cmake Flag globals
 *
 * They can be passed to many of the test helper functions and the cmake
 * function and represent flags usually passed to cmake.
 */
CMAKE_FLAGS_BASE = [
  'SITE': '${STAGE_NAME}',
  'KDB_DB_SYSTEM': '${WORKSPACE}/config/kdb/system',
  'KDB_DB_SPEC': '${WORKSPACE}/config/kdb/spec',
  'KDB_DB_HOME': '${WORKSPACE}/config/kdb/home',
  'CMAKE_INSTALL_PREFIX': '${WORKSPACE}/system',
  'BUILD_DOCUMENTATION': 'OFF',
  'CMAKE_RULE_MESSAGES': 'OFF',
  'COMMON_FLAGS': '-Werror'
]

CMAKE_FLAGS_BUILD_ALL = [
  'BINDINGS': 'ALL',
  'PLUGINS': 'ALL',
  'TOOLS': 'ALL'
]

CMAKE_FLAGS_COVERAGE = ['ENABLE_COVERAGE': 'ON']

CMAKE_FLAGS_CLANG = [
  'CMAKE_C_COMPILER': 'clang-11',
  'CMAKE_CXX_COMPILER': 'clang++-11'
]

CMAKE_FLAGS_MMAP = [
  'KDB_DB_FILE': 'default.mmap',
  'KDB_DB_INIT': 'elektra.mmap',
  'KDB_DEFAULT_STORAGE': 'mmapstorage'
]

CMAKE_FLAGS_I386 = [
  'CMAKE_C_FLAGS': '-m32',
  'CMAKE_CXX_FLAGS': '-m32'
]

CMAKE_FLAGS_ASAN = ['ENABLE_ASAN': 'ON']
CMAKE_FLAGS_DEBUG = ['ENABLE_DEBUG': 'ON']
CMAKE_FLAGS_LOGGER = ['ENABLE_LOGGER': 'ON']
CMAKE_FLAGS_OPTIMIZATIONS_OFF = ['ENABLE_OPTIMIZATIONS': 'OFF']

// CMAKE_FLAGS_BUILD_SHARED = ['BUILD_SHARED': 'ON']  is ON per default
CMAKE_FLAGS_BUILD_FULL = ['BUILD_FULL': 'ON']
CMAKE_FLAGS_BUILD_STATIC = ['BUILD_STATIC': 'ON']

// Define TEST enum used in buildAndTest helper
enum TEST {
  MEM,        // Test for memoryleaks via valgrind
  NOKDB,      // Only run tests that do not write to disk
  ALL,        // Run all tests
  INSTALL,    // Run all tests on an installed version of Elektra
  CRYPTOS     // Test crypto, fcrypt and gpgme for memory leaks (quite unstable)
  public TEST() {}  // WORKAROUND https://issues.jenkins-ci.org/browse/JENKINS-33023
}

DOCKER_IMAGES = [:]  // Containers docker image descriptions, populated during
                     // dockerInit()

/*****************************************************************************
 * Main Stages
 *
 * Serial stages that contain parallelized logic. Only proceeds to the next
 * if previous stage did not fail.
 *****************************************************************************/

/* main function wrapping around all stages
 *
 * Added to improve readability.
 */
def main() {
  stage("Init docker images") {
    dockerInit()
  }

  stage("Pull docker images") {
    parallel dockerUtils.generateDockerPullStages(DOCKER_IMAGES)
  }

  maybeStage("Build docker images", DOCKER_IMAGES.any {img -> !img.value.exists}) {
    lock('docker-images') {
        parallel dockerUtils.generateDockerBuildStages(DOCKER_IMAGES)
    }
  }

  stage("Main builds") {
    milestone label: "Main builds"
    parallel generateMainBuildStages()
  }

  stage("Full builds") {
    milestone label: "Full builds"
    parallel generateFullBuildStages()
  }

  stage("Build artifacts") {
    milestone label: "Build artifacts"
    parallel generateArtifactStages()
  }

  maybeStage("Deploy Website", isMaster()) {
    milestone label: "Deploy Website"
    deployWebsite()
  }

  // maybeStage("Deploy Web UI", isMaster()) {
  //   deployWebUI()
  // }
}

try {
  detectInterruption {
    main()
  }
} catch(UserInterruptedException uie) {
  println "Caught ${uie}"
} catch(Exception e) {
  if (isMaster()) {
    // If master is failing we want to know ASAP so send a mail.

    // collect changes since last build
    def changes = currentBuild.changeSets.collect() {
      it.collect() {
        "* ${it.getCommitId().take(7)} - ${it.getAuthor()} - ${it.getMsg().take(40)}"
      }.join('\n')
    }.join('\n')
    if (!changes) {
      changes = "* No new changes since last build"
    }

    def message = """\
Build ${JOB_NAME}:${BUILD_NUMBER} failed.
Url: ${RUN_DISPLAY_URL}
Reason: ${e}

Changes: ${RUN_CHANGES_DISPLAY_URL}
${changes}

Logs: ${currentBuild.rawBuild.getLog(20).join('\n')}
"""
    mail subject: "Build ${JOB_NAME} failed",
         body: message,
         replyTo: "noreply@libelektra.org",
         to: "build@libelektra.org"
  }
  throw e
}

/*****************************************************************************
 * Stage Generators
 *****************************************************************************/

/* Populate DOCKER_IMAGES with data
 *
 * For this we need a checkout of the scm to generate the hash for the
 * Dockerfiles which indicates if a rebuild of the images is needed
 */
def dockerInit() {
  node("master") {
    echo "Processing DOCKER_IMAGES"
    checkout scm

    /* We use the sid image for testing if we are compatible with Debian
     * unstable.
     * Additionally we use it for tests with Clang-6.0 and for source
     * formatting checks (as they depend on Clang-6.0).
     */
    DOCKER_IMAGES.sid = dockerUtils.createDockerImageDesc(
      "debian-sid", dockerUtils.&idTesting,
      "./scripts/docker/debian/sid",
      "./scripts/docker/debian/sid/Dockerfile"
    )

    /* Build Elektra's documentation with this image.
     * Also contains latex for pdf creation.
     */
    DOCKER_IMAGES.sid_doc = dockerUtils.createDockerImageDesc(
      "debian-sid-doc", dockerUtils.&idTesting,
      "./scripts/docker/debian/sid",
      "./scripts/docker/debian/sid/doc.Dockerfile"
    )

    /* Our main target for compatibility is Debian stable
     * (currently buster).
     * Hence we try to build all parts of Elektra via this image.
     * Most of the tests defined below use this image.
     * Especially noteworthy is that this image is also used for Debian
     * package building
     */
    DOCKER_IMAGES.buster = dockerUtils.createDockerImageDesc(
      "debian-buster", dockerUtils.&idTesting,
      "./scripts/docker/debian/buster",
      "./scripts/docker/debian/buster/Dockerfile"
    )

    /* A minimal Debian buster image used to test Elektra without any
     * additional requirements introduced by optional plugins, bindings,
     * ...
     */
    DOCKER_IMAGES.buster_minimal = dockerUtils.createDockerImageDesc(
      "debian-buster-minimal", dockerUtils.&idTesting,
      "./scripts/docker/debian/buster",
      "./scripts/docker/debian/buster/minimal.Dockerfile"
    )

    /* A Docker image for crossbuilding i386
     */
    DOCKER_IMAGES.buster_i386 = dockerUtils.createDockerImageDesc(
      "debian-buster-i386", dockerUtils.&idTesting,
      "./scripts/docker/debian/buster",
      "./scripts/docker/debian/buster/i386.Dockerfile"
    )

    /* Build Elektra's documentation with this image.
     * Also contains latex for pdf creation.
     */
    DOCKER_IMAGES.buster_doc = dockerUtils.createDockerImageDesc(
      "debian-buster-doc", dockerUtils.&idTesting,
      "./scripts/docker/debian/buster",
      "./scripts/docker/debian/buster/doc.Dockerfile"
    )

    /* A Debian oldstable image used for testing backwards compatibility.
     */
    DOCKER_IMAGES.stretch = dockerUtils.createDockerImageDesc(
      "debian-stretch", dockerUtils.&idTesting,
      "./scripts/docker/debian/stretch",
      "./scripts/docker/debian/stretch/Dockerfile"
    )

    /* A minimal Debian stretch image used to test Elektra without any
     * additional requirements introduced by optional plugins, bindings,
     * ...
     */
    DOCKER_IMAGES.stretch_minimal = dockerUtils.createDockerImageDesc(
      "debian-stretch-minimal", dockerUtils.&idTesting,
      "./scripts/docker/debian/stretch",
      "./scripts/docker/debian/stretch/minimal.Dockerfile"
    )

    /* A Docker image for crossbuilding i386
     */
    DOCKER_IMAGES.stretch_i386 = dockerUtils.createDockerImageDesc(
      "debian-stretch-i386", dockerUtils.&idTesting,
      "./scripts/docker/debian/stretch",
      "./scripts/docker/debian/stretch/i386.Dockerfile"
    )

    /* Ubuntu xenial image used to test compatibility with Ubuntu.
     */
    DOCKER_IMAGES.xenial = dockerUtils.createDockerImageDesc(
      "ubuntu-xenial", dockerUtils.&idTesting,
      "./scripts/docker/ubuntu/xenial",
      "./scripts/docker/ubuntu/xenial/Dockerfile"
    )

    /* Alpine: used to compile against musl and uses ash as shell
     */
    DOCKER_IMAGES.alpine = dockerUtils.createDockerImageDesc(
      "alpine", dockerUtils.&idTesting,
      "./scripts/docker/alpine/3.12",
      "./scripts/docker/alpine/3.12/Dockerfile"
    )

    /* Ubuntu bionic image*/
    DOCKER_IMAGES.bionic = dockerUtils.createDockerImageDesc(
      "bionic", dockerUtils.&idTesting,
      "./scripts/docker/ubuntu/bionic",
      "./scripts/docker/ubuntu/bionic/Dockerfile"
    )

    /* Ubuntu bionic image*/
    DOCKER_IMAGES.focal = dockerUtils.createDockerImageDesc(
      "focal", dockerUtils.&idTesting,
      "./scripts/docker/ubuntu/focal",
      "./scripts/docker/ubuntu/focal/Dockerfile"
    )

    /* Fedora 32 image*/
    DOCKER_IMAGES.fedora_32 = dockerUtils.createDockerImageDesc(
      "fedora-32", dockerUtils.&idTesting,
      "./scripts/docker/fedora/32",
      "./scripts/docker/fedora/32/Dockerfile"
    )

    /* Fedora 33 image*/
    DOCKER_IMAGES.fedora_33 = dockerUtils.createDockerImageDesc(
      "fedora-33", dockerUtils.&idTesting,
      "./scripts/docker/fedora/33",
      "./scripts/docker/fedora/33/Dockerfile"
    )

    /* Image building the libelektra.org website */
    DOCKER_IMAGES.website = dockerUtils.createDockerImageDesc(
      "website", dockerUtils.&idArtifact,
      ".",
      "./scripts/docker/website/Dockerfile",
      false
    )

    /* Image building elektra web base image */
    DOCKER_IMAGES.webui_base = dockerUtils.createDockerImageDesc(
      "web-base", dockerUtils.&idArtifact,
      ".",
      "./scripts/docker/webui/base/Dockerfile",
      false
    )

    /* Image building elektrad */
    DOCKER_IMAGES.webui_elektrad = dockerUtils.createDockerImageDesc(
      "elektrad-demo", dockerUtils.&idArtifact,
      "./scripts/docker/webui/elektrad-demo/",
      "./scripts/docker/webui/elektrad-demo/Dockerfile",
      false
    )

    /* Image building webd */
    DOCKER_IMAGES.webui_webd = dockerUtils.createDockerImageDesc(
      "webd-demo", dockerUtils.&idArtifact,
      "./scripts/docker/webui/webd-demo/",
      "./scripts/docker/webui/webd-demo/Dockerfile",
      false
    )

    /* Debian buster image used to install previously built packages */
    DOCKER_IMAGES.buster_installed = dockerUtils.createDockerImageDesc(
      "debian-buster-installed", dockerUtils.&idArtifact,
      "./build/package",
      "./scripts/docker/debian/buster/release.Dockerfile",
      false
    )

    /* Ubuntu focal image used to install previously built packages */
    DOCKER_IMAGES.focal_installed = dockerUtils.createDockerImageDesc(
      "ubuntu-focal-installed", dockerUtils.&idArtifact,
      "./build/package",
      "./scripts/docker/ubuntu/focal/release.Dockerfile",
      false
    )

    /* Ubuntu bionic image used to install previously built packages */
    DOCKER_IMAGES.bionic_installed = dockerUtils.createDockerImageDesc(
      "ubuntu-bionic-installed", dockerUtils.&idArtifact,
      "./build/package",
      "./scripts/docker/ubuntu/bionic/release.Dockerfile",
      false
    )

    /* Fedora 33 image used to install previously built packages */
    DOCKER_IMAGES.fedora_33_installed = dockerUtils.createDockerImageDesc(
      "fedora-33-installed", dockerUtils.&idArtifact,
      "./build/package",
      "./scripts/docker/fedora/33/release.Dockerfile",
      false
    )
  }
}

/* Generate Main stages
 *
 * Should be used to give quick feedback to developer and check for obvious
 * errors before the intensive tasks start
 */
def generateMainBuildStages() {
  def tasks = [:]
  // We want fo fail fast (i.e. abort parallel stages if one fails
  tasks.failFast = true

  tasks << buildAndTest(
    "fedora-33-full",
    DOCKER_IMAGES.fedora_33,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_DEBUG +
      CMAKE_FLAGS_BUILD_FULL +
      CMAKE_FLAGS_BUILD_STATIC,
    [TEST.ALL, TEST.MEM, TEST.NOKDB, TEST.INSTALL]
  )

  // Add a task that should build the whole project to catch all test errors
  // in a standard environment
  tasks << buildAndTest(
    "debian-buster-full",
    DOCKER_IMAGES.buster,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_DEBUG +
      CMAKE_FLAGS_BUILD_FULL +
      CMAKE_FLAGS_BUILD_STATIC +
      CMAKE_FLAGS_COVERAGE
    ,
    [TEST.ALL, TEST.MEM, TEST.NOKDB, TEST.INSTALL]
  )

  // Check the ABI and API compatibility of Elektra
  tasks << buildIcheck()

  // Check if release notes have been updated
  tasks << buildCheckReleaseNotes()

  // Check formatting of c and CMake files
  tasks << buildFormatChecks()

  return tasks
}

/* Generate Test stages for full test coverage
 */
def generateFullBuildStages() {
  def tasks = [:]

  // Run the open tasks plugin and sloccount
  tasks << buildTodo()

  // Build doc and upload
  tasks << buildDoc()

  tasks << buildAndTest(
    "fedora-32-full",
    DOCKER_IMAGES.fedora_32,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_DEBUG +
      CMAKE_FLAGS_BUILD_FULL +
      CMAKE_FLAGS_BUILD_STATIC,
    [TEST.ALL, TEST.MEM, TEST.NOKDB, TEST.INSTALL]
  )

  // Build Elektra with clang and ASAN
  // Detects memory leaks via ASAN
  tasks << buildAndTestAsan(
    "fedora-33-asan",
    DOCKER_IMAGES.fedora_33,
    CMAKE_FLAGS_BUILD_ALL
  )

  // Add a task that should build the whole project to catch all test errors
  // in a standard environment
  tasks << buildAndTest(
    "debian-stretch-full",
    DOCKER_IMAGES.stretch,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_DEBUG +
      CMAKE_FLAGS_BUILD_FULL +
      CMAKE_FLAGS_BUILD_STATIC +
      CMAKE_FLAGS_COVERAGE
    ,
    [TEST.ALL, TEST.MEM, TEST.NOKDB, TEST.INSTALL]
  )

  // Build Elektra with ASAN enabled
  // Detects memory leaks via ASAN
  tasks << buildAndTestAsan(
    "debian-stretch-asan",
    DOCKER_IMAGES.stretch,
    CMAKE_FLAGS_BUILD_ALL
  )

  // Build Elektra with clang and ASAN
  // Detects memory leaks via ASAN
  tasks << buildAndTestAsan(
    "debian-unstable-clang-asan",
    DOCKER_IMAGES.sid,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_CLANG
  )

  // Build Elektra on debian-unstable for compatibility tests
  tasks << buildAndTest(
    "debian-unstable-full",
    DOCKER_IMAGES.sid,
    CMAKE_FLAGS_BUILD_ALL+
      CMAKE_FLAGS_DEBUG,
    [TEST.ALL, TEST.MEM, TEST.INSTALL]
  )

  tasks << buildAndTest(
    "debian-buster-full-i386",
    DOCKER_IMAGES.buster_i386,
    CMAKE_FLAGS_I386 + [
      'PLUGINS': 'NODEP'
    ] + CMAKE_FLAGS_BUILD_ALL,
    [TEST.ALL]
  )

  tasks << buildAndTest(
    "debian-stretch-full-i386",
    DOCKER_IMAGES.stretch_i386,
    CMAKE_FLAGS_I386 + [
      'PLUGINS': 'NODEP'
    ] + CMAKE_FLAGS_BUILD_ALL,
    [TEST.ALL]
  )

  // Build Elektra with clang. We use unstable for easy access to clang-9.0
  // Detects problems when using an alternative compiler (clang)
  tasks << buildAndTest(
    "debian-unstable-full-clang",
    DOCKER_IMAGES.sid,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_DEBUG +
      CMAKE_FLAGS_CLANG,
    [TEST.ALL, TEST.MEM, TEST.INSTALL]
  )

  // Build Elektra via mingw for Windows
  tasks << buildAndTestMingwW64()

  // Build Elektra on alpine
  tasks << buildAndTest(
    "alpine",
    DOCKER_IMAGES.alpine,
    CMAKE_FLAGS_BUILD_ALL +
    CMAKE_FLAGS_BUILD_STATIC + [
      'PLUGINS': 'ALL;-date',
    ],
    [TEST.ALL]
  )

  // Build Elektra on ubuntu-xenial
  tasks << buildAndTest(
    "ubuntu-xenial",
    DOCKER_IMAGES.xenial,
    CMAKE_FLAGS_BUILD_ALL,
    [TEST.ALL]
  )

  // Build Elektra on a minimal Debian stretch Docker image
  tasks << buildAndTest(
    "debian-stretch-minimal",
    DOCKER_IMAGES.stretch_minimal,
    [:],
    [TEST.ALL]
  )

  // Build Elektra on a minimal Debian buster Docker image
  tasks << buildAndTest(
    "debian-buster-minimal",
    DOCKER_IMAGES.buster_minimal,
    [:],
    [TEST.ALL]
  )

  // Build Elektra on a minimal Debian buster Docker image
  // specify NODEP and see if plugins are properly included/excluded
  tasks << buildAndTest(
    "debian-buster-minimal-nodep",
    DOCKER_IMAGES.buster_minimal,
    ['PLUGINS': 'NODEP'],
    [TEST.ALL]
  )

  // Build Elektra without the cache plugin
  tasks << buildAndTest(
    "debian-buster-full-nocache",
    DOCKER_IMAGES.buster,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_DEBUG +
      CMAKE_FLAGS_COVERAGE + [
      'PLUGINS': 'ALL;-DEPRECATED;-cache',
      ],
    [TEST.ALL, TEST.MEM]
  )

  tasks << buildAndTestAsan(
    "debian-buster-full-mmap-asan",
    DOCKER_IMAGES.buster,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_DEBUG +
      CMAKE_FLAGS_MMAP
  )

  // Build Elektra with xdg resolver to see if we are compliant
  def xdgResolver = 'resolver_mf_xp_x'
  tasks << buildAndTest(
    "debian-buster-full-xdg",
    DOCKER_IMAGES.buster,
    [
      'PLUGINS': "${xdgResolver};dump;sync;base64;spec;error;list;timeofday;profile;mathcheck;tracer;hosts;network;glob",
      'KDB_DEFAULT_RESOLVER': xdgResolver
    ],
    [TEST.ALL, TEST.MEM]
  )

  // Build Elektra without optimizations
  tasks << buildAndTest(
    "debian-stretch-full-optimizations-off",
    DOCKER_IMAGES.stretch,
    CMAKE_FLAGS_BUILD_ALL +
      CMAKE_FLAGS_OPTIMIZATIONS_OFF +
      CMAKE_FLAGS_DEBUG +
      CMAKE_FLAGS_LOGGER
    ,
    [TEST.ALL, TEST.MEM]
  )

  // Run memory analysis of the crypto plugins in separate environment as theses tests are quite unstable
  tasks << buildAndTest(
    "debian-buster-cryptoplugins",
    DOCKER_IMAGES.buster,
    CMAKE_FLAGS_BUILD_ALL+
      CMAKE_FLAGS_DEBUG + [
      'PLUGINS': 'dump;resolver_fm_hpu_b;list;spec;sync;crypto;fcrypt;gpgme;base64',
      'TOOLS': 'kdb;gen-gpg-testkey',
      'BINDINGS': '',
      ],
    [TEST.CRYPTOS]
  )

  tasks << buildAndTest(
    "debian-stretch-cryptoplugins",
    DOCKER_IMAGES.stretch,
    CMAKE_FLAGS_BUILD_ALL+
      CMAKE_FLAGS_DEBUG + [
      'PLUGINS': 'dump;resolver_fm_hpu_b;list;spec;sync;crypto;fcrypt;gpgme;base64',
      'TOOLS': 'kdb;gen-gpg-testkey',
      'BINDINGS': '',
      ],
    [TEST.CRYPTOS]
  )

  // We need the webui_base image to build webui images later
  tasks << dockerUtils.buildImageStage(DOCKER_IMAGES.webui_base)

  return tasks
}

/* Stage for analysing open Tasks and running sloccount */
def buildTodo() {
  def stageName = "todo"
  def openTaskPatterns = '''\
**/*.c, **/*.h, **/*.hpp, **/*.cpp,\
**/CMakeLists.txt, **/Dockerfile*, Jenkinsfile*
'''
  return [(stageName): {
    stage(stageName) {
      withDockerEnv(DOCKER_IMAGES.buster_doc) {
        sh "sloccount --duplicates --wide --details ${WORKSPACE} > sloccount.sc"
        step([$class: 'SloccountPublisher', ignoreBuildFailure: true])
        openTasks pattern: openTaskPatterns,
                  low: 'TODO'
        utils.archive(["sloccount.sc"])
        deleteDir()
      }
    }
  }]
}

/* Stage checking if release notes have been updated */
def buildCheckReleaseNotes() {
  def stageName = "check-release-notes"
  return [(stageName): {
    maybeStage(stageName, !isMaster()) {
      withDockerEnv(DOCKER_IMAGES.stretch, [DockerOpts.MOUNT_MIRROR]) {
        sh "scripts/build/run_check_release_notes"
        deleteDir()
      }
    }
  }]
}

/* Stage running Icheck to see if the API has been modified */
def buildIcheck() {
  def stageName = "icheck"
  return [(stageName): {
    stage(stageName) {
      withDockerEnv(DOCKER_IMAGES.stretch, [DockerOpts.MOUNT_MIRROR]) {
        sh "scripts/build/run_icheck"
        deleteDir()
      }
    }
  }]
}

def buildFormatChecks() {
  def stageName = "formatting-check"
  return [(stageName): {
    stage(stageName) {
      withDockerEnv(DOCKER_IMAGES.sid, [DockerOpts.MOUNT_MIRROR]) {
        dir('build') {
          deleteDir()
          utils.cmake(env.WORKSPACE, [:])
          ctest("Test -R testscr_check_formatting")
        }
      }
    }
  }]
}

/* Stage building and uploading the documentation */
def buildDoc() {
  def stageName = "doc"
  cmakeFlags = [
    'BUILD_PDF': 'ON',
    'BUILD_FULL': 'OFF',
    'BUILD_SHARED': 'OFF',
    'BUILD_STATIC': 'OFF',
    'BUILD_TESTING': 'OFF'
  ]
  return [(stageName): {
    stage(stageName) {
      withDockerEnv(DOCKER_IMAGES.sid_doc) {
        dir('build') {
          deleteDir()
          utils.cmake(env.WORKSPACE, cmakeFlags)
          sh "make html latex man pdf"
        }

        def apib = "./doc/api_blueprints/snippet-sharing.apib"
        def apiDocDir = "./build/API_DOC/restapi"
        sh "mkdir -p ${apiDocDir}/${VERSION}"
        sh "cp ${apib} ${apiDocDir}/${VERSION}/"
        apiary(apib, "${apiDocDir}/${VERSION}/snippet-sharing.html")
        dir(apiDocDir) {
          sh "ln -s ${VERSION} current"
        }

        warnings parserConfigurations: [
          [parserName: 'Doxygen', pattern: 'build/doc/doxygen.log']
        ]

        if (isMaster()) {
          sshPublisher(
            publishers: [
              sshPublisherDesc(
                verbose: true,
                configName: 'doc.libelektra.org',
                transfers: [
                  sshTransfer(
                    sourceFiles: 'build/doc/latex/*',
                    removePrefix: 'build/doc/',
                    remoteDirectory: 'api/master'
                  ),
                  sshTransfer(
                    sourceFiles: 'build/doc/html/*',
                    removePrefix: 'build/doc/',
                    remoteDirectory: 'api/master'
                  ),
                  sshTransfer(
                    sourceFiles: 'build/doc/man/man3elektra/*',
                    removePrefix: 'build/doc/man/man3elektra/',
                    remoteDirectory: 'api/master/man'
                  )
                ]
              )
            ]
          )
        } else {
          sshPublisher(
            publishers: [
              sshPublisherDesc(
                verbose: true,
                configName: 'doc.libelektra.org',
                transfers: [
                  sshTransfer(
                    sourceFiles: 'build/doc/latex/*',
                    removePrefix: 'build/doc/',
                    remoteDirectory: 'api/pr'
                  ),
                  sshTransfer(
                    sourceFiles: 'build/doc/html/*',
                    removePrefix: 'build/doc/',
                    remoteDirectory: 'api/pr'
                  ),
                  sshTransfer(
                    sourceFiles: 'build/doc/man/man3elektra/*',
                    removePrefix: 'build/doc/man/man3elektra/',
                    remoteDirectory: 'api/pr/man'
                  )
                ]
              )
            ]
          )
        }
        deleteDir()
      }
    }
  }]
}

/* Helper to generate an asan enabled test */
def buildAndTestAsan(testName, image, extraCmakeFlags = [:]) {
  def cmakeFlags = CMAKE_FLAGS_BASE +
                   CMAKE_FLAGS_ASAN +
                   extraCmakeFlags
  def dockerOpts = [DockerOpts.MOUNT_MIRROR, DockerOpts.PTRACE]
  return [(testName): {
    stage(testName) {
      withDockerEnv(image, dockerOpts) {
        dir('build') {
          deleteDir()
          utils.cmake(env.WORKSPACE, cmakeFlags)
          sh "make"
          def llvm_symbolizer = sh(returnStdout: true,
                                   script: 'which llvm-symbolizer').trim()
          withEnv(["ASAN_OPTIONS='symbolize=1'",
                   "ASAN_SYMBOLIZER_PATH=${llvm_symbolizer}"]){
            ctest()
          }
        }
      }
    }
  }]
}

/* Helper to generate mingw test */
def buildAndTestMingwW64() {
  def testName = "debian-buster-mingw-w64"
  return [(testName): {
    stage(testName) {
      withDockerEnv(DOCKER_IMAGES.buster) {
        dir('build') {
          deleteDir()
          sh '../scripts/dev/configure-mingw-w64 ..'
          sh 'make'
          def destdir='elektra'
          withEnv(["DESTDIR=${destdir}"]){
              sh 'make install'
          }
          sh "zip -r elektra.zip ${destdir}"
          utils.archive(['elektra.zip'])
        }
      }
    }
  }]
}

/* Helper to generate a typical Elektra test environment
 *   Builds Elektra, depending on the contents of 'tests' it runs the
 *   corresponding test suites.
 * testName: used to identify the test and name the stage
 * image: which docker image should be used
 * extraCmakeFlags: which flags should be passed to cmake
 * tests: list of tests (see TEST enum) which should be run
 * extraArtifacts: which files should be additionally saved from the build
 */
def buildAndTest(testName, image, extraCmakeFlags = [:],
                 tests = [], extraArtifacts = []) {
  def cmakeFlags = CMAKE_FLAGS_BASE + extraCmakeFlags
  def artifacts = []

  def testCoverage = cmakeFlags.intersect(CMAKE_FLAGS_COVERAGE)
                                .equals(CMAKE_FLAGS_COVERAGE)
  def updateCoveralls = testName == 'debian-buster-full'
  def testMem = tests.contains(TEST.MEM)
  def testNokdb = tests.contains(TEST.NOKDB)
  def testAll = tests.contains(TEST.ALL)
  def testCryptos = tests.contains(TEST.CRYPTOS)
  def install = tests.contains(TEST.INSTALL)
  def dockerOpts = [DockerOpts.MOUNT_MIRROR]
  return [(testName): {
    stage(testName) {
      withDockerEnv(image, dockerOpts) {
        // we use a space in the directory to test if paths are
        // properly escaped
        def buildDir='build directory'

        if(tests) {
          artifacts.add("\"${buildDir}\"/Testing/*/*.xml")
        }

        try {
          ensureDirsExist(getElektraWritableFiles())

          dir(buildDir) {
            deleteDir()
            utils.cmake(env.WORKSPACE, cmakeFlags)
            sh "make"
            trackCoverage(testCoverage) {
              if(testAll) {
                ctest()
                if(testMem) {
                  cmemcheck()
                }
              }
              if(testNokdb) {
                withPermissions(getElektraWritableFiles(), "000") {
                  cnokdbtest()
                  if(testMem && !testAll) {
                    cmemcheck(testNokdb)
                  }
                }
              }
              if(testCryptos) {
                ctestcryptoplugins()
              }
            }
            if(install) {
              sh 'make install'
            }
          }
          if(install) {
            sh '''\
export LD_LIBRARY_PATH=${WORKSPACE}/system/lib:$LD_LIBRARY_PATH
export PATH=${WORKSPACE}/system/bin:$PATH
export DBUS_SESSION_BUS_ADDRESS=`dbus-daemon --session --fork --print-address`
export LUA_CPATH="${WORKSPACE}/system/lib/lua/5.2/?.so;"

env

kdb run_all
kill `pidof dbus-daemon` || echo "No dbus-daemon to kill."
'''
          }
        } catch(e) {
          println "Caught the following exception: ${e.message}"
          // rethrow to mark as failed
          throw e
        } finally {
          /* Warnings plugin overwrites each other, disable for now
          warnings canRunOnFailed: true, consoleParsers: [
            [parserName: 'GNU Make + GNU C Compiler (gcc)']
          ]
          */
          utils.archive(artifacts)
          if(testCoverage) {
            publishCoverage("${buildDir}/coverage")
          }
          if(updateCoveralls) {
            withCredentials([string(credentialsId: 'coveralls-repo-token', variable: 'REPO_TOKEN')]) {
              withEnv(["TRAVIS_JOB_ID=$BUILD_NUMBER"]) {
                sh("""\
coveralls -b '${buildDir}' \
          -t '${REPO_TOKEN}' \
          -e .idea -e benchmarks -e doc -e examples -e install -e libelektra -e system -e tests \
          -E '.*${buildDir}/(CMakeFiles|include)/.*' \
          -E '.*${buildDir}/src/bindings/(glib|intercept)/.*' \
          -E '.*${buildDir}/src/include/elektra/.*' \
          -E '.*${buildDir}/src/include/(kdb|kdbconfig|kdbversion)\\.h' \
          -E '.*${buildDir}/src/(libs|tools)/.*' \
          -E '.*${buildDir}/src/plugins/\
(?!\
constants/constants\\.c|\
(lua|ruby)/runtime\\.h|\
yanlr/YAML(((Base)?Listener|ImprovedSymbolNames)|.h)|\
).*' \
          -E '.*/src/error/.*' \
""")
              }
            }
          }
          if(testMem || testNokdb || testAll) {
            xunitUpload("${buildDir}/Testing/**/*.xml")
          }
          deleteDir()
        }
      }
    }
  }]
}

/**
 *  Generate Stages that build, test and deploy artifacts
 */
def generateArtifactStages() {
  def tasks = [:]

  tasks << buildPackage(
    "buildPackage/debian/buster",
    DOCKER_IMAGES.buster,
    "buster-unstable",
    "buster",
    DOCKER_IMAGES.buster_installed,
    release.&publishDebPackages,
    this.&updateDebianChangelog
  )
  tasks << buildPackage(
    "buildPackage/ubuntu/bionic",
    DOCKER_IMAGES.bionic,
    "bionic-unstable",
    "bionic",
    DOCKER_IMAGES.bionic_installed,
    release.&publishDebPackages,
    this.&updateDebianChangelog
  )
  tasks << buildPackage(
    "buildPackage/ubuntu/focal",
    DOCKER_IMAGES.focal,
    "focal-unstable",
    "focal",
    DOCKER_IMAGES.focal_installed,
    release.&publishDebPackages,
    this.&updateDebianChangelog
  )
  tasks << buildPackage(
    "buildPackage/fedora/33",
    DOCKER_IMAGES.fedora_33,
    "fedora-33-unstable",
    "fedora-33-unstable",
    DOCKER_IMAGES.fedora_33_installed,
    release.&publishRpmPackages,
    this.&updateFedoraChangelog,
    true
  )

  tasks << buildWebsite()
  // tasks << buildWebUI()

  return tasks
}


/**
 *  Runs the packaging script, tests packages and publishes to unstable repo.
 *
 * `withDockerEnv` accepts a second closure that builds the test image,
 * tests packages and publishes them. This is necessary to ensure that
 * these steps are run on the same node with the same working directory.
 *
 * @param stageName e.g. buildPackage/debian/<codename>
 * @param image must be one of DOCKER_IMAGES
 * @param repoName name of the package repository
 * @param repoPrefix prefix of the package repository
 * @param installImage image which should be build containing the built packages
 * @param publishPackagesFun Closure that publishes packages to a repository
 * @param updateChangelogFun Closure that updates the changelog
 * @param placeholderDir Bool that describes if placeholder dirs need to be created
 */
def buildPackage(stageName, image, repoName, repoPrefix, installImage,
                 publishPackagesFun, updateChangelogFun,
                 placeholderDir=false) {
  return [(stageName): {
    stage(stageName) {
      def packageRevision = ''
      def targetDir=""
      if (placeholderDir) {
        // necessary because libelektra path must be longer than
        // CPACK_RPM_<component>_BUILD_SOURCE_DIRS_PREFIX for debuginfo generation
        // of RPM packages.
        targetDir = "./placeholder/placeholder/placeholder/placeholder/libelektra"
      } else {
        targetDir = "./libelektra"
      }
      // Closure that builds the image with the previously generated packages,
      // tests these packages and publishes them.
      def postCl = {
        dir(targetDir) {
          dockerUtils.buildImage(installImage)
          // must not be run in targetDir because `withDockerEnv` checks out git
          // repo in current directory which would overwrite build/package dir
          dir("../packageTesting") {
          // Do not call withDockerEnv(..) {} directly because this step would be
          // executed in a nested node, causing the current node to wait for the
          // stage on the other node to complete.
            withDockerEnv.withDockerEnvWithoutNode(installImage) {
              sh "kdb run_all"
            }
          }
          dir("build/package") {
            if (isMaster()) {
              publishPackagesFun(
                'doc.libelektra.org',
                "/packaging/incoming/${repoName}/",
                repoName,
                repoPrefix,
                "$VERSION",
                packageRevision,
                targetDir
              )
            }
          }
        }
      }
      withDockerEnv(image, [DockerOpts.MOUNT_MIRROR], postCl) {
        withCredentials([file(credentialsId: 'jenkins-key', variable: 'KEY'),
                         file(credentialsId: 'jenkins-secret-key', variable: 'SKEY')]) {
          sh "gpg --import $KEY"
          sh "gpg --import $SKEY"
          sh "rm -R ./*"
          checkout scm: [
            $class: 'GitSCM',
            branches: scm.branches,
            extensions: scm.extensions + [
              [$class: 'PerBuildTag'],
              [$class: 'RelativeTargetDirectory',
               relativeTargetDir: targetDir]
            ],
            userRemoteConfigs: scm.userRemoteConfigs
          ]
          dir(targetDir) {
            sh "git config --global user.name jenkins-builder"
            sh "git config --global user.email jenkins@libelektra.org"

            dir('scripts/packaging') {
              packageRevision = updateChangelogFun()
            }

            dir("build") {
              sh "../scripts/packaging/package.sh ${packageRevision}"
              sh "../scripts/release/sign-packages.sh ./package"
            }
          }
        }
      }
    }
  }]
}

/**
 * Updates the fedora changelog for the unstable build
 * @return revision number of the package
 */
def updateFedoraChangelog() {
  dir ("fedora") {
    sh "./update-rpm-changelog.sh -l '.$BUILD_NUMBER' -m 'auto build'"
    sh "git commit -am 'auto build $VERSION'"
    return sh(
      returnStdout: true,
      script: './update-rpm-changelog.sh -p | cut -d "-" -f2'
    ).trim()
  }
}

/**
 * Updates the ubuntu changelog for the unstable build
 * @return revision number of the package
 */
def updateDebianChangelog() {
  sh "dch -l '.$BUILD_NUMBER' 'auto build'"
  sh "git commit -am 'auto build $VERSION'"
  return sh(
    returnStdout: true,
    script: 'dpkg-parsechangelog --show-field Version -l ./debian/changelog | cut -d "-" -f2'
  ).trim()
}


def deployDockerContainer(name, imageDesc, hostName) {
  node("frontend") {
    docker.withRegistry("https://${PipelineConfig.instance.registry}",
                        'docker-hub-elektra-jenkins') {
      def img = docker.image(imageDesc.id)
      img.pull()

      sh "docker stop -t 5 ${name} || /bin/true"
      sh "docker rm ${name} || /bin/true"
      img.run("""\
        -e VIRTUAL_HOST=${hostName} \
        -e LETSENCRYPT_HOST=${hostName} \
        -e LETSENCRYPT_EMAIL=jenkins@hub.libelektra.org \
        --name ${name} \
        --network=frontend_default \
        --restart=always"""
      )
    }
  }
}

def deployWebsite() {
  withDockerEnv(DOCKER_IMAGES.website) {
        sh "ls -al /usr/local/share/elektra/tool_data/website/public/"
        sh "cp -Rf /usr/local/share/elektra/tool_data/website ."
        sshPublisher(
            publishers: [
              sshPublisherDesc(
                verbose: true,
                configName: 'doc.libelektra.org',
                transfers: [
                  sshTransfer(
                    execCommand: 'rm -Rf /srv/libelektra/website/public_old; cp -Rfpv /srv/libelektra/website/public /srv/libelektra/website/public_old'
                  )
                ]
              )
            ]
          )
        sshPublisher(
            publishers: [
              sshPublisherDesc(
                verbose: true,
                configName: 'doc.libelektra.org',
                transfers: [
                  sshTransfer(
                    sourceFiles: 'website/public/**/*',
                    removePrefix: 'website',
                    remoteDirectory: 'website'
                  )
                ]
              )
            ]
          )
      }
}

def buildWebsite() {
  def websiteTasks = [:]
  websiteTasks << dockerUtils.buildImageStage(DOCKER_IMAGES.website)
  return websiteTasks
}

def deployWebUI() {
  deployDockerContainer(
    "elektrad",
    DOCKER_IMAGES.webui_elektrad,
    "elektrad-demo.libelektra.org"
  )
  deployDockerContainer(
    "webd",
    DOCKER_IMAGES.webui_webd,
    "webdemo.libelektra.org"
  )
}

def buildWebUI() {
  def webuiTasks = [:]
  webuiTasks << dockerUtils.buildImageStage(DOCKER_IMAGES.webui_elektrad)
  webuiTasks << dockerUtils.buildImageStage(DOCKER_IMAGES.webui_webd)
  return webuiTasks
}

/*****************************************************************************
 * Define helper functions
 *****************************************************************************/

/* Publishes coverage reports
 * @param source dir where coverage reports are located
 */
def publishCoverage(source = 'build/coverage') {
  echo "Start publication of coverage data"
  def uploadDir = "coverage/${env.BRANCH_NAME}/${env.STAGE_NAME}"
  def archiveName = "cov_${env.BRANCH_NAME}_${env.STAGE_NAME}.tar.gz"

  sh "mkdir -p ${uploadDir}"
  sh "mv -v -T '${source}' ${uploadDir} || /bin/true"
  sh "tar -czvf ${archiveName} ${uploadDir}"

  sshPublisher(
    publishers: [
      sshPublisherDesc(
        verbose: true,
        configName: 'doc.libelektra.org',
        transfers: [
          sshTransfer(
            sourceFiles: archiveName,
            execCommand: "cd /srv/libelektra && tar -zxvf ${archiveName} && rm ${archiveName}"
          )
        ]
      )
    ]
  )
  echo "Finish publication of coverage data"
}

/* Track coverage
 *
 * Tracks coverage of commands executed in the passed closure if do_track
 * evaluates to true.
 * @param doTrack If true track coverage
 * @param cl A closure that this function wraps around
 */
def trackCoverage(doTrack, cl) {
  if(doTrack) {
    sh 'make coverage-start'
  }
  cl()
  if(doTrack) {
    sh 'make coverage-stop'
    sh 'make coverage-genhtml'
  }
}

/* Run ctest with appropriate env variables
 * @param target What target to pass to ctest
 */
def ctest(target = "Test") {
  /*
     We disable the tests:

     - `testmod_dbus`,
     - `testmod_dbusrecv`,and
     - `testmod_zeromqsend`

     , since they are known to fail in scenarios with high load.
     See also: https://issues.libelektra.org/2439
  */
  sh """ctest -j ${env.CTEST_PARALLEL_LEVEL} --force-new-ctest-process \
          --output-on-failure --no-compress-output -T ${target} \
          -E 'testmod_(dbus(recv)?|zeromqsend)'"""
}

/* Helper for ctest to run MemCheck for the crypto, fcrypt and gpgme plugin
 * tests, which cause trouble when run in parallel or under high server load.
 */
def ctestcryptoplugins(target = "MemCheck") {
  sh """ctest --force-new-ctest-process \
        --output-on-failure --no-compress-output -T ${target} \
        -R 'testmod_(crypto|fcrypt|gpgme)'"""
}

/* Helper for ctest to run MemCheck without memleak tagged tests
 * @param kdbtests If true run tests tagged as kdbtests
 */
def cmemcheck(kdbtests=true) {
  if(kdbtests) {
    ctest("MemCheck -LE memleak")
  } else {
    ctest("MemCheck -LE memleak||kdbtests")
  }
}

/* Helper for ctest to run tests without tests tagged as kdbtests.
 */
def cnokdbtest() {
  ctest("Test -LE kdbtests")
}

/* Uploads ctest results
 * @param p Pattern to scan for
 */
def xunitUpload(p = 'build/Testing/**/*.xml') {
  step([$class: 'XUnitPublisher',
    thresholds: [
      [$class: 'SkippedThreshold', failureThreshold: '0'],
      [$class: 'FailedThreshold', failureThreshold: '0']
    ],
    tools: [
      [$class: 'CTestType', pattern: p]
    ]
  ])
}

/* Returns True if we are on the master branch
 */
def isMaster() {
  return env.BRANCH_NAME=="master"
}

/* Run apiary
 * @param input Input file (.apib)
 * @param output Output file (.html)
 */
def apiary(input, output) {
  sh "apiary preview --path=${input} --output=${output}"
}

def abortPreviousRunUnlessMaster() {
  // We never want to abort builds of the master branch
  if (isMaster()) return;
  def exec = currentBuild
             ?.rawBuild
             ?.getPreviousBuildInProgress()
             ?.getExecutor()
  if(exec) {
    exec.interrupt(
      Result.ABORTED,
      new CauseOfInterruption.UserInterruption(
        "Aborted by Build#${currentBuild.number}"
      )
    )
  }
}

/* Helper that modifies file permissions before executing the passed closure.
 * Restores the permissions after the closure has run.
 * @param listOfFiles List representing directories and files that should be
 *                    made unwritable.
 *                    Files must exist or it will fail.
 * @param perm target permissions in a form that can be passed to chmod
 * @param cl The closure that should be run
 */
def withPermissions(listOfFiles, perm, cl) {
  echo "Entering withPermissions"
  permissionsMap = [:]
  listOfFiles.each {
    def permOld = getPermissions(it)
    permissionsMap[it] = permOld
    setPermissions(it, perm)
  }
  try {
    cl()
  } catch (all) {
    // rethrow to mark as failed
    throw all
  } finally {
    // always restore permissions
    permissionsMap.each {
        setPermissions(it.key, it.value)
    }
    echo "Leaving withPermissions"
  }
}

/* Returns the permissions of a file via stat
 * @param file The file to get the permissions for
 */
def getPermissions(file) {
  return sh(returnStdout: true, script: "stat -c %a $file").trim()
}

/* Set permissions of a file
 * @param file The file to set the permissions for
 * @param mode permissions to set for the file
 */
def setPermissions(file, mode) {
  return sh(returnStdout: true, script: "chmod $mode $file")
}

/* Helper that returns files and directories elektra tests could write to
 */
def getElektraWritableFiles() {
  return [
    CMAKE_FLAGS_BASE.get('KDB_DB_SYSTEM'),
    CMAKE_FLAGS_BASE.get('KDB_DB_SPEC'),
    CMAKE_FLAGS_BASE.get('KDB_DB_HOME')
  ]
}

/* Create directories in listOfDirs
 * @param listOfDirs a List of directory paths to be created
 */
def ensureDirsExist(listOfDirs) {
  listOfDirs.each {
    sh "mkdir -p $it || /bin/true"
  }
}


/* Detect if pipeline was aborted
 *
 * Depending on which part of the pipeline was interrupted a different Exception
 * is thrown. This wrapper makes sure a UserInterruptedException is thrown
 * regardless of pipeline state.
 *
 * see: https://issues.jenkins-ci.org/browse/JENKINS-34376
 */
def detectInterruption(Closure c) {
  try {
    c()
  } catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException fie) {
    // this ambiguous condition means a user probably aborted
    if (!fie.message) {
        throw new UserInterruptedException(fie)
    } else {
        throw fie
    }
  } catch (hudson.AbortException ae) {
    // this ambiguous condition means during a shell step, user probably aborted
    if (ae.getMessage().contains('script returned exit code 143') ||
        ae.getMessage().contains('Queue task was cancelled')) {
        throw new UserInterruptedException(ae)
    } else {
        throw ae
    }
  }
}

class UserInterruptedException extends Exception {
    UserInterruptedException(e) {
        super(e)
    }
}
