def cicdproject = "${CICD_NAMESPACE}"
def appname = "${APP_NAME}"
def utv_project = "${UTV_NAMESPACE}"
def auto_project = "${AUTO_NAMESPACE}"
def sys_project = "${SYS_NAMESPACE}"
def jenkinsslave = "nodejs15-agent"
def dist_path = "dist/apps/mina-sidor-fa"

pipeline {

  agent {
      node {label "${jenkinsslave}"}
  }

  environment {
      CURRENT_COMMIT = getShortCommitHash()
      BUILD_TAG = ""
  }

  stages {

    stage("Initialize") {
      steps {
        echo '### Generating build tag... ###'
        script {
          def packageJson = readJSON file: 'package.json'
          BUILD_TAG = "dev_v${packageJson.version}_${env.BUILD_NUMBER}_${CURRENT_COMMIT}"
          echo '### Build tag ###'
          echo "This is the build tag: ${BUILD_TAG}"
        }
        echo '### Build tag generated! ###'
      }
    }

    stage("Install dependencies") {
      environment {
        NEXUS_CREDS = "${env.NEXUS_USERNAME}:${env.NEXUS_PASSWORD}"
      }
      steps {
        echo '### Installing dependencies... ###'
        sh '''
          ENCODED=$(echo -n "${NEXUS_CREDS}" | openssl base64)
          CACHE_DIRECTORY=/home/jenkins/.npm/cache
          mkdir -p ${CACHE_DIRECTORY}
          echo "_auth=${ENCODED}" >> .npmrc
          
          # set -x
          cat .npmrc

          # Pull from cache if it exists
          (
            # Fail if any step fail
            set -e
            # Remove line 3 from package-lock, which contain the package.json version. Store backup.
            # We only care about dependencies, not the version
            sed -i.bak -e '3d' package-lock.json
            # Hash the package-lock.json file
            sha1sum package-lock.json | tr -s " " | awk '{print $1}' > hashed.pkg-lock
            # Restore package-lock.json with version number intact
            mv package-lock.json.bak package-lock.json
            # Try to get the file from cache
            cp ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock) node_modules.tar.gz 2> /dev/null
            # Check if we found the cached node_modules
            # If we found the cached node_modules, extract the files to node_modules
            (test -f node_modules.tar.gz && tar -zxf node_modules.tar.gz && echo "Using cached node_modules from ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock)") || echo "No cached node_modules.tar.gz found"
            # Echo to the logs stating that we are using cache
          ) || true

          # If we did not find the cached node_modules, install from the lock
            test -f node_modules.tar.gz || npm ci;

          # Store cache
          (
            # Fail if any step fail
            set -e
            # Only update the cache if we found no previous cache
            test ! -f node_modules.tar.gz
            # Tar the cache
            tar -zcf node_modules.tar.gz node_modules
            # Clean old cache
            rm -rf ${CACHE_DIRECTORY}/*
            # Store the cache
            cp node_modules.tar.gz ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock)
          ) || true
        '''
        echo '### Dependencies installed! ###'
      }
    }

    stage("Build application") {
      environment {
        NGINX_PATH = "${dist_path}/."
        BUILD_TAG = "${BUILD_TAG}"
      }
      steps {
        echo '### Building application... ###'

         sh '''
          npm run build-os -- --config api --version ${BUILD_TAG}
          cp -r nginx/* ${NGINX_PATH}
        '''

        echo '### Application built! ###'
      }
    }
    stage('App bake') {
      steps {
        echo '### Creating image... ###'
        script {
          openshift.withCluster() {
            openshift.withProject(utv_project) {
              openshift.selector("bc", "${ appname }").startBuild("--from-dir=${dist_path}", "--wait=true")
              openshift.tag("${ appname }:latest", "${ appname }:${BUILD_TAG}")
            }
          }
        }
        echo '### Image created! ###'
      }
    }

    stage('Deploy to "auto"') {
      steps {
        echo '### Deploying to "auto"... ###'
        script {
          openshift.withCluster() {
            openshift.withProject(auto_project) {
              openshift.raw("set image dc/${ appname } ${ appname }=docker-registry.default.svc:5000/${utv_project}/${ appname }:${BUILD_TAG} --record=true --source=docker")
              openshift.raw("annotate dc ${ appname }  version=${BUILD_TAG} --overwrite=true")
              openshift.selector("dc", "${ appname }").rollout().status();
            }
          }
        }
        echo '### Deployed to "auto"! ###'
      }
    }

    stage('Deploy "sys"') {
      steps {
        echo '### Deploying to "sys"... ###'
        script {
          openshift.withCluster() {
            openshift.withProject(sys_project) {
              openshift.raw("set image dc/${ appname }  ${ appname }=docker-registry.default.svc:5000/${utv_project}/${ appname }:${BUILD_TAG} --record=true --source=docker")
              openshift.raw("annotate dc ${ appname }  version=${BUILD_TAG} --overwrite=true")
              openshift.selector("dc", "${ appname }").rollout().status();
            }
          }
        }
        echo '### Deployed to "sys"! ###'
      }
    }
  }
}

def getShortCommitHash() {
  return sh(returnStdout: true, script: "git log -n 1 --pretty=format:'%h'").trim()
}
