chore(openshift): Added Jenkinsfiles and Nginx files (TV-82)
This commit is contained in:
committed by
Erik Tiekstra
parent
184853b10c
commit
f483ae77f2
86
nginx/nginx.template
Normal file
86
nginx/nginx.template
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# For more information on configuration, see:
|
||||||
|
# * Official English Documentation: http://nginx.org/en/docs/
|
||||||
|
# * Official Russian Documentation: http://nginx.org/ru/docs/
|
||||||
|
|
||||||
|
|
||||||
|
worker_processes auto;
|
||||||
|
error_log /var/opt/rh/rh-nginx112/log/nginx/error.log;
|
||||||
|
pid /var/opt/rh/rh-nginx112/run/nginx/nginx.pid;
|
||||||
|
|
||||||
|
# Load dynamic modules. See /opt/rh/rh-nginx112/root/usr/share/doc/README.dynamic.
|
||||||
|
include /opt/rh/rh-nginx112/root/usr/share/nginx/modules/*.conf;
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 1024;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||||
|
'$status $body_bytes_sent "$http_referer" '
|
||||||
|
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||||
|
|
||||||
|
access_log /var/opt/rh/rh-nginx112/log/nginx/access.log main;
|
||||||
|
|
||||||
|
sendfile on;
|
||||||
|
tcp_nopush on;
|
||||||
|
tcp_nodelay on;
|
||||||
|
keepalive_timeout 65;
|
||||||
|
types_hash_max_size 2048;
|
||||||
|
|
||||||
|
gzip on;
|
||||||
|
gzip_types application/xml application/json text/css text/javascript application/javascript;
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_comp_level 6;
|
||||||
|
gzip_min_length 500;
|
||||||
|
|
||||||
|
include /etc/opt/rh/rh-nginx112/nginx/mime.types;
|
||||||
|
default_type application/octet-stream;
|
||||||
|
|
||||||
|
# Load modular configuration files from the /etc/nginx/conf.d directory.
|
||||||
|
# See http://nginx.org/en/docs/ngx_core_module.html#include
|
||||||
|
# for more information.
|
||||||
|
include /opt/app-root/etc/nginx.d/*.conf;
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 8080 default_server;
|
||||||
|
listen [::]:8080 default_server;
|
||||||
|
server_name _;
|
||||||
|
root /opt/app-root/src;
|
||||||
|
|
||||||
|
# Load configuration files for the default server block.
|
||||||
|
include /opt/app-root/etc/nginx.default.d/*.conf;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
try_files $uri $uri/ /index.html;
|
||||||
|
expires -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~* \.(?:jpg|jpeg|gif|png|ico|woff2)$ {
|
||||||
|
expires 1M;
|
||||||
|
add_header Cache-Control "public";
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~* \.(?:js|json|css)$ {
|
||||||
|
add_header Cache-Control "no-cache, public, must-revalidate, proxy-revalidate";
|
||||||
|
}
|
||||||
|
|
||||||
|
underscores_in_headers on;
|
||||||
|
$RESOLVER
|
||||||
|
|
||||||
|
# JWT TOKEN
|
||||||
|
location /token/jwt/rest/idp/v0/dafa {
|
||||||
|
proxy_pass $JWT_URL;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
error_page 404 /404.html;
|
||||||
|
location = /40x.html {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
error_page 500 502 503 504 /50x.html;
|
||||||
|
location = /50x.html {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
6
nginx/template.sh
Normal file
6
nginx/template.sh
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
export JWT_URL
|
||||||
|
export RESOLVER
|
||||||
|
|
||||||
|
envsubst '${JWT_URL} ${RESOLVER} ' < /usr/share/container-scripts/nginx/nginx-start/nginx.template > /etc/nginx/nginx.conf
|
||||||
177
openshift/dev/Jenkinsfile
vendored
Normal file
177
openshift/dev/Jenkinsfile
vendored
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
def cicdproject = "${CI_CD_NAMESPACE}"
|
||||||
|
def appname = "${APP_NAME}"
|
||||||
|
def devproject = "${DEV_NAMESPACE}"
|
||||||
|
def i1project = "${I1_NAMESPACE}"
|
||||||
|
def u1project = "${U1_NAMESPACE}"
|
||||||
|
def t1project = "${T1_NAMESPACE}"
|
||||||
|
def jenkinsslave = "nodejs12-agent"
|
||||||
|
|
||||||
|
pipeline {
|
||||||
|
|
||||||
|
agent {
|
||||||
|
node {label "${jenkinsslave}"}
|
||||||
|
}
|
||||||
|
|
||||||
|
environment {
|
||||||
|
CURRENT_COMMIT = getShortCommitHash()
|
||||||
|
BUILD_TAG = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
stages {
|
||||||
|
|
||||||
|
stage("Initialize") {
|
||||||
|
steps {
|
||||||
|
echo '### Generating build tag... ###'
|
||||||
|
script {
|
||||||
|
def packageJson = readJSON file: 'package.json'
|
||||||
|
BUILD_TAG = "dev_v${packageJson.version}_${env.BUILD_NUMBER}_${CURRENT_COMMIT}"
|
||||||
|
echo '### Build tag ###'
|
||||||
|
echo "${BUILD_TAG}"
|
||||||
|
}
|
||||||
|
echo '### Build tag generated! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage("Install dependencies") {
|
||||||
|
environment {
|
||||||
|
NEXUS_CREDS = "${env.NEXUS_USERNAME}:${env.NEXUS_PASSWORD}"
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
echo '### Installing dependencies... ###'
|
||||||
|
sh '''
|
||||||
|
ENCODED=$(echo -n "${NEXUS_CREDS}" | openssl base64)
|
||||||
|
CACHE_DIRECTORY=/home/jenkins/.npm/cache
|
||||||
|
mkdir -p ${CACHE_DIRECTORY}
|
||||||
|
echo "_auth=${ENCODED}" >> .npmrc
|
||||||
|
# set -x
|
||||||
|
cat .npmrc
|
||||||
|
|
||||||
|
# Pull from cache if it exists
|
||||||
|
(
|
||||||
|
# Fail if any step fail
|
||||||
|
set -e
|
||||||
|
# Remove line 3 from package-lock, which contain the package.json version. Store backup.
|
||||||
|
# We only care about dependencies, not the version
|
||||||
|
sed -i.bak -e '3d' package-lock.json
|
||||||
|
# Hash the package-lock.json file
|
||||||
|
sha1sum package-lock.json | tr -s " " | awk '{print $1}' > hashed.pkg-lock
|
||||||
|
# Restore package-lock.json with version number intact
|
||||||
|
mv package-lock.json.bak package-lock.json
|
||||||
|
# Try to get the file from cache
|
||||||
|
cp ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock) node_modules.tar.gz 2> /dev/null
|
||||||
|
# Check if we found the cached node_modules
|
||||||
|
test -f node_modules.tar.gz
|
||||||
|
# If we found the cached node_modules, extract the files to node_modules
|
||||||
|
tar -zxf node_modules.tar.gz
|
||||||
|
# Echo to the logs stating that we are using cache
|
||||||
|
echo "Using cached node_modules from ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock)"
|
||||||
|
) || true
|
||||||
|
|
||||||
|
# If we did not find the cached node_modules, install from the lock
|
||||||
|
test -f node_modules.tar.gz || npm ci;
|
||||||
|
|
||||||
|
# Store cache
|
||||||
|
(
|
||||||
|
# Fail if any step fail
|
||||||
|
set -e
|
||||||
|
# Only update the cache if we found no previous cache
|
||||||
|
test ! -f node_modules.tar.gz
|
||||||
|
# Tar the cache
|
||||||
|
tar -zcf node_modules.tar.gz node_modules
|
||||||
|
# Clean old cache
|
||||||
|
rm -rf ${CACHE_DIRECTORY}/*
|
||||||
|
# Store the cache
|
||||||
|
cp node_modules.tar.gz ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock)
|
||||||
|
) || true
|
||||||
|
'''
|
||||||
|
echo '### Dependencies installed! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage("Build application") {
|
||||||
|
steps {
|
||||||
|
echo '### Building application... ###'
|
||||||
|
|
||||||
|
sh '''
|
||||||
|
npm run build -- --output-path=dist
|
||||||
|
cp -r nginx/* dist/.
|
||||||
|
'''
|
||||||
|
|
||||||
|
// Used when testing Openshift, so that we dont need to wait for build. Also put a # before npm ci above
|
||||||
|
// sh '''
|
||||||
|
// mkdir dist
|
||||||
|
// echo hello > dist/index.html
|
||||||
|
// '''
|
||||||
|
|
||||||
|
echo '### Application built! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('App bake') {
|
||||||
|
steps {
|
||||||
|
echo '### Creating image... ###'
|
||||||
|
script {
|
||||||
|
openshift.withCluster() {
|
||||||
|
openshift.withProject(devproject) {
|
||||||
|
openshift.selector("bc", "${ appname }").startBuild("--from-dir=./dist", "--wait=true")
|
||||||
|
openshift.tag("${ appname }:latest", "${ appname }:${BUILD_TAG}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
echo '### Image created! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Deploy u1') {
|
||||||
|
steps {
|
||||||
|
echo '### Deploying to U1... ###'
|
||||||
|
script {
|
||||||
|
openshift.withCluster() {
|
||||||
|
openshift.withProject(u1project) {
|
||||||
|
openshift.raw("set image dc/${ appname } ${ appname }=docker-registry.default.svc:5000/${devproject}/${ appname }:${BUILD_TAG} --record=true --source=docker")
|
||||||
|
openshift.raw("annotate dc ${ appname } version=${BUILD_TAG} --overwrite=true")
|
||||||
|
openshift.selector("dc", "${ appname }").rollout().status();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
echo '### Deployed to U1! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Deploy i1') {
|
||||||
|
steps {
|
||||||
|
echo '### Deploying to I1... ###'
|
||||||
|
script {
|
||||||
|
openshift.withCluster() {
|
||||||
|
openshift.withProject(i1project) {
|
||||||
|
openshift.raw("set image dc/${ appname } ${ appname }=docker-registry.default.svc:5000/${devproject}/${ appname }:${BUILD_TAG} --record=true --source=docker")
|
||||||
|
openshift.raw("annotate dc ${ appname } version=${BUILD_TAG} --overwrite=true")
|
||||||
|
openshift.selector("dc", "${ appname }").rollout().status();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
echo '### Deployed to I1! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Deploy t1') {
|
||||||
|
steps {
|
||||||
|
echo '### Deploying to T1... ###'
|
||||||
|
script {
|
||||||
|
openshift.withCluster() {
|
||||||
|
openshift.withProject(t1project) {
|
||||||
|
openshift.raw("set image dc/${ appname } ${ appname }=docker-registry.default.svc:5000/${devproject}/${ appname }:${BUILD_TAG} --record=true --source=docker")
|
||||||
|
openshift.raw("annotate dc ${ appname } version=${BUILD_TAG} --overwrite=true")
|
||||||
|
openshift.selector("dc", "${ appname }").rollout().status();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
echo '### Deployed to T1! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def getShortCommitHash() {
|
||||||
|
return sh(returnStdout: true, script: "git log -n 1 --pretty=format:'%h'").trim()
|
||||||
|
}
|
||||||
190
openshift/prod/Jenkinsfile
vendored
Normal file
190
openshift/prod/Jenkinsfile
vendored
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
def cicdproject = "${CI_CD_NAMESPACE}"
|
||||||
|
def appname = "${APP_NAME}"
|
||||||
|
def devproject = "${DEV_NAMESPACE}"
|
||||||
|
def t2project = "${T2_NAMESPACE}"
|
||||||
|
def prodproject = "${PROD_NAMESPACE}"
|
||||||
|
def jenkinsslave = "nodejs12-agent"
|
||||||
|
|
||||||
|
pipeline {
|
||||||
|
|
||||||
|
agent {
|
||||||
|
node {label "${jenkinsslave}"}
|
||||||
|
}
|
||||||
|
|
||||||
|
environment {
|
||||||
|
CURRENT_COMMIT = getShortCommitHash()
|
||||||
|
BUILD_TAG = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
stages {
|
||||||
|
|
||||||
|
stage("Initialize") {
|
||||||
|
steps {
|
||||||
|
echo '### Generating build tag... ###'
|
||||||
|
script {
|
||||||
|
def packageJson = readJSON file: 'package.json'
|
||||||
|
BUILD_TAG = "prod_v${packageJson.version}_${env.BUILD_NUMBER}_${CURRENT_COMMIT}"
|
||||||
|
echo '### Build tag ###'
|
||||||
|
echo "${BUILD_TAG}"
|
||||||
|
}
|
||||||
|
echo '### Build tag generated! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage("Install dependencies") {
|
||||||
|
environment {
|
||||||
|
NEXUS_CREDS = "${env.NEXUS_USERNAME}:${env.NEXUS_PASSWORD}"
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
echo '### Installing dependencies... ###'
|
||||||
|
sh '''
|
||||||
|
ENCODED=$(echo -n "${NEXUS_CREDS}" | openssl base64)
|
||||||
|
CACHE_DIRECTORY=/home/jenkins/.npm/cache
|
||||||
|
mkdir -p ${CACHE_DIRECTORY}
|
||||||
|
echo "_auth=${ENCODED}" >> .npmrc
|
||||||
|
# set -x
|
||||||
|
cat .npmrc
|
||||||
|
|
||||||
|
# Pull from cache if it exists
|
||||||
|
(
|
||||||
|
# Fail if any step fail
|
||||||
|
set -e
|
||||||
|
# Remove line 3 from package-lock, which contain the package.json version. Store backup.
|
||||||
|
# We only care about dependencies, not the version
|
||||||
|
sed -i.bak -e '3d' package-lock.json
|
||||||
|
# Hash the package-lock.json file
|
||||||
|
sha1sum package-lock.json | tr -s " " | awk '{print $1}' > hashed.pkg-lock
|
||||||
|
# Restore package-lock.json with version number intact
|
||||||
|
mv package-lock.json.bak package-lock.json
|
||||||
|
# Try to get the file from cache
|
||||||
|
cp ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock) node_modules.tar.gz 2> /dev/null
|
||||||
|
# Check if we found the cached node_modules
|
||||||
|
test -f node_modules.tar.gz
|
||||||
|
# If we found the cached node_modules, extract the files to node_modules
|
||||||
|
tar -zxf node_modules.tar.gz
|
||||||
|
# Echo to the logs stating that we are using cache
|
||||||
|
echo "Using cached node_modules from ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock)"
|
||||||
|
) || true
|
||||||
|
|
||||||
|
# If we did not find the cached node_modules, install from the lock
|
||||||
|
test -f node_modules.tar.gz || npm ci;
|
||||||
|
|
||||||
|
# Store cache
|
||||||
|
(
|
||||||
|
# Fail if any step fail
|
||||||
|
set -e
|
||||||
|
# Only update the cache if we found no previous cache
|
||||||
|
test ! -f node_modules.tar.gz
|
||||||
|
# Tar the cache
|
||||||
|
tar -zcf node_modules.tar.gz node_modules
|
||||||
|
# Clean old cache
|
||||||
|
rm -rf ${CACHE_DIRECTORY}/*
|
||||||
|
# Store the cache
|
||||||
|
cp node_modules.tar.gz ${CACHE_DIRECTORY}/$(cat hashed.pkg-lock)
|
||||||
|
) || true
|
||||||
|
'''
|
||||||
|
echo '### Dependencies installed! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage("Build application") {
|
||||||
|
steps {
|
||||||
|
echo '### Building application... ###'
|
||||||
|
sh '''
|
||||||
|
npm run build:prod -- --output-path=dist
|
||||||
|
cp -r nginx/* dist/.
|
||||||
|
ls -la dist
|
||||||
|
'''
|
||||||
|
echo '### Application built! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('App bake') {
|
||||||
|
steps {
|
||||||
|
echo '### Creating image... ###'
|
||||||
|
script {
|
||||||
|
openshift.withCluster() {
|
||||||
|
openshift.withProject(devproject) {
|
||||||
|
openshift.selector("bc", "${ appname }").startBuild("--from-dir=./dist", "--wait=true")
|
||||||
|
openshift.tag("${ appname }:latest", "${ appname }:${BUILD_TAG}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
echo '### Image created! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Deploy T2') {
|
||||||
|
steps {
|
||||||
|
echo '### Deploying to T2... ###'
|
||||||
|
script {
|
||||||
|
openshift.withCluster() {
|
||||||
|
openshift.withProject(t2project) {
|
||||||
|
openshift.raw("set image dc/${ appname } ${ appname }=docker-registry.default.svc:5000/${devproject}/${ appname }:${BUILD_TAG} --record=true --source=docker ")
|
||||||
|
openshift.raw("annotate dc ${ appname } version=${BUILD_TAG} --overwrite=true")
|
||||||
|
openshift.selector("dc", "${ appname }").rollout().status();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
echo '### Deployed to T2! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stage test where application testing will go in here
|
||||||
|
|
||||||
|
stage('Push image to External registry') {
|
||||||
|
agent {
|
||||||
|
label 'skopeo-agent'
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
echo '### Publishing image to external registry... ###'
|
||||||
|
script {
|
||||||
|
def branchName = GIT_BRANCH.split('/')[1]
|
||||||
|
openshift.withCluster() {
|
||||||
|
openshift.withProject(cicdproject) {
|
||||||
|
def registry = "nexus.arbetsformedlingen.se:5555"
|
||||||
|
withCredentials([usernamePassword(credentialsId: "${openshift.project()}-nexus-secret", usernameVariable: "REG_USER", passwordVariable: "REG_PWD")]) {
|
||||||
|
sh "skopeo copy docker://docker-registry.default.svc:5000/${devproject}/${appname}:latest docker://${registry}/app-af-nexus/${appname}:${BUILD_TAG} --src-creds jenkins:\$(oc whoami -t) --dest-creds \"$REG_USER:$REG_PWD\" --src-tls-verify=false --dest-tls-verify=false --format v2s2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
echo '### Image published to external registry! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Deploy to production') {
|
||||||
|
steps {
|
||||||
|
echo '### Trying to deploy to prod... ###'
|
||||||
|
script {
|
||||||
|
openshift.withCluster() {
|
||||||
|
def api = "https://pocpconsole.arbetsformedlingen.se:443"
|
||||||
|
env.API = sh(script:"set +x; echo ${api}", returnStdout: true).replaceAll(/https?/, 'insecure')
|
||||||
|
def encodedToken = openshift.selector('secret/imagepromote-token').object().data.tokenbase64
|
||||||
|
env.TOKEN = sh(script:"set +x; echo ${encodedToken} | base64 --decode", returnStdout: true)
|
||||||
|
}
|
||||||
|
|
||||||
|
openshift.withCluster( env.API, env.TOKEN ) {
|
||||||
|
openshift.withProject(prodproject) {
|
||||||
|
// Wait for approval
|
||||||
|
timeout(time:1440, unit:'MINUTES') {
|
||||||
|
input message: "Go Live with ${ appname } in Production?", ok: "Confirm"
|
||||||
|
openshift.raw("tag nexus.arbetsformedlingen.se:5555/app-af-nexus/${ appname }:${BUILD_TAG} ${appname}:${BUILD_TAG} --insecure")
|
||||||
|
openshift.raw("tag ${ appname }:latest ${ appname }:${BUILD_TAG}")
|
||||||
|
openshift.raw("set image dc/${ appname } ${ appname }=nexus.arbetsformedlingen.se:5555/app-af-nexus/${ appname }:${BUILD_TAG} --record=true --source=docker")
|
||||||
|
openshift.selector("dc", "${ appname }").rollout().status();
|
||||||
|
echo '### Deployed to prod! ###'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def getShortCommitHash() {
|
||||||
|
return sh(returnStdout: true, script: "git log -n 1 --pretty=format:'%h'").trim()
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user