Skip to content
This repository was archived by the owner on Oct 8, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions extras/jenkins/AWS/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ pipeline {
DEBIAN_FRONTEND=noninteractive apt -y upgrade
# Make sure our deps are installed
DEBIAN_FRONTEND=noninteractive apt -y install figlet openjdk-11-jdk make docker.io
'''
'''
}
}

Expand All @@ -95,7 +95,7 @@ pipeline {
sh '''
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenkaws${BUILD_NUMBER} --force --yes \\;
'''
'''
}
}

Expand All @@ -108,7 +108,7 @@ pipeline {

sh '''
$WORKSPACE/bin/setup_venv.sh
'''
'''

}
}
Expand All @@ -122,7 +122,7 @@ pipeline {

sh '''
$WORKSPACE/bin/aws_write_creds.sh
'''
'''

}
}
Expand Down Expand Up @@ -156,7 +156,7 @@ pipeline {
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:helm_timeout "600" -C pulumi/python/config -s marajenkaws${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set aws:profile "${AWS_PROFILE}" -C pulumi/python/config -s marajenkaws${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set aws:region "${AWS_DEFAULT_REGION}" -C pulumi/python/config -s marajenkaws${BUILD_NUMBER}
'''
'''
}
}

Expand All @@ -171,7 +171,7 @@ pipeline {
sh '''
echo "${NGINX_JWT}" > $WORKSPACE/extras/jwt.token
$WORKSPACE/bin/start_aws.sh
'''
'''
}
}

Expand All @@ -188,8 +188,8 @@ pipeline {

sh '''
$WORKSPACE/bin/destroy.sh
find . -mindepth 2 -maxdepth 6 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenkaws${BUILD_NUMBER} --force --yes \;
'''
find . -mindepth 2 -maxdepth 6 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenkaws${BUILD_NUMBER} --force --yes \\;
'''
}
}

Expand All @@ -204,10 +204,10 @@ pipeline {
*/

sh '''
# Destroy our partial build...
$WORKSPACE/bin/destroy.sh || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenkaws${BUILD_NUMBER} --force --yes \\;
# Destroy our partial build...
$WORKSPACE/bin/destroy.sh || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenkaws${BUILD_NUMBER} --force --yes \\;
'''
}
}
Expand Down
24 changes: 9 additions & 15 deletions extras/jenkins/DigitalOcean/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ pipeline {
doctl auth init -t $DIGITALOCEAN_TOKEN
# Fix perms for the snap....
snap connect doctl:kube-config
'''
'''
}
}

Expand Down Expand Up @@ -140,7 +140,7 @@ pipeline {
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:helm_timeout "600" -C pulumi/python/config -s marajenkdo${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set digitalocean:token "${DO_TOKEN}" --plaintext -C pulumi/python/config -s marajenkdo${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set domk8s:k8s_version "latest" -C pulumi/python/config -s marajenkdo${BUILD_NUMBER}
'''
'''

}
}
Expand Down Expand Up @@ -176,15 +176,9 @@ pipeline {
find . -mindepth 2 -maxdepth 6 -type f -name Pulumi.yaml -execdir pulumi stack rm marajenkdo${BUILD_NUMBER} --force --yes \\;
'''
}
/*
* Clean up the environment; this includes running the destroy script to remove our pulumi resources and
* destroy the deployed infrastructure in Digital Ocean.
*
* After that completes, we remove the pulumi stack from the project with the find command; this is because
* we need to delete the stack in each project it's been instantiated in.
*/}

}
}

post {
failure {

Expand All @@ -194,11 +188,11 @@ pipeline {
*/

sh '''
# Destroy our partial build...
$WORKSPACE/bin/destroy.sh || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenk${BUILD_NUMBER} --force --yes \;
# Destroy our partial build...
$WORKSPACE/bin/destroy.sh || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenk${BUILD_NUMBER} --force --yes \\;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Didn't we remove the double backslash in a previous PR? Just want to make sure that this is what we want Also curious what the double backslash means?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was an interesting journey. If you pick up that line with \ and run it directly (like in the UDF scripts), the command fails because find wants to see \; at the end of the line. \\; escapes the \ and leaves you with ; which the shell interprets as the end of the line, and the execdir never runs.

So, what I noticed happening is stacks not being cleaned up, saw the \\, tested it locally, and thought "Aha! That is the issue! I shall fix that!"

Well, the fix worked for the scripting but bombed out in Jenkins. Becuase....the \ is a reserved character in the Groovy scripting language, which is used by Jenkins. Which then led me to the actual problem, which is that I was using pulumi without giving a path, and in Jenkins that meant "command not found" but we have a || true in case it can't find anything.....

So, this should fix all the issues above, and if my past PR's are any indication I'm sure this will introduce another bug...

'''
}
}
}
}
2 changes: 1 addition & 1 deletion extras/jenkins/K3S/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ pipeline {
/usr/local/bin/k3s-killall.sh || true
/usr/local/bin/k3s-uninstall.sh || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenk${BUILD_NUMBER} --force --yes \;
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenk${BUILD_NUMBER} --force --yes \\;
'''
}
}
Expand Down
24 changes: 12 additions & 12 deletions extras/jenkins/Linode/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -69,23 +69,23 @@ pipeline {
# Create the directory for the kubeconfig
mkdir -p $HOME/.kube || true
chmod 777 $HOME/.kube || true
'''
'''
}
}

stage('Cleaning Up') {
steps {

/*
* Run a find and check for any stacks that currently exist with our generated stack name; this should not
* happen in normal operation, but could potentially happen if things break so better safe than sorry.
* This is currently empty since we are building a new executor for each run. However, maintaining
* here for anyone who wants to add cleanup steps for their environment
*
* Other cleanup related functions can be placed here as well.
*/

sh '''
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenklke${BUILD_NUMBER} --force --yes \\;
# Just return...
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: I think this comment isn't adding anything the better comment above doesn't

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed in the last push; I also made a note to go through the jenkinsfiles and do a cleanup with the idea of writing it up either for an in-repo note or as a blog post...

true
'''

}
Expand Down Expand Up @@ -132,7 +132,7 @@ pipeline {
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:adminpass "password" -C pulumi/python/config -s marajenklke${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:helm_timeout "600" -C pulumi/python/config -s marajenklke${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set linode:token "${LINODE_TOKEN}" --plaintext -C pulumi/python/config -s marajenklke${BUILD_NUMBER}
'''
'''

}
}
Expand Down Expand Up @@ -165,7 +165,7 @@ pipeline {

sh '''
PATH=$WORKSPACE/pulumi/python/venv/bin:$PATH $WORKSPACE/bin/destroy.sh
find . -mindepth 2 -maxdepth 6 -type f -name Pulumi.yaml -execdir pulumi stack rm marajenklke${BUILD_NUMBER} --force --yes \\;
find $WORKSPACE -mindepth 2 -maxdepth 6 -type f -name Pulumi.yaml -execdir pulumi stack rm marajenklke${BUILD_NUMBER} --force --yes \\;
'''
}
}
Expand All @@ -180,11 +180,11 @@ pipeline {
*/

sh '''
# Destroy our partial build...
$WORKSPACE/bin/destroy.sh || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenk${BUILD_NUMBER} --force --yes \;
'''
# Destroy our partial build...
$WORKSPACE/bin/destroy.sh || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenklke${BUILD_NUMBER} --force --yes \\;
'''
}
}
}
91 changes: 38 additions & 53 deletions extras/jenkins/MicroK8s/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -71,34 +71,25 @@ pipeline {
DEBIAN_FRONTEND=noninteractive apt -y install figlet openjdk-11-jdk make docker.io
# Make sure our kubeconfig dir exists…
mkdir $HOME/.kube || true
'''
'''
}
}

stage('Cleaning Up') {
steps {

/*
* Run a find and check for any stacks that currently exist with our generated stack name; this should not
* happen in normal operation, but could potentially happen if things break so better safe than sorry.
* This is currently empty since we are building a new executor for each run. However, maintaining
* here for anyone who wants to add cleanup steps for their environment
*
* This function also tries to remove both K3S and Microk8s if they are found on the host; this is because we
* will be installing Microk8s and we want to both make sure we are removing any previous installations as well as
* ensuring this Jenkins Agent does not already have a Microk8s installation on it.
* Other cleanup related functions can be placed here as well.
*/

sh '''
# Reset our K3S Environment
/usr/local/bin/k3s-killall.sh || true
/usr/local/bin/k3s-uninstall.sh || true
# Reset our Microk8s Environment; true if it’s not there
microk8s reset --destroy-storage || true
# True if it’s not there…
snap remove microk8s || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenk${BUILD_NUMBER} --force --yes \\;
'''
}
sh '''
# Just return...
true
'''
}
}

stage('Microk8s Setup') {
Expand All @@ -115,7 +106,7 @@ pipeline {
snap install microk8s --classic --channel=1.23/stable
microk8s.enable storage dns helm3
microk8s.enable metallb 192.168.100.100/30
'''
'''
}
}

Expand All @@ -129,7 +120,7 @@ pipeline {

sh '''
microk8s.config > $HOME/.kube/config
'''
'''
}
}

Expand All @@ -142,7 +133,7 @@ pipeline {

sh '''
$WORKSPACE/bin/setup_venv.sh
'''
'''
}
}

Expand All @@ -159,21 +150,21 @@ pipeline {
*/

sh '''
echo "PULUMI_STACK=marajenk${BUILD_NUMBER}" > $WORKSPACE/config/pulumi/environment
$WORKSPACE/pulumi/python/venv/bin/pulumi stack select --create marajenk${BUILD_NUMBER} -C pulumi/python/config
$WORKSPACE/pulumi/python/venv/bin/pulumi stack select --create marajenk${BUILD_NUMBER} -C pulumi/python/kubernetes/applications/sirius
$WORKSPACE/pulumi/python/venv/bin/pulumi config set certmgr:helm_timeout "600" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kic-helm:fqdn "marajenks${BUILD_NUMBER}.zathras.io" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kic-helm:helm_timeout "600" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kubernetes:cluster_name "microk8s-cluster" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kubernetes:infra_type "kubeconfig" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kubernetes:kubeconfig "$HOME/.kube/config" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set logagent:helm_timeout "600" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set logstore:helm_timeout "600" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:adminpass "password" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:helm_timeout "600" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:helm_timeout "600" -C pulumi/python/config -s marajenk${BUILD_NUMBER}
'''
echo "PULUMI_STACK=marajenkmk8s${BUILD_NUMBER}" > $WORKSPACE/config/pulumi/environment
$WORKSPACE/pulumi/python/venv/bin/pulumi stack select --create marajenkmk8s${BUILD_NUMBER} -C pulumi/python/config
$WORKSPACE/pulumi/python/venv/bin/pulumi stack select --create marajenkmk8s${BUILD_NUMBER} -C pulumi/python/kubernetes/applications/sirius
$WORKSPACE/pulumi/python/venv/bin/pulumi config set certmgr:helm_timeout "600" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kic-helm:fqdn "marajenkmk8ss${BUILD_NUMBER}.zathras.io" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kic-helm:helm_timeout "600" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kubernetes:cluster_name "microk8s-cluster" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kubernetes:infra_type "kubeconfig" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set kubernetes:kubeconfig "$HOME/.kube/config" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set logagent:helm_timeout "600" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set logstore:helm_timeout "600" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:adminpass "password" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:helm_timeout "600" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
$WORKSPACE/pulumi/python/venv/bin/pulumi config set prometheus:helm_timeout "600" -C pulumi/python/config -s marajenkmk8s${BUILD_NUMBER}
'''
}
}

Expand All @@ -188,7 +179,7 @@ pipeline {
sh '''
echo $NGINX_JWT > $WORKSPACE/extras/jwt.token
$WORKSPACE/bin/start_kube.sh
'''
'''
}
}

Expand All @@ -209,14 +200,8 @@ pipeline {
microk8s reset --destroy-storage || true
# True if it’s not there…
sudo snap remove microk8s || true
find . -mindepth 2 -maxdepth 6 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenk${BUILD_NUMBER} --force --yes \\;
# This is a hack to allow additional commands to be issued following cleanup. This is needed because the VMs
# that currently run as agents for K3S and Microk8s deployments need to be rebooted following some number of
# runs due to zombie processes and other issues. Long term we want to deploy these VM's via IaaC so the only
# exist for the lifetime of the project. We do it this way in order to provide some flexibility for the
# jenkins configuration.
${POSTRUN_CMD- true}
'''
find . -mindepth 2 -maxdepth 6 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenkmk8s${BUILD_NUMBER} --force --yes \\;
'''

}
}
Expand All @@ -233,14 +218,14 @@ pipeline {
*/

sh '''
# Destroy our partial build...
$WORKSPACE/bin/destroy.sh || true
# Reset our Microk8s Environment; true if it’s not there
microk8s reset --destroy-storage || true
# True if it’s not there…
snap remove microk8s || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenk${BUILD_NUMBER} --force --yes \;
# Destroy our partial build...
$WORKSPACE/bin/destroy.sh || true
# Reset our Microk8s Environment; true if it’s not there
microk8s reset --destroy-storage || true
# True if it’s not there…
snap remove microk8s || true
# Clean up the Pulumi stack if it exists for our run - which it shouldn\'t, but you never know.
find $WORKSPACE -mindepth 2 -maxdepth 7 -type f -name Pulumi.yaml -execdir $WORKSPACE/pulumi/python/venv/bin/pulumi stack rm marajenkmk8s${BUILD_NUMBER} --force --yes \\;
'''
}
}
Expand Down
Loading