Jenkins declarative pipeline. Conditional statement in post block - if-statement

Have a Jenkins pipeline. Need/want to send emails when build succeeds. Email about all branches to maillist-1 and filter builds of master branch to maillist-master.
I tried using if and when statements-steps but both of them fail in post block.
pipeline {
agent ...
stages {...}
post{
success{
archiveArtifacts: ...
if( env.BRANCH_NAME == 'master' ){
emailext( to: 'maillist-master#domain.com'
, replyTo: 'maillist-master#domain.com'
, subject: 'Jenkins. Build succeeded :^) 😎'
, body: params.EmailBody
, attachmentsPattern: '**/App*.tar.gz'
)
}
emailext( to: 'maillist-1#domain.com'
, replyTo: 'maillist-1#domain.com'
, subject: 'Jenkins. Build succeeded :^) 😎'
, body: params.EmailBody
, attachmentsPattern: '**/App*.tar.gz'
)
}
}
}
How wanted behavior could be achieved?

It's true you currently can't use when in the global post block. When must be used inside a stage directive.
It's a logical choice to use if else, but you'll need a scripted block inside the declarative pipeline to make this work:
pipeline {
agent any
parameters {
string(defaultValue: "master", description: 'Which branch?', name: 'BRANCH_NAME')
}
stages {
stage('test'){
steps {
echo "my branch is " + params.BRANCH_NAME
}
}
}
post {
success{
script {
if( params.BRANCH_NAME == 'master' ){
echo "mail list master"
}
else {
echo "mail list others"
}
}
}
}
}
Output when parameter is master:
[Pipeline] {
[Pipeline] stage
[Pipeline] { (test)
[Pipeline] echo
my branch is master
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (Declarative: Post Actions)
[Pipeline] script
[Pipeline] {
[Pipeline] echo
mail list master
[Pipeline] }
[Pipeline] // script
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // node
[Pipeline] End of Pipeline
Finished: SUCCESS
output when parameter is 'test':
[Pipeline] {
[Pipeline] stage
[Pipeline] { (test)
[Pipeline] echo
my branch is test
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (Declarative: Post Actions)
[Pipeline] script
[Pipeline] {
[Pipeline] echo
mail list others
[Pipeline] }
[Pipeline] // script
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // node
[Pipeline] End of Pipeline
Finished: SUCCESS
Or to make it even more clean you can call the script as a function:
pipeline {
agent any
parameters {
string(defaultValue: "master", description: 'Which branch?', name: 'BRANCH_NAME')
}
stages {
stage('test'){
steps {
echo "my branch is " + params.BRANCH_NAME
}
}
}
post {
success{
getMailList(params.BRANCH_NAME)
}
}
}
def getMailList(String branch){
if( branch == 'master' ){
echo "mail list master"
}
else {
echo "mail list others"
}
}

Related

Docker Image not uploading to ecr from jenkins

I created a CI/CD pipline which builds a docker image and pushes to docker image to amazon ecr.
The docker image build perfectly but does not upload to ecr
After building the image it keeps retrying to upload the image to ecr but exits after several trials
pipeline {
agent any
environment {
registryCredential = 'ecr:us-east-1:aws-cred'
appRegistry = "xxxxxx.dkr.ecr.us-east-1.amazonaws.com/myappimg"
vprofileRegistry = "https://xxxxxxx.dkr.ecr.us-east-1.amazonaws.com"
dockerImage = ''
}
stages{
stage('Fetch code') {
steps{
git branch: 'docker', url: 'https://github.com/xxxxxxxxx/xxxxxxxxxxx.git'
}
}
stage('Build') {
steps {
sh 'mvn clean install -DskipTests'
}
}
stage('Test'){
steps {
sh 'mvn test'
}
}
stage('Build App Image') {
steps {
script {
dockerImage = docker.build( appRegistry + ":$BUILD_NUMBER", "./Docker-files/app/multistage/")
}
}
}
stage('Upload App Image') {
steps{
script {
docker.withRegistry( vprofileRegistry, registryCredential ) {
dockerImage.push("$BUILD_NUMBER")
dockerImage.push('latest')
}
}
}
}
}
}

Using Jenkins and Terraform EC2 auto deploy issues

I am new to DevOps. I am trying to deploy EC2 instances using the following script. It is failing on Terraform initializing stage. I do have the terraform init in my code. where is the issue?
pipeline {
parameters {
string(name: 'environment', defaultValue: 'terraform', description: 'Workspace/environment file to use for deployment')
booleanParam(name: 'autoApprove', defaultValue: false, description: 'Automatically run apply after generating plan?')
}
environment {
AWS_ACCESS_KEY_ID = credentials('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
}
agent any
options {
timestamps ()
}
stages {
stage('checkout') {
steps {
script{
dir("terraform")
{
git "https://github.com/Ravinderhub/Jenkins-Terraform-AWS.git"
}
}
}
}
stage('Plan') {
steps {
sh 'pwd;cd terraform/aws-instance-first-script ; terraform init -input=false'
sh 'pwd;cd terraform/aws-instance-first-script ; terraform workspace new ${environment}'
sh 'pwd;cd terraform/aws-instance-first-script ; terraform workspace select ${environment}'
sh "pwd;cd terraform/aws-instance-first-script ;terraform plan -input=false -out tfplan "
sh 'pwd;cd terraform/aws-instance-first-script ;terraform show -no-color tfplan > tfplan.txt'
}
}
stage('Approval') {
when {
not {
equals expected: true, actual: params.autoApprove
}
}
steps {
script {
def plan = readFile 'terraform/aws-instance-first-script/tfplan.txt'
input message: "Do you want to apply the plan?",
parameters: [text(name: 'Plan', description: 'Please review the plan', defaultValue: plan)]
}
}
}
stage('Apply') {
steps {
sh "pwd;cd terraform/aws-instance-first-script ; terraform apply -input=false tfplan"
}
}
}
}
There is an invalid variable in the configuration which prevents initialization.
ami_id variable defines an invalid type.
Correct that to:
variable "ami_id" {
type = "map"
default = {
us-east-1 = "ami-035b3c7efe6d061d5"
eu-west-2 = "ami-132b3c7efe6sdfdsfd"
eu-central-1 = "ami-9787h5h6nsn"
}
}

Jenkins Pipeline S3Upload hangs

I have a pipeline that hangs when tries to upload to s3 a file:
def get_temp_credentials() {
credentials_json = sh(returnStdout: true, script: "curl 169.254.170.2$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI -ss | python -mjson.tool").trim()
def c = readJSON text: credentials_json
return c
}
def AWS_ACCESS_KEY_ID
def AWS_SECRET_ACCESS_KEY
def AWS_SECRET_ACCESS_TOKEN
pipeline {
agent none
stages {
stage('GetMasterCredentials') {
agent { label 'master' }
steps {
script {
AWS_ACCESS_KEY_ID=get_temp_credentials()['AccessKeyId']
AWS_SECRET_ACCESS_KEY=get_temp_credentials()['SecretAccessKey']
AWS_SECRET_ACCESS_TOKEN=get_temp_credentials()['Token']
echo "Master AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}"
}
}
}
stage('BUILD') {
agent { label 'macOS' }
steps {
echo "Mac AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}"
sh 'mkdir -p js'
sh 'echo "not a artifact file" > js/build.js'
sh 'echo "artifact file" > js/build.min.js'
sh 'mkdir -p css'
sh 'echo "not a artifact file" > css/build.css'
sh 'echo "artifact file" > css/build.min.css'
withEnv([
"AWS_ACCESS_KEY_ID="+"${AWS_ACCESS_KEY_ID}",
"AWS_SECRET_ACCESS_KEY="+"${AWS_SECRET_ACCESS_KEY}",
"AWS_SECRET_ACCESS_TOKEN="+"${AWS_SECRET_ACCESS_TOKEN}",
"AWS_DEFAULT_REGION=us-east-2"]) {
s3Upload bucket:"build-artifacts", path:'Test/js/build.min.js', file: 'js/build.min.js'
}
}
}
}
}
This example is able to move credentials from AWS EC2 node to an on premisse node.
The only thing is that it hangs when upload, no feedback error message just job timeout.

how to invoke groovy function with Docker Registry in jenkins shared lib

i want to push image in ECR
stage('login_ecr') {
steps {
script {
withDockerRegistry(credentialsId: 'ecr:us-east-1:ecr_credentials', url: 'https://***********.dkr.ecr.us-east-1.amazonaws.com')
}
}
}
stage('build-image'){
steps{
script{
build_docker_image (IMAGE_NAME: "${server}", IMAGE_TAG:"${TAG}",AWS_REGION: "us-east-1")
}
}
}
but i am facing below error
java.lang.IllegalStateException: There is no body to invoke at
org.jenkinsci.plugins.workflow.cps.CpsStepContext.newBodyInvoker(CpsStepContext.java:282)

Unable to read values from contract deployed in hyperledger blockchain

I have set up the hyperledger blockchain locally. I run the hyperledger bc service within the docker container. I am able to bring up the node successfully, able to deploy and write to bc using a sample contract.
But couldnt read back the data from the block chain. Below is the error message bc throws. Can anyone pls guide what's wrong here ?
[ibc-js] Deploy Chaincode - Complete
{"query":{},"invoke":{},"details":{"deployed_name":"c123c14a65a511ee79e2a41b23726f473478d002064c01c3ce035cffa1229af083d73f1db220fc2f267b9ae31d66ce2e10113548e7abdf8812986ac3c5770a9c","func":{"invoke":["init","write"],"query":["read"]},"git_url":"https://github.com/IBM-Blockchain/learn-chaincode/finished","options":{"quiet":true,"timeout":60000,"tls":false},"peers":[{"name":"vp0-vp0...:49155","api_host":"127.0.0.1","api_port":49155,"id":"vp0","tls":false}],"timestamp":1470146338831,"users":[],"unzip_dir":"learn-chaincode-master/finished","version":"github.com/hyperledger/fabric/core/chaincode/shim","zip_url":"https://github.com/IBM-Blockchain/learn-chaincode/archive/master.zip"}}
sdk has deployed code and waited
[ibc-js] write - success: { jsonrpc: '2.0',
result:
{ status: 'OK',
message: '8b340e92-f96f-41f6-9b15-6ccb23304360' },
id: 1470146405598 }
write response: { jsonrpc: '2.0',
result:
{ status: 'OK',
message: '8b340e92-f96f-41f6-9b15-6ccb23304360' },
id: 1470146405598 }
[ibc-js] read - success: { jsonrpc: '2.0',
error:
{ code: -32003,
message: 'Query failure',
data: 'Error when querying chaincode: Error:Failed to launch chaincode spec(Could not get deployment transaction for c123c14a65a511ee79e2xxxxxxxxxxxxxxxxe7abdf8812986ac3c5770a9c - LedgerError - ResourceNotFound: ledger: resource not found)' },
id: 1470146405668 }
read response: null { name: 'query() resp error',
code: 400,
details:
{ code: -32003,
message: 'Query failure',
data: 'Error when querying chaincode: Error:Failed to launch chaincode spec(Could not get deployment transaction for c123c14a65a511ee79e2xxxxxxxxxxxxxxxxe7abdf8812986ac3c5770a9c - LedgerError - ResourceNotFound: ledger: resource not found)' } }
I have used IBM Blockchain JS for interacting w/ the go contract.
Below is the node js code
// Step 1 ==================================
var Ibc1 = require('ibm-blockchain-js');
var ibc = new Ibc1(/*logger*/); //you can pass a logger such as winston here - optional
var chaincode = {};
// ==================================
// configure ibc-js sdk
// ==================================
var options = {
network:{
peers: [{
"api_host": "127.0.0.1",
"api_port": 49155,
//"api_port_tls": 49157,
"id": "vp4"
}],
users: null,
options: {quiet: true, tls:false, maxRetry: 1}
},
chaincode:{
zip_url: 'https://github.com/IBM-Blockchain/learn-chaincode/archive/master.zip',
unzip_dir: 'learn-chaincode-master/finished',
git_url: 'https://github.com/IBM-Blockchain/learn-chaincode/finished'
}
};
// Step 2 ==================================
ibc.load(options, cb_ready);
// Step 3 ==================================
function cb_ready(err, cc){ //response has chaincode functions
chaincode = cc;
console.log(JSON.stringify(cc));
chaincode.deploy('init', ['Hi hyperledger'], null, cb_deployed);
// Step 5 ==================================
function cb_deployed(){
console.log(JSON.stringify(chaincode));
console.log('sdk has deployed code and waited');
console.log('******** Writing to chaincode Now **********');
chaincode.invoke.write(["mykey","Hi Ledger Systems"],function(err,data){
console.log('write response:', data);
readData();
});
}
function readData()
{
console.log('\n\n**** Waiting 7 seconds before reading **** \n\n');
setTimeout(function () {
console.log('\n\n**** Start reading **** \n\n');
chaincode.invoke.read(["mykey"],function(err,data){
console.log('read response:', data);
});
}, 7000)
}
}