Jenkins Pipeline S3Upload hangs - amazon-web-services

I have a pipeline that hangs when tries to upload to s3 a file:
def get_temp_credentials() {
credentials_json = sh(returnStdout: true, script: "curl 169.254.170.2$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI -ss | python -mjson.tool").trim()
def c = readJSON text: credentials_json
return c
}
def AWS_ACCESS_KEY_ID
def AWS_SECRET_ACCESS_KEY
def AWS_SECRET_ACCESS_TOKEN
pipeline {
agent none
stages {
stage('GetMasterCredentials') {
agent { label 'master' }
steps {
script {
AWS_ACCESS_KEY_ID=get_temp_credentials()['AccessKeyId']
AWS_SECRET_ACCESS_KEY=get_temp_credentials()['SecretAccessKey']
AWS_SECRET_ACCESS_TOKEN=get_temp_credentials()['Token']
echo "Master AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}"
}
}
}
stage('BUILD') {
agent { label 'macOS' }
steps {
echo "Mac AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}"
sh 'mkdir -p js'
sh 'echo "not a artifact file" > js/build.js'
sh 'echo "artifact file" > js/build.min.js'
sh 'mkdir -p css'
sh 'echo "not a artifact file" > css/build.css'
sh 'echo "artifact file" > css/build.min.css'
withEnv([
"AWS_ACCESS_KEY_ID="+"${AWS_ACCESS_KEY_ID}",
"AWS_SECRET_ACCESS_KEY="+"${AWS_SECRET_ACCESS_KEY}",
"AWS_SECRET_ACCESS_TOKEN="+"${AWS_SECRET_ACCESS_TOKEN}",
"AWS_DEFAULT_REGION=us-east-2"]) {
s3Upload bucket:"build-artifacts", path:'Test/js/build.min.js', file: 'js/build.min.js'
}
}
}
}
}
This example is able to move credentials from AWS EC2 node to an on premisse node.
The only thing is that it hangs when upload, no feedback error message just job timeout.

Related

Docker Image not uploading to ecr from jenkins

I created a CI/CD pipline which builds a docker image and pushes to docker image to amazon ecr.
The docker image build perfectly but does not upload to ecr
After building the image it keeps retrying to upload the image to ecr but exits after several trials
pipeline {
agent any
environment {
registryCredential = 'ecr:us-east-1:aws-cred'
appRegistry = "xxxxxx.dkr.ecr.us-east-1.amazonaws.com/myappimg"
vprofileRegistry = "https://xxxxxxx.dkr.ecr.us-east-1.amazonaws.com"
dockerImage = ''
}
stages{
stage('Fetch code') {
steps{
git branch: 'docker', url: 'https://github.com/xxxxxxxxx/xxxxxxxxxxx.git'
}
}
stage('Build') {
steps {
sh 'mvn clean install -DskipTests'
}
}
stage('Test'){
steps {
sh 'mvn test'
}
}
stage('Build App Image') {
steps {
script {
dockerImage = docker.build( appRegistry + ":$BUILD_NUMBER", "./Docker-files/app/multistage/")
}
}
}
stage('Upload App Image') {
steps{
script {
docker.withRegistry( vprofileRegistry, registryCredential ) {
dockerImage.push("$BUILD_NUMBER")
dockerImage.push('latest')
}
}
}
}
}
}

Using Jenkins and Terraform EC2 auto deploy issues

I am new to DevOps. I am trying to deploy EC2 instances using the following script. It is failing on Terraform initializing stage. I do have the terraform init in my code. where is the issue?
pipeline {
parameters {
string(name: 'environment', defaultValue: 'terraform', description: 'Workspace/environment file to use for deployment')
booleanParam(name: 'autoApprove', defaultValue: false, description: 'Automatically run apply after generating plan?')
}
environment {
AWS_ACCESS_KEY_ID = credentials('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
}
agent any
options {
timestamps ()
}
stages {
stage('checkout') {
steps {
script{
dir("terraform")
{
git "https://github.com/Ravinderhub/Jenkins-Terraform-AWS.git"
}
}
}
}
stage('Plan') {
steps {
sh 'pwd;cd terraform/aws-instance-first-script ; terraform init -input=false'
sh 'pwd;cd terraform/aws-instance-first-script ; terraform workspace new ${environment}'
sh 'pwd;cd terraform/aws-instance-first-script ; terraform workspace select ${environment}'
sh "pwd;cd terraform/aws-instance-first-script ;terraform plan -input=false -out tfplan "
sh 'pwd;cd terraform/aws-instance-first-script ;terraform show -no-color tfplan > tfplan.txt'
}
}
stage('Approval') {
when {
not {
equals expected: true, actual: params.autoApprove
}
}
steps {
script {
def plan = readFile 'terraform/aws-instance-first-script/tfplan.txt'
input message: "Do you want to apply the plan?",
parameters: [text(name: 'Plan', description: 'Please review the plan', defaultValue: plan)]
}
}
}
stage('Apply') {
steps {
sh "pwd;cd terraform/aws-instance-first-script ; terraform apply -input=false tfplan"
}
}
}
}
There is an invalid variable in the configuration which prevents initialization.
ami_id variable defines an invalid type.
Correct that to:
variable "ami_id" {
type = "map"
default = {
us-east-1 = "ami-035b3c7efe6d061d5"
eu-west-2 = "ami-132b3c7efe6sdfdsfd"
eu-central-1 = "ami-9787h5h6nsn"
}
}

how to invoke groovy function with Docker Registry in jenkins shared lib

i want to push image in ECR
stage('login_ecr') {
steps {
script {
withDockerRegistry(credentialsId: 'ecr:us-east-1:ecr_credentials', url: 'https://***********.dkr.ecr.us-east-1.amazonaws.com')
}
}
}
stage('build-image'){
steps{
script{
build_docker_image (IMAGE_NAME: "${server}", IMAGE_TAG:"${TAG}",AWS_REGION: "us-east-1")
}
}
}
but i am facing below error
java.lang.IllegalStateException: There is no body to invoke at
org.jenkinsci.plugins.workflow.cps.CpsStepContext.newBodyInvoker(CpsStepContext.java:282)

Checking status of AWS Data Pipeline using Go SDK

Situation: I have 2 data pipelines that run on-demand. Pipeline B cannot run until Pipeline A has completed. I'm trying to automate running both pipelines in a single script/program but I'm unsure how to do all of this in Go.
I have some Go code that activates a data pipeline:
func awsActivatePipeline(pipelineID, region string) (*datapipeline.ActivatePipelineOutput, error) {
svc := datapipeline.New(session.New(&aws.Config{Region: aws.String(region)}))
input := &datapipeline.ActivatePipelineInput{
PipelineId: aws.String(pipelineID),
}
result, err := svc.ActivatePipeline(input)
if err != nil {
fmt.Println("error activating pipeline: ", err)
}
fmt.Println(result)
return result, nil
}
After activating, I want to be able to monitor that pipeline and determine when it's finished so that I can run a second pipeline. Similar to the list-runs CLI command but I'm not sure what the corresponding Go function would be.
$ aws datapipeline list-runs --region us-west-2 --pipeline-id df-EXAMPLE
Name Scheduled Start Status
ID Started Ended
---------------------------------------------------------------------------------------------------
1. EC2ResourceObj 2017-09-12T17:49:55 FINISHED
#EC2ResourceObj_2017-09-12T17:49:55 2017-09-12T17:49:58 2017-09-12T17:56:52
2. Installation 2017-09-12T17:49:55 FINISHED
#Installation_#ShellCommandActivityObj_2017-09-12T 2017-09-12T17:49:57 2017-09-12T17:54:09
3. S3OutputLocation 2017-09-12T17:49:55 FINISHED
#S3OutputLocation_2017-09-12T17:49:55 2017-09-12T17:49:58 2017-09-12T17:54:50
4. ShellCommandActivityObj 2017-09-12T17:49:55 FINISHED
#ShellCommandActivityObj_2017-09-12T17:49:55 2017-09-12T17:49:57 2017-09-12T17:54:49
So once all actions are marked 'FINISHED', I want to activate my second pipeline. What's the best way to accomplish this?
FYI in case anyone else comes across this, this is how I resolved this:
Golang AWS API call to describe objects/actions of a data pipeline, returns true if all objects are finished
func awsDescribeObjects(pipelineID, region string, objects []string) bool {
var r Object
var s []string
var f bool
svc := datapipeline.New(session.New(&aws.Config{Region: aws.String(region)}))
input := &datapipeline.DescribeObjectsInput{
PipelineId: aws.String(pipelineID),
ObjectIds: aws.StringSlice(objects),
}
result, err := svc.DescribeObjects(input)
if err != nil {
fmt.Println("error describing pipeline objects: ", err)
f = false
return f
}
//fmt.Println("original result: ", result)
result2 := re.ReplaceAllString(result.String(), `"$1"$2`) //add "" around keys
result3 := re1.ReplaceAllString(result2, `$3$2`) //remove key and string/ref value from fields struct
result4 := strings.Replace(result3, "#", "", -1) //remove # from keys and values
result5 := re2.ReplaceAllString(result4, `$1$3$5$7$9`) //remove "" from timestamps
result6 := re3.ReplaceAllString(result5, `$1,`) // remove {} from fields struct
json.Unmarshal([]byte(result6), &r)
// fmt.Printf("R: %+v\n", r)
p := r.PipelineObjects
// fmt.Printf("P: %+v\n", p)
for i := range p {
for m := range p[i].Fields {
fmt.Printf("%v STATUS: %v\n", p[i].Name, p[i].Fields[m].Status)
s = append(s, p[i].Fields[m].Status)
if p[i].Fields[m].Status != "FINISHED" {
f = false
} else {
f = true
}
}
// fmt.Println("bool: ", f)
}
return f
}
my main go function
func main() {
if *action == "describe" {
obj := strings.Split(*object, ",")
for i := 0; i <= 20; i++ {
f := awsDescribeObjects(*pipeline, *region, obj)
fmt.Printf("%v - Status Check %v - Finished?: %v\n", time.Now(), i, f)
if f == true {
fmt.Println("FINISHED describing pipeline complete")
break
}
time.Sleep(5 * time.Minute)
if i == 20 {
fmt.Println("TIME OUT - describe pipeline timed out, max time reached")
os.Exit(1)
}
}
}
}
Shell script with go executable:
#PIPELINE 1
echo "Starting Pipeline 1..."
echo ./runpipeline.linux -region $REGION1 -pipeline-id $PIPELINEID1 -action activate
echo sleep 1m
echo ./runpipeline.linux -region $REGION1 -pipeline-id $PIPELINEID1 -action describe -object ShellCommandActivityObj
echo "Pipeline 1 complete"
#PIPELINE 2
echo "Starting Pipeline 2..."
echo ./runpipeline.linux -region $REGION2 -pipeline-id $PIPELINEID2 -action activate
echo sleep 1m
echo ./runpipeline.linux -region $REGION2 -pipeline-id $PIPELINEID2 -action describe -object ShellCommandActivityObj,CliActivity
echo "Pipeline 2 complete"
echo "FINISHED"

Download an already uploaded Lambda function

I created a lambda function in AWS (Python) using "upload .zip"
I lost those files and I need to make some changes, is there is any way to download that .zip?
Yes!
Navigate over to your lambda function settings and on the top right you will have a button called "Actions". In the drop down menu select "export" and in the popup click "Download deployment package" and the function will download in a .zip file.
Action button on top-right
A popup from CTA above (Tap "Download deployment package" here)
Update: Added link to script by sambhaji-sawant. Fixed Typos, improved answer and script based on comments!
You can use aws-cli to download the zip of any lambda.
First you need to get the URL to the lambda zip
$ aws lambda get-function --function-name $functionName --query 'Code.Location'
Then you need to use wget/curl to download the zip from the URL.
$ wget -O myfunction.zip URL_from_step_1
Additionally you can list all functions on your AWS account using
$ aws lambda list-functions
I made a simple bash script to parallel download all the lambda functions from your AWS account. You can see it
here :)
Note: You will need to setup aws-cli before using the above commands (or any aws-cli command) using aws configure
Full guide here
You can use shell script available here
If you want to download all the functions in the given region here is my workaround.
I have created a simple node script to download function. Install all the required npm packages and set your AWS CLI to the region you want before running the script.
let download = require('download-file');
let extract = require('extract-zip');
let cmd = require('node-cmd');
let downloadFile = async function (dir, filename, url) {
let options = {
directory: dir,
filename: filename
}
return new Promise((success, failure) => {
download(url, options, function (err) {
if (err) {
failure(err)
} else {
success('done');
}
})
})
}
let extractZip = async function (source, target) {
return new Promise((success, failure) => {
extract(source, { dir: target }, function (err) {
if (err) {
failure(err)
} else {
success('done');
}
})
})
}
let getAllFunctionList = async function () {
return new Promise((success, failure) => {
cmd.get(
'aws lambda list-functions',
function (err, data, stderr) {
if (err || stderr) {
failure(err || stderr)
} else {
success(data)
}
}
);
})
}
let getFunctionDescription = async function (name) {
return new Promise((success, failure) => {
cmd.get(
`aws lambda get-function --function-name ${name}`,
function (err, data, stderr) {
if (err || stderr) {
failure(err || stderr)
} else {
success(data)
}
}
);
})
}
let init = async function () {
try {
let { Functions: getAllFunctionListResult } = JSON.parse(await getAllFunctionList());
let getFunctionDescriptionResult, downloadFileResult, extractZipResult;
getAllFunctionListResult.map(async (f) => {
var { Code: { Location: getFunctionDescriptionResult } } = JSON.parse(await getFunctionDescription(f.FunctionName));
downloadFileResult = await downloadFile('./functions', `${f.FunctionName}.zip`, getFunctionDescriptionResult)
extractZipResult = await extractZip(`./functions/${f.FunctionName}.zip`, `/Users/malhar/Desktop/get-lambda-functions/final/${f.FunctionName}`)
console.log('done', f.FunctionName);
})
} catch (e) {
console.log('error', e);
}
}
init()
Here is a bash script that I used, it downloads all the functions in the default region:
download_code () {
local OUTPUT=$1
OUTPUT=`sed -e 's/,$//' -e 's/^"//' -e 's/"$//g' <<<"$OUTPUT"`
url=$(aws lambda get-function --function-name get-marvel-movies-from-opensearch --query 'Code.Location' )
wget $url -O $OUTPUT.zip
}
FUNCTION_LIST=$(aws lambda list-functions --query Functions[*].FunctionName)
for run in $FUNCTION_LIST
do
download_code $run
done
echo "Finished!!!!"
You can find a python script to download all the lambda functions here.
import os
import sys
from urllib.request import urlopen
import zipfile
from io import BytesIO
import boto3
def get_lambda_functions_code_url():
client = boto3.client("lambda")
lambda_functions = [n["FunctionName"] for n in client.list_functions()["Functions"]]
functions_code_url = []
for fn_name in lambda_functions:
fn_code = client.get_function(FunctionName=fn_name)["Code"]
fn_code["FunctionName"] = fn_name
functions_code_url.append(fn_code)
return functions_code_url
def download_lambda_function_code(fn_name, fn_code_link, dir_path):
function_path = os.path.join(dir_path, fn_name)
if not os.path.exists(function_path):
os.mkdir(function_path)
with urlopen(fn_code_link) as lambda_extract:
with zipfile.ZipFile(BytesIO(lambda_extract.read())) as zfile:
zfile.extractall(function_path)
if __name__ == "__main__":
inp = sys.argv[1:]
print("Destination folder {}".format(inp))
if inp and os.path.exists(inp[0]):
dest = os.path.abspath(inp[0])
fc = get_lambda_functions_code_url()
print("There are {} lambda functions".format(len(fc)))
for i, f in enumerate(fc):
print("Downloading Lambda function {} {}".format(i+1, f["FunctionName"]))
download_lambda_function_code(f["FunctionName"], f["Location"], dest)
else:
print("Destination folder doesn't exist")