Using Jenkins and Terraform EC2 auto deploy issues - amazon-web-services

I am new to DevOps. I am trying to deploy EC2 instances using the following script. It is failing on Terraform initializing stage. I do have the terraform init in my code. where is the issue?
pipeline {
parameters {
string(name: 'environment', defaultValue: 'terraform', description: 'Workspace/environment file to use for deployment')
booleanParam(name: 'autoApprove', defaultValue: false, description: 'Automatically run apply after generating plan?')
}
environment {
AWS_ACCESS_KEY_ID = credentials('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY')
}
agent any
options {
timestamps ()
}
stages {
stage('checkout') {
steps {
script{
dir("terraform")
{
git "https://github.com/Ravinderhub/Jenkins-Terraform-AWS.git"
}
}
}
}
stage('Plan') {
steps {
sh 'pwd;cd terraform/aws-instance-first-script ; terraform init -input=false'
sh 'pwd;cd terraform/aws-instance-first-script ; terraform workspace new ${environment}'
sh 'pwd;cd terraform/aws-instance-first-script ; terraform workspace select ${environment}'
sh "pwd;cd terraform/aws-instance-first-script ;terraform plan -input=false -out tfplan "
sh 'pwd;cd terraform/aws-instance-first-script ;terraform show -no-color tfplan > tfplan.txt'
}
}
stage('Approval') {
when {
not {
equals expected: true, actual: params.autoApprove
}
}
steps {
script {
def plan = readFile 'terraform/aws-instance-first-script/tfplan.txt'
input message: "Do you want to apply the plan?",
parameters: [text(name: 'Plan', description: 'Please review the plan', defaultValue: plan)]
}
}
}
stage('Apply') {
steps {
sh "pwd;cd terraform/aws-instance-first-script ; terraform apply -input=false tfplan"
}
}
}
}

There is an invalid variable in the configuration which prevents initialization.
ami_id variable defines an invalid type.
Correct that to:
variable "ami_id" {
type = "map"
default = {
us-east-1 = "ami-035b3c7efe6d061d5"
eu-west-2 = "ami-132b3c7efe6sdfdsfd"
eu-central-1 = "ami-9787h5h6nsn"
}
}

Related

Error in AWS CDK V2 construct for AWS ECR

I have written code to create a repo and a few properties. Even though I am passing reponame as a string 'testing' as part of an interface, my code is going through the else condition and creating the reponame as undefined+date.
2nd issue: Can you also help me to find the issue for the principal in the permission policy? I am receiving an error saying props.accountIds.map is wrong: I am passing an array to accountIds.
import * as ecr from 'aws-cdk-lib/aws-ecr';
import { Duration, RemovalPolicy, Stack } from 'aws-cdk-lib';
import { Repository, RepositoryEncryption, TagMutability } from 'aws-cdk-lib/aws-ecr';
import {AWSAccountDetails} from '../lib/utils/definition';
import * as cdk from 'aws-cdk-lib';
export class ecrStack extends cdk.Stack {
constructor(scope: cdk.App, id: string, props: any ){
super(scope, id);
const repository = this.createEcr(props);
this.createAdditionalProperty(repository,props);
}
//Method to check and create the AWS ECR REPO
private createEcr( props: AWSAccountDetails): any {
let imageTagMutability : ecr.TagMutability = ecr.TagMutability.IMMUTABLE;
let imageScanOnPush : Boolean =true;
let encryption : ecr.RepositoryEncryption =ecr.RepositoryEncryption.KMS;
if ( props.imageTagMutability in ecr.TagMutability ) {
imageTagMutability =props.imageTagMutability;
}
if (typeof props.imageScanOnPush ! == 'boolean'){
imageScanOnPush =props.imageScanOnPush;
}
if (typeof props.encryption ! == 'undefined'){
encryption =props.encryption;
}
if (!props.repositoryName) {
throw Error('No repository name provided');
}
let repository = ecr.Repository.fromRepositoryName(this, 'ecrRepo', props.repositoryName);
if (!repository.repositoryArn) {
// Repository does not exist, create a new one with the original name
repository=new ecr.Repository(this, props.repositoryName, {
repositoryName: props.repositoryName,
imageTagMutability: props.imageTagMutability,
encryption: RepositoryEncryption.KMS,
imageScanOnPush: props.imageScanOnPush,
removalPolicy: RemovalPolicy.DESTROY
});
} else {
const modifiedRepositoryName = `${props.repositoryName}-${Date.now()}`;
repository= new ecr.Repository(this, modifiedRepositoryName, {
repositoryName: modifiedRepositoryName,
imageTagMutability: props.imageTagMutability,
encryption: RepositoryEncryption.KMS,
imageScanOnPush: props.imageScanOnPush,
removalPolicy: RemovalPolicy.DESTROY
});
}return repository;
}
//Method to add the lifecycle policy,Tags and create aws account permissions.
private createAdditionalProperty(repository: any, props:AWSAccountDetails) {
let AgeOfImage :number =180;
if (typeof props.ImageAge ! == 'undefined'){
repository.addLifecycleRule({
rulePriority: 1,
maxImageAge:Duration.days(AgeOfImage)
});
} else {
repository.addLifecycleRule({
rulePriority: 1,
maxImageAge:Duration.days(props.ImageAge)
});
}
//Tags
const Tags:{[key:string]:string}={
Name: props.repositoryName,
}
//Permission to external aws account to grant permission for ECR pull and push
// const policy = new iam.PolicyDocument();
//policy.addStatements(new iam.PolicyStatement({
// actions: ['ecr:*'],
//actions: ['ecr:BatchCheckLayerAvailability', 'ecr:GetDownloadUrlForLayer', 'ecr:BatchGetImage', 'ecr:PutImage']
// resources: [repository.repositoryArn],
// principals: props.accountIds.map(id => new iam.AccountPrincipal(id))
// }));
}
addLifecycleRule(arg0: { rulePriority: number; maxImageAge: Duration; }) {
throw new Error('Method not implemented.');
}
}
The interface file:
import * as ecr from 'aws-cdk-lib/aws-ecr';
import { ecrStack } from '../ecrstack-stack';
export interface AWSAccountDetails {
ImageCount: any;
readonly repositoryName :'abcd'; /* Repo Name */
readonly ImageAge:110; //Number of days before image is deleted.i.e 90. need to change to imageAge
readonly imageTagMutability : ecr.TagMutability.IMMUTABLE; /* If the Repo should enable Tag Immutability or not; Default setting is Enabled */
readonly imageScanOnPush : true; /* If the Repo should enable ScanonPush or not ; Default setting is Enabled */
readonly encryption : 'KMS'; /* If the Repo should KMS or not ; Default setting is Enabled for AWS managed KMS Key*/
readonly accountIds : string //Account number to grant access to pull and push.
readonly encruptionproperty: 'KMS';
}
I have to pass the props as object and then export to the main stack. This solved the issue.

Docker Image not uploading to ecr from jenkins

I created a CI/CD pipline which builds a docker image and pushes to docker image to amazon ecr.
The docker image build perfectly but does not upload to ecr
After building the image it keeps retrying to upload the image to ecr but exits after several trials
pipeline {
agent any
environment {
registryCredential = 'ecr:us-east-1:aws-cred'
appRegistry = "xxxxxx.dkr.ecr.us-east-1.amazonaws.com/myappimg"
vprofileRegistry = "https://xxxxxxx.dkr.ecr.us-east-1.amazonaws.com"
dockerImage = ''
}
stages{
stage('Fetch code') {
steps{
git branch: 'docker', url: 'https://github.com/xxxxxxxxx/xxxxxxxxxxx.git'
}
}
stage('Build') {
steps {
sh 'mvn clean install -DskipTests'
}
}
stage('Test'){
steps {
sh 'mvn test'
}
}
stage('Build App Image') {
steps {
script {
dockerImage = docker.build( appRegistry + ":$BUILD_NUMBER", "./Docker-files/app/multistage/")
}
}
}
stage('Upload App Image') {
steps{
script {
docker.withRegistry( vprofileRegistry, registryCredential ) {
dockerImage.push("$BUILD_NUMBER")
dockerImage.push('latest')
}
}
}
}
}
}

Value for Terraform Composer airflow_config_override secrets-backend_kwargs

I need to change, using Terraform, the default project_id in my Composer environment so that I can access secrets from another project. To do so, according to Terraform, I need the variable airflow_config_overrides. I guess I should have something like this:
resource "google_composer_environment" "test" {
# ...
config {
software_config {
airflow_config_overrides = {
secrets-backend = "airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend",
secrets-backend_kwargs = {"project_id":"9999999999999"}
}
}
}
}
The secrets-backend section-key seems to be working. On the other hand, secrets-backend_kwargs is returning the following error:
Inappropriate value for attribute "airflow_config_overrides": element "secrets-backend_kwargs": string required
It seems that the problem is that GCP expects a JSON format and Terraform requires a string. How can I get Terraform to provide it in the format needed?
You can convert a map such as {"project_id":"9999999999999"} into a JSON encoded string by using the jsonencode function.
So merging the example given in the google_composer_environment resource documentation with your config in the question you can do something like this:
resource "google_composer_environment" "test" {
name = "mycomposer"
region = "us-central1"
config {
software_config {
airflow_config_overrides = {
secrets-backend = "airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend",
secrets-backend_kwargs = jsonencode({"project_id":"9999999999999"})
}
pypi_packages = {
numpy = ""
scipy = "==1.1.0"
}
env_variables = {
FOO = "bar"
}
}
}
}

how to invoke groovy function with Docker Registry in jenkins shared lib

i want to push image in ECR
stage('login_ecr') {
steps {
script {
withDockerRegistry(credentialsId: 'ecr:us-east-1:ecr_credentials', url: 'https://***********.dkr.ecr.us-east-1.amazonaws.com')
}
}
}
stage('build-image'){
steps{
script{
build_docker_image (IMAGE_NAME: "${server}", IMAGE_TAG:"${TAG}",AWS_REGION: "us-east-1")
}
}
}
but i am facing below error
java.lang.IllegalStateException: There is no body to invoke at
org.jenkinsci.plugins.workflow.cps.CpsStepContext.newBodyInvoker(CpsStepContext.java:282)

Download an already uploaded Lambda function

I created a lambda function in AWS (Python) using "upload .zip"
I lost those files and I need to make some changes, is there is any way to download that .zip?
Yes!
Navigate over to your lambda function settings and on the top right you will have a button called "Actions". In the drop down menu select "export" and in the popup click "Download deployment package" and the function will download in a .zip file.
Action button on top-right
A popup from CTA above (Tap "Download deployment package" here)
Update: Added link to script by sambhaji-sawant. Fixed Typos, improved answer and script based on comments!
You can use aws-cli to download the zip of any lambda.
First you need to get the URL to the lambda zip
$ aws lambda get-function --function-name $functionName --query 'Code.Location'
Then you need to use wget/curl to download the zip from the URL.
$ wget -O myfunction.zip URL_from_step_1
Additionally you can list all functions on your AWS account using
$ aws lambda list-functions
I made a simple bash script to parallel download all the lambda functions from your AWS account. You can see it
here :)
Note: You will need to setup aws-cli before using the above commands (or any aws-cli command) using aws configure
Full guide here
You can use shell script available here
If you want to download all the functions in the given region here is my workaround.
I have created a simple node script to download function. Install all the required npm packages and set your AWS CLI to the region you want before running the script.
let download = require('download-file');
let extract = require('extract-zip');
let cmd = require('node-cmd');
let downloadFile = async function (dir, filename, url) {
let options = {
directory: dir,
filename: filename
}
return new Promise((success, failure) => {
download(url, options, function (err) {
if (err) {
failure(err)
} else {
success('done');
}
})
})
}
let extractZip = async function (source, target) {
return new Promise((success, failure) => {
extract(source, { dir: target }, function (err) {
if (err) {
failure(err)
} else {
success('done');
}
})
})
}
let getAllFunctionList = async function () {
return new Promise((success, failure) => {
cmd.get(
'aws lambda list-functions',
function (err, data, stderr) {
if (err || stderr) {
failure(err || stderr)
} else {
success(data)
}
}
);
})
}
let getFunctionDescription = async function (name) {
return new Promise((success, failure) => {
cmd.get(
`aws lambda get-function --function-name ${name}`,
function (err, data, stderr) {
if (err || stderr) {
failure(err || stderr)
} else {
success(data)
}
}
);
})
}
let init = async function () {
try {
let { Functions: getAllFunctionListResult } = JSON.parse(await getAllFunctionList());
let getFunctionDescriptionResult, downloadFileResult, extractZipResult;
getAllFunctionListResult.map(async (f) => {
var { Code: { Location: getFunctionDescriptionResult } } = JSON.parse(await getFunctionDescription(f.FunctionName));
downloadFileResult = await downloadFile('./functions', `${f.FunctionName}.zip`, getFunctionDescriptionResult)
extractZipResult = await extractZip(`./functions/${f.FunctionName}.zip`, `/Users/malhar/Desktop/get-lambda-functions/final/${f.FunctionName}`)
console.log('done', f.FunctionName);
})
} catch (e) {
console.log('error', e);
}
}
init()
Here is a bash script that I used, it downloads all the functions in the default region:
download_code () {
local OUTPUT=$1
OUTPUT=`sed -e 's/,$//' -e 's/^"//' -e 's/"$//g' <<<"$OUTPUT"`
url=$(aws lambda get-function --function-name get-marvel-movies-from-opensearch --query 'Code.Location' )
wget $url -O $OUTPUT.zip
}
FUNCTION_LIST=$(aws lambda list-functions --query Functions[*].FunctionName)
for run in $FUNCTION_LIST
do
download_code $run
done
echo "Finished!!!!"
You can find a python script to download all the lambda functions here.
import os
import sys
from urllib.request import urlopen
import zipfile
from io import BytesIO
import boto3
def get_lambda_functions_code_url():
client = boto3.client("lambda")
lambda_functions = [n["FunctionName"] for n in client.list_functions()["Functions"]]
functions_code_url = []
for fn_name in lambda_functions:
fn_code = client.get_function(FunctionName=fn_name)["Code"]
fn_code["FunctionName"] = fn_name
functions_code_url.append(fn_code)
return functions_code_url
def download_lambda_function_code(fn_name, fn_code_link, dir_path):
function_path = os.path.join(dir_path, fn_name)
if not os.path.exists(function_path):
os.mkdir(function_path)
with urlopen(fn_code_link) as lambda_extract:
with zipfile.ZipFile(BytesIO(lambda_extract.read())) as zfile:
zfile.extractall(function_path)
if __name__ == "__main__":
inp = sys.argv[1:]
print("Destination folder {}".format(inp))
if inp and os.path.exists(inp[0]):
dest = os.path.abspath(inp[0])
fc = get_lambda_functions_code_url()
print("There are {} lambda functions".format(len(fc)))
for i, f in enumerate(fc):
print("Downloading Lambda function {} {}".format(i+1, f["FunctionName"]))
download_lambda_function_code(f["FunctionName"], f["Location"], dest)
else:
print("Destination folder doesn't exist")