How can I save the output of an API call to DynamoDB? - amazon-web-services

I am calling a REST API from lambda function in AWS . I receive the response in the terminal. How can I store these API responses in DynamoDB. I am sorry if this may seem naive to some. I am a beginner and self learner.

Here is a full working example for a Python based Lambda function:
# Import AWS (Python version) SDK
import boto3
def lambda_handler(event, context):
# Construct the client to the DynamoDB service
client = boto3.client('dynamodb')
db = boto3.resource('dynamodb')
table_name = 'my-awesome-table'
# Table instance
table = db.Table(table_name)
# Example data to query an entry from the table
primary_column_name = 'email'
primary_key = 'john#gmail.com'
# We get an object for the retrieved table entry
table_entry = get_table_item(table, primary_column_name, primary_key)
# Example data to put into the table
new_entry = {
'email': 'jack#outlook.com',
'name': 'Jack Ma',
'age': 56
}
put_table_item(table, new_entry)
# Function to retrieve an entry from the DynamoDB table
def get_table_item(table, primary_column_name, primary_key):
response = table.get_item(
Key={
primary_column_name: primary_key
}
)
return response['Item']
# Function to put an entry into the DynamoDB table
def put_table_item(table, item):
response = table.put_item(
Item=item
)
# If there were errors, throw an exception
if (response['ResponseMetadata']['HTTPStatusCode'] != 200):
raise Exception('Failed to update table!')

Related

How to authenticate requests made to AWS AppSync in Python?

I have a website with a backend of AWS Amplify. For a post-payment function, I am creating a lambda function to update the database. I am trying to query certain fields with the help of AppSync and then run a mutation. This is my function code:
import json
import boto3
import os
import decimal
import requests
from requests_aws4auth import AWS4Auth
def lambda_handler(event, context):
dynamoDB = boto3.resource('dynamodb', region_name='ap-northeast-1')
// load event data (hidden)
userid = sentData.get("userid")
slots = sentData.get("slots")
url = os.environ.get("AWS_GRAPHQL_API_ENDPOINT")
api_key = os.environ.get("AWS_GRAPHQL_API_KEY")
session = requests.Session()
query = """
query MyQuery {
getUserPlan(id: "9ddf437a-55b1-445d-8ae6-254c77493c30") {
traits
traitCount
}
}
"""
credentials = boto3.session.Session().get_credentials()
session.auth = AWS4Auth(
credentials.access_key,
credentials.secret_key,
'ap-northeast-1',
'appsync',
session_token=credentials.token
)
# response = session.request(
# url=url,
# method="POST",
# json={"query": query},
# headers={"Authorization": api_key},
# )
# response = requests.post(
# url=url,
# json={"query": query},
# headers={"x-api-key": api_key}
# )
response = session.request(
url=url,
method="POST",
json={"query": query},
);
print(response.json())
return {
"statusCode": 200,
}
I get the following error when I execute the function:
{'data': {'getUserPlan': None}, 'errors': [{'path': ['getUserPlan'], 'data': None, 'errorType': 'Unauthorized', 'errorInfo': None, 'locations': [{'line': 3, 'column': 9, 'sourceName': None}], 'message': 'Not Authorized to access getUserPlan on type UserPlan'}]}
I have referred to this and this. I have tried their solutions but they haven't worked for me. I have confirmed that all the environment variables are working properly and even added the local aws-cli iam user to the custom-roles.json file for admin privileges by Amplify. When I was trying with the API Key, I made sure that it hadn't expired as well.
I figured out how to fix it. I had to create a function through the amplify-cli, give it access to the api, push the function and then add the name of the role to adminRoleNames in custom-roles.json

Unit testing with AWS Cognito and GraphQL

I'm currently writing tests for my software but got stuck at the point.
I try to get data from my db with a normal GraphQL Query but my endpoint is first checking, if the idToken within the header is valid.
For the user handling I'm using AWS Cognito but couldn't find a good way to mock the login to retrieve the valid token to query and mutate the data within various endpoints.
Any idea how to handle this case?
Here is my code from the graphene docs (https://docs.graphene-python.org/projects/django/en/latest/testing/):
# Create a fixture using the graphql_query helper and `client` fixture from `pytest-django`.
import json
import pytest
from graphene_django.utils.testing import graphql_query
# https://docs.graphene-python.org/projects/django/en/latest/testing/
#pytest.fixture
def client_query(client):
def func(*args, **kwargs):
return graphql_query(*args, **kwargs, client=client)
return func
# Test you query using the client_query fixture
def test_some_query(client_query):
response = client_query(
'''
query GetAllProjectConfig{
getAllProjectConfig{
project{
id
slug
name
}
config{
id
}
}
}
''',
)
content = json.loads(response.content)
assert 'errors' not in content
The answer was not so hard:
auth_data = {'USERNAME': username, 'PASSWORD': password}
# auth the user on cognito
def auth_cognito_user():
provider_client = boto3.client(
'cognito-idp', region_name=os.environ.get('region_name'))
resp = provider_client.admin_initiate_auth(
UserPoolId=userpool_id, AuthFlow='ADMIN_NO_SRP_AUTH', AuthParameters=auth_data, ClientId=client_id)
# print("RESPONSE COGNITO", resp['AuthenticationResult']['IdToken'])
return resp['AuthenticationResult']['IdToken']

HTTPrequest to GCP BigQuery via Cloud Function with Multiple Tables

I'm looking to create a HTTPrequest to BigQuery via a Cloud Function within GCP where the request passes a value, that value is passed to the query and another value is returned from a joined table. The SQL works in BQ but having issues getting a value returned when I apply it to the Cloud Function.
Here's the curl I'm using as well.
https://us-central1-something.cloudfunctions.net/something/something2?client_Id=823754783.2
Thanks in advance.
from flask import escape
from google.cloud import bigquery
def cors_enabled_function(request):
if request.method == 'OPTIONS':
headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET',
'Access-Control-Allow-Headers': 'Content-Type',
'Access-Control-Max-Age': '3600'
}
return ('', 204, headers)
client = bigquery.Client()
def something2(request):
request_json = request.get_json(silent=True)
request_args = request_args
table = ['`audience-cookie.ecommerce.traffic` as traffic JOIN `audience-cookie.ecommerce.cardholder` as cardholder ON traffic.customerId = cardholder.customerId']
QUERY = ('SELECT '+cardholder+' from `'+table+'` WHERE client_Id='+client_Id)
try:
query_job = client.query(QUERY)
rows = query_job.result()
row_list = []
for row in rows:
row_list.append(str(row[cardholder]))
return("<p>"+"</p><p>".join(row(row_list) + "</p>"))
except e:
return(e)
I recommend you to print your query. There is several mistakes
Why are you using [] in the table var definition?
you use backquote ` in table and when your insert table into the query. So, at the end, you have a double backquote
client_Id isn't defined in your code

How should I set integration request for a lambda function in API gateway

I have Below lambda function which is invoked by API gateway
import sys
import logging
import pymysql
import json
rds_host="rds.amazonaws.com"
name="name"
password="pass"
db_name="DB"
port = 3306
def save_events(event):
result = []
conn = pymysql.connect(rds_host, user=name, passwd=password, db=db_name,
connect_timeout=30)
Bid=pymysql.escape_string("3")
with conn.cursor(pymysql.cursors.DictCursor) as cur:
cur.execute("select exid,exercise_name,image from exercise where bid = 3")
result = cur.fetchall()
cur.close()
print ("Data from RDS...")
print (result)
workout = json.dumps(result)
workouts=(workout.replace("\"", "'"))
def lambda_handler(event, context):
save_events(event)
return workouts
Now in Integration Request of get method in api gateway what should i add in mapping template to get data in json format from the user and how can i pass that user value in the query(eg :select exid,exercise_name,image from exercise where bid = "user supplied value"). AM a beginner to AWS and backend development. Thanks in advance
you can pass query argument. for that you have to mention querystringparameter in integration request and in mapping templet add json/application to map parameter in lambda code.
cur.execute("select exid,exercise_name,image from exercise where bid = 3")
replace this line with
cur.execute("select exid,exercise_name,image from exercise where bid = event['params']['querystring']['parameter_name']")
for more information check this
pass parameter to lambda from api gateway

DynamoDB pagination using Boto3

We are using boto3 for our DynamoDB and we need to do a full scan of our tables to enable to do that based on other post we need to do a pagination. However, we are unable to find a working sample of pagination. Here is what we did.
import boto3
client_setting = boto3.client('dynamodb', region_name='ap-southeast-2')
paginator = client_setting.get_paginator('scan')
esk = {}
data = []
unconverted_ga = ourQuery(params1, params2)
for page in unconverted_ga:
data.append(page)
esk = page['LastEvaluatedKey']
We dont know exactly how to make the esk as the ExclusiveStartKey of our next query. What should be the expected value of ExclusiveStartkey parameter? We are still new in DynamoDB and there's many things we need to learn including this. thanks!
From the answer by Tay B at https://stackoverflow.com/a/38619425/3176550
import boto3
dynamodb = boto3.resource('dynamodb',
aws_session_token=aws_session_token,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region
)
table = dynamodb.Table('widgetsTableName')
response = table.scan()
data = response['Items']
while 'LastEvaluatedKey' in response:
response = table.scan(ExclusiveStartKey=response['LastEvaluatedKey'])
data.update(response['Items'])
After hour of search, i've finally found a better solution. For those who are new to DynamoDB, we should'nt missed this - http://docs.aws.amazon.com/amazondynamodb/latest/gettingstartedguide/GettingStarted.Python.04.html
from __future__ import print_function # Python 2/3 compatibility
import boto3
import json
import decimal
from boto3.dynamodb.conditions import Key, Attr
# Helper class to convert a DynamoDB item to JSON.
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
return super(DecimalEncoder, self).default(o)
dynamodb = boto3.resource('dynamodb', region_name='us-west-2', endpoint_url="http://localhost:8000")
table = dynamodb.Table('Movies')
fe = Key('year').between(1950, 1959)
pe = "#yr, title, info.rating"
# Expression Attribute Names for Projection Expression only.
ean = { "#yr": "year", }
esk = None
response = table.scan(
FilterExpression=fe,
ProjectionExpression=pe,
ExpressionAttributeNames=ean
)
for i in response['Items']:
print(json.dumps(i, cls=DecimalEncoder))
// As long as LastEvaluatedKey is in response it means there are still items from the query related to the data
while 'LastEvaluatedKey' in response:
response = table.scan(
ProjectionExpression=pe,
FilterExpression=fe,
ExpressionAttributeNames= ean,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for i in response['Items']:
print(json.dumps(i, cls=DecimalEncoder))
You can try with following code:
esk = None
while True:
scan_generator = YourTableName.scan(max_results=10, exclusive_start_key=esk)
for item in scan_generator:
# your code for processing
# condition to check if entire table is scanned
else:
break;
# Load the last keys
esk = scan_generator.kwargs['exclusive_start_key'].values()
Here is the reference documentation link.
Hope that helps
Bit more verbose but I like it.
def fetch_from_table(last_key=None):
if last_key:
response = table.query(
IndexName='advertCatalogIdx',
KeyConditionExpression=Key('sk').eq('CATALOG'),
Limit=5,
ExclusiveStartKey=last_key
)
else:
response = table.query(
IndexName='advertCatalogIdx',
KeyConditionExpression=Key('sk').eq('CATALOG'),
Limit=5
)
# print(response)
for item in response['Items']:
print(item['address'])
print('***************************')
return response.get('LastEvaluatedKey')
last_key = fetch_from_table()
while last_key != None:
print("Running again : ")
last_key = fetch_from_table(last_key)
import sys
import boto3
client = boto3.client('dynamodb')
marker = None
while True:
paginator = client.get_paginator('list_tables')
page_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 1000,
'PageSize': 100,
'StartingToken': marker})
for page in page_iterator:
tables=page['TableNames']
for table in tables:
print (table)
try:
marker = page['NextToken']
except KeyError:
sys.exit()