I'm going to backup of my Cognito users with Lambda but I can't get Cognito users list in JSON-format with boto3. I do:
import boto3
import os
import json
from botocore.exceptions import ClientError
COGNITO_POOL_ID = os.getenv('POOL_ID')
S3_BUCKET = os.getenv('BACKUP_BUCKET')
ENV_NAME = os.getenv('ENV_NAME')
filename = ENV_NAME + "-cognito-backup.json"
REGION = os.getenv('REGION')
cognito = boto3.client('cognito-idp', region_name=REGION)
s3 = boto3.resource('s3')
def lambda_handler (event,context):
try:
response = (cognito.list_users(UserPoolId=COGNITO_POOL_ID,AttributesToGet=['email_verified','email']))['Users']
data = json.dumps(str(response)).encode('UTF-8')
s3object = s3.Object(S3_BUCKET, filename)
s3object.put(Body=(bytes(data)))
except ClientError as error:
print(error)
But get one string and I'm not sure that is JSON at all:
[{'Username': 'user1', 'Attributes': [{'Name': 'email_verified', 'Value': 'true'}, {'Name': 'email', 'Value': 'user1#xxxx.com'}], 'UserCreateDate': datetime.datetime(2020, 2, 10, 13, 13, 34, 457000, tzinfo=tzlocal()), 'UserLastModifiedDate': datetime.datetime(2020, 2, 10, 13, 13, 34, 457000, tzinfo=tzlocal()), 'Enabled': True, 'UserStatus': 'FORCE_CHANGE_PASSWORD'}]
I need something like this:
[
{
"Username": "user1",
"Attributes": [
{
"Name": "email_verified",
"Value": "true"
},
{
"Name": "email",
"Value": "user1#xxxx.com"
}
],
"Enabled": "true",
"UserStatus": "CONFIRMED"
}
]
Try this:
import ast
import json
print(ast.literal_eval(json.dumps(response)))
For the dict response from the SDK?
Edit: Just realized since the list_users SDK also UserCreateDate object, json.dumps will complain about the transformation due to the datatime value of the UserCreateDate key. If you get that off, this will work without the ast module -
import json
data = {'Username': 'Google_11761250', 'Attributes': [{'Name': 'email', 'Value': 'abc#gmail.com'}],'Enabled': True, 'UserStatus': 'EXTERNAL_PROVIDER'}
print((json.dumps(data)))
> {"Username": "Google_1176125910", "Attributes": [{"Name": "email", "Value": "123#gmail.com"}], "Enabled": true, "UserStatus": "EXTERNAL_PROVIDER"}
You can check the output type by using
type(output)
I guess that it can be list type, so you can convert it into JSON and prettyprint by using:
print(json.dumps(output, indent=4))
Related
Context
On Airflow using the GoogleCloudStorageToBigQueryOperator to load files from Google cloud storage into BigQuery.
Schema as per Bigquery documentation table schema.
Policy tags implemented as per documentation, tested manually via the UI - works as expected.
Blocker
The policy tags are not implemented when the load completes, even though it's specified in the schema fields. The other schema fields work as expected.
import airflow
from airflow import DAG
from google.cloud import bigquery
from airflow.contrib.operators.gcs_to_bq import GoogleCloudStorageToBigQueryOperator
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': airflow.utils.dates.days_ago(2),
'email': ['airflow#example.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 0,
}
with DAG(
'gcs_to_bq',
catchup=False,
default_args=default_args,
schedule_interval=None) as dag:
DATASET_NAME="temp"
TABLE_NAME="table"
gcs_to_bq_load = GoogleCloudStorageToBigQueryOperator(
task_id='gcs_to_bq_load',
bucket="temp-bucket",
source_objects=['dummy_data/data.csv'],
source_format='CSV',
skip_leading_rows=1,
write_disposition='WRITE_TRUNCATE',
destination_project_dataset_table=f"{DATASET_NAME}.{TABLE_NAME}",
schema_fields=
[{
"name": "id",
"mode": "NULLABLE",
"type": "INT64",
"fields": []
},
{
"name": "email",
"mode": "REQUIRED",
"type": "STRING",
"description": "test policy tags",
"policyTags": {
"names": ["projects/project-id/locations/location/taxonomies/taxonomy-id/policyTags/policytag-id"]
}
},
{
"name": "created_at",
"mode": "NULLABLE",
"type": "DATE",
"fields": []
}
]
,
dag=dag)
gcs_to_bq_load
I am trying the update security group tags using boto3. I have several security groups to update. This script filters based on a common tag and then adds other tags. I am using the following script:
import boto3
import sys
ec2 = boto3.client('ec2')
sgs = ec2.describe_security_groups(Filters=[{'Name': 'tag:type', 'Values': ['test']}])
ids = []
for reservation in sgs['SecurityGroups']:
ids.append(reservation['GroupName'])
print ("Changing tags for %d sgs" % len(ids))
ec2.create_tags(
Resources=ids,
Tags=[
{
'Key': 'bu',
'Value': 'HR'
},
{
'Key': 'product',
'Value': 'shared'
},
{
'Key': 'environment',
'Value': 'dev'
},
{
'Key': 'acc-no',
'Value': '883356'
},
{
'Key': 'type',
'Value': 'client'
},
{
'Key': 'app-id',
'Value': 'ae1'
},
{
'Key': 'name',
'Value': 'all-enterprise'
},
{
'Key': 'owner',
'Value': 'enterprise'
},
{
'Key': 'role',
'Value': 'enterprise'
}
]
)
I get the following error:
Changing tags for 1 sgs
Traceback (most recent call last):
File "C:\Users\charl\scv\boto\sgtest.py", line 18, in <module>
ec2.create_tags(
File "C:\python390\lib\site-packages\botocore\client.py", line 391, in _api_call
return self._make_api_call(operation_name, kwargs)
File "C:\python390\lib\site-packages\botocore\client.py", line 719, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (InvalidID) when calling the CreateTags operation: The ID 'packersg' is not valid
Can you tell me what I am doing wrong please?
for reservation in sgs['SecurityGroups']:
ids.append(reservation['GroupName'])
This should be changed to
for reservation in sgs['SecurityGroups']:
ids.append(reservation['GroupIds'])
This is because the create_tags function expects Security Group IDs (IDs for the security groups are in the format sg-xxxxxxxx) and not Security Group names.
The code for the lambda function is the following:
import json
import logging
import os
import time
import uuid
import boto3
dynamodb = boto3.resource('dynamodb')
def create(event, context):
data = json.loads(event['body'])
if 'text' not in data:
logging.error("Validation Failed")
raise Exception("Couldn't create the todo item.")
timestamp = str(time.time())
table = dynamodb.Table(os.environ['DYNAMODB_TABLE'])
item = {
'id': str(uuid.uuid1()),
'name': data['text'],
'description': data['text'],
'price': data['text'],
'createdAt': timestamp,
'updatedAt': timestamp,
}
# write the todo to the database
table.put_item(Item=item)
# create a response
response = {
"statusCode": 200,
"body": json.dumps(item)
}
return response
The test using AWS' Lambda's testing feature is:
{
"name": "Masks",
"description": "A box of 50 disposable masks",
"price": "$10"
}
The log output is:
START RequestId: 5cf1c00a-dba5-4ef6-b5e7-b692d8235ffe Version: $LATEST
[ERROR] KeyError: 'body'
Traceback (most recent call last):
File "/var/task/todos/create.py", line 12, in create
data = json.loads(event['body'])END RequestId: 5cf1c00a-dba5-4ef6-b5e7-b692d8235ffe
Why is "body" giving me a key error? How do I fix this? The template is directly from www.severless.com, and based off of online tutorials, people have used the exact same code, albie with different values, successfully?
I've tried changing variable names and value to no avail.
sls deploy
Does successfully create the table, but I am unable to input any data into it.
Edit 1: For those of you unfamiliar with AWS' Lambda Test feature, using Postman to input the same data is leading either to a 502 Gateway Error.
Assuming that this is the correct event object:
{
"name": "Masks",
"description": "A box of 50 disposable masks",
"price": "$10"
}
your code which matches this event should be:
import json
import logging
import os
import time
import uuid
import boto3
dynamodb = boto3.resource('dynamodb')
def create(event, context):
timestamp = str(time.time())
table = dynamodb.Table(os.environ['DYNAMODB_TABLE'])
item = {
'id': str(uuid.uuid1()),
'name': event['name'],
'description': event['description'],
'price': event['price'],
'createdAt': timestamp,
'updatedAt': timestamp,
}
# write the todo to the database
table.put_item(Item=item)
# create a response
response = {
"statusCode": 200,
"body": json.dumps(item)
}
return response
I'm trying to do the following request with django :
I tried the following code but it doesn't work :
data = {'username': admin,
'password': 123,
'grant_type': 'password',
'client_id': 'xxxx',
'client_secret': 'xxxx'}
headers = {'content-type': 'application/x-www-form-urlencoded'}
r = requests.post(url, data=data, headers=headers)
Thanks for your help !
It is form-encoded by default.
Typically, you want to send some form-encoded data — much like an HTML
form. To do this, simply pass a dictionary to the data argument. Your
dictionary of data will automatically be form-encoded when the request
is made.
>>> payload = {'key1': 'value1', 'key2': 'value2'}
>>> r = requests.post("http://httpbin.org/post", data=payload)
>>> print r.text
{
"origin": "179.13.100.4",
"files": {},
"form": {
"key2": "value2",
"key1": "value1"
},
"url": "http://httpbin.org/post",
"args": {},
"headers": {
"Content-Length": "23",
"Accept-Encoding": "identity, deflate, compress, gzip",
"Accept": "*/*",
"User-Agent": "python-requests/0.8.0",
"Host": "127.0.0.1:7077",
"Content-Type": "application/x-www-form-urlencoded"
},
"data": ""
}
http://docs.python-requests.org/en/v0.10.7/user/quickstart/#make-a-post-request
I use django-social-auth as my authentication mechanism and I need to test my app with logged in users. I'm trying:
from django.test import Client
c = Client()
c.login(username='myfacebook#username.com", password='myfacebookpassword')
The user which is trying to login succeeds to login from a browser. The app is already allowed to access user's data.
Any ideas how to login from a unittest when using django-social-auth as the authentication mechanism?
Thanks
Create a fixture with User instances
{
"pk": 15,
"model": "auth.user",
"fields": {
"username": "user",
"first_name": "user",
"last_name": "userov",
"is_active": true,
"is_superuser": false,
"is_staff": false,
"last_login": "2012-07-20 15:37:03",
"groups": [],
"user_permissions": [],
"password": "!",
"email": "",
"date_joined": "2012-07-18 13:29:53"
}
}
Create a fixture with SocialAuthUser instances like this
{
"pk": 7,
"model": "social_auth.usersocialauth",
"fields": {
"uid": "1234567",
"extra_data": "%some account social data%",
"user": 15,
"provider": "linkedin"
}
}
So you will get the user, who has the same behavior as a real user and has all the social data you need.
Set the new password and then you can use the auth mechanism for log this user in:
...
user.set_password('password')
user.save()
logged_in = self.client.login(username='user', password='password')
and then just call the view with login required
self.client.get("some/url")
Don't forget, that django.contrib.auth.backends.ModelBackend is needed, and django.contrib.sessions should be in your INTALLED_APPS tuple
Also, the advantage of using standard auth is that you don't need to make a server request for getting oauth tokens and so on.