I have below code which posts one calendar event to the events on office 365 REST API. I need to enter about 100 events to my calendar. Is there any way to place multiple events in the json data or should I use for loop?
import urllib2
import getpass
import os
import json
import sys
import base64
# Set the request parameters
url = 'https://outlook.office365.com/api/v1.0/me/events?$Select=Start,End'
user = 'emailuser#email.com'
pwd = getpass.getpass('Please enter your AD password: ')
# Create JSON payload
data = {
"Subject": "My Subject",
"Body": {
"ContentType": "HTML",
"Content": ""
},
"Start": "2015-08-11T07:00:00-05:00",
"StartTimeZone": "Central Standard Time",
"End": "2015-08-11T15:00:00-05:00",
"EndTimeZone": "Central Standard Time",
}
json_payload = json.dumps(data)
# Build the HTTP request
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(url, data=json_payload)
auth = base64.encodestring('%s:%s' % (user, pwd)).replace('\n', '')
request.add_header('Authorization', 'Basic %s' % auth)
request.add_header('Content-Type', 'application/json')
request.add_header('Accept', 'application/json')
request.get_method = lambda: 'POST'
# Perform the request
result = opener.open(request)
Batch processing is on our road map, but it isn't there today.
I ended up making function to create one event at a time and call the function on each iteration:
def create_event(date1, date2):
# Create JSON payload
data = {
"Subject": admin.longname,
"Body": {
"ContentType": "HTML",
"Content": ""
},
"Start": date1,
"StartTimeZone": "Central Standard Time",
"End": date2,
"EndTimeZone": "Central Standard Time",
}
json_payload = json.dumps(data)
# Build the HTTP request
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(url, data=json_payload)
auth = base64.encodestring('%s:%s' % (user, pwd)).replace('\n', '')
request.add_header('Authorization', 'Basic %s' % auth)
request.add_header('Content-Type', 'application/json')
request.add_header('Accept', 'application/json')
request.get_method = lambda: 'POST'
# Perform the request
result = opener.open(request)
def A( weeka, weekb ):
today = datetime.date.today()
firstday = today + relativedelta(weekday=SU(+ weeka))
for i in range(5):
firstday += datetime.timedelta(days=1)
date1 = '%sT07:00:00-05:00' % firstday
date2 = '%sT16:00:00-05:00' % firstday
create_event(date1, date2)
A(1,2)
Related
Currently I am doing Unit Testing in Fastapi using from fastapi.testclient import TestClient
def test_login_api_returns_token(session,client):
form_data = {
"username": "mike#gmail.com",
"password": "mike"
}
response = client.post(
"/api/login",
data=form_data,
headers={"content-type": "multipart/form-data"}
# headers={"content-type": "application/x-www-form-urlencoded"}
)
result = response.json()
assert response.status_code == 200
I am supposed to get token as response which I am getting when I run the fastapi application but not able to proceed with Unit Testing with the same.
Example of postman request for the same
How do I make sure form-data is being sent from TestClient?
api/login.py
#router.post("/login")
async def user_login(form_data: OAuth2PasswordRequestForm = Depends(), session: Session = Depends(get_session)):
user = authenticated_user(form_data.username, form_data.password, session)
user = user[0]
if not user:
raise token_exception()
token_expires = timedelta(minutes=120)
token = create_access_token(username=user.username, user_id=user.id, expires_delta=token_expires)
token_exp = jwt.decode(token, SECRET, algorithms=[ALGORITHM])
return {
"status_code": status.HTTP_200_OK,
"data":{
"username": user.username,
"token": token,
"expiry": token_exp['exp'],
"user_id": user.id
}
}
Try set the header to Content-Type form-data like
def test_login_api_returns_token(session,client):
form_data = {
"username": "mike#gmail.com",
"password": "mike"
}
response = client.post(
"/api/login",
json=form_data,
headers={ 'Content-Type': 'application/x-www-form-urlencoded'}
)
result = response.json()
assert response.status_code == 200
I would like to create a script in python that creates events for me for each individual user, I know how to get the list of calendars, but I would like the script to check if there is already a calendar available with that user name and if there isn't one, it creates one, in a way.
Thanks
from datetime import datetime, timedelta
from cal_setup import get_calendar_service
currentDateTime = datetime.now()
userfinder = 'Tuxone'
#Create a new calendar
calendar_name = userfinder
calendar = {
'summary': calendar_name,
'timeZone': 'America/Los_Angeles'
}
created_calendar = get_calendar_service().calendars().insert(body=calendar).execute()
#Print calendar list
# # calendar_list_entry = get_calendar_service().calendarList().get(calendarId=created_calendar['id']).execute()
# # print(calendar_list_entry)
def main():
# creates one hour event tomorrow 10 AM IST
service = get_calendar_service()
d = datetime.now().date()
tomorrow = datetime(d.year, d.month, d.day, 10)+timedelta(days=1)
start = tomorrow.isoformat()
end = (tomorrow + timedelta(hours=1)).isoformat()
event_result = service.events().insert(calendarId='ici1sgh7untsn7650edoknqblk#group.calendar.google.com',
body={
"summary": f'{currentDateTime}',
"description": 'This is a tutorial example of automating google calendar with python',
"start": {"dateTime": start, "timeZone": 'Asia/Kolkata'},
"end": {"dateTime": end, "timeZone": 'Asia/Kolkata'},
}
).execute()
print("created event")
print("id: ", event_result['id'])
print("summary: ", event_result['summary'])
print("starts at: ", event_result['start']['dateTime'])
print("ends at: ", event_result['end']['dateTime'])
if __name__ == '__main__':
main()
I am aware of the HTTP Data Collector API that can be used to pull data into Azure Log analytics, my ask here is on AWS Cloudwatch data to Azure. We have Azure hosted application and an external AWS hosted Serverless Lamda functions and we want to import the logs of those 13 serverless functions into Azure. I know from the documentation and there is a python function that can be used as a AWS Lamda function and the python example is in MSFT documentation. But what I am failing to understand is what Json format that AWS cloud collector needs to create so they can send it to Azure Log Analytics. Any examples on this ? Any help on how this can be done. I have come across this blog also but that is splunk specific. https://www.splunk.com/blog/2017/02/03/how-to-easily-stream-aws-cloudwatch-logs-to-splunk.html
Hey never mind I was able to dig a little deeper and I found that in AWS I can STREAM the Logs from one Lambda to other Lambda function thru subscription. Once that was setthen all I did was consumed that and on the fly created the JSON and sent it to Azure Logs. In case if you or anyone is interested in it, following is the code:-
import json
import datetime
import hashlib
import hmac
import base64
import boto3
import datetime
import gzip
from botocore.vendored import requests
from datetime import datetime
Update the customer ID to your Log Analytics workspace ID
customer_id = "XXXXXXXYYYYYYYYYYYYZZZZZZZZZZ"
For the shared key, use either the primary or the secondary Connected Sources client authentication key
shared_key = "XXXXXXXXXXXXXXXXXXXXXXXXXX"
The log type is the name of the event that is being submitted
log_type = 'AWSLambdafuncLogReal'
json_data = [{
"slot_ID": 12345,
"ID": "5cdad72f-c848-4df0-8aaa-ffe033e75d57",
"availability_Value": 100,
"performance_Value": 6.954,
"measurement_Name": "last_one_hour",
"duration": 3600,
"warning_Threshold": 0,
"critical_Threshold": 0,
"IsActive": "true"
},
{
"slot_ID": 67890,
"ID": "b6bee458-fb65-492e-996d-61c4d7fbb942",
"availability_Value": 100,
"performance_Value": 3.379,
"measurement_Name": "last_one_hour",
"duration": 3600,
"warning_Threshold": 0,
"critical_Threshold": 0,
"IsActive": "false"
}]
#body = json.dumps(json_data)
#####################
######Functions######
#####################
Build the API signature
def build_signature(customer_id, shared_key, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + str(content_length) + "\n" + content_type + "\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(shared_key)
encoded_hash = base64.b64encode(
hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(customer_id,encoded_hash)
return authorization
Build and send a request to the POST API
def post_data(customer_id, shared_key, body, log_type):
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
print (rfc1123date)
content_length = len(body)
signature = build_signature(customer_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = 'https://' + customer_id + '.ods.opinsights.azure.com' + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': log_type,
'x-ms-date': rfc1123date
}
response = requests.post(uri,data=body, headers=headers)
if (response.status_code >= 200 and response.status_code <= 299):
print("Accepted")
else:
print("Response code: {}".format(response.status_code))
print(response.text)
def lambda_handler(event, context):
cloudwatch_event = event["awslogs"]["data"]
decode_base64 = base64.b64decode(cloudwatch_event)
decompress_data = gzip.decompress(decode_base64)
log_data = json.loads(decompress_data)
print(log_data)
awslogdata = json.dumps(log_data)
post_data(customer_id, shared_key, awslogdata, log_type)
I read the documentation yesterday and done some coding with python to fetch data in the following way. It's working fine.
import logging as log
import adal
import requests
import json
import datetime
from pprint import pprint
# Details of workspace. Fill in details for your workspace.
resource_group = 'Test'
workspace = 'FirstMyWorkspace'
# Details of query. Modify these to your requirements.
query = "Type=*"
end_time = datetime.datetime.utcnow()
start_time = end_time - datetime.timedelta(hours=24)
num_results = 2 # If not provided, a default of 10 results will be used.
# IDs for authentication. Fill in values for your service principal.
subscription_id = '{subscription_id}'
tenant_id = '{tenant_id}'
application_id = '{application_id}'
application_key = '{application_key}'
# URLs for authentication
authentication_endpoint = 'https://login.microsoftonline.com/'
resource = 'https://management.core.windows.net/'
# Get access token
context = adal.AuthenticationContext('https://login.microsoftonline.com/' + tenant_id)
token_response = context.acquire_token_with_client_credentials('https://management.core.windows.net/', application_id, application_key)
access_token = token_response.get('accessToken')
# Add token to header
headers = {
"Authorization": 'Bearer ' + access_token,
"Content-Type": 'application/json'
}
# URLs for retrieving data
uri_base = 'https://management.azure.com'
uri_api = 'api-version=2015-11-01-preview'
uri_subscription = 'https://management.azure.com/subscriptions/' + subscription_id
uri_resourcegroup = uri_subscription + '/resourcegroups/'+ resource_group
uri_workspace = uri_resourcegroup + '/providers/Microsoft.OperationalInsights/workspaces/' + workspace
uri_search = uri_workspace + '/search'
# Build search parameters from query details
search_params = {
"query": query,
"top": num_results
}
# Build URL and send post request
uri = uri_search + '?' + uri_api
response = requests.post(uri, json=search_params,headers=headers)
# Response of 200 if successful
if response.status_code == 200:
# Parse the response to get the ID and status
data = response.json()
if data.get("__metadata", {}).get("resultType", "") == "error":
log.warn("oms_fetcher;fetch_job;error: " + ''.join('{}={}, '.format(key, val) for key, val in
data.get("error", {}).items()))
else:
print data["value"]
search_id = data["id"].split("/")
id = search_id[len(search_id)-1]
status = data["__metadata"]["Status"]
print status
# If status is pending, then keep checking until complete
while status == "Pending":
# Build URL to get search from ID and send request
uri_search = uri_search + '/' + id
uri = uri_search + '?' + uri_api
response = requests.get(uri, headers=headers)
# Parse the response to get the status
data = response.json()
status = data["__metadata"]["Status"]
print id
else:
# Request failed
print (response.status_code)
response.raise_for_status()
Today I went to the same webpage that I have followed yesterday but there is a different documentation today. So do I need to follow the new documentation? I tried new documentation too but got into an issue
url = "https://api.loganalytics.io/v1/workspaces/{workspace_id}/query"
headers = {
"X-Api-Key": "{api_key}",
"Content-Type": 'application/json'
}
search_param = {
}
res = requests.post(url=url, json=search_param, headers=headers)
print res.status_code
print res.json()
{u'error': {u'innererror': {u'message': u'The given API Key is not
valid for the request', u'code': u'UnsupportedKeyError'}, u'message':
u'Valid authentication was not provided', u'code':
u'AuthorizationRequiredError'}}
Here is the link to documentation
The api_key is not oms primary key on Portal. You could check example in this link. The token should like below:
Authorization: Bearer <access token>
So, you need modify X-Api-Key": "{api_key} to Authorization: Bearer <access token>.
You need create a service principal firstly, please check this link.
Then, you could use the sp to get token, please check this link.
Note: You could your code to get token, but you need modify the resource to https://api.loganalytics.io. Like below:
# Get access token
context = adal.AuthenticationContext('https://login.microsoftonline.com/' + tenant_id)
token_response = context.acquire_token_with_client_credentials('https://api.loganalytics.io', application_id, application_key)
access_token = token_response.get('accessToken')
# Add token to header
headers = {
"Authorization": 'Bearer ' + access_token,
"Content-Type": 'application/json'
}
Working Prototype to Query OMS or Log Analytic workspace.
import adal
import requests
import json
import datetime
from pprint import pprint
# Details of workspace. Fill in details for your workspace.
resource_group = 'xxx'
workspace = 'xxx'
workspaceid = 'xxxx'
# Details of query. Modify these to your requirements.
query = "AzureActivity | limit 10"
# IDs for authentication. Fill in values for your service principal.
subscription_id = 'xxxx'
# subscription_id = 'xxxx'
tenant_id = 'xxxx'
application_id = 'xxxx'
application_key = 'xxxxx'
# Get access token
context = adal.AuthenticationContext('https://login.microsoftonline.com/' + tenant_id)
token_response = context.acquire_token_with_client_credentials('https://api.loganalytics.io', application_id, application_key)
access_token = token_response.get('accessToken')
# Add token to header
headers = {
"Authorization": 'Bearer ' + access_token,
"Content-Type": 'application/json'
}
search_params = {
"query": query
}
url = "https://api.loganalytics.io/v1/workspaces/{workspaceID}/query"
res = requests.post(url=url, json=search_params, headers=headers)
print (res.status_code)
print (res.json())
I'me trying to use this eventCalendar in django : http://jquery-week-calendar.googlecode.com/svn/trunk/jquery.weekcalendar/full_demo/weekcalendar_full_demo.html
I suppose to write ajax codes myself but on the other hand I'me a newbie in jquery ajax,I wanna send event data include startTime,endTime,etc to show them on the calendar:
$('#calendar').weekCalendar({
data: function(callback){
$.getJSON("{% url DrHub.views.getEvents %}",
{
},
function(result) {
callback(result);
}
);
}
});
this calendar get data in this format:
return {
events : [
{
"id":1,
"start": new Date(year, month, day, 12),
"end": new Date(year, month, day, 13, 30),
"title":"Lunch with Mike"
},
{
"id":2,
"start": new Date(year, month, day, 14),
"end": new Date(year, month, day, 14, 45),
"title":"Dev Meeting"
},
...
]
};
how can I format fetched data from database in getEvents view?
from django.utils import simplejson
def some_view(request):
# Build the output -> it's a standard Python dict
output = {
"events": [
{
"id": 1,
"start": "2009-05-10T13:15:00.000+10:00",
"end": "2009-05-10T14:15:00.000+10:00",
"title":"Lunch with Mike"
},
]
}
# With db data you would do something like:
# events = Event.objects.all()
# for event in events:
# event_out = {
# "title": event.title,
# # other fields here
# }
# output['events'].append(event_out)
# Return the output as JSON
return HttpResponse(simplejson.dumps(output), mimetype='application/json')
You can construct the dictionary as usual, just take into account that strings for dates will not be interpreted in javascript without processing. My advice is to directly send javascript interpretable dates, not strings, as follows:
from django.utils import simplejson
import datetime
import time
occ.start = time.mktime(occ.start.timetuple())*1000
occ.end = time.mktime(occ.end.timetuple())*1000
event = {'id': occ.id,'title':occ.title,'start':occ.start,'end':occ.end,'body':occ.description,'readOnly': '%r' %occ.read_only,'recurring':'%r' % occ.recurring,'persisted': '%r' % occ.persisted,'event_id':occ.event.id}
mimetype = 'application/json'
return HttpResponse(simplejson.dumps(event),mimetype)
take into account that the calendar expects the 'events' key so:
$.getJSON(url, function(data){
res = {events:data};
//alert(JSON.stringify(res, null, 4));
callback(res);
});
If you preffer processing on the javascript side try the library datejs that can convert a date from text.