How to configure dynamically a AWS Sage Maker task with AWS Step Function - amazon-web-services

I'm trying to build an ML pipeline using AWS Step Function.
I would like to configure the 'CreateHyperParameterTuningJob' dynamically depending on the input of the task.
Here is a screenshot of the State Machine that I'm trying to build:
ML State Machine
When I try to create this State Machine, I got the following error:
The value for the field 'MaxParallelTrainingJobs' must be an INTEGER
I'm struggling to figure out what is the issue here.
Do you have any suggestion to make the SM configuration dynamic with Step Function? Is it even possible?
Here is the input data passed to the 'Run training job' task:
{
"client_id": "test",
"training_job_definition": {
"AlgorithmSpecification": {
"TrainingImage": "433757028032.dkr.ecr.us-west-2.amazonaws.com/xgboost:latest",
"TrainingInputMode": "File"
},
"ResourceConfig": {
"InstanceCount": 1,
"InstanceType": "ml.m5.large",
"VolumeSizeInGB": 5
},
"StaticHyperParameters": {
"num_round": 750
},
"StoppingCondition": {
"MaxRuntimeInSeconds": 900
},
"InputDataConfig": [
{
"ChannelName": "train",
"CompressionType": "None",
"ContentType": "csv",
"DataSource": {
"S3DataSource": {
"S3DataDistributionType": "FullyReplicated",
"S3DataType": "S3Prefix",
"S3Uri": "..."
}
}
},
{
"ChannelName": "validation",
"CompressionType": "None",
"ContentType": "csv",
"DataSource": {
"S3DataSource": {
"S3DataDistributionType": "FullyReplicated",
"S3DataType": "S3Prefix",
"S3Uri": "..."
}
}
}
],
"OutputDataConfig": {
"S3OutputPath": "..."
},
"RoleArn": "arn:aws:iam::679298748479:role/landingzone_sagemaker_role"
},
"hyper_parameter_tuning_job_config": {
"HyperParameterTuningJobObjective": {
"MetricName": "validation:rmse",
"Type": "Minimize"
},
"Strategy": "Bayesian",
"ResourceLimits": {
"MaxParallelTrainingJobs": 2,
"MaxNumberOfTrainingJobs": 10
},
"ParameterRanges": {
"ContinuousParameterRanges": [
{
"Name": "eta",
"MinValue": 0.01,
"MaxValue": 0.04
},
{
"Name": "gamma",
"MinValue": 0,
"MaxValue": 100
},
{
"Name": "subsample",
"MinValue": 0.6,
"MaxValue": 1
},
{
"Name": "lambda",
"MinValue": 0,
"MaxValue": 5
},
{
"Name": "alpha",
"MinValue": 0,
"MaxValue": 2
}
],
"IntegerParameterRanges": [
{
"Name": "max_depth",
"MinValue": 5,
"MaxValue": 10
}
]
}
}
}
Here is JSON file that describes the State Machine:
{
"StartAt": "Generate Training Dataset",
"States": {
"Generate Training Dataset": {
"Resource": "arn:aws:lambda:uswest-2:012345678912:function:StepFunctionsSample-SageMaLambdaForDataGeneration-1TF67BUE5A12U",
"Type": "Task",
"Next": "Run training job"
},
"Run training job": {
"Resource": "arn:aws:states:::sagemaker:createHyperParameterTuningJob.sync",
"Parameters": {
"HyperParameterTuningJobName.$": "$.execution_date",
"HyperParameterTuningJobConfig": {
"HyperParameterTuningJobObjective": {
"MetricName": "$.hyper_parameter_tuning_job_config.HyperParameterTuningJobObjective.MetricName",
"Type": "Minimize"
},
"Strategy": "$.hyper_parameter_tuning_job_config.Strategy",
"ResourceLimits": {
"MaxParallelTrainingJobs": "$.hyper_parameter_tuning_job_config.ResourceLimits.MaxParallelTrainingJobs",
"MaxNumberOfTrainingJobs": "$.hyper_parameter_tuning_job_config.ResourceLimits.MaxNumberOfTrainingJobs"
},
"ParameterRanges": "$.hyper_parameter_tuning_job_config.ParameterRanges"
},
"TrainingJobDefinition": {
"AlgorithmSpecification": "$.training_job_definition.AlgorithmSpecification",
"StoppingCondition": "$.training_job_definition.StoppingCondition",
"ResourceConfig": "$.training_job_definition.ResourceConfig",
"RoleArn": "$.training_job_definition.RoleArn",
"InputDataConfig": "$.training_job_definition.InputDataConfig",
"OutputDataConfig": "$.training_job_definition.OutputDataConfig",
"StaticHyperParameters": "$.training_job_definition.StaticHyperParameters"
},
"HyperParameterTuningJobConfig.ResourceLimits": ""
},
"Type": "Task",
"End": true
}
}
}

Related

How to set or not set audio selector in AWS media convert?

I got this error, when creating job for AWS media convert:
Invalid selector_sequence_id [0] specified for audio_description [1].
I do not even need sound for my output mp4 video.
My intention is to loop for 2 second an image (png or jpg) and add a fade effect for the first frames.
How would you change the sent json?
{
"middlewareStack": {},
"input": {
"Queue": "arn:aws:mediaconvert:eu-central-1:634617701827:queues/Default",
"UserMetadata": {},
"Role": "arn:aws:iam::634617701827:role/service-role/MediaConvert_Default_Role",
"Settings": {
"TimecodeConfig": {
"Anchor": "00:00:00:00",
"Source": "EMBEDDED"
},
"OutputGroups": [
{
"Name": "File Group",
"Outputs": [
{
"Preset": "createPromoVideo",
"Extension": "mp4",
"NameModifier": "_fade",
"VideoDescription": {
"CodecSettings": {
"FilterGraph": "fade=out:150:30"
},
"ScalingBehavior": "DEFAULT",
"TimecodeInsertion": "DISABLED",
"AntiAlias": "ENABLED",
"Sharpness": 50,
"Height": 1080,
"Width": 1080
},
"AudioDescriptions": [
{
"AudioSelector": {
"SelectorSettings": [
{
"AudioSelectorName": "Default"
}
]
},
"CodecSettings": {
"Codec": "AAC",
"AacSettings": {
"Bitrate": 96000,
"CodingMode": "CODING_MODE_2_0",
"SampleRate": 48000
}
}
}
]
}
],
"OutputGroupSettings": {
"Type": "FILE_GROUP_SETTINGS",
"FileGroupSettings": {
"Destination": "s3://t44-post-cover/8fui.mp4",
"DestinationSettings": {
"S3Settings": {
"AccessControl": {
"CannedAcl": "PUBLIC_READ"
}
}
}
}
}
}
],
"Inputs": [
{
"FileInput": "s3://t44-post-cover/8fui",
"VideoSelector": {
"ColorSpace": "FOLLOW"
},
"FilterEnable": "AUTO",
"TimecodeSource": "ZEROBASED",
"InputClippings": [
{
"StartTimecode": "00:00:00:00",
"EndTimecode": "00:00:02:00"
}
],
"FilterGraph": "fade=in:0:30",
"AudioSelectors": {
"Default": {
"DefaultSelection": "DEFAULT"
}
}
}
]
},
"AccelerationSettings": {
"Mode": "DISABLED"
},
"StatusUpdateInterval": "SECONDS_60",
"Priority": 0
}
}
AWS MediaConvert requires you to have at least one Audio Selector.
Just provide it with this simple one:
"Inputs": [
...
{
"AudioSelectors": {
"Audio Selector 1": {
"Offset": 0,
"DefaultSelection": "DEFAULT",
"SelectorType": "LANGUAGE_CODE",
"ProgramSelection": 1,
"LanguageCode": "ENM"
}
},
...
},
UPDATE:
A more barebones one:
"Inputs": [
...
{
"AudioSelectors": {
"Audio Selector 1": {
DefaultSelection: 'DEFAULT',
},
}
},
...
},

Deneb Waffle chart (percentual share) displays correctly in PBI desktop however shows as blank in PBI Service

could anyone please help with my issue? I´ve created couple of DENEB visuals which seem to be working fine both in PBI Desktop and service however the one I´m sharing doesn´t work in PBI service, it shows as blank.
Do you know by chance what might be the problem?
Here is the JSON that I´m using:
{
"data": {"name": "dataset"},
"transform": [
{
"joinaggregate": [
{
"op": "sum",
"field": "NrOfSfhifts",
"as": "TotalOrigin"
}
]
},
{
"joinaggregate": [
{
"op": "sum",
"field": "NrOfSfhifts",
"as": "TotalOriginGrouped"
}
],
"groupby": ["NrOfSfhifts"]
},
{
"calculate": "round(datum.TotalOriginGrouped/datum.TotalOrigin * 100)",
"as": "PercentOfTotal"
},
{
"aggregate": [
{
"op": "average",
"field": "PercentOfTotal",
"as": "Percento"
}
],
"groupby": ["Dispatcher"]
},
{
"calculate": "sequence(1,datum.Percento+1)",
"as": "S"
},
{"flatten": ["S"]},
{
"window": [
{"op": "row_number", "as": "id"}
],
"sort": [
{
"op": "sum",
"field": "TotalOriginGrouped",
"order": "ascending"
}
]
},
{
"calculate": "ceil (datum.id / 10)",
"as": "row"
},
{
"calculate": "datum.id - datum.row * 10",
"as": "col"
}
],
"mark": {
"type": "circle",
"filled": true,
"tooltip": true,
"stroke": "black",
"strokeWidth": 2
},
"encoding": {
"x": {
"field": "col",
"type": "ordinal",
"axis": null,
"sort": "x"
},
"y": {
"field": "row",
"type": "ordinal",
"axis": null,
"sort": "y"
},
"color": {
"field": "Dispatcher",
"type": "nominal",
"sort": [
{
"op": "sum",
"field": "TotalOriginGrouped",
"order": "descending"
}
],
"scale": {
"range": [
"#FFD300",
"#ed3419",
"lightgray",
"white",
"black",
"olive",
"lightblue"
]
},
"legend": {
"orient": "right",
"offset": 10,
"labelOffset": 3,
"titlePadding": 5,
"titleFontSize": 10
}
},
"size": {"value": 330},
"tooltip": [
{
"field": "Dispatcher",
"type": "nominal"
},
{
"field": "Percento",
"type": "quantitative",
"format": "0",
"formatType": "pbiFormat"
}
]
}
}
Thank you!
That is my code :).
If this is working in the desktop but not the service, then your admin has probably disabled non-native visuals. You should ask them to enable certified visuals at the very least as there is no danger from those.

Can I partially color a bar based on percentage complete on a temporal axis?

I am trying to create a Gantt chart and I want to color a single task with two colors, based on a percentage complete. Say, make the complete part green and the remaining part orange.
How can I achieve this?
Below is a sample code, also available in the editor here.
{
"data": {
"values": [
{"Description": "Task 1", "Start": "2023-01-05", "End": "2023-01-10", "Percentage complete": 0},
{"Description": "Task 2", "Start": "2023-01-01", "End": "2023-01-15", "Percentage complete": 75},
{"Description": "Task 3", "Start": "2023-01-01", "End": "2023-01-03", "Percentage complete": 100}
]
},
"layer": [
{
"mark": "bar",
"encoding": {
"y": {
"field": "Description",
"type": "ordinal",
"stack": null
},
"x": {
"field": "Start",
"type": "temporal"
},
"x2": {
"field": "End",
"type": "temporal"
}
}
}
]
}
The expected result should look like this.
I tried looking at folding, transforming, and scale. But as I am new to Vega-lite, to no avail.
You have two options.
Reshape your data upstream. Your partially coloured bars should be rendered as two bars stacked - one for incomplete and one for complete.
Use Reactive Geometry as described here. This may need Vega rather than VL.
Here it is using reactive geometry.
{
"$schema": "https://vega.github.io/schema/vega/v5.json",
"background": "white",
"padding": 5,
"width": 200,
"style": "cell",
"data": [
{
"name": "source_0",
"values": [
{
"Description": "Task 1",
"Start": "2023-01-05",
"End": "2023-01-10",
"Percentatecomplete": 0
},
{
"Description": "Task 2",
"Start": "2023-01-01",
"End": "2023-01-15",
"Percentatecomplete": 0.75
},
{
"Description": "Task 3",
"Start": "2023-01-01",
"End": "2023-01-03",
"Percentatecomplete": 1
}
]
},
{
"name": "data_0",
"source": "source_0",
"transform": [
{"type": "formula", "expr": "toDate(datum[\"Start\"])", "as": "Start"},
{"type": "formula", "expr": "toDate(datum[\"End\"])", "as": "End"},
{
"type": "filter",
"expr": "(isDate(datum[\"Start\"]) || (isValid(datum[\"Start\"]) && isFinite(+datum[\"Start\"])))"
}
]
}
],
"signals": [
{"name": "y_step", "value": 20},
{
"name": "height",
"update": "bandspace(domain('y').length, 0.1, 0.05) * y_step"
}
],
"marks": [
{
"name": "layer_0_marks",
"type": "rect",
"style": ["bar"],
"from": {"data": "data_0"},
"encode": {
"update": {
"fill": {"value": "#4c78a8"},
"x": {"scale": "x", "field": "Start"},
"x2": {"scale": "x", "field": "End"},
"y": {"scale": "y", "field": "Description"},
"height": {"signal": "max(0.25, bandwidth('y'))"}
}
}
},
{
"type": "rect",
"from": {"data": "layer_0_marks"},
"encode": {
"update": {
"x": {"field": "x"},
"y": {"field": "y"},
"fill": {"value": "red"},
"width": {"signal": "(datum.x2 - datum.x) * datum.datum.Percentatecomplete"},
"height": {"signal": "max(0.25, bandwidth('y'))"}
}
}
}
],
"scales": [
{
"name": "x",
"type": "time",
"domain": {"data": "data_0", "fields": ["Start", "End"]},
"range": [0, {"signal": "width"}]
},
{
"name": "y",
"type": "band",
"domain": {"data": "data_0", "field": "Description", "sort": true},
"range": {"step": {"signal": "y_step"}},
"paddingInner": 0.1,
"paddingOuter": 0.05
}
],
"axes": [
{
"scale": "x",
"orient": "bottom",
"gridScale": "y",
"grid": true,
"tickCount": {"signal": "ceil(width/40)"},
"domain": false,
"labels": false,
"aria": false,
"maxExtent": 0,
"minExtent": 0,
"ticks": false,
"zindex": 0
},
{
"scale": "x",
"orient": "bottom",
"grid": false,
"title": "Start, End",
"labelFlush": true,
"labelOverlap": true,
"tickCount": {"signal": "ceil(width/40)"},
"zindex": 0
},
{
"scale": "y",
"orient": "left",
"grid": false,
"title": "Description",
"zindex": 0
}
]
}

highlighting bars in Deneb (Power BI) - fillOpacity

I have the chart on the left, code provided below, and would like to get the chart on the right. The chart on the right has the bars highlighted that correspond to a selected tier; the tier selected comes from a slicer. (The right chart shows Tier 1; however, the user may prefer a different tier.) I feel like this can be accomplished using fillOpacity. How do I get the highlighting?
{
"data": {
"values": [
{"name": "A", "group": "High", "tier": "Tier 3"},
{"name": "B", "group": "Med", "tier": "Tier 1"},
{"name": "C", "group": "High", "tier": "Tier 1"},
{"name": "D", "group": "High", "tier": "Tier 2"},
{"name": "E", "group": "Low", "tier": "Tier 3"},
{"name": "F", "group": "Low", "tier": "Tier 1"}
]
},
"transform": [
{
"aggregate": [
{
"field": "name",
"op": "count",
"as": "numProj"
}
],
"groupby": [
"name",
"group"
]
},
{
"stack": "numProj",
"groupby": ["group"],
"sort": [
{
"field": "name",
"order": "descending"
}
],
"as": "barTop"
}
],
"layer": [
{
"mark": {
"type": "bar",
"stroke": "black",
"strokeWidth": 1,
"tooltip": true
},
"encoding": {
"y": {
"field": "numProj",
"type": "quantitative",
"axis": {
"title": "Number of Projects",
"tickMinStep": 1
}
},
"fill": {
"field": "group",
"type": "nominal",
"scale": {
"domain": [
"Low",
"Med",
"High"
],
"range": [
"#e15759",
"#ffff00",
"#59a14f"
]
},
"legend": null
}
}
},
{
"mark": {
"type": "text",
"color": "black",
"dy": -10
},
"encoding": {
"y": {
"field": "barTop",
"type": "quantitative"
},
"text": {
"field": "name"
}
}
}
],
"encoding": {
"x": {
"field": "group",
"type": "nominal",
"axis": {
"title": null,
"labelAngle": 0
}
}
}
}
Highlighting in Deneb is quite involved and can be read about here. Having said that, I have a working example.
Code
{
"data": {"name": "dataset"},
"layer": [
{
"mark": {
"type": "bar",
"stroke": "black",
"strokeWidth": 1,
"tooltip": true,
"opacity": 0.3
},
"encoding": {
"y": {
"field": "test",
"type": "quantitative",
"axis": {
"title": "Number of Projects",
"tickMinStep": 1
}
},
"fill": {
"field": "group",
"type": "nominal",
"scale": {
"domain": [
"Low",
"Med",
"High"
],
"range": [
"#e15759",
"#ffff00",
"#59a14f"
]
},
"legend": null
}
}
},
{
"mark": {
"type": "bar",
"stroke": "black",
"strokeWidth": 1,
"tooltip": true,
"opacity": 1
},
"encoding": {
"y": {
"field": "test__highlight",
"type": "quantitative",
"axis": {
"title": "Number of Projects",
"tickMinStep": 1
}
},
"fill": {
"field": "group",
"type": "nominal",
"scale": {
"domain": [
"Low",
"Med",
"High"
],
"range": [
"#e15759",
"#ffff00",
"#59a14f"
]
},
"legend": null
}
}
},
{
"mark": {
"type": "text",
"color": "black",
"dy": 70
},
"encoding": {
"y": {
"field": "test__highlight",
"stack": true,
"type": "quantitative"
},
"text": {
"field": "name"
}
}
}
],
"encoding": {
"x": {
"field": "group",
"type": "nominal",
"axis": {
"title": null,
"labelAngle": 0
}
}
}
}
Things to keep in mind.
You need a measure (Deneb docs state highlighting doesn't work without a measure). The measure named test is simply test = COUNT('Table'[name])
You can't highlight from a slicer unless it is disconnected as slicers filter instead of highlight
You can't highlight stacks in position. The highlighted stacks naturally fall to the bottom as a result of how the data is being passed. There may be a way around this but it will involve further investigation which is probably not worth it.
Edit 1
Highlighting in place.
{
"data": {"name": "dataset"},
"layer": [
{
"mark": {
"type": "bar",
"stroke": "black",
"strokeWidth": 1,
"tooltip": true,
"opacity": 0.3
},
"encoding": {
"y": {
"field": "test",
"type": "quantitative",
"axis": {
"title": "Number of Projects",
"tickMinStep": 1
}
},
"fill": {
"field": "group",
"type": "nominal",
"scale": {
"domain": [
"Low",
"Med",
"High"
],
"range": [
"#e15759",
"#ffff00",
"#59a14f"
]
},
"legend": null
}
}
},
{
"mark": {
"type": "bar",
"stroke": "black",
"strokeWidth": 1,
"tooltip": true
},
"encoding": {
"y": {
"field": "test",
"type": "quantitative",
"axis": {
"title": "Number of Projects",
"tickMinStep": 1
}
},
"opacity": {
"condition": {
"test": "datum['test__highlight']!=null"
,
"value": 1
},
"value": 0
},
"fill": {
"field": "group",
"type": "nominal",
"scale": {
"domain": [
"Low",
"Med",
"High"
],
"range": [
"#e15759",
"#ffff00",
"#59a14f"
]
},
"legend": null
}
}
},
{
"mark": {
"type": "text",
"color": "black",
"dy": 70
},
"encoding": {
"y": {
"field": "test",
"stack": true,
"type": "quantitative"
},
"text": {
"field": "name"
}
}
}
],
"encoding": {
"x": {
"field": "group",
"type": "nominal",
"axis": {
"title": null,
"labelAngle": 0
}
}
}
}

People API returning error when bulkCreateContact is called

I have tried to bulk add a contact using API Try editor of Google https://developers.google.com/people/api/rest/v1/people/batchCreateContacts
{
"contacts": [
{
"contactPerson": {
"addresses": [
{
"formattedValue": "formattedValue",
"type": "type",
"poBox": "poBox",
"streetAddress": "streetAddress",
"extendedAddress": "extendedAddress",
"region": "region",
"postalCode": "postalCode",
"country": "country",
"countryCode": "countryCode"
},
{
"formattedValue": "formattedValue",
"type": "type",
"poBox": "poBox",
"streetAddress": "streetAddress",
"extendedAddress": "extendedAddress",
"city": "city",
"region": "region",
"postalCode": "postalCode",
"country": "country",
"countryCode": "countryCode"
}
],
"biographies": [
{
"value": "biographies-value",
"contentType": "TEXT_PLAIN"
}
],
"birthdays": [
{
"date": {
"year": 1988,
"month": 9,
"day": 22
},
"text": "22/09/1988"
}
],
"calendarUrls": [
{
"url": "https://lh3.googleusercontent.com/ogw/ADea4I4kLm9hsAYNpD_7v-7wXki3joED-eg2ZHcGmp31",
"type": "calendarUrls-type"
}
],
"clientData": [
{
"key": "clientData-key",
"value": "clientData-value"
}
],
"emailAddresses": [
{
"value": "emailAddresses-value",
"type": "emailAddresses-type",
"displayName": "emailAddresses-displayName"
}
],
"events": [
{
"date": {
"year": 1988,
"month": 9,
"day": 22
},
"type": "events-type"
},
{
"date": {
"year": 1988,
"month": 9,
"day": 22
},
"type": "events-type"
},
{
"date": {
"year": 2019,
"month": 12,
"day": 7
},
"type": "marriage"
}
],
"externalIds": [
{
"value": "externalIds-value",
"type": "externalIds-type"
}
],
"fileAses": [
{
"value": "fileAses-value"
}
],
"genders": [
{
"value": "male",
"addressMeAs": "her"
}
],
"imClients": [
{
"username": "imClients-username1",
"type": "imClients-typeA",
"protocol": "imClients-protocol1"
},
{
"username": "imClients-username2",
"type": "imClients-typeA",
"protocol": "imClients-protocol2"
},
{
"username": "imClients-username3",
"type": "imClients-typeB",
"protocol": "imClients-protocol3"
}
],
"interests": [
{
"value": "interests-value"
}
],
"locales": [
{
"value": "locales-value"
}
],
"locations": [
{
"value": "locations-value1",
"type": "desk",
"current": true,
"buildingId": "locations-buildingId",
"floor": "locations-floor",
"floorSection": "buildingId-floorSection",
"deskCode": "locations-deskCode"
},
{
"value": "locations-value2",
"type": "desk",
"current": true,
"buildingId": "locations-buildingId",
"floor": "locations-floor",
"floorSection": "buildingId-floorSection",
"deskCode": "locations-deskCode"
}
],
"memberships": [
{
"contactGroupMembership": {
"contactGroupResourceName": "contactGroups/3616ed318c1125e3"
}
}
],
"miscKeywords": [
{
"value": "SENSITIVITY1",
"type": "OUTLOOK_SENSITIVITY"
},
{
"value": "SENSITIVITY2",
"type": "OUTLOOK_SENSITIVITY"
},
{
"value": "OUTLOOK_SUBJECT",
"type": "OUTLOOK_SUBJECT"
},
{
"value": "OUTLOOK_BILLING_INFORMATION",
"type": "OUTLOOK_BILLING_INFORMATION"
},
{
"value": "OUTLOOK_DIRECTORY_SERVER",
"type": "OUTLOOK_DIRECTORY_SERVER"
},
{
"value": "OUTLOOK_KEYWORD",
"type": "OUTLOOK_KEYWORD"
},
{
"value": "OUTLOOK_MILEAGE",
"type": "OUTLOOK_MILEAGE"
},
{
"value": "OUTLOOK_PRIORITY",
"type": "OUTLOOK_PRIORITY"
},
{
"value": "OUTLOOK_SUBJECT",
"type": "OUTLOOK_SUBJECT"
},
{
"value": "OUTLOOK_USER1-value",
"type": "OUTLOOK_USER"
},
{
"value": "OUTLOOK_USER2-value",
"type": "OUTLOOK_USER"
},
{
"value": "HOME",
"type": "HOME"
},
{
"value": "WORK",
"type": "WORK"
},
{
"value": "OTHER",
"type": "OTHER"
}
],
"names": [
{
"unstructuredName": "unstructuredName",
"familyName": "MrTest",
"givenName": "givenName",
"middleName": "middleName",
"honorificPrefix": "honorificPrefix",
"honorificSuffix": "honorificSuffix",
"phoneticFullName": "phoneticFullName",
"phoneticFamilyName": "phoneticFamilyName",
"phoneticGivenName": "phoneticGivenName",
"phoneticMiddleName": "phoneticMiddleName",
"phoneticHonorificPrefix": "phoneticHonorificPrefix",
"phoneticHonorificSuffix": "phoneticHonorificSuffix"
}
],
"nicknames": [
{
"value": "nicknames-value-alternate-name",
"type": "ALTERNATE_NAME"
},
{
"value": "nicknames-value-default",
"type": "DEFAULT"
}
],
"occupations": [
{
"value": "occupations-value"
}
],
"organizations": [
{
"type": "organizations-type",
"startDate": {
"year": 1988,
"month": 9,
"day": 22
},
"endDate": {
"year": 1988,
"month": 9,
"day": 22
},
"current": true,
"name": "organizations-name",
"phoneticName": "organizations-phoneticName",
"department": "organizations-department",
"title": "organizations-title",
"jobDescription": "organizations-jobDescription",
"symbol": "organizations-symbol",
"domain": "organizations-domain",
"location": "organizations-location"
}
],
"phoneNumbers": [
{
"value": "phoneNumbers-value",
"type": "phoneNumbers-type"
}
],
"relations": [
{
"person": "relations-person",
"type": "relations-type"
}
],
"sipAddresses": [
{
"value": "sipAddresses-value",
"type": "sipAddresses-type"
}
],
"urls": [
{
"value": "https://lh3.googleusercontent.com/ogw/ADea4I4kLm9hsAYNpD_7v-7wXki3joED-eg2ZHcGmp31",
"type": "urls-type"
}
],
"userDefined": [
{
"key": "userDefined-key",
"value": "userDefined-value"
}
]
}
}
],
"readMask": "emailAddresses,phoneNumbers,addresses,birthdays,biographies,calendarUrls,clientData,coverPhotos,events,externalIds,genders,imClients,interests,locales,locations,memberships,miscKeywords,names,nicknames,occupations,organizations,phoneNumbers,photos,relations,sipAddresses,skills,urls,userDefined"
}
I get the error as
{
"error": {
"code": 500,
"message": "Internal error encountered.",
"status": "INTERNAL"
}
}
Am I missing something?
It looks like the 500 Internal Error message you are receiving is due to the fact that you are using the calendarUrls field in the request.
This might in fact be a bug so I have taken the opportunity to file a report on Google's Issue Tracker here.
I suggest you star the issue as all the updates will be posted there.