Apollo Server - how to create and add object in resolver when type isn't available in namespace - apollo

In the following example, I am attempting to create a post and add it to the Dictionary 'post'. How is the Mutation expected to create, add to the hash, and return the type of the item created when the item type isn't available to the namespace of the resolver?
mutation createPost {
createPost(input: {name: "Post Name"}){
name
}
}
index.js:
const { ApolloServer, gql } = require('apollo-server');
const dictionary = {};
const typeDefs = gql`
input PostSpecInput {
name: String
}
type PostSpec {
id: ID!
name: String
}
type Mutation {
createPost(input: PostSpecInput): PostSpec
}
type Query {
post_specs: [PostSpec]
}
`;
const resolvers = {
Query: {
post_specs: () => Object.keys(dictionary).map(function(key){
return dictionary[key];
})
},
Mutation: {
createPost(parent, args, context, info) {
var id = require('crypto').randomBytes(10).toString('hex');
const postSpec = new PostSpec(id, args.input);
posts_mock_database[id] = args.input;
return postSpec;
}
}
}
const server = new ApolloServer({typeDefs, resolvers})
server.listen().then(({url}) => {
console.log(`Server Ready at ${url}`);
})
Error:
{
"errors": [
{
"message": "PostSpec is not defined",
"locations": [
{
"line": 2,
"column": 3
}
],
"path": [
"createPost"
],
"extensions": {
"code": "INTERNAL_SERVER_ERROR",
"exception": {
"stacktrace": [
"ReferenceError: PostSpec is not defined",
" at createPost (index.js:38:34)",

Type definitions are not classes nor object instances, they are just for enforcing type. Even if you were in the namespace, calling 'new' would not work. Here is the solution for your mock database:
Mutation: {
createPost(parent, args, context, info) {
var id = require('crypto').randomBytes(10).toString('hex');
const newPostSpec = { id: id, name: args.input.name }
posts_mock_database[id] = newPostSpec;
return newPostSpec;
}
}

Related

Ember Data Serializer & Adapter with Supabase returns empty (Proxy { })

Struggling to create my customised adapter & serializer to integrate Supabase, how I'm stuck why no data in Ember Data.
Trying out with a simple findAll() method. See below:
Service ⬇️:
export default class SupabaseService extends Service {
client;
constructor() {
super(...arguments);
const { url, key } = ENV.supabase;
const supabase = createClient(url, key);
this.client = supabase;
}
}
Model ⬇️:
export default class CourseModel extends Model {
#attr('string') name;
#attr('date') date_added;
}
Adapter ⬇️:
export default class ApplicationAdapter extends RESTAdapter {
#service supabase;
async findAll(store, type, neverSet, snapshotRecordArray) {
return new Promise(async (resolve, reject) => {
try {
const { data, error, status } = await this.supabase.client
.from(pluralize(type.modelName))
.select('*');
if (error) {
reject(error);
} else {
resolve(data);
}
} catch (error) {
reject(error);
}
});
}
}
Serializer ⬇️:
normalizeResponse(store, primaryModelClass, payload, id, requestType) {
// parse the response data from the server and return it in the format that Ember Data expects
let newPayload = {
data: payload.map(item => {
let attributes = JSON.parse(JSON.stringify(item));
delete attributes.id;
return {
id: item.id,
type: primaryModelClass.modelName,
attributes: attributes
}
})
}
return super.normalizeResponse(store, primaryModelClass, newPayload, id, requestType);
}
✅ The service works fine. The adapter manage to get data and returns as follows:
[
{
"id": "259f46fd-3321-4cc9-ad5e-6d6ec880f7f1",
"date_added": "2022-12-31T00:03:14.618585+00:00",
"name": "Science"
},
{
"id": "62a6a085-604b-4600-8cc4-59a8c9af284a",
"date_added": "2022-12-31T00:03:30.010963+00:00",
"name": "Physics"
}
]
The serializer newPayload to follow JSON API schema, returns:
{
"data": [
{
"id": "259f46fd-3321-4cc9-ad5e-6d6ec880f7f1",
"type": "course",
"attributes": {
"name": "Science",
"date_added": "2022-12-31T00:03:14.618585+00:00"
}
},
{
"id": "62a6a085-604b-4600-8cc4-59a8c9af284a",
"type": "course",
"attributes": {
"name": "Physics",
"date_added": "2022-12-31T00:03:30.010963+00:00"
}
}
]
}
But the problem is no data in store. Logging model in template shows empty Proxy {}.
I have no idea why. Ember Inspector shows no model in Data.
Any suggestions?

BatchWriteItemCommand with AWS.DynamoDB class using AWS SDK V3 in Nodejs

I have been trying for hours to perform a DynamoDB DeleteRequest using BatchWriteItemCommand but I keep getting the following error:
Error ValidationException: 1 validation error detected: Value null at 'requestItems.td_notes_sdk.member.1.member.deleteRequest.key' failed to satisfy constraint: Member must not be null
This is what my table looks like:
Partition key: user_id (string)
Sort key: timestamp (number)
DynamoDB Screenshot
This is what my code looks like:
// Import required AWS SDK clients and commands for Node.js
import {
DynamoDBClient,
BatchWriteItemCommand,
} from "#aws-sdk/client-dynamodb";
// Set the parameters
export const params = {
RequestItems: {
"td_notes_sdk": [
{
DeleteRequest: {
Item: {
Key: {
user_id: { S : "bb" },
timestamp: { N : 2 },
},
},
},
},
],
},
};
export const run = async () => {
const ddbClient = new DynamoDBClient({ region: "us-east-2" });
try {
const data = await ddbClient.send(new BatchWriteItemCommand(params));
console.log("Success, items inserted", data);
return data;
} catch (err) {
console.log("Error", err);
}
};
run();
Here are some resources that I've been trying to follow along with:
Resource 1: Writing items in Batch Example
Resource 2: AWS Javascript SDK v3 Documentation
Update: BatchWrite PutRequest work with the code below, so I know that the structure of my keys/attributes is closer to being correct. Still does not work for DeleteRequest.
export const params = {
RequestItems: {
"td_notes_sdk": [
{
PutRequest: {
Item: {
user_id: { "S": "bb" },
timestamp: { "N": "5" },
},
},
},
],
},
};
You don't supply an Item when deleting an item. You supply a Key.
Here is a working example:
const params_delete = {
RequestItems: {
"td_notes_sdk": [
{
DeleteRequest: {
Key: {
user_id: { S: "bb" },
timestamp: { N: "2" },
},
},
},
],
},
};
const delete_batch = async () => {
const ddbClient = new DynamoDBClient({ region: "us-east-2" });
try {
const data = await ddbClient.send(new BatchWriteItemCommand(params_delete));
console.log("Success, item deleted");
return data;
} catch (err) {
console.log("Error", err);
}
};
delete_batch();

UpdateExpression: Add other attribute's value to list

Given the following DynamoDB document:
{
"myobject" : {"foo" : "bar"},
"mylist" : [{"some" : "stuff}]
}
My goal is to update this document to get the following result:
{
"myobject" : {"foo" : "bar"},
"mylist" : [{"some" : "stuff}, {"foo" : "bar"}]
}
My request's params look like this:
let params = {
TableName: doctorSlotsTable,
Key: {
hashKey: hash,
rangeKey: range
},
UpdateExpression: 'SET mylist = list_append(if_not_exists(mylist, :empty_list), [myobject])',
ExpressionAttributeValues : {
':empty_list' : []
},
ReturnValues : "UPDATED_NEW"
};
This obviously does not work because the [ in the list_append triggers a syntax error.
Is there any solution to achieve that without having to get the data in a previous request and add it manually to the list ?
Unfortunately you cannot use an attribute name as an operand to list_append(...) unless that attribute is itself a list. The best you can do I believe would be to store myobject in the proper type up front, and then update it as expected.
Since storage is cheap & network/compute are expensive here, you could even duplicate the data to have one of them in the right form.
Here's a full example, where createTable() and deleteTable() do exactly what you think:
const PK = 'the item';
async function createObjAndList() {
const docClient = new DocumentClient();
const myObject = { foo: "bar" };
const theItem = {
PK,
myObject,
myObjectAsList: [ myObject ],
myList: [ { some : "stuff" } ],
};
const putParams = {
TableName,
Item: theItem
}
await docClient.put(putParams).promise();
console.log(`Put item ${util.inspect(theItem)}`);
}
async function updateListWithObject() {
const docClient = new DocumentClient();
const updateParams = {
TableName,
Key: { PK },
UpdateExpression: `SET #myList = list_append(if_not_exists(#myList, :emptyList), #myObjectAsList)`,
ExpressionAttributeNames: {
'#myList': 'myList',
'#myObjectAsList': 'myObjectAsList',
},
ExpressionAttributeValues: {
':emptyList': [],
}
}
await docClient.update(updateParams).promise();
console.log(`Updated list to include object`);
}
async function getObjAndList() {
const docClient = new DocumentClient();
const results = await docClient.get({ TableName, Key: { PK }}).promise();
console.log(`Item is now: ${util.inspect(results.Item)}`);
}
if (module === require.main) {
(async () => {
try {
await createTable();
await createObjAndList()
await updateListWithObject();
await getObjAndList();
} catch (err) {
console.log(`Error: ${err.message}`);
} finally {
await deleteTable();
}
})();
}
The output from this is:
Put item {
PK: 'the item',
myObject: { foo: 'bar' },
myObjectAsList: [ { foo: 'bar' } ],
myList: [ { some: 'stuff' } ]
}
Updated list to include object
Item is now: {
myList: [ { some: 'stuff' }, { foo: 'bar' } ],
myObject: { foo: 'bar' },
PK: 'the item',
myObjectAsList: [ { foo: 'bar' } ]
}

AWS EventBridge putEvents does not accept Detail JSON array

I am using AWS SDK v2.796.0
As per the documentation of putEvents, the Detail value needs to be a valid JSON string.
https://docs.aws.amazon.com/eventbridge/latest/APIReference/API_PutEventsRequestEntry.html
However, it is not accepting a JSON array as string.
const eventBridge = new AWS.EventBridge();
const entries = {
Entries: [
{
EventBusName: "busName",
Source: "api.user",
DetailType: "detailType",
Detail: JSON.stringify({ test: { test: ["test", "test2"] } }),
},
],
};
const rs = await eventBridge.putEvents(entries).promise();
console.log(rs);
// this passes
// {
// FailedEntryCount: 0,
// Entries: [ { EventId: 'a6176012-7310-2b84-a9b5-819956e2e3f9' } ]
// }
const entries2 = {
Entries: [
{
EventBusName: "busName",
Source: "api.user",
DetailType: "detailType",
Detail: JSON.stringify([{ test: "test" }]),
},
],
};
const rs2 = await eventBridge.putEvents(entries2).promise();
console.log(rs2);
// this fails
// {
// FailedEntryCount: 1,
// Entries: [
// {
// ErrorCode: 'MalformedDetail',
// ErrorMessage: 'Detail is malformed.'
// }
// ]
// }
Is this expected? Is there a way to use array in Detail?
This happens because you are using list in your entries2:
Detail: JSON.stringify([{ test: "test" }]),
If you just use object, it will work:
Detail: JSON.stringify({ test: "test" }),

DynamoDB retrieve only attribute values

I have a table that has userId as the PK and a single attribute called userToken.
I have written a batchGet() function to return all the userTokens for specific userIds, however it returns it like this:
[ { userToken: '1234' },
{ userToken: '5678' } ]
I'd like it to just return the values since I already know what the attribute name will be:
['1234', '5678']
How would I go about doing so?
const params = {
RequestItems: {
UserTokens: {
Keys: userIds,
AttributesToGet: [
'userToken'
]
}
}
};
db.batchGet(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log(data.Responses);
sendNotifications(data.Responses);
}
});
DynamoDB always returns the attribute name and value.
You can easily filter this on the client side.
val = [ { userToken: '1234' }, { userToken: '5678' } ];
reducer = (accumulator, currentVal) => {
accumulator.push(currentVal.userToken);
return accumulator;
}
console.log(val.reduce(reducer, []));