Validation error of type UnknownType: Unknown type CreateUserInput - node.js

Using AppSync Query Playground running this query.
mutation UserMutation ($input: CreateUserInput!) {
createUser
(input: $input)
{
uniqueID
recordType
userName
userEmail
userEmailVerified
userCreated
userLastModified
userEnabled
userStatus
userValidFrom
userValidTo
}
}
Query Variables are
{
"input":
{
"uniqueID": "UUID-formatted",
"recordType": "USER",
"userName": "username",
"userEmail": "mailadres",
"userEmailVerified": true,
"userCreated": "2020-12-04T22:32:37.000Z",
"userLastModified": "2020-12-04T22:34:15.000Z",
"userEnabled": true,
"userStatus": "CONFIRMED",
"userValidFrom": "2021-03-12T11:03:37.283Z",
"userValidTo": "9999-12-31T23:59:59.999Z"
}
}
Creates a record in the DynamoDB table as expected.
Which suggest to me that the model and the resolvers are well defined in AppSync.
Running the exact same code in a NodeJS Express environment creates the above error.
API-key and GraphQL endpoint are correct. Exactly the same method for other entity used and works.
This is the NodeJS code
exports.userCreate = (cognitosub, userName, cognitoemail, cognitoemail_verified, cognitoUserCreateDate, cognitoUserLastModifiedDate, cognitoEnabled, cognitoUserStatus, userValidFrom, userValidTo) => {
const mutateQuery = gql(`
mutation UserMutation ($input: CreateUserInput!) {
createUser
(input: $input)
{
uniqueID
recordType
userName
userEmail
userEmailVerified
userCreated
userLastModified
userEnabled
userStatus
userValidFrom
userValidTo
}
}
`);
const mutateVariables = JSON.parse(`{
"uniqueID" : "${cognitosub}",
"recordType" : "USER",
"userName" : "${userName}",
"userEmail" : "${cognitoemail}",
"userEmailVerified" : ${cognitoemail_verified},
"userCreated" : "${cognitoUserCreateDate}",
"userLastModified" : "${cognitoUserLastModifiedDate}",
"userEnabled" : ${cognitoEnabled},
"userStatus" : "${cognitoUserStatus}",
"userValidFrom" : "${utils.dateConvertToISOString(userValidFrom)}",
"userValidTo" : "${utils.dateConvertToISOString(userValidTo)}"
}`)
return new Promise ((resolve, reject) => {
console.debug(`${Date(Date.now())} - utilities.userCreate.mutateVariables`,mutateVariables)
graphqlClient.mutate({
mutation: mutateQuery,
fetchPolicy: 'no-cache', // Mutate suporteert alleen 'no-cache'
variables: {input: mutateVariables}
})
.then((success) => {
// console.debug(`${Date(Date.now())} - utilities.userCreate.then`,success)
if (success === null) {reject('userIsNull')}
else {
resolve(success.data.createUser.uniqueID)}
})
.catch((err) => {
console.debug(`${Date(Date.now())} - utilities.userCreate.catch\n`,err)
reject(err)
})
})
Exact same code is used for a less complicated object with an UUID, Identification, validFrom en ValidTo date. It works like a charm.
I looked at every error and spelling mistake. The code keeps throwing this two errors.
graphQLErrors: [
{
path: null,
locations: [ { line: 1, column: 31, sourceName: null } ],
message: 'Validation error of type UnknownType: Unknown type CreateUserInput'
},
{
path: null,
locations: [ { line: 2, column: 3, sourceName: null } ],
message: "Validation error of type FieldUndefined: Field 'createUser' in type 'Mutation' is undefined # 'createUser'"
}
]
Which are dump of the error-object.
Appolo-client is used top access the DynamoDB. The records created in the AppSync GraphQL playground are perfectly viewable in the DB.
I am out of clues here. Can anyone help?

Today I really looked into this problem and decided not to use the AppSync client from AWS anymore (which created some depency build problems with every npm update by the way...)
I choose to go for the Apollo client latest version which doesn't give any npm update issues and is a up-to-date version of the client that AWS uses in the background if I am well informed (read can read fora in a good way ;-))
I had some issues with the authentication on AppSync but managed to get over this.
This code totally fixes ALL previous error (for me)
const gql = require("graphql-tag");
const ApolloClient = require("#apollo/client").ApolloClient
const ApolloLink = require("#apollo/client").ApolloLink
const InMemoryCache = require("#apollo/client").InMemoryCache;
const createHttpLink = require("#apollo/client").createHttpLink;
const {createAuthLink} = require("aws-appsync-auth-link")
require('cross-fetch/polyfill');
const clientGraphQL = new ApolloClient({
link: ApolloLink.from([
createAuthLink({
url: aws_secrets.AWS_DEV_APPSYNC_ENDPOINT,
region:aws_secrets.REGION,
auth:
{
type: "API_KEY",
apiKey: aws_secrets.AWS_DEV_APPSYNC_API_KEY,
}
}),
createHttpLink({
uri: aws_secrets.AWS_DEV_APPSYNC_ENDPOINT
}),
]),
cache: new InMemoryCache(),
});
Safe code by hiding all the secrets in a special file.
createAuthLink is the only thing I need from AWSAppSync (IMHO).
I didn't manage to get a proper connection with the CreateAuthLink from the Apollo Client.
Hope this helps some of you...

Related

How to pass variables to metafieldsSet mutation in Shopify Api Node js Grahql client?

I was trying to use the metafieldsSet mutation to update metafields in Shopify with the following code:
const client = new Shopify.Clients.Graphql(
process.env.SHOP,
process.env.PASSWORD
)
try {
const metafields = await client.query({
data: `mutation metafieldsSet($metafields: [MetafieldsSetInput!]!) {
metafieldsSet(metafields: $metafields) {
userErrors {
field
message
}
metafields {
key
value
}
}
}
`,
query: {
metafields: [
{
key: 'cb_inventory',
namespace: 'my_fields',
ownerId: 'gid://shopify/ProductVariant/40576138313890',
type: 'number_integer',
value: '25',
},
],
},
})
console.log(metafields)
res.status(200).json({ values: metafields })
} catch (error) {
console.log(error)
res.status(500).json(error)
}
However, the above mutation returns the following error:
Expected value to not be null
Variable $metafields of type [MetafieldsSetInput!]! was provided invalid value
I assume the variable metafields failed to pass into the mutation because when I run the exact same mutation in the Shopify Admin API GraphiQL explorer, there was no error
Shopify Admin API GraphiQL explorer mutation result
I have also looked into the github repo of #shopify/shopify-api. In my understanding, variables are added to the query object.
What am I missing?
Thanks,
Howard
Environment: Next js 11.1.2,
Dependencies: #shopify/shopify-api 1.4.1
Turns out the syntax is incorrect. The variables should be placed inside the variables object, while the mutation statement should be placed inside the query object.
The following code works now:
const metafields = await client.query({
data: {
query: `mutation metafieldsSet($metafields: [MetafieldsSetInput!]!) {
metafieldsSet(metafields: $metafields) {
userErrors {
field
message
}
metafields {
key
value
}
}
}`,
variables: {
metafields: [
{
key: 'cb_inventory',
namespace: 'my_fields',
ownerId: 'gid://shopify/ProductVariant/40576138313890',
type: 'number_integer',
value: '25',
},
],
},
},
})
ref: https://github.com/Shopify/shopify-node-api/blob/main/src/clients/graphql/test/graphql_client.test.ts
To use the intended API version, you need to first initialise the context with the following code:
Shopify.Context.initialize({
API_KEY,
API_SECRET_KEY,
SCOPES: ['read_products', 'write_products'],
HOST_NAME: HOST,
API_VERSION: '2021-10',
})

Github GraphQL API: non-existent repositories showing up in response

I have a little web app called RepoSweeper (https://reposweeper.com/) which I built after my coding bootcamp. It's purpose is to bulk delete unused repos from GitHub.
A while back GitHub changed the way you access repos via the API and Private repos stopped showing up in the results. So I had a freelancer modify the code to use GraphQL to make the request.
Now all the repos show up (hooray!) but there are a huge number of repos that don't exist in my Repositories on Github. My first thought was that they are archived, but the isArchived flag is false in the response data.
This is the GraphQL query:
export const REPO_INFO_QUERY = gql`
query ($ghLogin: String!, $after: String) {
user(login: $ghLogin) {
id
name
login
avatarUrl
bioHTML
repositories(first: 100, after: $after, ownerAffiliations: [OWNER, ORGANIZATION_MEMBER, COLLABORATOR]) {
totalCount
pageInfo {
endCursor
hasNextPage
__typename
}
nodes {
id
viewerCanAdminister
name
description
isFork
isPrivate
isArchived
updatedAt
createdAt
url
parent {
nameWithOwner
url
__typename
}
owner {
login
url
__typename
}
__typename
}
__typename
}
__typename
}
}
`;
This is the request:
const endpoint = `https://api.github.com/graphql`
const graphQLClient = new GraphQLClient(endpoint, {
headers: {
authorization: `Bearer ${token}`,
},
})
const variables = {ghLogin: user, archived: true, isArchived: true}
return graphQLClient.request(REPO_INFO_QUERY, variables).then(data => {
debugger
return data
})
.catch(err => {
Popup.alert('Error: '+ err)
return false
}
)
And this is what the non-existent repo data looks like:
Love to hear what you guys think. Thank you!

Rolling back resolve function from GraphQLObjectType to another

I'm currently studying GraphQL and as part of the developing process, i'm interested with modularization of my code - i do understand how to write query, but fail to understand how to correctly implement query of queries.
That is the rootQuery.js
const {
GraphQLInt,
GraphQLList,
GraphQLObjectType,
GraphQLSchema,
GraphQLFloat,
GraphQLString
} = require("graphql");
const bankRootQuery = require('../graphql/queries/bank.queries')
const rootQuery = new GraphQLObjectType({
name: "rootQuery",
fields: {
bankRootQuery: { type: bankRootQuery, resolve: () => { console.log(bankRootQuery.resolve) } }
}
});
module.exports = new GraphQLSchema({
query: rootQuery
});
And here is the bankRootQuery.js:
const { GraphQLObjectType, GraphQLInt, GraphQLNonNull, GraphQLID, GraphQLList } = require("graphql");
const BankType = require('../types/bank.type');
const models = require('../../models/models_handler');
module.exports = new GraphQLObjectType({
name: "bankRootQuery",
fields: {
getbanks: {
type: new GraphQLList(BankType),
resolve: () => {
return models.getBanks()
}
},
getbankByID: {
type: BankType,
args: {
bankID: { name: "bankID", type: GraphQLInt }
},
resolve: (_, args) => {
if (!models.getBanks().has(args.bankID))
throw new Error(`Bank with ID ${args.bankID} doesn't exists`);
return models.getBank(args.bankID);}
}
}
});
Assining bankRootQuery to the scheme object instead of rootQuery works perfectly fine, but using the rootQuery yields with null result when querying using GraphiQL - The Documentation Explorer structure seems to be in proper manner, so i'm guessing the problem is with the resolve function, which i don't understand how to define correctly.
Here is the result when querying using GraphQL:
{
"data": {
"bankRootQuery": null
}
}
If a field resolves to null, then execution for that "branch" of the graph ends. Even if the field's type is an object type, none of the resolvers for its "children" fields will be called. Imagine if you had a field like user -- if the field resolves to null, then it makes no sense to try to resolve the user's name or email.
Your resolver for the bankRootQuery field just logs to the console. Because it doesn't have a return statement, its return value is undefined. A value of undefined is coerced into a null. Since the field resolved to null, execution halts.
If you want to return something other than null, then your resolver needs to return something -- even if it's just an empty object ({}). Then the resolvers for any "child" fields will work as expected.
In general, I would advise against nesting your queries like this -- just keep them at the root level. For additional details around how field resolution works, check out this post.

How to get around the "Missing field" warning when executing a mutation with an optimiticResponse in the apollo-client?

I'm using aws-appsync with the apollo-client and when I try to execute a mutation without providing all fields I get a warning like "Missing field x in {...}". Do I really need to provide all (including optional) fields? How can I handle this gracefully?
I wonder if this is the expected behaviour or wether I'm missing something obvious. I don't want to maintain the added complexity of having to pass all optional fields and having those fields stored in the database as null values.
I figured since they are just warnings I 'll just ignore them but I found that the updates would be executed in the database, but then the inmemorycache cache would not always update. It would sometimes show the update and other times not.
import {compose, graphql} from "react-apollo";
import gql from "graphql-tag";
import React from "react";
export const EditCard = (props) => {
const handleSave = () => {
props.update({
givenName :'someGivenName',
//middleName omitted on purpose
familyName :'someFamilyName',
});
};
return (
<>...more stuff here...</>
);
};
export const card = gql`
fragment card on Identity{
givenName
middleName
familyName
}
`;
export const CardsGraphQL = gql`
query GerCards {
cards: listIdentitys(filter: {type: {eq: "CARD"}}) {
items {
...card
}
}
}
${card}
`;
export const UpdateCardGraphQL = gql`
mutation UpdateCard($input: UpdateIdentityInput!) {
updateObject: updateIdentity(input: $input) {
...card
}
}
${card}
`;
export const selectConfig = () => {
return {
options: {
fetchPolicy: 'cache-and-network',
},
props: (props) => {
return {
cards: props.data.cards ? props.data.cards.items : [],
};
},
};
};
export const updateConfig = (query) => {
return {
options: {
update: (cache, {data: {updateObject}}) => {
// Read query from cache
const data = cache.readQuery({query});
// Add updated object to the cache
data.cards.items = data.cards.items.map(item => item.id === updateObject.id ? updateObject : item);
//Overwrite the cache with the new results
cache.writeQuery({query, data});
},
},
props: (props) => {
return {
update: (input) => {
props.mutate({
variables: {input},
optimisticResponse: () => ({
updateObject: input,
}),
});
},
};
},
};
};
export default compose(
graphql(CardsGraphQL, selectConfig),
graphql(UpdateCardGraphQL, updateConfig(CardsGraphQL)))
(EditCard);
For GraphQL this mutation seems to run without problems and the result in the dynamoDB is what I expect:
{
givenName :'someGivenName',
familyName :'someFamilyName'
}
However the cache is not always updated with the mutation result and the apollo-client shows the warning:
"Missing field middleName in {..."
If I add the middleName field, the warning goes away and the cache updates correctly but the result in the dynamoDB is:
{
givenName :'someGivenName',
middleName : null,
familyName :'someFamilyName'
}
This approach results in additional complexity in my client that I would like to avoid maintaining.
Does anyone else have this problem? How to solve this gracefully?
Any help is appreciated.

What is the perfect replace of toInclude() assertion in expect new version?

Stuck with this.I am trying to test my login API using expect and new version of expect throwing me some error.
That's my testing code.
it('should login user and return auth token', (done) => {
request(app)
.post('/users/login')
.send({
email : users[1].email,
password : users[1].password
})
.expect((res) => {
expect(res.headers['x-auth']).toBeTruthy();
})
.end((error,res) => {
if(error)
{
return done(error);
}
User.findById(users[1]._id).then((user) => {
expect(user.tokens[0]).toMatchObject({
access : 'auth',
token : res.headers['x-auth']
});
done();
}).catch((error) => done(error));
});
});
And error is
1) POST /users/login
should login user and return auth token:
Error: expect(received).toMatchObject(expected)
Expected value to match object:
{"access": "auth", "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJfaWQiOiI1OWYwMzM0ZGExMzRmYjFmNzg4NTkzOTciLCJhY2Nlc3MiOiJhdX
RoIiwiaWF0IjoxNTA4OTE0MDEzfQ.S0KCmLADcCLPWTK1khxNPO03tVMTW0HU117xapm56MM"}
Received:
{"_id": "59f0335da134fb1f788593b3", "access": "auth", "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJfaWQiOiI1OWYwMzM0ZGExMzR
mYjFmNzg4NTkzOTciLCJhY2Nlc3MiOiJhdXRoIiwiaWF0IjoxNTA4OTE0MDEzfQ.S0KCmLADcCLPWTK1khxNPO03tVMTW0HU117xapm56MM"}
Difference:
- Expected
+ Received
Object {
+ "_id": "59f0335da134fb1f788593b3",
"access": "auth",
"token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJfaWQiOiI1OWYwMzM0ZGExMzRmYjFmNzg4NTkzOTciLCJhY2Nlc3MiOiJhdXRoIiwiaWF0IjoxNTA
4OTE0MDEzfQ.S0KCmLADcCLPWTK1khxNPO03tVMTW0HU117xapm56MM",
}
I am testing two things but the code itself including _id and showing me that error. In previous version of expect (when Jest was not introduced) it was simple using toInclude() assertion,but now both toContain() and toMatchObject() showing same error.
That's my seed file
const{ObjectID} = require('mongodb');
const jwt = require('jsonwebtoken');
const {Todo} = require('./../../models/todo');
const {User} = require('./../../models/user');
const userOneId = new ObjectID();
const userTwoId = new ObjectID();
const users = [{
_id: userOneId,
email: 'adil.aj95#gmail.com',
password : 'userOnePass',
tokens: [{
access : 'auth',
token : jwt.sign({_id : userOneId,access : 'auth'}, 'abc123').toString()
}]
},
{
_id: userTwoId,
email: 'adil2.aj95#gmail.com',
password : 'userTwoPass',
// tokens: [{
// access : 'auth',
// token : jwt.sign({_id : userTwoId,access : 'auth'}, 'abc123').toString()
// }]
}];
You need just a slight change. Instead of using
expect(user.tokens[0]).toMatchObject({
access : 'auth',
token : res.headers['x-auth']
});
include .toObject() after user like this
expect(user.toObject().tokens[0]).toMatchObject({
access : 'auth',
token : res.headers['x-auth']
});
Why? Your user is a mongoose object that has more info than you would expect. You can see that there is an extra _id property in the token (the error that is thrown shows that). What toObject() does is it returns just the object as you would expect it, without all the mongoose-specific properties (stuff like _id, __v etc).
You can use .toHaveProperty(keyPath, value) for the new expect version by jest.
So the code becomes like this:
expect(user.tokens[0]).toHaveProperty('access', 'auth');
expect(user.tokens[0]).toHaveProperty('token', user.tokens[0].token);

Resources