buildSchema is not a function graphql npm - node.js

The official website of graphql refers to this graphql npm to create the helloworld .
var { graphql, buildSchema } = require('graphql');
var schema = buildSchema(`
type Query {
hello: String
}
`);
var root = { hello: () => 'Hello world!' };
graphql(schema, '{ hello }', root).then((response) => {
console.log(response);
});
When i run this file with node cmd , I got :
var schema = buildSchema(`
^
TypeError: buildSchema is not a function
at Object. (
I guess the offcial website of graphql is not up-to-date with this repository , so , i tried to use GraphQLSchema instead of buildSchema
var schema = GraphQLSchema(`
...
`)
I go another error:
TypeError: Cannot call a class as a function
Thus , i called new GraphQLSchema(... instead of GraphQLSchema(...
I got another error :
Error: Must provide configuration object.
So , what i did as next step, I explored the object and i filter functions contains schema in its name
const graphql = require('graphql');
const functionsContainsSchema= Object.keys(graphql).filter(e => e.toLowerCase().includes('schema') && typeof graphql[e] ==='function');
console.log(
functionsContainsSchema
)
/* STDOUT
[ 'GraphQLSchema',
'buildClientSchema',
'buildASTSchema',
'extendSchema',
'printSchema' ]
*/
What is the new version of buildSchema and what is its signature ?

Related

Unable to stub an exported function with Sinon

I need to test the following createFacebookAdVideoFromUrl() that consumes a retryAsyncCall that I'd like to stub with Sinon :
async function createFacebookAdVideoFromUrl(accountId, videoUrl, title, facebookToken = FACEBOOK_TOKEN, options = null, businessId = null) {
const method = 'POST';
const url = `${FACEBOOK_URL}${adsSdk.FacebookAdsApi.VERSION}/${accountId}/advideos`;
const formData = {
access_token: businessId ? getFacebookConfig(businessId).token : facebookToken,
title,
name: title,
file_url: videoUrl,
};
const callback = () => requestPromise({ method, url, formData });
const name = 'createFacebookAdVideoFromUrl';
const retryCallParameters = buildRetryCallParameters(name, options);
const adVideo = await retryAsyncCall(callback, retryCallParameters);
logger.info('ADVIDEO', adVideo);
return { id: JSON.parse(adVideo).id, title };
}
This retryAsyncCall function is exported as such:
module.exports.retryAsyncCall = async (callback, retryCallParameters, noRetryFor = [], customRetryCondition = null) => {
// Implementation details ...
}
Here is how I wrote my test so far:
it.only("should create the video calling business's Facebook ids", async () => {
const payload = createPayloadDataBuilder({
businessId: faker.internet.url(),
});
const retryAsyncCallStub = sinon.stub(retryAsyncCallModule, 'retryAsyncCall').resolves('random');
const createdFacebookAd = await FacebookGateway.createFacebookAdVideoFromUrl(
payload.accountId,
payload.videoUrl,
payload.title,
payload.facebookToken,
payload.options,
payload.businessId,
);
assert.strictEqual(retryAsyncCallStub.calledOnce, true);
assert.strictEqual(createdFacebookAd, { id: 'asdf', title: 'asdf' });
});
I don't expect it to work straightaway as I am working in TDD fashion, but I do expect the retryAsyncCall to be stubbed out. Yet, I am still having this TypeError: Cannot read property 'inc' of undefined error from mocha, which refers to an inner function of retryAsyncCall.
How can I make sinon stubbing work?
I fixed it by changing the way to import in my SUT :
// from
const { retryAsyncCall } = require('../../../helpers/retry-async');
// to
const retry = require('../../../helpers/retry-async');
and in my test file :
// from
import * as retryAsyncCallModule from '../../../src/common/helpers/retry-async';
// to
import retryAsyncCallModule from '../../../src/common/helpers/retry-async';
The use of destructuring seemed to make a copy instead of using the same reference, thus, the stub was not applied on the right reference.

Parameter obj to Document() must be an object when trying to convert array to mongoose document with redis

I have using redis to cache my queries. Its working fine with object but not when i get array. It gives me an error **"Parameter "obj" to Document() must be an object, got kids", **. It also happens with count query. Here is my code :
const mongoose = require("mongoose");
const redis = require("redis");
const util = require("util");
const client = redis.createClient(process.env.REDIS_URL);
client.hget = util.promisify(client.hget);
const exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = async function (options = {}) {
this.useCache = true;
this.hashKey = JSON.stringify(options.key || "");
this.time = JSON.stringify(options.time || 36000);
return this;
};
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
return exec.apply(this, arguments);
}
const key = JSON.stringify(
Object.assign({}, this.getQuery(), {
collection: this.mongooseCollection.name,
})
);
// client.flushdb(function (err, succeeded) {
// console.log(succeeded); // will be true if successfull
// });
const cacheValue = await client.hget(this.hashKey, key);
if (cacheValue) {
const doc = JSON.parse(cacheValue);
/*
this.model refers to the Class of the corresponding Mongoose Model of the query being executed, example: User,Blog
this function must return a Promise of Mongoose model objects due to the nature of the mongoose model object having other
functions attached once is created ( validate,set,get etc)
*/
console.log("Response from Redis");
console.log(doc);
console.log(Array.isArray(doc));
return Array.isArray(doc)
? doc.map((d) => new this.model(d))
: new this.model(doc);
}
//await the results of the query once executed, with any arguments that were passed on.
const result = await exec.apply(this, arguments);
client.hset(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, this.time);
console.log("Response from MongoDB");
return result;
};
module.exports = {
clearHash(hashKey) {
client.del(JSON.stringify(hashKey));
},
};
Data in redis - [ 'kids', 'men', 'women' ]
Query - const collectionType = await Product.find() .distinct("collectionType") .cache({ key: "COLLECTION_TYPE" });
can i anyone please tell me what i am doing wrong?
I have solved by directly returning the doc and its working fine. Not sure if it is the right way if i directly do return doc then sending data from redis only

Can't use #cypher in GraphQL schema when using ApolloWebserver

I want to query a field on a node using the #cypher directive in my GraphQL schema.
However when I query the field I get Resolve function for \"Link.x\" returned undefined.
My schema with the directive on x from Link is the following
scalar URI
interface IDisplayable{
"Minimal data necessary for the object to appear on screen"
id: ID!
label: String
story: URI
}
interface ILink{
"""
A link must know to what nodes it is connected to
"""
x: Node! #cypher(statement: "MATCH (this)-[:X_NODE]->(n:Node) RETURN n")
y: Node!
"""
if optional=true then sequence MAY be used to define a set of options
"""
optional: Boolean
}
interface INode{
synchronous: Boolean
unreliable: Boolean
}
type Node implements INode & IDisplayable{
id: ID!
label: String!
story: URI
synchronous: Boolean
unreliable: Boolean
}
type Link implements ILink & IDisplayable{
id: ID!
label: String!
x: Node! #cypher(statement: "MATCH (this)-[:X_NODE]->(n:Node) RETURN n")
y: Node!
story: URI
optional: Boolean
}
When querying for a a link and its x property I get undefined. With the custom resolver that I wrote for y however it works. Of course I could leave the hand written resolvers but its a lot of code that is not necessary.
This is index.js:
require( 'dotenv' ).config();
const express = require( 'express' );
const { ApolloServer } = require( 'apollo-server-express' );
const neo4j = require( 'neo4j-driver' );
const cors = require( 'cors' );
const { makeAugmentedSchema } = require( 'neo4j-graphql-js' );
const typeDefs = require( './graphql-schema' );
const resolvers = require( './resolvers' );
const app = express();
app.use( cors() );
const URI = `bolt://${ process.env.DB_HOST }:${ process.env.DB_PORT }`;
const driver = neo4j.driver(
URI,
neo4j.auth.basic( process.env.DB_USER, process.env.DB_PW ),
);
const schema = makeAugmentedSchema( { typeDefs, resolvers } );
const server = new ApolloServer( {
context: { driver },
schema,
formatError: ( err ) => {
return {
message: err.message,
code: err.extensions.code,
success: false,
stack: err.path,
};
},
} );
const port = process.env.PORT;
const path = process.env.ENDPOINT;
server.applyMiddleware( { app, path } );
app.listen( { port, path }, () => {
console.log( `Server listening at http://localhost:${ port }${ path }` );
} );
With "graphql-schema.js" being
const fs = require( 'fs' );
const path = require( 'path' );
const schema = './schemas/schema.graphql';
const encoding = 'utf-8';
let typeDefs = '';
typeDefs += fs.readFileSync( path.join( __dirname, schema ) )
.toString( encoding );
module.exports = typeDefs;
Thanks for any tips
I found out that, if I write a custom resolver for a query, the directives provided by Apollo do not work.
However I realized that I can let Apollo create the queries that I needed so I just deleted my custom implementations, which works for me.
So in my resolvers I had to remove implementations for queries that would fetch fields annotated with a #cypher query, then I could put the directive into my schema and they worked fine.

ObjectionJS - Group models in a data layer file

I have a NodeJS app running fastify with fastify-objectionjs.
For tidiness, I'd like to group all models in a single file called _main.js, where I export an array of the models inside the models folder.
Since the fastify-objectionjs registration requires an array of models, I thought I could just import the array from my _main.js and feed it as it is to the registration function.
But ObjectionJS is telling me that The supplied models are invalid.
/app.js (node entry point)
const fastify = require('fastify')({
logger: true
})
const knexConfig = require('./knexfile')
const dataLayer = require('./models/_main')
fastify.register(require('fastify-objectionjs'), {
knexConfig: knexConfig,
models: dataLayer
})
// Also tried:
// fastify.register(require('fastify-objectionjs'), {
// knexConfig: knexConfig,
// models: [dataLayer]
// })
/models/_main.js
const User = require('./user.model')
var dataLayer = [User]
module.exports = dataLayer
// Also tried without var:
// module.exports = {
// dataLayer: [
// User
// ]
// }
/models/user.model.js
const Knex = require('knex')
const connection = require('../knexfile')
const { Model } = require('objection')
const knexConnection = Knex(connection)
Model.knex(knexConnection)
class User extends Model {
static get tableName () {
return 'users'
}
}
module.exports = { User }
I can't seem to find a problem in the file flow, but if I create the models array on the fly, the app starts smoothly:
/app.js (node entry point)
const fastify = require('fastify')({
logger: true
})
const knexConfig = require('./knexfile')
const User = require('./models/user.model') // changed
fastify.register(require('fastify-objectionjs'), {
knexConfig: knexConfig,
models: [User] // changed
})
Any idea why this isn't working?
Thanks in advance for your time.
Found the gotcha, I just needed to use destructuring in the require of User, like this:
/models/_main.js
// BAD
// const User = require('./user.model')
// GOOD
const { User } = require('./user.model')
module.exports = [User]
Works like a charm.
Useful question that explains the difference:
Curly brackets (braces) in node require statement

Sequelize query is giving TypeError: undefined is not a function

I'm using express and sequelize for my node application. On the controller file, I have the following:
var models = require('../models'),
Property = models.property,
Sequelize = require('sequelize');
module.exports = function(req, res){
Sequelize.query("SELECT * FROM 'property'", { type:Sequelize.QueryTypes.SELECT})
.then(function(properties) {
res.json(properties)
})
}
I can use model.findAll fine but when I try to use raw query, I'm getting the TypeError: undefined is not a function. Can you point what I'm doing wrong in this code?
You need to be calling query() on a Sequelize instance instead:
var Sequelize = require('sequelize');
var sequelize = new Sequelize('database', 'username', 'password');
sequelize.query("SELECT * FROM 'property'", { type:Sequelize.QueryTypes.SELECT})
.then(function(properties) {
res.json(properties)
})
You can use
const sql = "select * from ..."
model.sequelize.query(sql, { type: model.sequelize.QueryTypes.SELECT })
.then(function (rows) {
... do a job on the query here...
})
In my case, I was reading the contents of a file and importing it in a migration with:
const sqlFile = fs.readFileSync(file, {encoding: "UTF-8"});
await queryInterface.sequelize.query(sqlFile, {
raw: true,
type: Sequelize.QueryTypes.RAW
});
But I was receiving the error:
Error importing: sql.trim is not a function.
Long story short, my SQL contained the ` (backquote/backtick) character, and I needed to escape it.
So I escaped it as soon as I read the file:
const sqlFile = fs.readFileSync(file, {encoding: "UTF-8"}).replace('`', '\`');
Problem solved. But it took me a bit over an hour to figure that out.

Resources