I've been trying to automate the creation of my sequelize models by creating them with a generic model that I can pass definitions into, rather than creating a model file specifically for each one.
I have an array of model definitions which looks something like this:
const modelDefinitions = [
{
name: "User",
fieldDefinitions: [
{
name: "first_name",
label: "First Name",
column_type: Sequelize.DataTypes.STRING,
},
{
name: "last_name",
label: "Last Name",
column_type: Sequelize.DataTypes.STRING,
},
{
name: "email",
label: "Email",
column_type: Sequelize.DataTypes.STRING,
},
{
name: "password",
label: "Password",
restricted: true,
column_type: Sequelize.DataTypes.STRING,
},
],
},
{
name: "Audit",
fieldDefinitions: [
{
name: "ref",
column_type: Sequelize.DataTypes.STRING,
label: "Audit Ref",
},
{
name: "result",
column_type: Sequelize.DataTypes.STRING,
label: "Result",
},
{
name: "auditor_id",
column_type: Sequelize.DataTypes.INTEGER,
label: "Auditor",
},
],
},
];
When my array of models contains just one model it works perfectly fine, but when I have multiple, the GenericModel of the previously defined models is then "changed" to ne the last one in the list that was initialised.
I'm new to node so I think I'm either missing something or there's some sort of model caching happening, meaning that all instances of GenericModel become what it is initialised as last.
Please see my example below (the commented out code is what I used to use to define the models and the reduce is my new way of defining these)
// {
// User: User.init(sequelize, modelDef),
// Article: Article.init(sequelize, modelDef),
// Audit: Audit.init(sequelize, modelDef),
// Form: Form.init(sequelize, modelDef),
// };
const models = modelDefinitions.reduce((acc, modelDef) => {
return { ...acc, [modelDef.name]: GenericModel.init(sequelize, modelDef) };
}, {});
console.log({ models });
My console.log() returns the following - notice both are Group :
{
models: {
User: Group,
Group: Group
}
}
As you can see, what ever the last model is defined as, the previous ones inherit that instead of keeping what I defined them as originally.
But what I actually want is :
{
models: {
User: User,
Group: Group
}
}
If my list only had User in it, it works fine.
I managed to get this working in the end.
I think my issue was that my GenericModel was treated as a Singleton, so to get around this I changed GenericModel from extending the Sequelize.Model and instead made a new class with a contructor to consume my arguments and then created a method on the new class to return the sequelize model.
The main change there was instead of defining the models with GenericModel.init(), I defined them by calling sequelize.define(modelName, attributes, options)
so my map now looks like this :
const models = modelDefinitions.reduce((acc, modelDef) => {
return { ...acc, [modelDef.name]: new GenericModel(sequelize, modelDef).getDBModel() };
}, {});
and my class:
class TestModel {
constructor(sequelize, modelDef) {
this.sequelize = sequelize;
this.modelDef = modelDef;
this.modelName = modelDef?.name;
this.definitions = modelDef?.fieldDefinitions;
this.restrictedFieldList = this.definitions.filter((field) => field?.restricted).map((definition) => definition.name);
}
getDBModel() {
const model = this.sequelize.define(
this.modelName,
this.definitions.reduce((acc, definition) => {
return { ...acc, [definition.name]: definition.column_type };
}, {}),
{
defaultScope: {
attributes: {
exclude: this.restrictedFieldList,
},
},
sequelize: this.sequelize,
modelName: this.modelName,
}
);
return model;
}
}```
I'm trying to create a nearby searching platform with keystone.js
But the nearby queries not working with the keystone js.
How I can run mongoose $nearSphere query or aggregation queries in the keystone?
Hereby added my best try and errors.
Index.js
require('dotenv').config({
path: './.env'
});
const {
Keystone
} = require('#keystonejs/keystone');
const {
GraphQLApp
} = require('#keystonejs/app-graphql');
const {
AdminUIApp
} = require('#keystonejs/app-admin-ui');
const {
PasswordAuthStrategy
} = require('#keystonejs/auth-password');
const {
MongooseAdapter
} = require('#keystonejs/adapter-mongoose');
const initialiseData = require('./initial-data');
const {
Post,
User,
Category,
SubCategory,
Comment,
ForgottenPasswordToken,
} = require('./schema');
const PROJECT_NAME = 'rentospot';
const keystone = new Keystone({
appVersion: {
version: '1.0.0',
addVersionToHttpHeaders: false,
access: false,
},
cookie: {
secure: process.env.NODE_ENV === 'production', // Default to true in production
maxAge: 1000 * 60 * 60 * 24 * 30, // 30 days
sameSite: false,
},
cookieSecret: process.env.COOKIE_SECRET,
adapter: new MongooseAdapter({
mongoUri: 'mongodb+srv://*******:******.mongodb.net/my-app-key-stone'
}),
onConnect: process.env.CREATE_TABLES !== 'true' && initialiseData,
});
const postList = keystone.createList('Post', Post);
keystone.createList('Category', Category);
keystone.createList('SubCategory', SubCategory);
keystone.createList('Comment', Comment);
keystone.createList('User', User);
keystone.createList('ForgottenPasswordToken', ForgottenPasswordToken);
const authStrategy = keystone.createAuthStrategy({
type: PasswordAuthStrategy,
list: 'User',
});
const extendSchema = {
queries: [{
schema: 'nearBy: [Post]',
resolver: async(_) => {
try {
const {
adapter
} = postList;
const result = await adapter.find({
// "feature": true,
"venue.location": {
"$nearSphere": {
"$geometry": {
"type": "Point",
"coordinates": [12, 75],
},
"$minDistance": 0,
"$maxDistance": 500,
},
}
});
console.log(result)
return result;
} catch (e) {
console.log("errrrrrrrrrrrrrror: ", e)
return null
}
},
}, ]
};
keystone.extendGraphQLSchema(extendSchema)
module.exports = {
keystone,
apps: [new GraphQLApp(),
new AdminUIApp({
name: PROJECT_NAME,
enableDefaultRoute: true,
authStrategy
})
],
};
Schema.js
const fetch = require('node-fetch');
const { v4: uuid } = require('uuid');
const { sendEmail } = require('./emails');
const {
File,
Text,
Slug,
Relationship,
Select,
Password,
Checkbox,
CalendarDay,
DateTime,
Integer
} = require('#keystonejs/fields');
const { S3Adapter } = require('#keystonejs/file-adapters');
const { AuthedRelationship } = require('#keystonejs/fields-authed-relationship');
const { formatISO } = require('date-fns');
const { Wysiwyg } = require('#keystonejs/fields-wysiwyg-tinymce');
const LocationGoogle = require('./LocationGoogle');
const fileAdapter = new S3Adapter({
bucket: process.env.BUCKET_NAME,
folder: process.env.S3_PATH,
publicUrl: ({ filename }) =>
// `https://${process.env.CF_DISTRIBUTION_ID}.cloudfront.net/${process.env.S3_PATH}/${filename}`,
`https://${process.env.CF_DISTRIBUTION_ID}/${process.env.S3_PATH}/${filename}`,
s3Options: {
// Optional paramaters to be supplied directly to AWS.S3 constructor
apiVersion: '2006-03-01',
accessKeyId: process.env.API_KEY_ID,
secretAccessKey: process.env.SECRET_ACCESS_KEY,
region: process.env.REGION
},
uploadParams: ({ id }) => ({
Metadata: {
keystone_id: `${id}`,
},
}),
});
// Access control functions
const userIsAdmin = ({ authentication: { item: user } }) => Boolean(user && user.isAdmin);
const userOwnsItem = ({ authentication: { item: user } }) => {
if (!user) {
return false;
}
return { id: user.id };
};
const userIsCurrentAuth = ({ authentication: { item: user } }) => Boolean(user); // item will be undefined for anonymous user
const userIsAdminOrOwner = auth => {
const isAdmin = access.userIsAdmin(auth);
const isOwner = access.userOwnsItem(auth);
return isAdmin ? isAdmin : isOwner;
};
const access = { userIsAdmin, userIsCurrentAuth, userOwnsItem, userIsAdminOrOwner };
// Read: public / Write: admin
const DEFAULT_LIST_ACCESS = {
create: access.userIsCurrentAuth,
read: true,
update: access.userIsAdminOrOwner,
delete: access.userIsAdmin,
};
exports.User = {
access: {
update: access.userIsCurrentAuth,
delete: access.userIsAdmin,
},
fields: {
name: { type: Text },
dob: {
type: CalendarDay,
format: 'do MMMM yyyy',
dateFrom: '1901-01-01',
dateTo: formatISO(new Date(), { representation: 'date' }),
},
phone: {
type: Text,
isUnique: true,
access: { read: access.userIsCurrentAuth },
// hooks: {
// validateInput:
// }
},
email: { type: Text, isUnique: true, access: { read: access.userIsCurrentAuth } },
password: { type: Password, isRequired: true },
isAdmin: { type: Checkbox, access: { update: access.userIsAdmin } },
twitterHandle: { type: Text },
image: { type: File, adapter: fileAdapter },
},
hooks: {
afterChange: async ({ updatedItem, existingItem }) => {
if (existingItem && updatedItem.password !== existingItem.password) {
const url = process.env.SERVER_URL || 'http://localhost:3000';
const props = {
recipientEmail: updatedItem.email,
signinUrl: `${url}/signin`,
};
const options = {
subject: 'Your password has been updated',
to: updatedItem,
from: process.env.MAILGUN_FROM,
domain: process.env.MAILGUN_DOMAIN,
apiKey: process.env.MAILGUN_API_KEY,
};
await sendEmail('password-updated.jsx', props, options);
}
},
},
};
// exports.Organiser = {
// access: DEFAULT_LIST_ACCESS,
// fields: {
// user: { type: Relationship, ref: 'User' },
// order: { type: Integer },
// role: { type: Text },
// },
// };
// TODO: We can't access the existing item at the list update level yet,
// read access needs to check if event is "active" or if the user is admin
// read: ({ existingItem, authentication }) => access.userIsAdmin({ authentication }) || !!(existingItem && existingItem.status === 'active'),
exports.Post = {
access: DEFAULT_LIST_ACCESS,
fields: {
title: { type: Text },
slug: { type: Slug, from: 'title' },
author: {
type: AuthedRelationship,
ref: 'User',
isRequired: true,
access: {
read: userIsCurrentAuth,
update: userIsAdmin,
},
},
status: {
type: Select,
defaultValue: 'published',
options: [
{ label: 'Draft', value: 'draft' },
{ label: 'Published', value: 'published' },
],
},
feature: { type: Checkbox, access: { update: access.userIsAdmin } },
assured: { type: Checkbox, access: { update: access.userIsAdmin } },
posted: { type: DateTime, format: 'dd/MM/yyyy' },
categories: {
type: Relationship,
ref: 'Category',
many: false,
},
subCategories: {
type: Relationship,
ref: 'SubCategory',
many: true,
},
description: { type: Wysiwyg },
venue: {
type: LocationGoogle,
googleMapsKey: process.env.GOOGLE_MAPS_KEY,
// hooks: {
// resolveInput: async ({ originalInput }) => {
// try {
// const placeId = originalInput.venue;
// if (typeof placeId === 'undefined') {
// // Nothing was passed in, so we can bail early.
// return undefined;
// }
// const r = await fetch(
// `https://maps.googleapis.com/maps/api/geocode/json?place_id=${placeId}&key=${process.env.GOOGLE_MAPS_KEY}`
// )
// const response = await r.json()
// if (response.results && response.results[0]) {
// const { place_id, formatted_address } = response.results[0];
// const { lat, lng } = response.results[0].geometry.location;
// return {
// googlePlaceID: place_id,
// formattedAddress: formatted_address,
// lat,
// lng,
// loc: {
// type: "Point",
// coordinates: [lng, lat]
// }
// };
// }
// return null;
// }
// catch (e) {
// console.log("error:", e)
// }
// },
// validateInput: async (x) => {
// console.log("---------------------------------item 2-------------------------------------- : ",x);
// },
// beforeChange: async (x) => {
// console.log("========", x)
// },
// afterChange: async (x) => {
// console.log("++++", x)
// }
// },
},
locationAddress: { type: Text },
locationDescription: { type: Text },
createdby: {
type: AuthedRelationship,
ref: 'User',
isRequired: true,
access: {
read: userIsCurrentAuth,
update: userIsAdmin,
},
},
image: {
type: File,
adapter: fileAdapter,
hooks: {
beforeChange: async ({ existingItem }) => {
if (existingItem && existingItem.image) {
await fileAdapter.delete(existingItem.image);
}
},
},
},
},
hooks: {
afterDelete: ({ existingItem }) => {
if (existingItem.image) {
fileAdapter.delete(existingItem.image);
}
},
},
adminConfig: {
defaultPageSize: 20,
defaultColumns: 'title, status',
defaultSort: 'title',
},
labelResolver: item => item.title,
};
exports.Category = {
// access: userIsAdmin,
fields: {
name: { type: Text },
slug: { type: Slug, from: 'name' },
image: {
type: File,
adapter: fileAdapter,
hooks: {
beforeChange: async ({ existingItem }) => {
if (existingItem && existingItem.image) {
await fileAdapter.delete(existingItem.image);
}
},
},
},
},
};
exports.SubCategory = {
// access: userIsAdmin,
fields: {
name: { type: Text },
slug: { type: Slug, from: 'name' },
categories: {
type: Relationship,
ref: 'Category',
many: false,
},
image: {
type: File,
adapter: fileAdapter,
hooks: {
beforeChange: async ({ existingItem }) => {
if (existingItem && existingItem.image) {
await fileAdapter.delete(existingItem.image);
}
},
},
},
},
};
exports.Comment = {
access: {
create: userIsCurrentAuth,
update: userIsAdminOrOwner,
},
fields: {
body: { type: Text, isMultiline: true },
originalPost: {
type: Relationship,
ref: 'Post',
},
author: {
type: AuthedRelationship,
ref: 'User',
isRequired: true,
access: {
create: userIsAdmin,
update: userIsAdmin,
},
},
posted: { type: CalendarDay },
},
labelResolver: item => item.body,
};
// exports.Talk = {
// access: DEFAULT_LIST_ACCESS,
// fields: {
// name: { type: Text },
// event: { type: Relationship, ref: 'Event.talks' },
// speakers: { type: Relationship, ref: 'User.talks', many: true },
// isLightningTalk: { type: Checkbox },
// description: { type: Wysiwyg },
// },
// };
// exports.Rsvp = {
// access: {
// create: true,
// read: true,
// update: ({ authentication: { item } }) => {
// if (!item) {
// return false;
// }
// return { user: { id: item.id } };
// },
// delete: access.userIsAdmin,
// },
// fields: {
// event: { type: Relationship, ref: 'Event' },
// user: { type: Relationship, ref: 'User' },
// status: { type: Select, options: 'yes, no' },
// },
// hooks: {
// validateInput: async ({ context, resolvedData, existingItem }) => {
// const { status } = resolvedData;
// const { event: eventId } = existingItem ? existingItem : resolvedData;
// if (status === 'no') {
// return;
// }
// const { data } = await context.executeGraphQL({
// query: `query {
// event: Event(where: { id: "${eventId}" }) {
// id
// startTime
// maxRsvps
// isRsvpAvailable
// }
// allRsvps(where: { event: { id: "${eventId}" }}) {
// id
// }
// }`,
// });
// const { event, allRsvps } = data;
// if (
// !event ||
// !event.isRsvpAvailable ||
// !event.startTime ||
// new Date() > new Date(event.startTime) ||
// allRsvps.length >= event.maxRsvps
// ) {
// throw 'Error rsvping to event';
// }
// },
// },
// };
// exports.Sponsor = {
// access: DEFAULT_LIST_ACCESS,
// fields: {
// name: { type: Text },
// website: { type: Text },
// logo: { type: CloudinaryImage, adapter: cloudinaryAdapter },
// },
// };
exports.ForgottenPasswordToken = {
access: {
create: true,
read: true,
update: access.userIsAdmin,
delete: access.userIsAdmin,
},
fields: {
user: {
type: Relationship,
ref: 'User',
access: {
read: access.userIsAdmin,
},
},
token: {
type: Text,
isRequired: true,
isUnique: true,
access: {
read: access.userIsAdmin,
},
},
requestedAt: { type: DateTime, isRequired: true },
accessedAt: { type: DateTime },
expiresAt: { type: DateTime, isRequired: true },
},
hooks: {
afterChange: async ({ context, updatedItem, existingItem }) => {
if (existingItem) return null;
const now = new Date().toISOString();
const { errors, data } = await context.executeGraphQL({
context: context.createContext({ skipAccessControl: true }),
query: `
query GetUserAndToken($user: ID!, $now: DateTime!) {
User( where: { id: $user }) {
id
email
}
allForgottenPasswordTokens( where: { user: { id: $user }, expiresAt_gte: $now }) {
token
expiresAt
}
}
`,
variables: { user: updatedItem.user.toString(), now },
});
if (errors) {
console.error(errors, `Unable to construct password updated email.`);
return;
}
const { allForgottenPasswordTokens, User } = data;
const forgotPasswordKey = allForgottenPasswordTokens[0].token;
const url = process.env.SERVER_URL || 'http://localhost:3000';
const props = {
forgotPasswordUrl: `${url}/change-password?key=${forgotPasswordKey}`,
recipientEmail: User.email,
};
const options = {
subject: 'Request for password reset',
to: User.email,
from: process.env.MAILGUN_FROM,
domain: process.env.MAILGUN_DOMAIN,
apiKey: process.env.MAILGUN_API_KEY,
};
await sendEmail('forgot-password.jsx', props, options);
},
},
};
exports.customSchema = {
mutations: [
{
schema: 'startPasswordRecovery(email: String!): ForgottenPasswordToken',
resolver: async (obj, { email }, context) => {
const token = uuid();
const tokenExpiration =
parseInt(process.env.RESET_PASSWORD_TOKEN_EXPIRY) || 1000 * 60 * 60 * 24;
const now = Date.now();
const requestedAt = new Date(now).toISOString();
const expiresAt = new Date(now + tokenExpiration).toISOString();
const { errors: userErrors, data: userData } = await context.executeGraphQL({
context: context.createContext({ skipAccessControl: true }),
query: `
query findUserByEmail($email: String!) {
allUsers(where: { email: $email }) {
id
email
}
}
`,
variables: { email: email },
});
if (userErrors || !userData.allUsers || !userData.allUsers.length) {
console.error(
userErrors,
`Unable to find user when trying to create forgotten password token.`
);
return;
}
const userId = userData.allUsers[0].id;
const result = {
userId,
token,
requestedAt,
expiresAt,
};
const { errors } = await context.executeGraphQL({
context: context.createContext({ skipAccessControl: true }),
query: `
mutation createForgottenPasswordToken(
$userId: ID!,
$token: String,
$requestedAt: DateTime,
$expiresAt: DateTime,
) {
createForgottenPasswordToken(data: {
user: { connect: { id: $userId }},
token: $token,
requestedAt: $requestedAt,
expiresAt: $expiresAt,
}) {
id
token
user {
id
}
requestedAt
expiresAt
}
}
`,
variables: result,
});
if (errors) {
console.error(errors, `Unable to create forgotten password token.`);
return;
}
return true;
},
},
{
schema: 'changePasswordWithToken(token: String!, password: String!): User',
resolver: async (obj, { token, password }, context) => {
const now = Date.now();
const { errors, data } = await context.executeGraphQL({
context: context.createContext({ skipAccessControl: true }),
query: `
query findUserFromToken($token: String!, $now: DateTime!) {
passwordTokens: allForgottenPasswordTokens(where: { token: $token, expiresAt_gte: $now }) {
id
token
user {
id
}
}
}`,
variables: { token, now },
});
if (errors || !data.passwordTokens || !data.passwordTokens.length) {
console.error(errors, `Unable to find token`);
throw errors.message;
}
const user = data.passwordTokens[0].user.id;
const tokenId = data.passwordTokens[0].id;
const { errors: passwordError } = await context.executeGraphQL({
context: context.createContext({ skipAccessControl: true }),
query: `mutation UpdateUserPassword($user: ID!, $password: String!) {
updateUser(id: $user, data: { password: $password }) {
id
}
}`,
variables: { user, password },
});
if (passwordError) {
console.error(passwordError, `Unable to change password`);
throw passwordError.message;
}
await context.executeGraphQL({
context: context.createContext({ skipAccessControl: true }),
query: `mutation DeletePasswordToken($tokenId: ID!) {
deleteForgottenPasswordToken(id: $tokenId) {
id
}
}
`,
variables: { tokenId },
});
return true;
},
},
],
};
LocationGoogle is a directory of the files exactly #keystonejs/fields-location-google and small changes in its Implementation.js
Implementation.js
const { Implementation } = require('#keystonejs/fields');
const { MongooseFieldAdapter } = require('#keystonejs/adapter-mongoose');
const fetch = require('node-fetch');
// Disabling the getter of mongoose >= 5.1.0
// https://github.com/Automattic/mongoose/blob/master/migrating_to_5.md#checking-if-a-path-is-populated
class LocationGoogleImplementation extends Implementation {
constructor(_, { googleMapsKey }) {
super(...arguments);
this.graphQLOutputType = 'LocationGoogle';
if (!googleMapsKey) {
throw new Error(
'You must provide a `googleMapsKey` to LocationGoogle Field. To generate a Google Maps API please visit: https://developers.google.com/maps/documentation/javascript/get-api-key'
);
}
this._googleMapsKey = googleMapsKey;
}
get _supportsUnique() {
return false;
}
extendAdminMeta(meta) {
return {
...meta,
googleMapsKey: this._googleMapsKey,
};
}
gqlOutputFields() {
return [`${this.path}: ${this.graphQLOutputType}`];
}
gqlQueryInputFields() {
return [...this.equalityInputFields('String'), ...this.inInputFields('String')];
}
getGqlAuxTypes() {
return [
`
type Location{
coordinates: [Float]
},
type ${this.graphQLOutputType} {
location: Location
googlePlaceID: String
formattedAddress: String
lat: Float
lng: Float
}
`,
];
}
// Called on `User.avatar` for example
gqlOutputFieldResolvers() {
return {
[this.path]: item => {
const itemValues = item[this.path];
if (!itemValues) {
return null;
}
return itemValues;
},
};
}
async resolveInput({ resolvedData }) {
const placeId = resolvedData[this.path];
// NOTE: The following two conditions could easily be combined into a
// single `if (!inputId) return inputId`, but that would lose the nuance of
// returning `undefined` vs `null`.
// Premature Optimisers; be ware!
if (typeof placeId === 'undefined') {
// Nothing was passed in, so we can bail early.
return undefined;
}
if (placeId === null) {
// `null` was specifically set, and we should set the field value to null
// To do that we... return `null`
return null;
}
const response = await fetch(
`https://maps.googleapis.com/maps/api/geocode/json?place_id=${placeId}&key=${this._googleMapsKey}`
).then(r => r.json());
if (response.results && response.results[0]) {
const { place_id, formatted_address } = response.results[0];
const { lat, lng } = response.results[0].geometry.location;
return {
location: {
type: "Point",
coordinates: [lng, lat]
},
googlePlaceID: place_id,
formattedAddress: formatted_address,
lat: lat,
lng: lng,
};
}
return null;
}
gqlUpdateInputFields() {
return [`${this.path}: String`];
}
gqlCreateInputFields() {
return [`${this.path}: String`];
}
getBackingTypes() {
const type = `null | {
location: {
type: "Point",
coordinates: [lng, lat]
};
googlePlaceID: string;
formattedAddress: string;
lat: number;
lng: number;
}
`;
return { [this.path]: { optional: true, type } };
}
}
const CommonLocationInterface = superclass =>
class extends superclass {
getQueryConditions(dbPath) {
return {
...this.equalityConditions(dbPath),
...this.inConditions(dbPath),
};
}
};
class MongoLocationGoogleInterface extends CommonLocationInterface(MongooseFieldAdapter) {
addToMongooseSchema(schema) {
const schemaOptions = {
type: {
location: {
type: { type: String, default: "Point" },
coordinates: [Number]
},
googlePlaceID: String,
formattedAddress: String,
lat: Number,
lng: Number,
},
};
schema.add({
[this.path]: this.mergeSchemaOptions(schemaOptions, this.config)
});
}
}
module.exports = {
LocationGoogleImplementation,
MongoLocationGoogleInterface
}
package.json
{
"name": "#keystonejs/example-projects-blank",
"description": "A blank KeystoneJS starter project.",
"private": true,
"version": "5.0.15",
"author": "The KeystoneJS Development Team",
"repository": "https://github.com/keystonejs/keystone/tree/master/packages/create-keystone-app/example-projects/blank",
"homepage": "https://github.com/keystonejs/keystone",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"scripts": {
"dev": "cross-env NODE_ENV=development ENABLE_DEV_FEATURES=false DISABLE_LOGGING=false nodemon --exec keystone dev --harmony",
"build": "cross-env NODE_ENV=production ENABLE_DEV_FEATURES=false keystone build",
"start": "cross-env NODE_ENV=production keystone start",
"create-tables": "cross-env keystone create-tables"
},
"dependencies": {
"#arch-ui/layout": "^0.2.14",
"#arch-ui/typography": "^0.0.18",
"#keystonejs/adapter-mongoose": "^10.1.2",
"#keystonejs/app-admin-ui": "^7.3.13",
"#keystonejs/app-graphql": "^6.2.1",
"#keystonejs/auth-password": "^6.0.0",
"#keystonejs/email": "^5.2.0",
"#keystonejs/fields-authed-relationship": "^1.0.16",
"#keystonejs/fields-location-google": "^3.2.1",
"#keystonejs/fields-wysiwyg-tinymce": "^5.3.15",
"#keystonejs/file-adapters": "^7.0.8",
"#keystonejs/keystone": "^18.1.0",
"cross-env": "^7.0.3",
"date-fns": "^2.16.1",
"dotenv": "^8.2.0",
"google-maps-react": "^2.0.6",
"node-fetch": "^2.6.1",
"react": "^16.14.0",
"react-dom": "^16.14.0",
"uuid": "^8.3.2"
},
"devDependencies": {
"nodemon": "^2.0.7"
}
}
The Error screenshot attached bellow
I'm running into a funny problem. I'm using NextJS for its server-side rendering capabilities and am using ReactQuill as my rich-text editor. To get around ReactQuill's tie to the DOM, I'm dynamically importing it. However, that presents another problem which is that when I try to attach a ref to the ReactQuill component, it's treated as a loadable component instead of the ReactQuill component. I need the ref in order to customize how images are handled when uploaded into the rich-text editor. Right now, the ref returns current:null instead of the function I can use .getEditor() on to customize image handling.
Anybody have any thoughts on how I can address this? I tried ref-forwarding, but it's still applying refs to a loadable component, instead of the React-Quill one. Here's a snapshot of my code.
const ReactQuill = dynamic(import('react-quill'), { ssr: false, loading: () => <p>Loading ...</p> }
);
const ForwardedRefComponent = React.forwardRef((props, ref) => {return (
<ReactQuill {...props} forwardedRef={(el) => {ref = el;}} />
)})
class Create extends Component {
constructor() {
super();
this.reactQuillRef = React.createRef();
}
imageHandler = () => {
console.log(this.reactQuillRef); //this returns current:null, can't use getEditor() on it.
}
render() {
const modules = {
toolbar: {
container: [[{ 'header': [ 2, 3, false] }],
['bold', 'italic', 'underline', 'strike'],
[{ 'list': 'ordered'}, { 'list': 'bullet' }],
[{ 'script': 'sub'}, { 'script': 'super' }],
['link', 'image'],
[{ 'indent': '-1'}, { 'indent': '+1' }],
[{ 'align': [] }],
['blockquote', 'code-block'],],
handlers: {
'image': this.imageHandler
}
}
};
return(
<ForwardedRefComponent
value={this.state.text}
onChange={this.handleChange}
modules={modules}
ref={this.reactQuillRef}/> //this.reactQuillRef is returning current:null instead of the ReactQuill function for me to use .getEditor() on
)
}
}
const mapStateToProps = state => ({
tutorial: state.tutorial,
});
export default connect(
mapStateToProps, {createTutorial}
)(Create);
I share my solution with hope that it helps you too.
Helped from https://github.com/zenoamaro/react-quill/issues/642#issuecomment-717661518
const ReactQuill = dynamic(
async () => {
const { default: RQ } = await import("react-quill");
return ({ forwardedRef, ...props }) => <RQ ref={forwardedRef} {...props} />;
},
{
ssr: false
}
);
export default function QuillWrapper() {
const quillRef = React.useRef(false)
return <>
<ReactQuill forwardedRef={quillRef} />
</>
}
for example you can use the ref to upload image with custom hanlder
import React, { useMemo } from "react";
import dynamic from "next/dynamic";
const ReactQuill = dynamic(
async () => {
const { default: RQ } = await import("react-quill");
return ({ forwardedRef, ...props }) => <RQ ref={forwardedRef} {...props} />;
},
{
ssr: false,
}
);
export default function QuillWrapper({ value, onChange, ...props }) {
const quillRef = React.useRef(false);
// Custom image upload handler
function imgHandler() {
// from https://github.com/quilljs/quill/issues/1089#issuecomment-318066471
const quill = quillRef.current.getEditor();
let fileInput = quill.root.querySelector("input.ql-image[type=file]");
// to prevent duplicate initialization I guess
if (fileInput === null) {
fileInput = document.createElement("input");
fileInput.setAttribute("type", "file");
fileInput.setAttribute(
"accept",
"image/png, image/gif, image/jpeg, image/bmp, image/x-icon"
);
fileInput.classList.add("ql-image");
fileInput.addEventListener("change", () => {
const files = fileInput.files;
const range = quill.getSelection(true);
if (!files || !files.length) {
console.log("No files selected");
return;
}
const formData = new FormData();
formData.append("file", files[0]);
formData.append("uid", uid);
formData.append("img_type", "detail");
quill.enable(false);
console.log(files[0]);
axios
.post("the/url/for/handle/uploading", formData)
.then((response) => {
// after uploading succeed add img tag in the editor.
// for detail visit https://quilljs.com/docs/api/#editor
quill.enable(true);
quill.insertEmbed(range.index, "image", response.data.url);
quill.setSelection(range.index + 1);
fileInput.value = "";
})
.catch((error) => {
console.log("quill image upload failed");
console.log(error);
quill.enable(true);
});
});
quill.root.appendChild(fileInput);
}
fileInput.click();
}
I don't know much about useMemo
but if i don't use the hook,
the editor keeps rerendered resulting in losing focus and I guess perfomance trouble too.
const modules = useMemo(
() => ({
toolbar: {
container: [
[{ font: [] }],
[{ size: ["small", false, "large", "huge"] }], // custom dropdown
["bold", "italic", "underline", "strike"], // toggled buttons
[{ color: [] }, { background: [] }], // dropdown with defaults from theme
[{ script: "sub" }, { script: "super" }], // superscript/subscript
[{ header: 1 }, { header: 2 }], // custom button values
["blockquote", "code-block"],
[{ list: "ordered" }, { list: "bullet" }],
[{ indent: "-1" }, { indent: "+1" }], // outdent/indent
[{ direction: "rtl" }], // text direction
[{ align: [] }],
["link", "image"],
["clean"], // remove formatting button
],
handlers: { image: imgHandler }, // Custom image handler
},
}),
[]
);
return (
<ReactQuill
forwardedRef={quillRef}
modules={modules}
value={value}
onChange={onChange}
{...props}
/>
);
}
In NextJS, React.useRef or React.createRef do not work with dynamic import.
You should Replace
const ReactQuill = dynamic(import('react-quill'), { ssr: false, loading: () => <p>Loading ...</p> }
);
with
import ReactQuill from 'react-quill';
and render after when window is loaded.
import ReactQuill from 'react-quill';
class Create extends Component {
constructor() {
super();
this.reactQuillRef = React.createRef();
this.state = {isWindowLoaded: false};
}
componentDidMount() {
this.setState({...this.state, isWindowLoaded: true});
}
.........
.........
render(){
return (
<div>
{this.isWindowLoaded && <ReactQuil {...this.props}/>}
</div>
)
}
}
Use onChange and pass all the arguments, here one example to use the editor.getHTML()
import React, { Component } from 'react'
import dynamic from 'next/dynamic'
import { render } from 'react-dom'
const QuillNoSSRWrapper = dynamic(import('react-quill'), {
ssr: false,
loading: () => <p>Loading ...</p>,
})
const modules = {
toolbar: [
[{ header: '1' }, { header: '2' }, { font: [] }],
[{ size: [] }],
['bold', 'italic', 'underline', 'strike', 'blockquote'],
[
{ list: 'ordered' },
{ list: 'bullet' },
{ indent: '-1' },
{ indent: '+1' },
],
['link', 'image', 'video'],
['clean'],
],
clipboard: {
// toggle to add extra line breaks when pasting HTML:
matchVisual: false,
},
}
/*
* Quill editor formats
* See https://quilljs.com/docs/formats/
*/
const formats = [
'header',
'font',
'size',
'bold',
'italic',
'underline',
'strike',
'blockquote',
'list',
'bullet',
'indent',
'link',
'image',
'video',
]
class BlogEditor extends Component {
constructor(props) {
super(props)
this.state = { value: null } // You can also pass a Quill Delta here
this.handleChange = this.handleChange.bind(this)
this.editor = React.createRef()
}
handleChange = (content, delta, source, editor) => {
this.setState({ value: editor.getHTML() })
}
render() {
return (
<>
<div dangerouslySetInnerHTML={{ __html: this.state.value }} />
<QuillNoSSRWrapper ref={this.editor} onChange={this.handleChange} modules={modules} formats={formats} theme="snow" />
<QuillNoSSRWrapper value={this.state.value} modules={modules} formats={formats} theme="snow" />
</>
)
}
}
export default BlogEditor
If you want to use ref in Next.js with dynamic import
you can use React.forwardRef API
more info