I have this in my .js file:
exports.generate = function(details) {
// bunch of code
// returns Promise
}
exports.save = function(details){
exports.generate(details)
.then(function(id){
//save in db
})
}
Is it okay to use an exported function like this? Or is there a better way..?
It depends on if you want consumers of the module to be able to influence the module's behavior by overwriting exports.generate (e.g. require('foo').generate = function() {...}).
If you don't want users to be able to influence it in this way, then your best bet is going to be pulling out the generate() function and naming it, then exporting that and using the function directly by name inside save():
function generate(details) {
// ...
}
exports.generate = generate;
exports.save = function(details) {
generate(details).then(function(id) {
// ...
});
};
Otherwise if you do want to allow users to override the generate() functionality, then what you are currently doing is fine.
var _this=this;
exports.save = function(details) {
_this.generate(details) ...
};
Related
Background:
I am building a discord bot that operates as a Dungeons & Dragons DM of sorts. We want to store game data in a database and during the execution of certain commands, query data from said database for use in the game.
All of the connections between our Discord server, our VPS, and the VPS' backend are functional and we are now implementing slash commands since traditional ! commands are being removed from support in April.
We are running into problems making the slash commands though. We want to set them up to be as efficient as possible which means no hard-coded choices for options. We want to build those choice lists via data from the database.
The problem we are running into is that we can't figure out the proper way to implement the fetch to the database within the SlashCommandBuilder.
Here is what we currently have:
const {SlashCommandBuilder} = require('#discordjs/builders');
const fetch = require('node-fetch');
const {REST} = require('#discordjs/rest');
const test = require('../commonFunctions/test.js');
var options = async function getOptions(){
let x = await test.getClasses();
console.log(x);
return ['test','test2'];
}
module.exports = {
data: new SlashCommandBuilder()
.setName('get-test-data')
.setDescription('Return Class and Race data from database')
.addStringOption(option =>{
option.setName('class')
.setDescription('Select a class for your character')
.setRequired(true)
for(let op of options()){
//option.addChoice(op,op);
}
return option
}
),
async execute(interaction){
},
};
This code produces the following error when start the npm for our bot on our server:
options is not a function or its return value is not iterable
I thought that maybe the function wasn't properly defined, so I replaced the contents of it with just a simple array return and the npm started without errors and the values I had passed showed up in the server.
This leads me to think that the function call in the modules.exports block is immediatly attempting to get the return value of the function and as the function is async, it isn't yet ready and is either returning undefined or a promise or something else not iteratable.
Is there a proper way to implement the code as shown? Or is this way too complex for discord.js to handle?
Is there a proper way to implement the idea at all? Like creating a json object that contains the option data which is built and saved to a file at some point prior to this command being registered and then having the code above just pull in that file for the option choices?
Alright, I found a way. Ian Malcom would be proud (LMAO).
Here is what I had to do for those with a similar issues:
I had to basically re-write our entire application. It sucks, I know, but it works so who cares?
When you run your index file for your npm, make sure that you do the following things.
Note: you can structure this however you want, this is just how I set up my js files.
Setup a function that will setup the data you need, it needs to be an async function as does everything downstream from this point on relating to the creation and registration of the slash commands.
Create a js file to act as your application setup "module". "Module" because we're faking a real module by just using the module.exports method. No package.jsons needed.
In the setup file, you will need two requires. The first is a, as of yet, non-existent data manager file; we'll do that next. The second is a require for node:fs.
Create an async function in your setup file called setup and add it to your module.exports like so:
module.exports = { setup }
In your async setup function or in a function that it calls, make a call to the function in your still as of yet non-existent data manager file. Use await so that the application doesn't proceed until something is returned. Here is what mine looks like, note that I am writing my data to a file to read in later because of my use case, you may or may not have to do the same for yours:
async function setup(){
console.log('test');
//build option choice lists
let listsBuilt = await buildChoiceLists();
if (listsBuilt){
return true;
} else {
return false;
}
}
async function buildChoiceLists(){
let classListBuilt = await buildClassList();
return true;
}
async function buildClassList(){
let classData = await classDataManager.getClassData();
console.log(classData);
classList = classData;
await writeFiles();
return true;
}
async function writeFiles(){
fs.writeFileSync('./CommandData/classList.json', JSON.stringify(classList));
}
Before we finish off this file, if you want to store anything as a property in this file and then get it later on, you can do so. In order for the data to return properly though, you will need to define a getter function in your exports. Here is an example:
var classList;
module.exports={
getClassList: () => classList,
setup
};
So, with everything above you should have something that looks like this:
const classDataManager = require('./DataManagers/ClassData.js')
const fs = require('node:fs');
var classList;
async function setup(){
console.log('test');
//build option choice lists
let listsBuilt = await buildChoiceLists();
if (listsBuilt){
return true;
} else {
return false;
}
}
async function buildChoiceLists(){
let classListBuilt = await buildClassList();
return true;
}
async function buildClassList(){
let classData = await classDataManager.getClassData();
console.log(classData);
classList = classData;
await writeFiles();
return true;
}
async function writeFiles(){
fs.writeFileSync('./CommandData/classList.json', JSON.stringify(classList));
}
module.exports={
getClassList: () => classList,
setup
};
Next that pesky non-existent DataManager file. For mine, each data type will have its own, but you might want to just combine them all into a single .js file for yours.
Same with the folder name, I called mine DataManagers, if you're combining them all into one, you could just call the file DataManager and leave it in the same folder as your appSetup.js file.
For the data manager file all we really need is a function to get our data and then return it in the format we want it to be in. I am using node-fetch. If you are using some other module for data requests, write your code as needed.
Instead of explaining everything, here is the contents of my file, not much has to be explained here:
const fetch = require('node-fetch');
async function getClassData(){
return new Promise((resolve) => {
let data = "action=GetTestData";
fetch('http://xxx.xxx.xxx.xx/backend/characterHandler.php', {
method: 'post',
headers: { 'Content-Type':'application/x-www-form-urlencoded'},
body: data
}).then(response => {
response.json().then(res => {
let status = res.status;
let clsData = res.classes;
let rcData = res.races;
if (status == "Success"){
let text = '';
let classes = [];
let races = [];
if (Object.keys(clsData).length > 0){
for (let key of Object.keys(clsData)){
let cls = clsData[key];
classes.push({
"name": key,
"code": key.toLowerCase()
});
}
}
if (Object.keys(rcData).length > 0){
for (let key of Object.keys(rcData)){
let rc = rcData[key];
races.push({
"name": key,
"desc": rc.Desc
});
}
}
resolve(classes);
}
});
});
});
}
module.exports = {
getClassData
};
This file contacts our backend php and requests data from it. It queries the data then returns it. Then we format it into an JSON structure for use later on with option choices for the slash command.
Once all of your appSetup and data manager files are complete, we still need to create the commands and register them with the server. So, in your index file add something similar to the following:
async function getCommands(){
let cmds = await comCreator.appSetup();
console.log(cmds);
client.commands = cmds;
}
getCommands();
This should go at or near the top of your index.js file. Note that comCreator refers to a file we haven't created yet; you can name this require const whatever you wish. That's it for this file.
Now, the "comCreator" file. I named mine deploy-commands.js, but you can name it whatever. Once again, here is the full file contents. I will explain anything that needs to be explained after:
const {Collection} = require('discord.js');
const {REST} = require('#discordjs/rest');
const {Routes} = require('discord-api-types/v9');
const app = require('./appSetup.js');
const fs = require('node:fs');
const config = require('./config.json');
async function appSetup(){
console.log('test2');
let setupDone = await app.setup();
console.log(setupDone);
console.log(app.getClassList());
return new Promise((resolve) => {
const cmds = [];
const cmdFiles = fs.readdirSync('./commands').filter(f => f.endsWith('.js'));
for (let file of cmdFiles){
let cmd = require('./commands/' + file);
console.log(file + ' added to commands!');
cmds.push(cmd.data.toJSON());
}
const rest = new REST({version: '9'}).setToken(config.token);
rest.put(Routes.applicationGuildCommands(config.clientId, config.guildId), {body: cmds})
.then(() => console.log('Successfully registered application commands.'))
.catch(console.error);
let commands = new Collection();
for (let file of cmdFiles){
let cmd = require('./commands/' + file);
commands.set(cmd.data.name, cmd);
}
resolve(commands);
});
}
module.exports = {
appSetup
};
Most of this is boiler plate for slash command creation though I did combine the creation and registering of the commands into the same process. As you can see, we are grabbing our command files, processing them into a collection, registering that collection, and then resolving the promise with that variable.
You might have noticed that property, was used to then set the client commands in the index.js file.
Config just contains your connection details for your discord server app.
Finally, how I accessed the data we wrote for the SlashCommandBuilder:
data: new SlashCommandBuilder()
.setName('get-test-data')
.setDescription('Return Class and Race data from database')
.addStringOption(option =>{
option.setName('class')
.setDescription('Select a class for your character')
.setRequired(true)
let ops = [];
let data = fs.readFileSync('./CommandData/classList.json','utf-8');
ops = JSON.parse(data);
console.log('test data class options: ' + ops);
for(let op of ops){
option.addChoice(op.name,op.code);
}
return option
}
),
Hopefully this helps someone in the future!
I love generators in nodejs. They help nodejs look more like server-side code. I'm trying to use generators with my Sails app. This is my controller and it works when I visit 'get /hi':
/**
* FooController
*
* #description :: Server-side logic for managing foos
* #help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
hi: function (req, res){
return res.send("Hi there!");
}
};
However when I change that hi action to a generator function...
module.exports = {
hi: function* (req, res){
return res.send("Hi there!");
}
};
this action never gets to return the response. Like it's waiting for something. How does one utilize ES6 generators within SailsJS controllers and in-fact all of Sails?
You can use it, in fact it'll be great if we all use this style, it adds a lot of readability and flexibility in your code (no more callback-hell), but that is not the way to use it.
As #Cohars stated, sails expect a function as controller actions, you can't pass a generator like in Koa, but that does not prevent you from using them, the thing is that a generator by itself is very useless, you need a function that calls it and iterates it and i believe koa does this for you at framework level, but you have some options available to you at library level, like co or yortus/asyncawait/ which is what i use because node-fibers implemented as functions are way more flexible than es6 generators (though they are almost the same) and i'll show you why in a sec.
So here is how you use it:
First npm install co and require it in your controller, or add it as a global in config/bootstrap.js since you will be using it a lot. Once you are done with it, you can use it in your controllers.
module.exports = {
hi: function(req, res){
co(function* (){
// And you can use it with your models calls like this
let user = yield User.findOne(req.param('id'))
return res.send("Hi there" + user.name + "!");
})
}
};
That's it
Now if you rather use async/await, its pretty similar, it goes like this:
module.exports = {
hi: function(req, res){
async(function(){
// Since await is a function, you can access properties
// of the returned values like this which is nice to
// access object properties or array indices
let userName = await(User.findOne(req.param('id'))).name
return res.send("Hi there" + userName + "!");
})();
}
};
There is a caveat though, if you call other methods of you controller via this, remember that they will refer to the generator fn or the passed regular fn if you use async/await, so be sure to save its context, or since you are already using es6 syntax, you can use fat arrows with async/await, unfortunately not with co, since there is not fat arrow version of generators (..yet?)
So it'll be like this:
module.exports = {
hi: function(req, res){
async(() => {
let params = this._parseParams(req);
let userName = await(User.findOne(params.id)).name
return res.send("Hi there" + userName + "!");
})();
},
_parseParams: function(req){
let params = req.allParams()
// Do some parsing here...
return params
}
};
I've been using the second method for months now, and works perfectly, i've tried with co too and works as well, i just liked more the async/await module (and is supposed to be a little bit faster) and its a perfect match if you are using coffeescript since your sintax will be like do async => await User.find()
UPDATE:
I've created a wrapper that you can use if you use yortus async/await or check the source code and modify it to work with co or something else if you wish.
Here is the link https://www.npmjs.com/package/async-handler, is in alpha but i'm using on my own apps and works as expected, if not submit an issue.
With that the las example would look like this:
module.exports = {
hi: asyncHandler((req, res)->{
let params = this._parseParams(req);
let userName = await(User.findOne(params.id)).name
return res.send("Hi there" + userName + "!");
}),
_parseParams: function(req){
let params = req.allParams()
// Do some parsing here...
return params
}
};
With this handler you get the extra benefit of having async/promise errors to propagate correctly on sails if they are not caught by a try/catch
Sails expects a regular function here, not a generator. Maybe you could take a look at co, not sure it would really help with Sails though. If you really want to use generators, you should probably try Koa, which has several frameworks based on it
The way I am doing it is like this:
const Promise = require('bluebird');
module.exports = {
hi: Promise.coroutine(function* (req, res) {
let id = req.params('id'),
userName;
try {
userName = yield User.findOne(id).name;
}
catch (e) {
sails.log.error(e);
return res.serverError(e.message);
}
return res.ok(`Hi there ${userName}!`);
})
}
Works great. You just need to ensure any functions you call from your controllers return promises.
Put this code in your bootstrap.js, and every thing work like a charm!
var _ = require('lodash');
var coExpress = require('co-express');
sails.modules.loadControllers(function (err, modules) {
if (err) {
return callback(err);
}
sails.controllers = _.merge(sails.controllers, modules);
// hacking every action of all controllers
_.each(sails.controllers, function(controller, controllerId) {
_.each(controller, function(action, actionId) {
actionId = actionId.toLowerCase();
console.log('hacking route:', controllerId, actionId);
// co.wrap,generator => callback
action = coExpress(action);
sails.hooks.controllers.middleware[controllerId][actionId] = action;
});
});
// reload routes
sails.router.load(function () {
// reload blueprints
sails.hooks.blueprints.initialize(function () {
sails.hooks.blueprints.extendControllerMiddleware();
sails.hooks.blueprints.bindShadowRoutes();
callback();
});
});
});
What's the best way to pass thisArg to a require()d module?
I want to do something like this:
index.js
function Main(arg) {
return {
auth: auth,
module: require('/some/module')
}
}
module.js
module.exports = {
someMethod: function() {...}
}
Then, in my code somewhere I call Main(), which returns the object.
So Main().auth exists, cool. But how do I access it from Main().module?
The thisArg in Main().module.someMethod() points to the module itself.. but I need the parent.
Is there any way to do this without using new keyword, functions and prototypes?
EDIT:
Thanks for all the answers guys! Some additional info:
Main() is the module what I wanna require() and use in my app. The "module" Main tries to import is actually just sub functionality of Main, it's just a part of code which I moved to a separate "module" to better organize the code.
So a better example would be:
function RestApi(param) {
return {
common_param: param,
commonFunc: function() {...}
endpoint1: require('/some/module'),
endpoint2: require('/some/module2'),
endpoint3: require('/some/module3')
}
}
And my app would use it like this:
RestApi = require('./RestApi')
RestApi().endpoint1.someHTTPCall(...)
But inside someHTTPCall(), both "common_param" and "commonFunc" should be accessible via thisArg, like this.commonFunc().
So this is kinda a general question, how do you merge multiple modules using require() properly, so "this" would point to the right object (i.e.: the parent)
I know this could be achieved using Function.prototype and inheritance, just would like to know if there is a simpler way.
The best I found so far is something like this:
var _ = require('lodash');
function Module(auth) {
this.auth = auth || {};
}
Module.prototype = {
endpoint1: function() { return _.extend(require('./endpoint1'),{auth: this.auth, commonFunc: commonFunc})}
}
function commonFunc() {...}
However, this is not ideal, since RestApi.endpoint1() would create a new the object on every call.
Is there a better way to handle this?
Thanks in advance!
Create own "require" module with auth param and allways use it.
project/module/require2.js
module.exports = function(path, auth){
if (!check(auth))
return null
return require(path)
}
You could change the module to return a function, like this:
// some/module.js
module.exports = function(mainModule) {
var main = mainModule;
return {
someMethod: function() {
main.doSomethingElse();
}
}
}
Then require it passing the main object:
function Main(arg) {
var main = {
auth: auth,
other: stuff,
};
main.module = require('/some/module')(main);
return main;
}
i'm very familiar with the javascript console.log(), and the php_dump() functions that allows us to see what's in a variable, i want to know if there is some function like this in hogan.js that let us inspect the content of a variable.
add some method to your data and include it at the loctation you need to inspect the scope
var data = {
...
// your vars,
...
inspect: function () {
return function () {
console.log(this);
}
}
};
template.render(data);
anywhere you use {{inspect}} it will log the current render context in the console
I slightly modified it to add the function to the data packet that is passed to Hogan in a centralized position, which, in my code, is a function called render().
Thank you for this clever trick.
function render(template, data, destination) {
data.inspect = function() {
return function() {
console.log("inspect:")
console.log(this);
};
};
// localized strings
data.strings = app.strings;
var tmpl = Hogan.compile(template);
var content = tmpl.render(data);
document.querySelector(destination).innerHTML = content;
}
I would like to have the ability to load a module that has been changed. I would have to of course unload the module first. Since this is a webserver setup, I am asking if there is a way to load the module in an async fashion, to avoid freezing the webserver for the duration of the read of the updated file.
Awhile back Node.JS removed the require.async function. So, on the latest version of Node.JS, what would be the recommended alternative?
Should I read the entire file first, and then use the Module library to parse the file contents. (as opposed to the default functionality of taking a file name to read and parse internally) How?
Should I outsource this job to some open-source library? Which one?
Should I write my own module handler - my own implementation of requireAsync? (I know how.)
Note: I do not want to do anything more than load a module async, so please do not recommend that I replace my setup with a new webserver routing framework.
I posted this answer, but you are welcome to post an improvement.
See the Node.JS source code.
Module.prototype.require = function(path) { return Module._load(path, this); };
Abridged version of _load
Module._load = function(request, parent, isMain) {
var filename = Module._resolveFilename(request, parent);
if (Module._cache[filename]) return Module._cache[filename].exports;
if (NativeModule.exists(filename)) {
if (filename == 'repl') { // special case, needs the real require.
var replModule = new Module('repl');
replModule._compile(NativeModule.getSource('repl'), 'repl.js');
NativeModule._cache.repl = replModule;
return replModule.exports;
}
return NativeModule.require(filename);
}
var module = new Module(filename, parent);
if (isMain) process.mainModule = module, module.id = '.';
Module._cache[filename] = module;
var hadException = true;
try {
module.load(filename);
hadException = false;
} finally {
if (hadException) delete Module._cache[filename];
}
return module.exports;
};
My version of require.async.js will be something like
var NativeModule = require('native_module');
var fs = require('fs');
if(!require.async) require.async = function (path, callback) { module.exports(path, this, callback); } // Comment out if you dislike using globals
module.exports = function(request, parent, callback) {
var filename = Module.resolve(request, parent); // This is a Sync function. TODO, change it to an async function with a callback.
if (Module.cache[filename]) callback(Module.cache[filename].exports);
else if (NativeModule.exists(filename)) callback(new Error('What are you thinking?'))
else fs.readFile(filename, 'utf8', function(err, file) {
if (Module.cache[filename]) callback(null, Module.cache[filename].exports); // For the case when there are two calls to require.async at a time.
else if(err) callback(err)
else {
var module = new Module(filename, parent);
try {
module._compile(file);
Module.cache[filename] = module;
} catch(ex) {
callback(err)
}
if(Module.cache[filename]) callback(null, module.exports)
}
}
Caveats
There is one TODO in the code which is to make the multiple calls to stat to be async. The actual reading of the file is regular async, so that is good.
If you are async loading a module, and that module you are loading is sync loading another module, then you have not fully gone async with your code - have you.
It uses one private method - _compile.