Is there a better way to run CLI commands with Node.js? - node.js

I just wrote a script to release a build of one of the products I'm working on. The script does the job, but I don't really like the code itself, looks like spaghetti code and callback hell combined.
Is there a cleaner way to do this? I'd like to be able to run commands in series, log the outputs (stdout.on('data')) and when the task is finished. (easier for further debug and when waiting for the task to be done, reassuring to know what's happening on the background)
Maybe using Promises would help clean the mess a bit, but still, I feel like there should be a cleaner way to deal with multiple commands.
Some explanation about what the code does:
Create a tag with the commit you want and the tag version you want, i.e: git tag 1.2.5.
Build the release file with gulp build.
Create a folder doc/<tag>.
Convert doc/doc_reader.odt to doc/<tag>/documentation.pdf. (Open it and export as PDF)
Copy build/reader.js and doc/changelog.txt in the created folder.
Zip the 3 files.
Commit everything with commit message: Release 1.2.11 (for example)
Push.
Create a new release on GitHub using the commit you just pushed and the same tag.
Here is the code, as an example. (ES5, Node 4.6.0+)
var mkdirp = require('mkdirp');
var fs = require('fs-extra');
var path = require('path');
var spawn = require('child_process').spawn;
var zip = new require('node-zip')();
var package = require('../package.json');
var version = package.version;
var releaseDirectory = 'doc'
var gitTagProcess = spawn('git', ['tag', version]);
var gulpBuildProcess = spawn('gulp', ['build']);
console.log(`Running "git tag ${version}"...`);
gitTagProcess.stdout.on('data', function (chunk) {
console.log(chunk.toString('utf8'));
});
gitTagProcess.on('close', function () {
console.log('Tag created.')
console.log('Running "gulp build"...');
gulpBuildProcess.stdout.on('data', function (chunk) {
console.log(chunk.toString('utf8'));
});
gulpBuildProcess.on('close', function () {
console.log('"gulp build" done.')
console.log(`Creating "${releaseDirectory}/${version}" directory.`)
mkdirp(`${releaseDirectory}/${version}`, function () {
console.log('Directory created.');
var docFile = `${releaseDirectory}/doc_reader.md`;
console.log(`Converting ${docFile} to pdf ...`);
var docBuildProcess = spawn('npm', ['run', 'build:doc']);
docBuildProcess.stdout.on('data', function (chunk) {
console.log(chunk.toString('utf8'));
});
docBuildProcess.on('close', function () {
console.log('Doc created.');
console.log('Copying changelog.txt ...');
fs.copySync('doc/changelog.txt', `doc/${version}/changelog.txt`);
console.log('changelog.txt copied.');
console.log(`Copying "build/reader.js" to "doc/reader-${version}.js" and "doc/reader.js" ...`);
fs.copySync('build/reader.js', `doc/${version}/reader.js`);
fs.copySync('build/reader.js', `doc/${version}/reader-${version}.js`);
console.log('reader.js copied.');
console.log('Zipping all files ...');
zip.file('changelog.txt', fs.readFileSync(`doc/${version}/changelog.txt`));
zip.file('doc_reader.pdf', fs.readFileSync(`doc/${version}/doc_reader.pdf`));
zip.file('reader.js', fs.readFileSync(`doc/${version}/reader.js`));
zip.file(`reader-${version}.js`, fs.readFileSync(`doc/${version}/reader-${version}.js`));
var data = zip.generate({ base64: false, compression: 'DEFLATE' });
var zipFilename = `doc/${version}/HTML5Reader_${version}.zip`;
fs.writeFileSync(zipFilename, data, 'binary'); // it's important to use *binary* encode
console.log(`${zipFilename} created.`);
console.log(`\nRelease ${version} done. Please add generated files and commit using:`);
console.log(`\n\tgit add * && git commit -m "Release ${version}"`);
console.log(`\n\nDon't forget to push and create a new release on GitHub at https://github.com/$domain/$product/releases/new?tag=${version}`);
});
});
});
});
Edit:
Here is the implementation using async/await (node 7.8.0)
I used special mkdirp and exec modules, that allow usage with await. But I couldn't find an equivalent for spawn.
const mkdirp = require('async-mkdirp');
const fs = require('fs-extra');
const spawn = require('child-process-promise').spawn;
const exec = require('mz/child_process').exec;
const zip = new require('node-zip')();
const c = require('chalk');
const error = c.bold.red;
const warn = c.yellow;
const info = c.cyan;
const info2 = c.magenta;
const version = require('../package.json').version;
const releaseDirectory = 'doc'
async function git_tag() {
async function exec_git_tag() {
return await exec(`git tag ${version}`);
}
console.log(info(`Creating git tag ${version}`));
return exec_git_tag()
.then(() => {
console.log(info(`Git tag created for ${version}`))
})
.catch((err) => {
console.log(warn('warn', err));
})
// Finally
.then(() => {
console.log(info(`"git tag ${version}" - Completed`))
});
};
async function gulp_build() {
async function exec_gulp_build() {
const promise = spawn('gulp', ['build'])
const childProcess = promise.childProcess;
childProcess.stdout.on('data', (data) => {
console.log(info2(data.toString()));
});
childProcess.stderr.on('data', (data) => {
console.log(error(data.toString()));
});
return promise
.catch((err) => {
console.error(error(err));
})
// Finally
.then(() => {
console.log(info('"gulp build" - Completed'))
});
}
console.log(info('Running "gulp build"...'))
return exec_gulp_build()
}
async function create_dir() {
const dirPath = `${releaseDirectory}/${version}`;
console.log(info(`Creating "${dirPath}" directory.`))
await mkdirp(`${dirPath}`);
console.log(info(`Directory ${dirPath} created.`));
}
async function build_doc() {
const docFile = `${releaseDirectory}/doc_reader.md`;
console.log(info(`Converting ${docFile} to pdf ...`));
async function exec_build_doc() {
return await exec(`npm run build:doc`);
}
return exec_build_doc()
.catch((err) => {
console.error(error(err));
})
.then(() => {
console.log(info(`Doc "${docFile}" created.`));
})
}
function copy_files() {
console.log(info('Copying changelog.txt ...'));
fs.copySync('doc/changelog.txt', `doc/${version}/changelog.txt`);
console.log(info('changelog.txt copied.'));
console.log(info(`Copying "build/reader.js" to "doc/reader-${version}.js" and "doc/reader.js" ...`));
fs.copySync('build/reader.js', `doc/${version}/reader.js`);
fs.copySync('build/reader.js', `doc/${version}/reader-${version}.js`);
console.log(info('reader.js copied.'));
}
function zip_files() {
console.log(info('Zipping all files ...'));
zip.file('changelog.txt', fs.readFileSync(`doc/${version}/changelog.txt`));
zip.file('doc_reader.pdf', fs.readFileSync(`doc/${version}/doc_reader.pdf`));
zip.file('reader.js', fs.readFileSync(`doc/${version}/reader.js`));
zip.file(`reader-${version}.js`, fs.readFileSync(`doc/${version}/reader-${version}.js`));
const data = zip.generate({ base64: false, compression: 'DEFLATE' });
const zipFilename = `doc/${version}/HTML5Reader_${version}.zip`;
fs.writeFileSync(zipFilename, data, 'binary'); // it's important to use *binary* encode
console.log(info(`${zipFilename} created.`));
}
async function release() {
await git_tag();
await gulp_build();
await create_dir();
await build_doc();
copy_files();
zip_files();
console.log(`\nRelease ${version} done. Please add generated files and commit using:`);
console.log(`\n\tgit add . && git commit -m "Release ${version}"`);
}
release();

There is an mz module that can be very helpful here. See:
https://www.npmjs.com/package/mz
This, combined with async/await will allow you to write code like this:
let exec = require('mz/child_process').exec;
(async () => {
let version = await exec('node --version');
console.log(version);
let result = await exec('some other command');
console.log(result);
// ...
})();
This is a simple example but you can use all functions from the child_process, fs and many other modules that way.
What's important here is that this code is still asynchronous and non-blocking.
Note that you can only use await inside of a function created with the async keyword. For more info, see:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await
For support in browsers, see:
http://caniuse.com/async-functions
For support in Node, see:
http://node.green/#ES2017-features-async-functions
In places where you don't have native support for async and await you can use Babel:
https://babeljs.io/docs/plugins/transform-async-to-generator/
or with a slightly different syntax a generator based approach like in co or Bluebird coroutines:
https://www.npmjs.com/package/co
http://bluebirdjs.com/docs/api/promise.coroutine.html

Related

onValue triggering multiple times

I'm using Node.js v18.12.1 and Discord.js v14. for developing the Discord bot. I need to read some data from Firebase. I'm confused because I'm used to how Java with Hibernate fetches data differently. Here, I need to use onValue() listener.
My onValue() acts strange. Instead of just reading the data from Firebase, it skips entirely, then it triggers multiple times, each time skipping the body block of its code, and then it actually does the code after.
I've read somewhere on this forum that this can happen because there are more onValue() listeners that are subscribed and they are all fired up. Someone mentioned I need to use the off() function somewhere "before" the onValue(). This confuses me because I'm using this listener in many locations. I need it in each command file, in execute(interaction) functions. You know, when you need to execute slash commands in Discord. I have it something like this:
async execute(interaction) {
const infographicRef = ref(db, '/infographics/arena/' + interaction.options.getString("arena-team"));
var imageUrl = null;
var postUrl = null;
onValue(infographicRef, (snapshot) => {
imageUrl = snapshot.child("image-url").val();
interaction.reply(imageUrl);
})
},
And I planned for each command, in each command.js file to have onValue(). I'm not sure exactly what to do.
Also, I tried to work around this with once() method, I see it in Firebase documentation, but I got the error: ref.once() is not a function.
It seems that after first triggering of onValue method when the body is not executed, my code in interactionCreate.js is triggered as well, it points for a command to be executed again:
const { Events } = require('discord.js');
module.exports = {
name: Events.InteractionCreate,
async execute(interaction) {
if (!interaction.isChatInputCommand()) return;
const command = interaction.client.commands.get(interaction.commandName);
if (!command) {
console.error(`No command matching ${interaction.commandName} was found.`);
return;
}
try {
await command.execute(interaction);
} catch (error) {
console.error(`Error executing ${interaction.commandName}`);
console.error(error);
}
},
};
my bot.js (which is in my case an index file)
const fs = require('node:fs');
const path = require('node:path');
const { Client, Collection, Events, GatewayIntentBits } = require('discord.js');
const { token } = require('./config.json');
const client = new Client({ intents: [GatewayIntentBits.Guilds] });
const eventsPath = path.join(__dirname, 'events');
const eventFiles = fs.readdirSync(eventsPath).filter(file => file.endsWith('.js'));
for (const file of eventFiles) {
const filePath = path.join(eventsPath, file);
const event = require(filePath);
if (event.once) {
client.once(event.name, (...args) => event.execute(...args));
} else {
client.on(event.name, (...args) => event.execute(...args));
}
}
client.commands = new Collection();
const commandsPath = path.join(__dirname, 'commands');
const commandFiles = fs.readdirSync(commandsPath).filter(file => file.endsWith('.js'));
for (const file of commandFiles) {
const filePath = path.join(commandsPath, file);
const command = require(filePath);
client.commands.set(command.data.name, command);
}
client.once(Events.ClientReady, () => {
console.log('Ready!');
});
client.on(Events.InteractionCreate, async interaction => {
if (!interaction.isChatInputCommand()) return;
const command = client.commands.get(interaction.commandName);
if (!command) return;
try {
await command.execute(interaction);
} catch (error) {
console.error(error);
await interaction.reply({ content: 'There was an error while executing this command!', ephemeral: true });
}
});
client.login(token);
The onValue function registers a realtime listener, that continues to monitor the value on the database.
If you want to read a value once, that'd be done with get() function in v9 (which is the equivalent of the once method in earlier SDK versions). Have a look at the code sample in the documentation on reading data once.

Getting 'Cannot use import statement outside a module' in my test

I've written a small program using Playwright and I'm trying to make it use worker_threads. Now I've written a test for it but for some reason I'm getting the above error in my test.
I tried tweaking the tsconfig settings, but that also didn't work. What I read most about is using commonjs as the module, which I'm doing. I also installed ts-node, but that also didn't do anything.
Below is the function I wrote to create a worker.
import { Worker } from "worker_threads";
const create_worker = (file: string, tasksData: {}) =>
new Promise((resolve, reject) => {
const worker = new Worker(file, {
workerData: tasksData,
});
worker.on("message", (data) => {
resolve(data);
});
worker.on("error", (msg) => {
reject(`An error occurred: ${msg}`);
});
});
export default create_worker;
And below is my test.
const browser = await chromium.launch();
const context = await browser.newContext();
const page = await context.newPage();
const search_grid = await construct_search_grid();
const chunks = chunkify_array(search_grid, 4);
const workerPromises: unknown[] = [];
for (let i = 0; i < 5; i++) {
const worker = await create_worker(
"./src/adapters/playwright.ts",
{
search_grid: chunks![i],
}
);
workerPromises.push(worker);
}
const thread_results = await Promise.all(workerPromises);
Now I believe everything checks out so I'm not really sure what to look for now. Does anyone know?
It feels like you don't have tsconfig.json "module" configured?
I have mine set to "commonjs". Without it, I also get that same error.
I was able to overcome this error by following this guide: https://wanago.io/2019/05/06/node-js-typescript-12-worker-threads/

fs.watchFile() a json file until a specific value appear

So I have a json file that changes continously and I need to read it AFTER a value called auth-token is written to the file, here what I get now:
const json = fs.readFileSync("some-json.json")
const headers = JSON.parse(json);
return headers
But it reads the file before anything can be written to it, is there anyway that I can use fs.watchFile() and watch the file UNTIL the value is written?
Thanks
You can use fs.watch although its behavior is a bit unreliable with multiple events triggered upon file change (but I don't think it would be a problem here).
Here is a small sample:
const { watch } = require('fs');
const { readFile } = require('fs/promises');
(async () => {
const result = await new Promise((resolve) => {
const watcher = watch('some-json.json', async (eventType, filename) => {
try {
const fileContent = await readFile(filename);
const headers = JSON.parse(fileContent.toString());
if (headers['auth-token']) { // or whatever test you need here
watcher.close();
resolve(headers);
}
} catch (e) {}
});
});
console.log(result);
})();
Note that if your file gets modified many times before it contains the desired header, it might be preferable to replace the usage of fs.watch by a setInterval to read the file at regular intervals until it contains the value you expect.
Here is what it would look like:
const { readFile } = require('fs/promises');
(async () => {
const waitingTime = 1000;
const result = await new Promise((resolve) => {
const interval = setInterval(async (eventType, filename) => {
const fileContent = await readFile('some-json.json');
try {
const headers = JSON.parse(fileContent.toString());
if (headers['auth-token']) { // or whatever test you need here
clearInterval(interval);
resolve(headers);
}
} catch (e) {}
}, waitingTime);
});
console.log(result);
})();

Receiving error: html-pdf: PDF generation timeout. Phantom.js script did not exit. within Firebase Cloud Functions

I'm building out a firebase function that uses the html-pdf package (which uses PhantomJS). The function works fine on my local machine, but whenever I deploy the function on Firebase, I get the following error:
Error: html-pdf: PDF generation timeout. Phantom.js script did not exit.
I've changed the timeout parameter for pdf.create() and keep getting the same result. Any idea on what might be creating this issue that is unique to only when a deploy this to Firebase? Code is below.
const pdf = require('html-pdf');
const runtimeOpts = {
timeoutSeconds: 540, // in seconds
memory: '2GB'
}
exports.sendToKindle = functions.runWith(runtimeOpts).https.onRequest(async (req, res) => {
// REMOVED A BLOCK OF CODE FOR SIMPLICITY, BUT CAN PUT BACK IN IF NEEDED//
var options = {
format: 'Letter',
directory: "/tmp",
timeout: 540000, // in milliseconds
};
const blookFileName = createFileName(blookData.title) + '.pdf';
const tempFilePath = path.join(os.tmpdir(), `${blookFileName}`);
const htmlFilePath = path.join(os.tmpdir(), 'book.html');
const htmlFs = fs.openSync(htmlFilePath, 'w');
await fs.promises.appendFile(htmlFilePath, bookHTML);
const fd = fs.openSync(tempFilePath, 'w');
var html = fs.readFileSync(htmlFilePath, 'utf8');
let mailgunObject = null;
pdf.create(html, options).toFile(tempFilePath, async (err, res) => {
if (err) return console.error(err);
mailgunObject = await sendEmail(tempFilePath, kindleEmail);
return console.log(res);
});
fs.closeSync(fd);
fs.closeSync(htmlFs);
return cors(req, res, () => {
res.status(200).type('application/json').send({'response': 'Success'})
})
I was able to solve this issue by modifying the code by having the pdf.create().toFile() placed within the return of the cloud function.
const pdf = require('html-pdf');
const runtimeOpts = {
timeoutSeconds: 300, // in seconds
memory: '1GB'
}
exports.sendToKindle = functions.runWith(runtimeOpts).https.onRequest(async (req, res) => {
// REMOVED A BLOCK OF CODE FOR SIMPLICITY, BUT CAN PUT BACK IN IF NEEDED//
var options = {
format: 'Letter',
directory: "/tmp",
timeout: 540000, // in milliseconds
};
const blookFileName = createFileName(blookData.title) + '.pdf';
const tempFilePath = path.join(os.tmpdir(), `${blookFileName}`);
const htmlFilePath = path.join(os.tmpdir(), 'book.html');
const htmlFs = fs.openSync(htmlFilePath, 'w');
await fs.promises.appendFile(htmlFilePath, bookHTML);
const fd = fs.openSync(tempFilePath, 'w');
var html = fs.readFileSync(htmlFilePath, 'utf8');
return cors(req, res, () => {
pdf.create(html, options).toFile(tempFilePath, async (err, res) => {
if (err) return console.error(err);
let mailgunObject = await sendEmail(tempFilePath, kindleEmail);
fs.closeSync(fd);
fs.closeSync(htmlFs);
return console.log(res);
});
res.status(200).type('application/json').send({'response': 'Success'})
})
I got the same issue. Actualy I realized that when I called the function using html-pdf through Postman, or simply through a request by Google Chrome, the pdf used to generate within a 2 or 3 seconds, whereas it was more 2 or 3 minutes when calling it directly from my app.
So this is what I did : putting html-pdf in a separate function that I deployed, and then calling it :
https = require('https');
https.get(https://us-central1-your-project-name.cloudfunctions.net/your-function-using-html-pdf)

nodejs shallow git clone using simple-git

I'm trying to create shallow clone using simple-git. I'm trying to create an equivalent of this command: git clone --depth 1 https://github.com/steveukx/git-js.git. My code is as follows:
const git = require('simple-git')()
const repoURL = 'https://github.com/steveukx/git-js.git';
const localPath= './git-js';
const options = ['--depth', '1'];
const handlerFn = () => {
console.log('DONE')
};
git.clone(repoURL, localPath, options, handlerFn());
I've specified --depth 1 in options, but the code copies the entire repo history, it seems to completely ignore the options given. Am I doing this correctly, what can cause this behaviour?
After some digging the issue was in git.clone(repoURL, localPath, options, handlerFn());, you have to pass the reference to function instead of actual callback, like this git.clone(repoURL, localPath, options, handlerFn);.
The full implementation is bellow:
const git = require('simple-git')();
const fs = require('fs')
const url = require('url');
this.gitURL = 'https://github.com/torvalds/linux.git';
const localURL = url.parse(this.gitURL);
const localRepoName = (localURL.hostname + localURL.path)
.replace('com', '')
.replace('/', '')
.replace('/', '.')
.replace('.git', '')
this.localPath = `./${localRepoName}`;
this.options = ['--depth', '1'];
this.callback = () => {
console.log('DONE')
}
if (fs.existsSync(this.localPath)) {
// something
} else {
git.outputHandler((command, stdout, stderr) => {
stdout.pipe(process.stdout);
stderr.pipe(process.stderr)
stdout.on('data', (data) => {
// Print data
console.log(data.toString('utf8'))
})
})
.clone(this.gitURL, this.localPath, this.options, this.callback)
}

Resources