Jest functions export - jestjs

I'm using jest+puppeteer and I have a code that I'd like to reuse across my project.
I use the following instruction:
https://jestjs.io/docs/en/getting-started
//adminLogin2.js
const admLog = function admLog () {
return
page.goto(data.config.env.host);
page.waitForSelector(data.selectors.admin.auth.input_login);
page.click(data.selectors.admin.auth.input_login);
page.type(data.selectors.admin.auth.input_login, data.credentials.business_email.login);
page.click(data.selectors.admin.auth.form_button_first);
// second step
page.waitForSelector(data.selectors.admin.auth.input_login_password);
page.click(data.selectors.admin.auth.input_login_password);
page.type(data.selectors.admin.auth.input_login_password, data.credentials.business_email.password);
page.click(data.selectors.admin.auth.form_button_second);
page.waitForSelector(data.selectors.admin.auth.business_login_button);
page.click(data.selectors.admin.auth.business_login_button);
page.waitForSelector(data.selectors.admin.auth.business_body);
}
module.exports = admLog;
//test
const data = require('../config');
const admLog = require('../struct/Login/adminLogin2');
describe('GetPackage :: Auth', () => {
it('Admin Email', async () => {
await admLog();
});
});
Test could be run without exceptions, but nothing happens, in headless:false mode Chrome is just run and closed.
What should be fixed?

Add the async/await in the admLog function.
Remove the return statement to prevent the Automatic semicolon insertion
So the final adminLogin2.js file should be like this:
//adminLogin2.js
const admLog = async function() {
await page.goto(data.config.env.host);
await page.waitForSelector(data.selectors.admin.auth.input_login);
await page.click(data.selectors.admin.auth.input_login);
await page.type(data.selectors.admin.auth.input_login, data.credentials.business_email.login);
await page.click(data.selectors.admin.auth.form_button_first);
// second step
await page.waitForSelector(data.selectors.admin.auth.input_login_password);
await page.click(data.selectors.admin.auth.input_login_password);
await page.type(data.selectors.admin.auth.input_login_password, data.credentials.business_email.password);
await page.click(data.selectors.admin.auth.form_button_second);
await page.waitForSelector(data.selectors.admin.auth.business_login_button);
await page.click(data.selectors.admin.auth.business_login_button);
await page.waitForSelector(data.selectors.admin.auth.business_body);
}
module.exports = admLog;

Related

onValue triggering multiple times

I'm using Node.js v18.12.1 and Discord.js v14. for developing the Discord bot. I need to read some data from Firebase. I'm confused because I'm used to how Java with Hibernate fetches data differently. Here, I need to use onValue() listener.
My onValue() acts strange. Instead of just reading the data from Firebase, it skips entirely, then it triggers multiple times, each time skipping the body block of its code, and then it actually does the code after.
I've read somewhere on this forum that this can happen because there are more onValue() listeners that are subscribed and they are all fired up. Someone mentioned I need to use the off() function somewhere "before" the onValue(). This confuses me because I'm using this listener in many locations. I need it in each command file, in execute(interaction) functions. You know, when you need to execute slash commands in Discord. I have it something like this:
async execute(interaction) {
const infographicRef = ref(db, '/infographics/arena/' + interaction.options.getString("arena-team"));
var imageUrl = null;
var postUrl = null;
onValue(infographicRef, (snapshot) => {
imageUrl = snapshot.child("image-url").val();
interaction.reply(imageUrl);
})
},
And I planned for each command, in each command.js file to have onValue(). I'm not sure exactly what to do.
Also, I tried to work around this with once() method, I see it in Firebase documentation, but I got the error: ref.once() is not a function.
It seems that after first triggering of onValue method when the body is not executed, my code in interactionCreate.js is triggered as well, it points for a command to be executed again:
const { Events } = require('discord.js');
module.exports = {
name: Events.InteractionCreate,
async execute(interaction) {
if (!interaction.isChatInputCommand()) return;
const command = interaction.client.commands.get(interaction.commandName);
if (!command) {
console.error(`No command matching ${interaction.commandName} was found.`);
return;
}
try {
await command.execute(interaction);
} catch (error) {
console.error(`Error executing ${interaction.commandName}`);
console.error(error);
}
},
};
my bot.js (which is in my case an index file)
const fs = require('node:fs');
const path = require('node:path');
const { Client, Collection, Events, GatewayIntentBits } = require('discord.js');
const { token } = require('./config.json');
const client = new Client({ intents: [GatewayIntentBits.Guilds] });
const eventsPath = path.join(__dirname, 'events');
const eventFiles = fs.readdirSync(eventsPath).filter(file => file.endsWith('.js'));
for (const file of eventFiles) {
const filePath = path.join(eventsPath, file);
const event = require(filePath);
if (event.once) {
client.once(event.name, (...args) => event.execute(...args));
} else {
client.on(event.name, (...args) => event.execute(...args));
}
}
client.commands = new Collection();
const commandsPath = path.join(__dirname, 'commands');
const commandFiles = fs.readdirSync(commandsPath).filter(file => file.endsWith('.js'));
for (const file of commandFiles) {
const filePath = path.join(commandsPath, file);
const command = require(filePath);
client.commands.set(command.data.name, command);
}
client.once(Events.ClientReady, () => {
console.log('Ready!');
});
client.on(Events.InteractionCreate, async interaction => {
if (!interaction.isChatInputCommand()) return;
const command = client.commands.get(interaction.commandName);
if (!command) return;
try {
await command.execute(interaction);
} catch (error) {
console.error(error);
await interaction.reply({ content: 'There was an error while executing this command!', ephemeral: true });
}
});
client.login(token);
The onValue function registers a realtime listener, that continues to monitor the value on the database.
If you want to read a value once, that'd be done with get() function in v9 (which is the equivalent of the once method in earlier SDK versions). Have a look at the code sample in the documentation on reading data once.

Node async await not waiting

When I run the below code and remove the setTimeout all the methods run asynchronously even with the await. Any ideas why the await does not wait.
const dotenv = require("dotenv");
dotenv.config({ path: "./config/.env" });
const { setTimeout } = require("timers/promises");
const module1 = require("./utils/module1");
const module2 = require("./utils/module2");
const module3 = require("./utils/module3");
const run = async () => {
await module1.import();
await setTimeout(60000);
await module2.import();
await setTimeout(120000);
await module3.import();
};
run();
Example of a module:
exports.import = async () => {
//do something
}
As #robertklep stated in the comments the problem is inside the actual method of the module.
Nothing was properly awaited inside the first method causing the second method to kick off.
Specifically I was incorrectly using readline and had to change the code as follows:
instead of
rl.on()
use
for await (const line of rl) { }

fs.writeFile crashes node app after writing first json file

I'm trying to crawl several web pages to check broken links and writing the results of the links to a json files, however, after the first file is completed the app crashes with no error popping up...
I'm using Puppeteer to crawl, Bluebird to run each link concurrently and fs to write the files.
WHAT IVE TRIED:
switching file type to '.txt' or '.php', this works but I need to create another loop outside the current workflow to convert the files from '.txt' to '.json'. Renaming the file right after writing to it also causes the app to crash.
using try catch statements for fs.writeFile but it never throws an error
the entire app outside of express, this worked at some point but i trying to use it within the framework
const express = require('express');
const router = express.Router();
const puppeteer = require('puppeteer');
const bluebird = require("bluebird");
const fs = require('fs');
router.get('/', function(req, res, next) {
(async () => {
// Our (multiple) URLs.
const urls = ['https://www.testing.com/allergy-test/', 'https://www.testing.com/genetic-testing/'];
const withBrowser = async (fn) => {
const browser = await puppeteer.launch();
try {
return await fn(browser);
} finally {
await browser.close();
}
}
const withPage = (browser) => async (fn) => {
const page = await browser.newPage();
// Turns request interceptor on.
await page.setRequestInterception(true);
// Ignore all the asset requests, just get the document.
page.on('request', request => {
if (request.resourceType() === 'document' ) {
request.continue();
} else {
request.abort();
}
});
try {
return await fn(page);
} finally {
await page.close();
}
}
const results = await withBrowser(async (browser) => {
return bluebird.map(urls, async (url) => {
return withPage(browser)(async (page) => {
await page.goto(url, {
waitUntil: 'domcontentloaded',
timeout: 0 // Removes timeout.
});
// Search for urls we want to "crawl".
const hrefs = await page.$$eval('a[href^="https://www.testing.com/"]', as => as.map(a => a.href));
// Predefine our arrays.
let links = [];
let redirect = [];
// Loops through each /goto/ url on page
for (const href of Object.entries(hrefs)) {
response = await page.goto(href[1], {
waitUntil: 'domcontentloaded',
timeout: 0 // Remove timeout.
});
const chain = response.request().redirectChain();
const link = {
'source_url': href[1],
'status': response.status(),
'final_url': response.url(),
'redirect_count': chain.length,
};
// Loops through the redirect chain for each href.
for ( const ch of chain) {
redirect = {
status: ch.response().status(),
url: ch.url(),
};
}
// Push all info of target link into links
links.push(link);
}
// JSONify the data.
const linksJson = JSON.stringify(links);
fileName = url.replace('https://www.testing.com/', '');
fileName = fileName.replace(/[^a-zA-Z0-9\-]/g, '');
// Write data to file in /tmp directory.
fs.writeFile(`./tmp/${fileName}.json`, linksJson, (err) => {
if (err) {
return console.log(err);
}
});
});
}, {concurrency: 4}); // How many pages to run at a time.
});
})();
});
module.exports = router;
UPDATE:
So there is nothing wrong with my code... I realized nodemon was stopping the process after each file was saved. Since nodemon would detect a "file change" it kept restarting my server after the first item

Node async await code writing style which one is good and optimized

I used to write code for async-await in (Style 1), other dev suggested me to write in (Style 2).
Can someone please explain to me what is the difference between both styles, for me it seems the same.
Code Style 1:
const fixtures = await fixtureModel.fetchAll();
const team = await teamModel.fetch(teamId);
Code Style 2:
const fixturesPromise = fixtureModel.fetchAll();
const teamPromise = teamModel.fetch(teamId);
const fixtures = await fixturesPromise;
const team = await teamPromise;
They are not the same.
The first will initialize a Promise, wait for it to complete, then initialize another Promise, and wait for the second Promise to complete.
The second will initialize both Promises at once and wait for both to complete. So, it will take less time. Here's a similar example:
// Takes twice as long as the other:
const makeProm = () => new Promise(resolve => setTimeout(resolve, 1000));
console.log('start');
(async () => {
const foo = await makeProm();
const bar = await makeProm();
console.log('done');
})();
// Takes half as long as the other:
const makeProm = () => new Promise(resolve => setTimeout(resolve, 1000));
console.log('start');
(async () => {
const fooProm = makeProm();
const barProm = makeProm();
const foo = await fooProm;
const bar = await barProm;
console.log('done');
})();
But you might consider making the code even clearer with Promise.all instead:
const [fixtures, team] = await Promise.all([
fixtureModel.fetchAll(),
teamModel.fetch(teamId)
]);

nodejs How to fix `browser.newPage is not a function` using puppeteer-core?

I am trying to use pupetteer-core but when I run my code.
const puppeteer = require('puppeteer-core');
module.exports= run = () => {
const url = 'https://example.com'
const browser = puppeteer.launch();
const page = browser.newPage().then(function(page){
page.goto(url)
return browser
};
run().catch(console.error.bind(console))
I get this error
TypeError: browser.newPage is not a function
The problem in your code is that puppeteer works with Promises, meaning that most functions will return a Promise instead of the value directly. This means that you ether have to use then function or await statements to get the value.
Code sample
module.exports = run = async () => {
const url = 'https://example.com';
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto(url);
return browser;
};
Note that the function is marked as async now, making it implicitly returning a Promise. That means to wait for the run() function to finish, you would have to call it from within another async function like this:
(async () => {
const browser = await run();
})();

Resources