Asynchronously writing files in a loop, how to manage streams - node.js

I am trying to write over 100 png files via node-canvas in a loop. Only 40 files get generated and then the process completes.
I have tried creating a png stream via createPNGStream() and piping the results to a write stream created by fs.createWriteStream().
Write function:
function writeFile(row, col) {
// canvas code ...
const stream = canvas.createPNGStream();
let path = __dirname + '/imgs/heatmapRow' + row + "Col" + col + '.png';
const out = fs.createWriteStream(path);
stream.pipe(out);
out.on('finish', () => console.log('The PNG file was created.'))
}
Calling function:
function generateImages(){
var numRows = 20;
var numCols = 5;
for(let row = 0; row < numRows; ++row) {
for (let col = 0; col < numCols; ++col) {
writeFile(row, col);
}
}
}
The loop runs and completes, and at the end I get a bunch of the following lines all at once:
The PNG file was created.
The PNG file was created.
The PNG file was created.
The PNG file was created.
I'm thinking that on each loop a writeStream is being created and is asynchronous. The process is terminating because I can have only so many streams open.
How can I write all of my files and do so asynchronously to speed up the process time? (I prefer not to write the files synchronously) Do I need to add each writeFile to a queue? How do I determine limit the number of streams I have open and how do I manage them?

You have to use Promises to make your asynchronous calls.
I give you a solution:
function writeFile(row, col) {
// canvas code ...
const stream = canvas.createPNGStream();
let path = __dirname + "/imgs/heatmapRow" + row + "Col" + col + ".png";
return new Promise(resolve => {
const out = fs.createWriteStream(path);
stream.pipe(out);
out.on("finish", () => {
console.log("The PNG file was created.");
resolve();
});
});
}
function generateImages() {
var numRows = 20;
var numCols = 5;
var imagesToGenerate = [];
for (let row = 0; row < numRows; ++row) {
for (let col = 0; col < numCols; ++col) {
imagesToGenerate.push(writeFile(row, col));
}
}
Promise.all(imagesToGenerate).then(() => {
console.log("All images generated");
});
}
Take a look at Promise.all docs if you don't clearly understand how it is working

Related

Why am I getting timeout and document missing errors while making parallel calls to Apps Script API?

I'm writing a Google Sheets add-on that copies some data from one spreadsheet to another and then reformats it. The data sets involved are often large (~100k rows), so to avoid hitting the 6-minute timeout limit I break the data into chunks and then run the data-copying function in parallel on each chunk using google.script.run calls from the client side.
On my sample data set of ~100k rows, the first couple of chunks to complete are copied successfully, and the rest are throwing the error "Service Spreadsheets timed out while accessing document with id [spreadsheet id]."
And here's what it looks like in the Apps Script dashboard:
I'm confused by the timeout errors because:
I've successfully run the script on a dataset that contains 5000 rows
Apps Script dashboard shows the executions failing before 6 minutes (more like 4-5 minutes)
Apps Script dashboard logging shows the failed (timed out) executions logging successfully. Logging happens after the setValues() operation (see code below); the only thing that comes after the logging is the return, so I don't understand how it could log successfully and then time out (I thought Apps Script was synchronous... but maybe I'm wrong?)
I'm also not sure about those "Uncaught" errors, but they seem to be showing up on the dashboard as "Document [spreadsheet id] is missing (perhaps it was deleted, or you don't have read access?)"
This is the document I'm copying to, and I've confirmed that it still exists on my Drive and I can open it and see the data that was successfully copied. Can a document go "missing" if too many instances of a script are trying to access it simultaneously?
I've experimented with smaller chunk sizes (1000 and 2000 rows) and get the same types of errors.
Here's what my client-side Javascript looks like:
// This function is the success handler that runs after another function (which grabs the total # of rows
// from the sheet to be copied, and then creates the new spreadsheet to be copied into) completes
function dataParamsSuccess(dataParameters) {
// dataParameters = [busHrs, outputSsUrl, lastRow, maxRows, maxColumns]
var busHrs = dataParameters[0];
var outputSsUrl = dataParameters[1];
var lastRow = dataParameters[2];
var maxRows = dataParameters[3];
var maxColumns = dataParameters[4];
console.log(maxRows);
console.log(maxColumns);
// Set chunk size
var chunkSize = 5000; // number of rows in chunk
// Determine number of chunks
var numChunks = Math.ceil(lastRow / chunkSize);
var lastChunkSize = lastRow % chunkSize;
if ((numChunks-1) * chunkSize + lastChunkSize == lastRow) {
console.log("Math checks out");
} else {
console.log("oops, check your math");
}
// Generate status message
var statusHtml = numChunks + " chunks to be copied";
for (i=0; i<numChunks; i++) {
var chunkNum = i+1;
var chunkNumStr = chunkNum.toString();
statusHtml += "<div id=\"chunk" + chunkNumStr + "Status\"></div>";
}
document.getElementById("statusMsg").innerHTML = statusHtml;
var startRow = 1;
// Call copyData once for each chunk
for (i=0; i<numChunks; i++) {
var chunkNum = i+1;
var chunkNumStr = chunkNum.toString();
var chunkDivId = "chunk" + chunkNumStr + "Status";
if (chunkNum==numChunks) { // if this is the last chunk, chunk size is smaller
chunkSize = lastChunkSize;
}
var copyParams = [chunkNum, chunkSize, startRow, outputSsUrl];
google.script.run
.withSuccessHandler(copyChunkSuccess)
.copyData(copyParams);
document.getElementById(chunkDivId).innerHTML = "Chunk " + chunkNumStr + " copying in progress";
startRow += chunkSize;
console.log("startRow: " + startRow.toString());
}
// Haven't gotten to the part where I figure out what to do after all chunks are complete yet
}
And here's the server-side Apps Script function being called:
function copyData(copyParams) {
try {
// copyParams = [chunkNum, chunkSize, startRow, outputSsUrl]
var chunkNum = copyParams[0];
var chunkSize = copyParams[1];
var startRow = copyParams[2];
var outputSsUrl = copyParams[3];
var lastRow = startRow + chunkSize;
// Get input and output sheets
var dataSheet = SpreadsheetApp.getActiveSheet();
var outputSpreadsheet = SpreadsheetApp.openByUrl(outputSsUrl);
var outputSheet = outputSpreadsheet.getActiveSheet();
// Copy values
var values = dataSheet.getRange(startRow, 1, chunkSize, 22).getValues();
outputSheet.getRange(startRow, 1, chunkSize, 22).setValues(values);
// Logging
var dataSpreadsheetId = dataSheet.getParent().getId();
var outputSpreadsheetId = outputSpreadsheet.getId();
console.log("Chunk " + chunkNum.toString() + " (rows " + startRow.toString() + " through " + lastRow.toString() + ") copied successfully");
return [chunkNum, startRow, lastRow, "success"];
} catch(e) {
return [chunkNum, startRow, lastRow, e.message]; // Return error to client-side; server-side logging is taking too long
}
}
How about this answer?
In my experience, even when the Spreadsheet service is used, when the continuous accesses occurs with the asynchronous process, I have experienced such issue. At that time, I used the lock service and setTimeout. But I'm not sure whether this method can resolve your issue. So please test the following modification. Here, I would like to propose to use the lock service for Google Apps Script side and setTimeout for Javascript side. When your script is modified, it becomes as follows.
The flow of this workaround is as follows.
Flow:
10 workers are sent to Google Apps Script side.
After 10 workers were sent, it waits for 5 seconds.
At Google Apps Script side, 10 workers are received. And these are processed under the lock service.
After 5 seconds, at Javascript side, next 10 workers are sent.
By this cycle, the script is run.
Google Apps Script side:
Please modify copyData as follows.
function copyData(copyParams) {
var lock = LockService.getDocumentLock();
if (lock.tryLock(10000)) {
try {
// copyParams = [chunkNum, chunkSize, startRow, outputSsUrl]
var chunkNum = copyParams[0];
var chunkSize = copyParams[1];
var startRow = copyParams[2];
var outputSsUrl = copyParams[3];
var lastRow = startRow + chunkSize;
// Get input and output sheets
var dataSheet = SpreadsheetApp.getActiveSheet();
var outputSpreadsheet = SpreadsheetApp.openByUrl(outputSsUrl);
var outputSheet = outputSpreadsheet.getActiveSheet();
// Copy values
var values = dataSheet.getRange(startRow, 1, chunkSize, 22).getValues();
outputSheet.getRange(startRow, 1, chunkSize, 22).setValues(values);
// Logging
var dataSpreadsheetId = dataSheet.getParent().getId();
var outputSpreadsheetId = outputSpreadsheet.getId();
console.log("Chunk " + chunkNum.toString() + " (rows " + startRow.toString() + " through " + lastRow.toString() + ") copied successfully");
return [chunkNum, startRow, lastRow, "success"];
} catch(e) {
return [chunkNum, startRow, lastRow, e.message]; // Return error to client-side; server-side logging is taking too long
} finally {
lock.releaseLock();
}
}
}
HTML & Javascript side:
Please modify dataParamsSuccess as follows.
// This function is the success handler that runs after another function (which grabs the total # of rows
// from the sheet to be copied, and then creates the new spreadsheet to be copied into) completes
async function dataParamsSuccess(dataParameters) { // <--- Modified
const wait = (s) => new Promise(r => setTimeout(r, s)); // <--- Added
// dataParameters = [busHrs, outputSsUrl, lastRow, maxRows, maxColumns]
var busHrs = dataParameters[0];
var outputSsUrl = dataParameters[1];
var lastRow = dataParameters[2];
var maxRows = dataParameters[3];
var maxColumns = dataParameters[4];
console.log(maxRows);
console.log(maxColumns);
// Set chunk size
var chunkSize = 5000; // number of rows in chunk
// Determine number of chunks
var numChunks = Math.ceil(lastRow / chunkSize);
var lastChunkSize = lastRow % chunkSize;
if ((numChunks - 1) * chunkSize + lastChunkSize == lastRow) {
console.log("Math checks out");
} else {
console.log("oops, check your math");
}
// Generate status message
var statusHtml = numChunks + " chunks to be copied";
for (i = 0; i < numChunks; i++) {
var chunkNum = i + 1;
var chunkNumStr = chunkNum.toString();
statusHtml += "<div id=\"chunk" + chunkNumStr + "Status\"></div>";
}
document.getElementById("statusMsg").innerHTML = statusHtml;
var count = 0; // <--- Added
var startRow = 1;
// Call copyData once for each chunk
for (i = 0; i < numChunks; i++) {
count++; // <--- Added
var chunkNum = i + 1;
var chunkNumStr = chunkNum.toString();
var chunkDivId = "chunk" + chunkNumStr + "Status";
if (chunkNum == numChunks) { // if this is the last chunk, chunk size is smaller
chunkSize = lastChunkSize;
}
var copyParams = [chunkNum, chunkSize, startRow, outputSsUrl];
google.script.run
.withSuccessHandler(copyChunkSuccess)
.copyData(copyParams);
if (count == 10) { // <--- Added
console.log("wait");
await wait(5000);
count = 0;
}
document.getElementById(chunkDivId).innerHTML = "Chunk " + chunkNumStr + " copying in progress";
startRow += chunkSize;
console.log("startRow: " + startRow.toString());
}
// Haven't gotten to the part where I figure out what to do after all chunks are complete yet
}
Note:
I'm not sure whether 5000 of await wait(5000) is suitable for your situation. So please modify this value by testing at your situation. In the current value, 5000 is 5 seconds.
Reference:
Lock Service

Make Initialization Asynchronous in node.js

I am trying to initialize a key class in a node.js program, but the instructions are running in arbitrary order and therefore it is initializing wrong. I've tried both making initialization happen in the definition and in a separate function; neither works. Is there something that I'm missing?
Current code:
class BotState {
constructor() {
this.bios = {}
this.aliases = {};
this.stories = {};
this.nextchar = 0;
}
}
var ProgramState = new BotState();
BotState.prototype.Initialize = function() {
this.bios = {};
var aliases = {};
var nextchar = 0;
this.nextchar = 0;
fs.readdir(biosdir, function (err, files) {
if (err) throw err;
for (var file in files) {
fs.readFile(biosdir + file + ".json", {flag: 'r'}, (err, data) => {
if (err) throw err;
var bio = JSON.parse(data);
var index = bio["charid"];
this.bios[index] = bio;
for (var alias in bio["aliaslist"]) {
this.aliases[bio["aliaslist"][alias].toLowerCase()] = index;
}
if (index >= nextchar) {
nextchar = index + 1;
}
})
}
this.stories = {};
this.nextchar = Math.max(Object.keys(aliases).map(key => aliases[key]))+1;
});
}
ProgramState.Initialize();
Is there some general way to make node.js just... run commands in the order they're written, as opposed to some arbitrary one?
(Apologies if the code is sloppy; I was more concerned with making it do the right thing than making it look nice.)
You are running an asynchronous operation in a loop which causes the loop to continue running and the asynchronous operations finish in some random order so you process them in some random order. The simplest way to control your loop is to switch to the promise-based version of the fs library and then use async/await to cause your for loop to pause and wait for the asynchronous operation to complete. You can do that like this:
const fsp = require('fs').promises;
class BotState {
constructor() {
this.bios = {}
this.aliases = {};
this.stories = {};
this.nextchar = 0;
}
}
var ProgramState = new BotState();
BotState.prototype.Initialize = async function() {
this.bios = {};
this.nextchar = 0;
let aliases = {};
let nextchar = 0;
const files = await fsp.readdir(biosdir);
for (const file of files) {
const data = await fsp.readFile(biosdir + file + ".json", {flag: 'r'});
const bio = JSON.parse(data);
const index = bio.charid;
const list = bio.aliaslist;
this.bios[index] = bio;
for (const alias of list) {
this.aliases[alias.toLowerCase()] = index;
}
if (index >= nextchar) {
nextchar = index + 1;
}
}
this.stories = {};
// there is something wrong with this line of code because you NEVER
// put any data in the variable aliases
this.nextchar = Math.max(Object.keys(aliases).map(key => aliases[key]))+1;
}
ProgramState.Initialize();
Note, there's a problem with your usage of the aliases local variable because you never put anything in that data structure, yet you're trying to use it in the last line of the function. I don't know what you're trying to accomplish there so you will have to fix that.
Also, note that you should never use for/in to iterate an array. That iterates properties of an object which can include more than just the array elements. for/of is made precisely for iterating an iterable like an array and it also saves the array dereference too as it gets you each value, not each index.

How to get over 1000 records from a SuiteScript Saved Search?

Below is code I came up with to run a Saved Search in NetSuite using SuiteScript, create a CSV with the Saved Search results and then email the CSV. The trouble is, the results are limited to 1000 records. I've researched this issue and it appears the solution is to run a loop that slices in increments of 1000. A sample of what I believe is used to slice searches is also below.
However, I cannot seem to be able to incorporate the slicing into my code. Can anyone help me combine the slicing code with my original search code?
var search = nlapiSearchRecord('item', 'customsearch219729');
// Creating some array's that will be populated from the saved search results
var content = new Array();
var cells = new Array();
var temp = new Array();
var x = 0;
// Looping through the search Results
for (var i = 0; i < search.length; i++) {
var resultSet = search[i];
// Returns an array of column internal Ids
var columns = resultSet.getAllColumns();
// Looping through each column and assign it to the temp array
for (var y = 0; y <= columns.length; y++) {
temp[y] = resultSet.getValue(columns[y]);
}
// Taking the content of the temp array and assigning it to the Content Array.
content[x] += temp;
// Incrementing the index of the content array
x++;
}
//Inserting headers
content.splice(0, 0, "sku,qty,");
// Creating a string variable that will be used as the CSV Content
var contents;
// Looping through the content array and assigning it to the contents string variable.
for (var z = 0; z < content.length; z++) {
contents += content[z].replace('undefined', '') + '\n';
}
// Creating a csv file and passing the contents string variable.
var file = nlapiCreateFile('InventoryUpdate.csv', 'CSV', contents.replace('undefined', ''));
// Emailing the script.
function SendSSEmail()
{
nlapiSendEmail(768, 5, 'Inventory Update', 'Sending saved search via scheduled script', 'cc#email.com', null, null, file, true, null, 'cc#email.com');
}
The following code is an example of what I found that is used to return more than a 1000 records. Again, as a novice, I can't seem to incorporate the slicing into my original, functioning SuiteScript. Any help is of course greatly appreciated.
var filters = [...];
var columns = [...];
var results = [];
var savedsearch = nlapiCreateSearch( 'customrecord_mybigfatlist', filters, columns );
var resultset = savedsearch.runSearch();
var searchid = 0;
do {
var resultslice = resultset.getResults( searchid, searchid+1000 );
for (var rs in resultslice) {
results.push( resultslice[rs] );
searchid++;
}
} while (resultslice.length >= 1000);
return results;
Try out this one :
function returnCSVFile(){
function escapeCSV(val){
if(!val) return '';
if(!(/[",\s]/).test(val)) return val;
val = val.replace(/"/g, '""');
return '"'+ val + '"';
}
function makeHeader(firstLine){
var cols = firstLine.getAllColumns();
var hdr = [];
cols.forEach(function(c){
var lbl = c.getLabel(); // column must have a custom label to be included.
if(lbl){
hdr.push(escapeCSV(lbl));
}
});
return hdr.join(",");
}
function makeLine(srchRow){
var cols = srchRow.getAllColumns();
var line = [];
cols.forEach(function(c){
if(c.getLabel()){
line.push(escapeCSV(srchRow.getText(c) || srchRow.getValue(c)));
}
});
return line.join(",");
}
function getDLFileName(prefix){
function pad(v){ if(v >= 10) return v; return "0"+v;}
var now = new Date();
return prefix + '-'+ now.getFullYear() + pad(now.getMonth()+1)+ pad(now.getDate()) + pad( now.getHours()) +pad(now.getMinutes()) + ".csv";
}
var srchRows = getItems('item', 'customsearch219729'); //function that returns your saved search results
if(!srchRows) throw nlapiCreateError("SRCH_RESULT", "No results from search");
var fileLines = [makeHeader(srchRows[0])];
srchRows.forEach(function(soLine){
fileLines.push(makeLine(soLine));
});
var file = nlapiCreateFile('InventoryUpdate.csv', 'CSV', fileLines.join('\r\n'));
nlapiSendEmail(768, 5, 'Test csv Mail','csv', null, null, null, file);
}
function getItems(recordType, searchId) {
var savedSearch = nlapiLoadSearch(recordType, searchId);
var resultset = savedSearch.runSearch();
var returnSearchResults = [];
var searchid = 0;
do {
var resultslice = resultset.getResults(searchid, searchid + 1000);
for ( var rs in resultslice) {
returnSearchResults.push(resultslice[rs]);
searchid++;
}
} while (resultslice.length >= 1000);
return returnSearchResults;
}
I looked into your code but it seems you're missing the label headers in the generated CSV file. If you are bound to use your existing code then just replace
var search = nlapiSearchRecord('item', 'customsearch219729');
with
var search = getItems('item', 'customsearch219729');
and just use the mentioned helper function to get rid off the 1000 result limit.
Cheers!
I appreciate it has been a while since this was posted and replied to but for others looking for a more generic response to the original question the following code should suffice:
var search = nlapiLoadSearch('record_type', 'savedsearch_id');
var searchresults = search.runSearch();
var resultIndex = 0;
var resultStep = 1000;
var resultSet;
do {
resultSet = searchresults.getResults(resultIndex, resultIndex + resultStep); // retrieves all possible results up to the 1000 max returned
resultIndex = resultIndex + resultStep; // increment the starting point for the next batch of records
for(var i = 0; !!resultSet && i < resultSet.length; i++){ // loop through the search results
// Your code goes here to work on a the current resultSet (upto 1000 records per pass)
}
} while (resultSet.length > 0)
Also worth mentioning, if your code is going to be updating fields / records / creating records you need to bear in mind script governance.
Moving your code to a scheduled script to process large volumes of records is more efficient and allows you to handle governance.
The following line:
var savedsearch = nlapiCreateSearch( 'customrecord_mybigfatlist', filters, columns );
can be adapted to your own saved search like this:
var savedsearch = nlapiLoadSearch('item', 'customsearch219729');
Hope this helps.

Recursive function in node.js giving unexpected results

Well the results are unexpected for me at least. I'm trying to search through a directory and get all files and subfolders, however I'm having some problems with working with subdirectories of subdirectories. The function itself is designed to find all the contents of all the folders and all the files within a given folder, and call itself when one of the things inside a folder is a folder.
However, it never finishes, it keeps feeding the same folder into itself and never doing anything with it, eventually just crashing.
var fs = require('fs');
var array = fs.readdirSync(__dirname);
function getAllSub(array){
for (i = 0; i < array.length; i++){
if (array[i].indexOf(".") == (-1))
{
array = array.concat(array[i] + "/" + fs.readdirSync(__dirname + "/" + array[i]));
}
if (array[i].indexOf("/") != (-1)){
var foldcon = array[i].substr(array[i].indexOf("/") + 1);
var folder = array[i].substr(0, array[i].indexOf("/"));
foldcon = foldcon.split(",");
for (n = 0; n < foldcon.length; n++){
foldcon[n] = folder + "/" + foldcon[n]
if (foldcon[n].indexOf(".") == (-1)){
console.log([foldcon[n]]);
foldcon[n] = getAllSub([foldcon[n]]);
}
}
array.splice(i, 1, foldcon);
}
}
return array;
}
array = getAllSub(array);
console.log(array);
You have a couple of problems with your code. You should be using the fs.stat call to get information about files to determine if they are directories or files, parsing the paths themselves is really error prone. Also, readdir just returns the names of the file in the directory, not the full path to the files. Finally, I'd suggest you use the async versions of readdir so that you don't lock up the node.js thread while you're doing this.
Here's some sample code for getting all of the folders and files from a start path:
var parseDirectory = function (startPath) {
fs.readdir(startPath, function (err, files) {
for(var i = 0, len = files.length; i < len; i++) {
checkFile(startPath + '/' + files[i]);
}
});
};
var checkFile = function (path) {
fs.stat(path, function (err, stats) {
if (stats.isFile()) {
console.log(path + ' is a file.');
}
else if (stats.isDirectory()) {
console.log(path + ' is a directory.');
parseDirectory(path);
}
});
};
parseDirectory(__dirname + '/../public');

Getting NaN for variables in Node.js.. Arg?

ok, I have a homework assignment where I have to read in files and calculate the distance between a bunch of numbers in the files and then print out the mean and standard deviation of each set of numbers. The end of the script, where the console.log stuff is, is giving all NaN for the variables. Can anyone help me out?
*I've omitted repeating parts of the script to make it shorter (their are more arrays than just the lHipJoint array and the calculations for them but I left them out).
var fs = require('fs');
var lHipJoint = new Array();
//open the first text file
fs.readFile('file.txt','utf8', function (err, data)
{
if (err) throw err;
//split the data into an array with each line as an element
stuff=data.split('\n');
for (var i = 0; i < stuff.length; i++)
{
//function that processes each line into an array
//with each number as an element and does the euclidean dis.
processLine(stuff[i]);
}
data.length = 0;
stuff.length = 0;
});
//do the same for the next file
fs.readFile('file2.txt','utf8', function (err, data)
{
if (err) throw err;
stuff=data.split('\n');
for (var i = 0; i < stuff.length; i++)
{
processLine(stuff[i]);
}
data.length = 0;
stuff.length = 0;
});
//and again
fs.readFile('file3.txt','utf8', function (err, data)
{
if (err) throw err;
stuff=data.split('\n');
for (var i = 0; i < stuff.length; i++)
{
processLine(stuff[i]);
}
data.length = 0;
stuff.length = 0;
});
//and again
fs.readFile('file4.txt','utf8', function (err, data)
{
if (err) throw err;
stuff=data.split('\n');
for (var i = 0; i < stuff.length; i++)
{
processLine(stuff[i]);
}
data.length = 0;
stuff.length = 0;
});
//and again
fs.readFile('file5.txt','utf8', function (err, data)
{
if (err) throw err;
stuff=data.split('\n');
for (var i = 0; i < stuff.length; i++)
{
processLine(stuff[i]);
}
data.length = 0;
stuff.length = 0;
});
//and again
fs.readFile('file6.txt','utf8', function (err, data)
{
if (err) throw err;
stuff=data.split('\n');
for (var i = 0; i < stuff.length; i++)
{
processLine(stuff[i]);
}
data.length = 0;
stuff.length = 0;
});
//function to split each line into an array with each number as an element
//then parse the number strings into floats and do the euclidean distances,
//storing the values in arrays for each bone.
function processLine(line)
{
var line1 = line
var numbers = line1.split(" ");
line1.length = 0;
for (var i = 0; i < numbers.length; i++)
{
var number = parseFloat(numbers[i]);
line1[i] = number[i];
}
lHipJoint = Math.sqrt((line1[6] - line1[9])*(line1[6] - line1[9]) + (line1[7] - line1[10])*(line1[7] - line1[10]) + (line1[8] - line1[11])*(line1[8] - line1[11]));
//reset the arrays so they can be reused
line1.length = 0;
numbers.length = 0;
number.length = 0;
}
//calculations and output for the mean and SD of each bone's distance from the root bone.
for(var i = 0; i < lHipJoint.length; i++)
{
var lHipJointTotal = lHipJointTotal + lHipJoint[i];
}
var lHipJointMean = lHipJointTotal/lHipJoint.length;
for(var i = 0; i < lHipJoint.length; i++)
{
var lHipJointSDSum = lHipJointSDSum + (lHipJoint[i] - lHipJointMean)*(lHipJoint[i] - lHipJointMean);
}
var lHipJointSD = Math.sqrt(lHipJointSDSum/lHipJoint.length);
console.log("The mean distance of the left hip joint from the root bone is " +lHipJointMean+ " and the standard deviation is " +lHipJointSD+ ".\n");
You are doing a lot of strange things here in your script i will try to
bring upp as manny as i can.
So first of all dont reset arrays.
your in a garbage collected language just reallocate new ones.
Also in the processLine function you are assigning numbers to the indexes of a string
i asume you think its an array but its not the same thing.
strings are immutable (cant be changed) in javascript.
In the aggregating for loops att the bottom of the file you are
declaring the variable in every iteration. you want to declare it before the loop like this.
var x = 0;
for(var i = 0; i < list.length; i++) {
x = x + ......
}
Your cals to read the files all do the same thing.
So you want to use the same function for that.
write it ones.
You are assigning to the lHipJoint array in the
processLine function my understanding is that you want to add
the calculated value to the array.
You can do this with the push method like this
lHipJoint.push(Math.sqr(........
Also theres a problem with using the async file reading
sins your printing the result before you even read the files.
if you want to use the async ones you need to coordinate so that.
you only print the result when you done all the file reading.
but a tips is to use the non async ones in this case.
I understand this is an assignment so you might not want to read my
attempt to correct the program beneath.
Maybe read it after you handed in yours, but im leaving it here
for the q&a reference for others reading this.
var fs = require("fs");
var filePaths = ["file.txt", "file2.txt",
"file3.txt", "file4.txt",
"file5.txt", "file6.txt"];
var lHipJoint = [];
filePaths.forEach(function(path) {
var content = fs.readFileSync(path, "utf-8");
var lines = content.split("\n");
lines.forEach(function(line) {
if(line.trim() === "") return;
var numbers = line.split("\t").map(parseFloat);
// im not touching your calculation :D
lHipJoint.push(Math.sqrt((numbers[6] - numbers[9])*(numbers[6] - numbers[9])
+ (numbers[7] - numbers[10])*(numbers[7] - numbers[10]) + (numbers[8] - numbers[11])
* (numbers[8] - numbers[11])));
});
});
var lHipJointTotal = lHipJoint.reduce(function(p, c) {
return p + c;
});
var lHipJointMean = lHipJointTotal / lHipJoint.length;
var lHipJointSDSum = lHipJoint.reduce(function(p, c) {
return p + (c - lHipJointMean) * (c - lHipJointMean);
}, 0);
var lHipJointSD = Math.sqrt(lHipJointSDSum/lHipJoint.length);
console.log("The mean distance of the left hip joint from the root bone is "
+ lHipJointMean + " and the standard deviation is " + lHipJointSD + ".\n");
there might be some error in this program i dont know how the data looks but i hope this helps
you.

Resources