Downloading multiple files from Google cloud storage using nodejs fails without errors - node.js

My code successfully downloads some of the files but not all. Execution just stops in the middle without any errors and the last file may be only partially downloaded. It always fails to download some files even if I change the number of files or if I use different files. File size doesn't seem to matter.
I've tried many things but it seems that I'm not able to catch any exceptions or errors when it stops. I've tried using try-catch and process.on events but not able to catch anything.
I'm pretty sure few months ago I used this kind of code to download hundreds of files without any problems.
Here is a simplified version of my current code.
const { Storage } = require('#google-cloud/storage');
const storage = new Storage({ keyFilename: 'D:/myProject/myKeyFile.json' });
var folder = 'D:/myProject/downloadedFiles';
var bucketName = 'bucket_1';
async function downloadFile(fileName) {
var fullPath = folder + '/' + fileName;
const options = {
destination: fullPath,
};
await storage.bucket(bucketName).file(fileName).download(options);
console.log(`gs://${bucketName}/${fileName} downloaded to ${fullPath}.`);
}
async function downloadFiles() {
var filenames = ['file1', 'file2', 'file3', 'file4', 'file5', 'file6'];
for(var i = 0; i < filenames.length; i++){
await downloadFile(filenames[i], i).catch(console.error);
}
}
downloadFiles().catch(console.error);

It turns out the download only fails on that specific computer, so my code is not the problem. My guess it is network related, maybe because a network switch was replaced some time ago.

Related

Unzip a MacOS .app file in Electron using Node.js

I am trying to unzip a file called Restart.Manager.zip which contains a single item, Restart Manager.app. This code seems to unzip the file correctly but upon launching the outputted .app folder, I get an error "The application “Restart Manager” can’t be opened."
const JSZip = require('jszip');
const fs = require('fs');
const jetpack = require('fs-jetpack');
const originalFs = require('original-fs');
async function extractZip(filePath, destination) {
fs.readFile(filePath, function(err, data) {
if (!err) {
var zip = new JSZip();
zip.loadAsync(data).then(function(contents) {
Object.keys(contents.files).forEach(function(filename) {
const file = zip.file(filename);
if (file) {
file.async('nodebuffer').then(function(content) {
var dest = destination + '/' + filename;
if (filename.endsWith('.asar')) {
originalFs.writeFileSync(dest, content)
} else {
jetpack.write(dest, content);
}
});
}
});
});
}
});
};
extractZip('/Users/me/Desktop/Restart.Manager.zip', '/Users/me/Desktop')
Manually unzipping the .zip file creates a working .app so I'm not sure where the code is messing up.
Here is the file on GitHub releases for testing: https://github.com/itw-creative-works/restart-manager-download-server/releases/download/installer/Restart.Manager.zip but feel free to use your own zipped .app file (although it should probably be an Electron app in which case you can find one here https://www.electronjs.org/apps)
I have tried zipping things like a .png and it unzips fine, which makes me think it is having problems with .app files or possibly the fact that the .app contains a .asar file which Electron supposedly has problems handling when it comes to the fs module: https://github.com/electron/electron/issues/1658

use .sfz soundfonts to render audio with WebMScore

I'm using WebMScore to render audio of music scores (it's a fork of MuseScore that runs in the browser or node).
I can successfully load my own, local .sf2 or .sf3 files, however
Trying to load an .sfz soundfont throws error 15424120. (And error.message is simply 'undefined'.)
Unlike .sf2 and .sf3, which contain the sounds and instructions in a single file, the .sfz format is just a text instruction file that refers to a separate folder of samples.
The reason I need the .sfz is that I need to be able to edit the .sfz file textually and programatically without an intervening Soundfont generator.
Is there a way to use .sfz's? Do I need to specify Zerberus (the Musescore .sfz player)? Do I need a different file structure? Please see below.
My environment is node js, with the following test case and file structure:
File Structure
Project Folder
app.js
testScore.mscz
mySFZ.sfz
samples
one.wav
two.wav
etc.wav
Test Case (Works with .sf3 , errors with .sfz)
const WebMscore = require('webmscore');
const fs = require('fs');
// free example scores available at https://musescore.com/openscore/scores
const name = 'testScore.mscz';
const exportedPrefix = 'exported';
const filedata = fs.readFileSync(`./${name}`);
WebMscore.ready.then(async () => {
const score = await WebMscore.load('mscz', filedata, [], false);
await score.setSoundFont(fs.readFileSync('./mySFZ.sfz'));
try { fs.writeFileSync(`./${exportedPrefix}.mp3`, await score.saveAudio('mp3')); }
catch (err) { console.log(err) }
score.destroy();
});

Meteor/Node writeFile crashes server

I have the following code:
Meteor.methods({
saveFile: function(blob, name, path, encoding) {
var path = cleanPath(path), fs = __meteor_bootstrap__.require('fs'),
name = cleanName(name || 'file'), encoding = encoding || 'binary',
chroot = Meteor.chroot || 'public';
// Clean up the path. Remove any initial and final '/' -we prefix them-,
// any sort of attempt to go to the parent directory '..' and any empty directories in
// between '/////' - which may happen after removing '..'
path = chroot + (path ? '/' + path + '/' : '/');
// TODO Add file existance checks, etc...
fs.writeFile(path + name, blob, encoding, function(err) {
if (err) {
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
function cleanPath(str) {
if (str) {
return str.replace(/\.\./g,'').replace(/\/+/g,'').
replace(/^\/+/,'').replace(/\/+$/,'');
}
}
function cleanName(str) {
return str.replace(/\.\./g,'').replace(/\//g,'');
}
}
});
Which I took from this project
https://gist.github.com/dariocravero/3922137
The code works fine, and it saves the file, however it repeats the call several time and each time it causes meteor to reset using windows version 0.5.4. The F12 console ends up looking like this: . The meteor console loops over the startup code each time the 503 happens and repeats the console logs in the saveFile function.
Furthermore in the target directory the image thumbnail keeps displaying and then display as broken, then a valid thumbnail again, as if the fs is writing it multiple times.
Here is the code that calls the function:
"click .savePhoto":function(e, template){
e.preventDefault();
var MAX_WIDTH = 400;
var MAX_HEIGHT = 300;
var id = e.srcElement.id;
var item = Session.get("employeeItem");
var file = template.find('input[name='+id+']').files[0];
// $(template).append("Loading...");
var dataURL = '/.bgimages/'+file.name;
Meteor.saveFile(file, file.name, "/.bgimages/", function(){
if(id=="goodPhoto"){
EmployeeCollection.update(item._id, { $set: { good_photo: dataURL }});
}else{
EmployeeCollection.update(item._id, { $set: { bad_photo: dataURL }});
}
// Update an image on the page with the data
$(template.find('img.'+id)).delay(1000).attr('src', dataURL);
});
},
What's causing the server to reset?
My guess would be that since Meteor has a built-in "automatic directories scanning in search for file changes", in order to implement auto relaunching of the application to newest code-base, the file you are creating is actually causing the server reset.
Meteor doesn't scan directories beginning with a dot (so called "hidden" directories) such as .git for example, so you could use this behaviour to your advantage by setting the path of your files to a .directory of your own.
You should also consider using writeFileSync insofar as Meteor methods are intended to run synchronously (inside node fibers) contrary to the usual node way of asynchronous calls, in this code it's no big deal but for example you couldn't use any Meteor mechanics inside the writeFile callback.
asynchronousCall(function(error,result){
if(error){
// handle error
}
else{
// do something with result
Collection.update(id,result);// error ! Meteor code must run inside fiber
}
});
var result=synchronousCall();
Collection.update(id,result);// good to go !
Of course there is a way to turn any asynchronous call inside a synchronous one using fibers/future, but that's beyond the point of this question : I recommend reading this EventedMind episode on node future to understand this specific area.

Node.JS - fs.exists not working?

I'm a beginner in Node.js, and was having trouble with this piece of code.
var fs = require('fs');
Framework.Router = function() {
this.run = function(req, res) {
fs.exists(global.info.controller_file, function(exists) {
if (exists) {
// Here's the problem
res.writeHead(200, {'Content-Type':'text/html'});
var cname = App.ucfirst(global.info.controller)+'Controller';
var c = require(global.info.controller_file);
var c = new App[cname]();
var action = global.info.action;
c[action].apply(global.info.action, global.info.params);
res.end();
} else {
App.notFound();
return false;
}
});
}
};
The problem lies in the part after checking if the 'global.info.controller_file' exists, I can't seem to get the code to work properly inside the: if (exists) { ... NOT WORKING }
I tried logging out the values for all the variables in that section, and they have their expected values, however the line: c[action].apply(global.info.action, global.info.params);
is not running as expected. It is supposed to call a function in the controller_file and is supposed to do a simple res.write('hello world');. I wasn't having this problem before I started checking for the file using fs.exists. Everything inside the if statement, worked perfectly fine before this check.
Why is the code not running as expected? Why does the request just time out?
Does it have something to do with the whole synchronous vs asynchronous thing? (Sorry, I'm a complete beginner)
Thank you
Like others have commented, I would suggest you rewrite your code to bring it more in-line with the Node.js design patterns, then see if your problem still exists. In the meantime, here's something which may help:
The advice about not using require dynamically at "run time" should be heeded, and calling fs.exists() on every request is tremendously wasteful. However, say you want to load all *.js files in a directory (perhaps a "controllers" directory). This is best accomplished using an index.js file.
For example, save the following as app/controllers/index.js
var fs = require('fs');
var files = fs.readdirSync(__dirname);
var dotJs = /\.js$/;
for (var i in files) {
if (files[i] !== 'index.js' && dotJs.test(files[i]))
exports[files[i].replace(dotJs, '')] = require('./' + files[i]);
}
Then, at the start of app/router.js, add:
var controllers = require('./controllers');
Now you can access the app/controllers/test.js module by using controllers.test. So, instead of:
fs.exists(controllerFile, function (exists) {
if (exists) {
...
}
});
simply:
if (controllers[controllerName]) {
...
}
This way you can retain the dynamic functionality you desire without unnecessary disk IO.

Why append rather than write when using knox / node.js to grab file from Amazon s3

I'm experimenting with the knox module for node.js as a way of managing some small files in an Amazon S3 bucket. Everything works fine stand-alone: I can upload a file, download a file, etc. However, I want to be able to download a file on recurring schedule. When I modify the code to run on an interval, I'm getting the downloaded file appending to the previous instance instead of overwriting.
I'm not sure if I've made a mistake in the file write code or in the knox handling code. I've tried several different write approaches (writeFile, writeStream, etc.) and I've looked at the knox source code. Nothing obvious to me stands out as a problem. Here's the code I'm using:
knox = require('knox');
fs = require('fs');
var downFile = DOWNFILE;
var downTxt = '';
var timer = INTERVAL;
var path = S3PATH + downFile;
setInterval(function()
{
var s3client = knox.createClient(
{
key: '********************',
secret: '**********************************',
bucket: '********'
});
s3client.get(path).on('response', function(response)
{
response.setEncoding('ascii');
response.on('data', function(chunk)
{
downTxt += chunk;
});
response.on('end', function()
{
fs.writeFileSync(downFile, downTxt, 'ascii');
});
}).end();
},
timer);
The problem is with your placement of var downTxt = '';. That is the only place you set downTxt to blank, so every time you retrieve more data, you add it to the data that you got in the previous request because you never clear the data from the previous request. The simplest fix is to move that line to just before the setEncoding line.
However, the way you are processing the data is unnecessarily complicated. Try something like this instead. You don't need to recreate the client every time, and setting the encoding will just break things if you are downloading non-text files, and it won't make a difference with text files. Next, you shouldn't manually collect the data, you can immediately start writing it to the file as you receive it. Lastly, since request is a standard stream, you don't need to monitor the 'data' event because you can just use pipe.
var knox = require('knox'),
fs = require('fs'),
downFile = DOWNFILE,
timer = INTERVAL,
path = S3PATH + downFile,
s3client = knox.createClient({
key: '********************',
secret: '**********************************',
bucket: '********'
});
(function downloadFile() {
var str = fs.createWriteStream(downFile);
s3client.get(path).pipe(str);
str.on('close', function() {
setTimeout(downloadFile, timer);
});
})();

Resources