I have a function which uploads a file to a server and returns a promise.
I'd like to check when each promise finishes and display a log of "Successfully deployed filename..".
I don't care about the order of the promises, But Q.all() doesn't help me since it returns only when all of the promises finished or failing fast when one failed.
I'd like to write something that checks whenever one of my promises finish and displays the log.
Since each file can be large, I want to user to be alerted what has been uploaded thus far.
When one of my files fails, the user will see something like:
Successfully deployed file1.txt
Successfully deployed file2.txt
Failed deploying file3.txt
Why not simply loop over your files (or promises)?
var files = ['file1.txt', 'file2.txt'/*, ...*/];
files.forEach(function (file) {
uploadFile(file).done(function () {
console.log('Successfully deployed ' + file);
}, function () {
console.log('Failed deployed ' + file);
});
});
#kamituel's answer works perfectly. If you need fail-fast behavior though (as you requested in the comment) you can just set a flag on failure.
var failed = false;
var files = ['file1.txt', 'file2.txt'/*, ...*/];
files.forEach(function (file) {
uploadFile(file).done(function () {
if (failed) return;
console.log('Successfully deployed ' + file);
}, function () {
if (failed) return;
failed = true;
console.log('Failed deployed ' + file);
});
});
Related
I am using Formidable with Express in nodeJS in an attempt to have a simple single file upload scheme. I have confirmed that a file is actually sent over from the client-side, but where it seems to run into troubles is on the server-side.
index.js
app.post('/', (req, res) => {
const form = formidale();
form.on('file', (filename, file) => {
fs.rename(file.path, `./data/nodes.csv`, err => {
if (err) {
console.log(`There was an error in downloading a CSV file: ${err}`);
return null;
}
else {
console.log("CSV file has been uploaded correctly.");
}
});
});
form.on('error', err => {
console.log(`There was an error in downloading a CSV file: ${err}`);
return null;
});
form.on('end', () => {
console.log(fs.readFileSync('./data/nodes.csv')); // test to see if file exists
const nodes = assignMetrics();
console.log(nodes);
return nodes;
});
form.parse(req);
});
}
The main trouble I seem to find is that the form.on('end', ...) event does not seem to wait till the file has finished uploading to fire. I have confirmed this by trying to read the file in the event, but by that point it doesn't exist? The documentation though appears to suggest it is only meant to fire "after all files have been flushed [from the APIs pipe it infers]".
There appears to be no other events available that might wait till the file has been uploaded to be called? I also don't want to start throwing in layers of promises and such unless it is the only option, as each new layer of promises I find is a chance for unintended effects to happen.
So I need to deploy a Google Cloud Function that allow me to make 2 things.
The first one is to DOWNLOAD any files on SFTP/FTP server on /tmp local directory of the Cloud Function. Then, the second step, is to UPLOAD this file in a bucket on the Google Cloud Storage.
Actually I know how to upload but I don't get how to DOWNLOAD files from ftp server to my local /tmp directory.
So, actually I have written a GCF that receive in parameters (on the body), the configuration (config) to allow me to connect on the FTP server, the filename and the path.
For my test I used the following ftp server test: https://www.sftp.net/public-online-sftp-servers with this configuration.
{
config:
{
hostname: 'test.rebex.net',
username: 'demo',
port: 22,
password: 'password'
},
filename: 'FtpDownloader.png',
path: '/pub/example'
}
After my DOWNLOAD, I start my UPLOAD. For that I check if I found the DOWNLOAD file in '/tmp/filename' before to UPLOAD but the file is nerver here.
See the following code:
exports.transferSFTP = (req, res) =>
{
let body = req.body;
if(body.config)
{
if(body.filename)
{
//DOWNLOAD
const Client = require('ssh2-sftp-client');
const fs = require('fs');
const client = new Client();
let remotePath
if(body.path)
remotePath = body.path + "/" + body.filename;
else
remotePath = "/" + body.filename;
let dst = fs.createWriteStream('/tmp/' + body.filename);
client.connect(body.config)
.then(() => {
console.log("Client is connected !");
return client.get(remotePath, dst);
})
.catch(err =>
{
res.status(500);
res.send(err.message);
})
.finally(() => client.end());
//UPLOAD
const {Storage} = require('#google-cloud/storage');
const storage = new Storage({projectId: 'my-project-id'});
const bucket = storage.bucket('my-bucket-name');
const file = bucket.file(body.filename);
fs.stat('/tmp/' + body.filename,(err, stats) =>
{
if(stats.isDirectory())
{
fs.createReadStream('/tmp/' + body.filename)
.pipe(file.createWriteStream())
.on('error', (err) => console.error(err))
.on('finish', () => console.log('The file upload is completed !!!'));
console.log("File exist in tmp directory");
res.status(200).send('Successfully executed !!!')
}
else
{
console.log("File is not on the tmp Google directory");
res.status(500).send('File is not loaded in tmp Google directory')
}
});
}
else res.status(500).send('Error: no filename on the body (filename)');
}
else res.status(500).send('Error: no configuration elements on the body (config)');
}
So, I received the following message: "File is not loaded in tmp Google directory" because after fs.stat() method, stats.isDirectory() is false. Before I use the fs.stats() method to check if the file is here, I have just writen files with the same filenames but without content.
So, I conclude that my upload work but without DONWLOAD files is really hard to copy it in the Google Cloud Storage.
Thanks for your time and I hope I will find a solution.
The problem is that your not waiting for the download to be completed before your code which performs the upload starts running. While you do have a catch() statement, that is not sufficient.
Think of the first part (the download) as a separate block of code. You have told Javascript to go off an do that block asynchronously. As soon as your script has done that, it immediately goes on to do the the rest of your script. It does not wait for the 'block' to complete. As a result, your code to do the upload is running before the download has been completed.
There are two things you can do. The first would be to move all the code which does the uploading into a 'then' block following the get() call (BTW, you could simplify things by using fastGet()). e.g.
client.connect(body.config)
.then(() => {
console.log("Client is connected !");
return client.fastGet(remotePath, localPath);
})
.then(() => {
// do the upload
})
.catch(err => {
res.status(500);
res.send(err.message);
})
.finally(() => client.end());
The other alternative would be to use async/await, which will make your code look a little more 'synchronous'. Something along the lines of (untested)
async function doTransfer(remotePath, localPath) {
try {
let client - new Client();
await client.connect(config);
await client.fastGet(remotePath, localPath);
await client.end();
uploadFile(localPath);
} catch(err) {
....
}
}
here is a github project that answers a similar issue to yours.
here they deploy a Cloud Function to download the file from the FTP and upload them directly to the bucket, skipping the step of having the temporal file.
The code works, the deployment way in this github is not updated so I'll put the deploy steps as I suggest and i verified they work:
Activate Cloud Shell and run:
Clone the repository from github: git clone https://github.com/RealKinetic/ftp-bucket.git
Change to the directory: cd ftp-bucket
Adapt your code as needed
Create a GCS bucket, if you dont have one already you can create one by gsutil mb -p [PROJECT_ID] gs://[BUCKET_NAME]
Deploy: gcloud functions deploy importFTP --stage-bucket [BUCKET_NAME] --trigger-http --runtime nodejs8
In my personal experience this is more efficient than having it in two functions unless you need to do some file editing within the same cloud function
I've written a program that creates HTML files. I then attempt to upload the files to my S3 bucket at the end of the program. It seems that the problem is that my program terminates before allowing the function to complete or receiving a callback from the function.
Here is the gist of my code:
let aws = require('aws-sdk');
aws.config.update({
//Censored keys for security
accessKeyId: '*****',
secretAccessKey: '*****',
region: 'us-west-2'
});
let s3 = new aws.S3({
apiVersion: "2006-03-01",
});
function upload(folder, platform, browser, title, data){
s3.upload({
Bucket: 'html',
Key: folder + platform + '/' + browser + '/' + title + '.html',
Body: data
}, function (err, data) {
if (err) {
console.log("Error: ", err);
}
if (data) {
console.log("Success: ", data.Location);
}
});
}
/*
*
* Here is where the program generates HTML files
*
*/
upload(folder, platform, browser, title, data);
If I call the upload() function (configured with test/dummy data) before the HTML generation section of my code, the upload succeeds. The test file successfully uploads to S3. However, when the function is called at the end of my code, I do not receive an error or success response. Rather, the program simply terminates and the file isn't uploaded to S3.
Is there a way to wait for the callback from my upload() function before continuing the program? How can I prevent the program from terminating before uploading my files to S3? Thank you!
Edit: After implementing Deiv's answer, I found that the program is still not uploading my files. I still am not receiving a success or error message of any kind. In fact, it seems like the program just skips over the upload() function. To test this, I added a console.log("test") after calling upload() to see if it would execute. Sure enough, the log prints successfully.
Here's some more information about the project: I'm utilizing WebdriverIO v4 to create HTML reports of various tests passing/failing. I gather the results of the tests via multiple event listeners (ex. this.on('test:start'), this.on('suite:end'), etc.). The final event is this.on('end'), which is called when all of the tests have completed execution. It is here were the test results are sorted based on which Operating System it was run on, Browser, etc.
I'm now noticing that my program won't to do anything S3 related in the this.on('end') event handler even if I put it at the very beginning of the handler, though I'm still convinced it's because it isn't given enough time to execute because the handler is able to process the results and create HTML files very quickly. I have this bit of code that lists all buckets in my S3:
s3.listBuckets(function (err, data) {
if (err) {
console.log("Error: ", err);
} else {
console.log("Success: ", data.Buckets);
}
});
Even this doesn't return a result of any kind when run at the beginning of this.on('end'). Does anyone have any ideas? I'm really stumped here.
Edit: Here is my new code which implement's Naveen's suggestion:
this.on('end', async (end) => {
/*
* Program sorts results and creates variable 'data', the contents of the HTML file.
*/
await s3.upload({
Bucket: 'html',
Key: key,
Body: data
}, function (err, data) {
if (err) {
console.log("Error: ", err);
}
if (data) {
console.log("Success: ", data.Location);
}
}).on('httpUploadProgress', event => {
console.log(`Uploaded ${event.loaded} out of ${event.total}`);
});
}
The logic seems sound, but still I get no success or error message, and I do not see the upload progress. The HTML file does not get uploaded to S3.
You can use promises to wait for your upload function to finish. Here's what it will look like:
function upload(folder, platform, browser, title, data) {
return new Promise((resolve, reject) => {
s3.upload({
Bucket: 'html',
Key: folder + platform + '/' + browser + '/' + title + '.html',
Body: data
}, function(err, data) {
if (err) {
console.log("Error: ", err);
return reject(err);
}
if (data) {
console.log("Success: ", data.Location);
return resolve(); //potentially return resolve(data) if you need the data
}
});
});
}
/*
*
* Here is where the program generates HTML files
*
*/
upload(folder, platform, browser, title, data)
.then(data => { //if you don't care for the data returned, you can also do .then(() => {
//handle success, do whatever else you want, such as calling callback to end the function
})
.catch(error => {
//handle error
}
I'm using react, electron, nodejs, asyncjs redux and thunk.
I wrote the following code which is supposed to download a list of files and write it to disk. In my code when the user presses a button i call this actionCreator:
export function downloadList(pack) {
return (dispatch, getState) => {
const { downloadManager } = getState();
async.each(downloadManager.downloadQueue[pack].libs, async (url, callback) => {
const filename = url.split('/').pop().split('#')[0].split('?')[0];
await downloadFile(url, `dl/${filename}`);
callback();
}, (err) => {
if (err) {
console.log('A file failed to process');
} else {
dispatch({
type: DOWNLOAD_COMPLETED,
packName: pack
});
}
});
};
}
async function downloadFile(url, path) {
const file = fs.createWriteStream(path);
const request = https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close();
});
}).on('error', (err) => { // Handle errors
fs.unlink(path); // Delete the file async. (But we don't check the result)
});
}
It does what it's supposed to do but while it does that, it blocks the entire UI. I really can't understand why it's happening since if i use an
setTimeout
with 3000ms delay inside the async.each it doesn't block the UI.
Another strange behaviour is that if i use the eachLimit function of asyncJS it just downloads me the limit of files, so if i want to download 100 files but i set eachLimit to 10 parallel, it just downloads the first 10 files and then stops. Can you enlight me about this?
I wanted to use axios to download files since it doesn't need to know if the urls are http or https but i can't find any resource on using axios with stream responsetype
I can answer the first part. Pretty much every existent implementation of JavaScript runs on one thread. This means that the runtime is concurrent, but not parallel, i.e. the runtime does one and exactly one thing at a time. This means that if there is a function call that takes a while, it will block everything else. Therefore, something in the downloadList function is blocking the event loop. However, if you use setTimeout, then the downloadList function will be pushed onto the message queue, which will unblock the event and allow the UI to be rendered. For more information on the event loop check out this video
am using express.js and uploadify to upload large file to node server, everything works fine except when more than one user login and try to upload file at same time, well it still works but it seems that the server can only upload one file at a time, so the user have to wait until the other user finish their uploading, this is so unacceptable.
here is server side code
exports.upload = function(req, res,next){
// console.log( req.body);
// console.log(req.files);
var tmp_path = req.files.product_video.path;
var target_path = 'F:/shopping/shop/' +req.body.shop_id+'/'+ req.files.product_video.name;
fs.rename(tmp_path, target_path, function(err) {
if (err) {
console.log(err)
}
else{
fs.unlink(tmp_path, function() {
if (err){
console.log(err)
}else{
exec("C:/ffmpeg/bin/ffmpeg -i shop/"+ req.body.shop_id+ '/' + req.files.product_video.name + " -ss 00:01:00.00 -r 1 -an -vframes 1 -s 250x150 -f mjpeg shop/"+ req.body.shop_id+ '/' + req.files.product_video.name + "_thumbnail.jpg", function(err){
var data = {
'thum_src':'shop/'+ req.body.shop_id+ '/' + req.files.product_video.name + "_thumbnail.jpg",
'video_name':req.files.product_video.name,
}
res.send(data);
});
}
});
}
});
};
here is front end code
$('#input_product_video').uploadify({
'formData':{'shop_id':$('#shop_id').val()},
'buttonText' : 'add',
'fileSizeLimit' : '100MB',
'fileObjName' : 'product_video',
'uploader' : '/uploads',
'swf' :'/public/javascripts/lib/uploadify/uploadify.swf',
'onUploadSuccess':function(file,data){
console.log(file);
console.log(JSON.parse(data));
console.log(response);
}
});
You shouldn't need the fs.unlink call because fs.rename is going to move the file to the correct path, not copy it, so if fs.rename succeeds, the temporary file will already be gone. Remove the whole fs.unlink block, which doesn't check for an error anyway. Then you need to make sure in every possible path through the code, you are either calling next(err) with an error or calling res.send. It looks like there are code paths in here where you will not respond and will just let the request time out. Make those changes and see if that gets it working.