Reading local filestream with fallback to other source - node.js

I store files in Amazon S3, but also maintain a local file cache. When I need a file I want to check the cache first. I want to avoid testing for local file existance before reading, both because fs.exists will be deprecated and the file can actually be deleted between the exists-check and the file read.
I want to use promises and streams. The below example has a fallback to another local file. My real code will have S3 as fallback.
Would the below be a good solution?
The only way I found to get information of a failed read was to hook up an error handler to the stream. Once I get the "readable" event I unhook my temporary error handler.
I also wonder If I really need to unhook the handler when I use "once" to hook it up.
'use strict';
const fs = require('fs');
function tryToReadLocalFile() {
return new Promise(function(resolve, reject) {
let rs = fs.createReadStream('./test1.txt');
let errorListener = function(err) {
reject(err);
};
rs.once('error', errorListener)
rs.on('readable', () => {
rs.removeListener('error', errorListener);
resolve(rs)
});
});
}
function tryToReadAnotherFile() {
return new Promise(function(resolve, reject) {
let rs = fs.createReadStream('./test2.txt');
let errorListener = function(err) {
reject(err);
};
rs.once('error', errorListener)
rs.on('readable', () => {
rs.removeListener('error', errorListener);
resolve(rs)
});
});
}
tryToReadLocalFile()
.catch(function(err) {
if(err.code === 'ENOENT') {
console.log('test1.txt not found. Fallback to test2.txt')
//Reading from another file as a test. Should read from S3 as fallback
return tryToReadAnotherFile();
} else {
return Promise.reject(err);
}
}).then(function(file) {
console.log('writing to test.txt');
let ws = fs.createWriteStream('./test.txt');
file.pipe(ws);
});
---------- edit -->
I have now implemented a more compact version of the above. I would still be grateful for any input on this, though. Is this a good way to solve this?
As you can see I don't bother to check for ENOENT anymore. Whatever the error is, I want to fall back to S3.
function getFileFromStorageP(options) {
return new Promise(function(resolve, reject) {
let rs = fs.createReadStream(path.join(env.fsp.cacheDir, options.fileId));
rs.once('error', (err) => {reject(err)});
rs.once('readable', () => {resolve(rs)});
}).catch(function(err) {
return srvS3.download({
fileId: options.fileId
});
});
}

I'll post an answer for others to read..
The above solution is now implemented and works great. I haven't found any easier way to get hold of the error, though.

Related

Promise around event-stream mapSync() not working

Currently i am trying to create a CSV reader that can handle very large CSV files. I chose for a streaming implementation with the event-stream NPM package.
I have created a function getNextp() that should return a promise and give me the next piece of data every time i call it.
"use strict";
const fs = require('fs');
const es = require('event-stream');
const csv = require('csv-parser');
class CsvFileReader {
constructor(file) {
this.file = file;
this.isStreamReading = false;
this.stream = undefined;
}
getNextP() {
return new Promise( (resolve) => {
if (this.isStreamReading === true) {
this.stream.resume();
} else {
this.isStreamReading = true;
// Start reading the stream.
this.stream = fs.createReadStream(this.file)
.pipe(csv())
.pipe(es.mapSync( (row) => {
this.stream.pause();
resolve(row);
}))
.on('error', (err) => {
console.error('Error while reading file.', err);
})
.on("end", () => {
resolve(undefined);
})
}
});
}
}
I call this then with this code.
const csvFileReader = new CsvFileReader("small.csv");
setInterval( () => {
csvFileReader.getNextP().then( (frame) => {
console.log(frame);
})
}, 1000);
However every time when i try this out i only get the first row and the subsequent rows i do not get. I can not figure out why this it not working. I have tried the same with a good old callback function and then it works without any problem.
Update:
So what i basically want is a function (getNext()) that returns me every time when i call it the next row of the CSV. Some rows can be buffered, but yeah until now i could not figure out how to do this with streams. So if somebody could give me a pointer on how to create a correct getNext() function that would be great.
I would like to ask if somebody understands what is going wrong here, and ask kindly to share his/hers knowledge.
Thank you in advance.

Problem with async when downloading a series of files with nodejs

I'm trying to download a bunch of files. Let's say 1.jpg, 2.jpg, 3.jpg and so on. If 1.jpg exist, then I want to try and download 2.jpg. And if that exist I will try the next, and so on.
But the current "getFile" returns a promise, so I can't loop through it. I thought I had solved it by adding await in front of the http.get method. But it looks like it doesn't wait for the callback method to finish. Is there a more elegant way to solve this than to wrap the whole thing in a new async method?
// this returns a promise
var result = getFile(url, fileToDownload);
const getFile = async (url, saveName) => {
try {
const file = fs.createWriteStream(saveName);
const request = await http.get(url, function(response) {
const { statusCode } = response;
if (statusCode === 200) {
response.pipe(file);
return true;
}
else
return false;
});
} catch (e) {
console.log(e);
return false;
}
}
I don't think your getFile method is returning promise and also there is no point of awaiting a callback. You should split functionality in to two parts
- get file - which gets the file
- saving file which saves the file if get file returns something.
try the code like this
const getFile = url => {
return new Promise((resolve, reject) => {
http.get(url, response => {
const {statusCode} = response;
if (statusCode === 200) {
resolve(response);
}
reject(null);
});
});
};
async function save(saveName) {
const result = await getFile(url);
if (result) {
const file = fs.createWriteStream(saveName);
response.pipe(file);
}
}
What you are trying to do is getting / requesting images in some sync fashion.
Possible solutions :
You know the exact number of images you want to get, then go ahead with "request" or "http" module and use promoise chain.
You do not how the exact number of images, but will stop at image no. N-1 if N not found. then go ahed with sync-request module.
your getFile does return a promise, but only because it has async keyword before it, and it's not a kind of promise you want. http.get uses old callback style handling, luckily it's easy to convert it to Promise to suit your needs
const tryToGetFile = (url, saveName) => {
return new Promise((resolve) => {
http.get(url, response => {
if (response.statusCode === 200) {
const stream = fs.createWriteStream(saveName)
response.pipe(stream)
resolve(true);
} else {
// usually it is better to reject promise and propagate errors further
// but the function is called tryToGetFile as it expects that some file will not be available
// and this is not an error. Simply resolve to false
resolve(false);
}
})
})
}
const fileUrls = [
'somesite.file1.jpg',
'somesite.file2.jpg',
'somesite.file3.jpg',
'somesite.file4.jpg',
]
const downloadInSequence = async () => {
// using for..of instead of forEach to be able to pause
// downloadInSequence function execution while getting file
// can also use classic for
for (const fileUrl of fileUrls) {
const success = await tryToGetFile('http://' + fileUrl, fileUrl)
if (!success) {
// file with this name wasn't found
return;
}
}
}
This is a basic setup to show how to wrap http.get in a Promise and run it in sequence. Add error handling wherever you want. Also it's worth noting that it will proceed to the next file as soon as it has received a 200 status code and started downloading it rather than waiting for a full download before proceeding

How to import a value in node js only after it is exported elsewhere sometime later?

I am learning SSO and trying this out without the conventional User class/object. I am new to asynchronous programming and having difficulty in managing the data flow. I am stuck at a point where I have successfully exported a boolean value, but my import (in another module) gets undefined. I suspect it is because import does not wait for the corresponding export statement to execute first. How do I make it and all subsequent code wait?
I don't know what to try in this case.
Module that is exporting usrFlag
const request = require("request");
let usrFlag = false; // assuming user doesn't already exist.
function workDB(usr_id, usr_name, dateTimeStamp) {
//some code excluded - preparing selector query on cloudant db
request(options, function (error, response, body) {
if (error) throw new Error(error);
if (body.docs.length == 0) addUsr(usr_id, usr_name, dateTimeStamp);
else {
xyz(true); //This user already exists in cloudant
console.log('User already exists since', body.docs[0].storageTime);
}
});
}
async function setUsrFlag(val) { usrFlag = val; }
async function xyz(val) {
await setUsrFlag(val);
//module.exports below does not execute until usrFlag has the correct value.
//so value is not exported until usrFlag has been properly set.
console.log(usrFlag);
module.exports.usrFlag = usrFlag;
}
Module that is importing this value
const usrP = require('../config/passport-setup');
const dbProcess = require('../dbOps/dbProcessLogic'); // <-- This is import
router.get('/google/redirect', passport.authenticate('google'), (req, res) => {
dbProcess.workDB(usrP.usrPrf.id, usrP.usrPrf.displayName, new Date());
// Instead of true/false, I see undefined here.
console.log(dbProcess.usrFlag);
});
I expect the require function of import module to wait for export module to send it all the required values. However, I know that is probably not going to happen without me explicitly telling it to do so. My question is, how?
So, I have just modified some of the code, so that I can work on it easily.
Module that is exporting usrFlag
// const request = require("request");
let usrFlag = false; // assuming user doesn't already exist.
function workDB(usr_id, usr_name, dateTimeStamp) {
return new Promise(function (resolve, reject) {
setTimeout(function () {
xyz(true).then(function () {
resolve('done');
})
}, 1000);
});
}
function setUsrFlag(val) { usrFlag = val; }
function xyz(val) {
return new Promise(function (resolve, reject) {
setUsrFlag(val);
module.exports.usrFlag = usrFlag;
resolve('done');
});
}
module.exports = {
usrFlag,
workDB
}
Module that is importing this value
const dbProcess = require('../dbOps/dbProcessLogic'); // <-- This is import
dbProcess.workDB().then(function () {
console.log(dbProcess.usrFlag);
})
Now when you run the second file, you get usrFlag as true.
I have used setTimeout to imitate a request.
Sorry if I butchered up some of your code.

Question about end of request for node/JS request package

I'm trying to understand what .on('end', ...) does in the node package request.
My code:
const fs = require('fs');
const request = require('request');
function downloadAsset(relativeAssetURL, fileName) {
return new Promise((resolve, reject) => {
try {
let writeStream = fs.createWriteStream(fileName);
var remoteImage = request(`https:${relativeAssetURL}`);
remoteImage.on('data', function(chunk) {
writeStream.write(chunk);
});
remoteImage.on('end', function() {
let stats = fs.statSync(fileName);
resolve({ fileName: fileName, stats: stats });
});
} catch (err) {
reject(err);
}
});
}
What I'm trying to do is download a remote image, get some file statistics, and then resolve the promise so my code can do other things.
What I'm finding is that the promise doesn't always resolve after the file has been downloaded; it may resolve a little before then. I thought that's what .on('end', ... ) was for.
What can I do to have this promise resolve after the image has been downloaded in full?
As the docs say:
The writable.write() method writes some data to the stream, and calls the supplied callback once the data has been fully handled.
So, writable.write() is asynchronous. Just because your last writeStream.write has been called does not necessarily mean that all write operations have been completed. You probably want to call the .end method, which means:
Calling the writable.end() method signals that no more data will be written to the Writable. The optional chunk and encoding arguments allow one final additional chunk of data to be written immediately before closing the stream. If provided, the optional callback function is attached as a listener for the 'finish' event.
So, try calling writeStream.end when the remoteImage request ends, and pass a callback to writeStream.end that resolves the Promise once the writing is finished:
function downloadAsset(relativeAssetURL, fileName) {
return new Promise((resolve, reject) => {
try {
const writeStream = fs.createWriteStream(fileName);
const remoteImage = request(`https:${relativeAssetURL}`);
remoteImage.on('data', function(chunk) {
writeStream.write(chunk);
});
remoteImage.on('end', function() {
writeStream.end(() => {
const stats = fs.statSync(fileName);
resolve({ fileName: fileName, stats: stats });
});
});
} catch (err) {
reject(err);
}
});
}
(also try not to mix var and let/const - in an ES6+ environment, prefer const, which is generally easier to read and has fewer problems, like hoisting)

unzip or decompress data from API response in angular2

I'm using unzip function to decompress the zipped response from API in typescript (angular2).
customers:any=[];
zlib.gunzip(Buffer.from(response), function(err, uncompressedMessage)
{
if(err)
{
console.log(err);
}
else
{
resultArray = JSON.parse(uncompressedMessage.toString());
consloe('response After Unzip within fun',resultArray);
this.customers = resultArray; // error undefined
}
});
console('response After Unzip outside fun',resultArray); // undefined
Here the resultArray is only accessible within function but not outside. I tried for declaring global variable and aceess it but not able access the value So, pls suggest me that how can I achieve this.
The issue is function(err, uncompressedMessage) {} is asynchronous. And you are trying to access resultArray before it is set.
What you can do is wrap the uncompress functionality in a function that returns a Promise as follows:
let uncompress = (response) => new Promise((resolve, reject) => {
zlib.gunzip(Buffer.from(response), function(err, uncompressedMessage) {
if(err) {
reject(err);
} else {
let resultArray = JSON.parse(uncompressedMessage.toString());
resolve(resultArray);
}
});
});
Once that is done, when you receive the response, you can get the customers using:
const customers = await uncompress(response); // not undefined anymore
Note that the method which uses await keyword will have have to be declared async. If you don't want to do that, you can anyways use plain Promise like this:
uncompress(response).then(function(customers){ /* logic */ })
Don't forget to use bind(this) to callback like this
uncompress(response).then(function(customers){ /* logic */ }.bind(this))

Resources