Calculate a file hash and save the file - node.js

Users upload files into my express app. I need to calc hash of the uploaded file and then write file to disk using calculated hash as a filename. I try to do it using the following code:
function storeFileStream(file, next) {
createFileHash(file, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
file.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem is that after I calc file hash the writed file size is 0. What is the best way do solve this task?
Update
According #poke suggestion I try to duplicate my stream. Now my code is:
function storeFileStream(file, next) {
var s1 = new pass;
var s2 = new pass;
file.pipe(s1);
file.pipe(s2);
createFileHash(s1, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
s2.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem of this code is that events end and finish are not emited. If I comment file.pipe(s2); events are emited, but I again get my origin problem.

This code fix the problem:
var s1 = new passThrough,
s2 = new passThrough;
file.on('data', function(data) {
s1.write(data);
s2.write(data);
});
file.on('end', function() {
s1.end();
s2.end();
});

The correct and simple way should be as follow:
we should resume the passthroughed stream
function storeFileStream(file, directory, version, reject, resolve) {
const fileHashSource = new PassThrough();
const writeSource = new PassThrough();
file.pipe(fileHashSource);
file.pipe(writeSource);
// this is the key point, see https://nodejs.org/api/stream.html#stream_three_states
fileHashSource.resume();
writeSource.resume();
createFileHash(fileHashSource, function(err, hash) {
if (err) {
return reject(err);
}
const fileName = path.join(directory, version + '_' + hash.slice(0, 8) + '.zip');
const writeStream = fs.createWriteStream(fileName);
writeStream.on('error', function(err) {
return reject(err);
});
writeStream.on('finish', function() {
return resolve();
});
writeSource.pipe(writeStream);
});
}
function createFileHash(readStream, next) {
const hash = crypto.createHash('sha1');
hash.setEncoding('hex');
hash.on('error', function(err) {
return next(err);
});
hash.on('finish', function(data) {
return next(null, hash.read());
});
readStream.pipe(hash);
}

You could use the async module (not tested but should work):
async.waterfall([
function(done) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
done(err);
});
file.on('end', function(data) {
done(null, hash.read);
});
file.pipe(hash);
},
function(hash, done) {
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
done(err);
});
stream.on('finish', function() {
done(null);
});
file.pipe(stream);
}
], function (err) {
console.log("Everything is done!");
});

Related

i have come across a strange error in nodejs filesystem

i am working with the filesystem protocol of nodejs. i did writing the file, reading the file and now trying to rename it. it actually renames the file but throws me this error and my localhost stops running.
this is the error:
_http_outgoing.js:690
throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be one of type string or Buffer. Received type object
file1: var fs = require('fs');
// write
function write(fileName, content) {
return new Promise(function (resolve, reject) {
fs.writeFile(`./test/${fileName}`, content, function (err, done) {
if (err) {
reject(err);
return;
}
resolve(done);
// console.log('file created >>', done);
});
})
}
function readFile(fileName, cb) {
fs.readFile(`./test/${fileName}`, 'utf-8', function (err, done) {
if (err) {
cb(err);
return;
}
cb(null, done);
})
}
function rename(oldname, newname, cb){
fs.rename(`./bands/${oldname}`, `./bands/${newname}`, function(err,done){
if(err){
cb(err)
return
}
cb(null,done)
})
}
rename('pinkfloyd.txt', 'tool.txt', function(err, done){
if(err){
console.log('error in renaming')
}
else{
console.log('renamed>>')
}
})
readFile('kisor.txt', function(err,done){
if(err){
console.log("error in file reading >>",err);
}else{
console.log('success >>',done);
}
})
write('kisor.txt', 'i am javascript')
.then(function (data) {
console.log('write success ', data);
})
.catch(function (err) {
console.log('error in wirte >>', err);
})
// objec short hand
module.exports = {
write, readFile, rename
}
i have imported the exported stuff from file 1 here in this file:
//var { write,readFile, } = require('./file');// object destruct
var fileOp = require('./file1');
//console.log('file operation >>', fileOp);
fileOp.write('brodway.txt', 'i am infosys of nepal')
.then(function (data) {
console.log('done >>>', data);
})
.catch(function(err){
console.log('err',err);
})
fileOp.readFile('kisor.txt',function(err,done){
if(err){
console.log('err',err);
}else{
console.log('success >>',done);
}
and lastly, here is the server:
var http = require('http');
var fileOp = require('./file1');
var server = http.createServer(function (request, response) {
// this function will be executed whenever a client is connected
// request or 1st argument is http request object
// response or 2nd argument is http response object
var url = request.url;
if (url == '/write') {
fileOp.write('abcd.js', 'hi')
.then(function (data) {
response.end('data', data);
})
.catch(function (err) {
response.end(err);
})
} else if (url == '/read') {
fileOp.readFile('abcd.js', function (err, done) {
if (err) {
response.end(err);
} else {
response.end('done' + done);
}
})
} else if(url == '/rename'){
fileOp.rename('pinkfloyd.txt', 'tool.txt', function(err, done){
if(err){
response.end(err)
}
else{
response.end('done', done)
}
})
} else {
response.end('form default page');
}
console.log('client connected to server');
console.log('request url >>', request.url);
// request response cycle must be completed
// response.end('hi from node server'); response cannot sent more than once
});
server.listen(8080, function (err, done) {
if (err) {
console.log('server listening failed');
}
else {
console.log('server listening at port 8080');
console.log('press CTRL + C to exit from server');
}
});
In this section of code:
fileOp.rename('pinkfloyd.txt', 'tool.txt', function(err, done){
if(err){
response.end(err)
}
else{
response.end('done', done)
}
You are calling response.end('done', done). But, the fs.rename() callback does not have a done argument - it only has the err argument because there's no data to communicate back - the rename either succeeded or it didn't . So, done will be undefined. So you're calling:
response.end('done', undefined);
The function signature for response.end() is this:
response.end([data[, encoding]][, callback])
So, you're trying to send undefined for the encoding. That is not correct.
Your rename function should be change from this:
function rename(oldname, newname, cb){
fs.rename(`./bands/${oldname}`, `./bands/${newname}`, function(err,done){
if(err){
cb(err)
return
}
cb(null,done)
})
}
to this:
function rename(oldname, newname, cb){
fs.rename(`./bands/${oldname}`, `./bands/${newname}`, function(err){
if(err){
cb(err);
return
}
cb(null); // no 2nd argument here
})
}
or even simpler:
function rename(oldname, newname, cb){
fs.rename(`./bands/${oldname}`, `./bands/${newname}`, cb);
}
FYI, it appears you're using .end() improperly a bunch of places in your code where you do this:
response.end('done', someData);
That's not how it works. You aren't emitting an event. You're commanding the end of the http response and you need to follow this function signature:
response.end([data[, encoding]][, callback])
So, you'd probably just do response.end(data), if data was a string.

Streaming data from oracle db to browser with node.js

I am learning node.js and database. I am trying to stream heavy data about 7,700,000 rows and 96 columns from oracle to client. Later i use that data for virtual table. But in client it is showing only one row and then in node command error is displaying "Cannot set headers after they are sent to the client". How to stream data in client. Please help
var oracledb = require('oracledb');
const cors = require('cors');
var express = require('express');
var app = express();
app.use(cors());
oracledb.outFormat = oracledb.ARRAY;
oracledb.getConnection({
user: 'user',
password: 'password',
connectString: 'some string'
},
(err, connection) => {
if (err) {
console.error(err.message);
return;
}
var rowsProcessed = 0;
var startTime = Date.now();
var dataSize = 0;
var stream = connection.queryStream(
'SELECT * FROM table',
);
// stream.on('data', function (data) {
// rowsProcessed++;
// // console.log(JSON.stringify(data));
// // console.log(data);
// dataSize = dataSize + data.length;
// // oracleData.push(data);
// // console.log("pushing");
// // console.log(oracleData);
// // app.get('/data', (req, res) => {
// // res.send(data);
// // })
// // console.log(data);
// });
app.get('/data', (req, res) => {
stream.on('data', (data) => {
rowsProcessed++;
dataSize = dataSize + data.length;
res.send(JSON.stringify(data));
})
})
stream.on('end', function () {
var t = ((Date.now() - startTime) / 1000);
console.log('queryStream(): rows: ' + rowsProcessed +
', seconds: ' + t);
// console.log(dataSize + ' bytes');
connection.close(
function (err) {
if (err) {
console.error(err.message);
} else {
console.log("connection closed")
}
}
)
})
}
);
app.listen(5000, () => {
console.log('Listening at 5000')
})
I tried using above approach. But it is failing. How can I achieve the output?
The browser is freezing if I output entire data at single time that's why I am trying to use streaming and in the node command prompt it is displaying out of memory if I load entire data at single time.
Thank you.
The first thing you'll want to do is organize your app a little better. Separation of concerns is important, you should have a connection pool, etc. Have a look at this series for some ideas: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Once you get the organization figured out, incorporate this example of streaming a large result set out.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
If you find you're doing this a lot, simplify things by creating a reusable transform stream:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;

image is not getting uploaded through nodeJs

I am uploading an image from nodeJs.
Control is not getting into req.on('end'). Nothing is getting printed inside req.on('end'). I could not identify where is the problem. I am making an API and calling from js to upload an image in the server at a specific location.
app.post('/tde/api/photo/:widgetId/:choosenFileName',function(req,res){
console.log("In file Upload..");
console.log(req.params.widgetId);
console.log(req.params.choosenFileName);
res.writeHead(200, { 'Content-Type': 'application/binary' });
var filedata = '';
var chunks = [];
//req.setEncoding('binary');
req.on('data', function(chunk){
//filedata+= chunk;
chunks.push(chunk);
})
req.on('end', function (chunk) {
var dir = 'uploads/'+req.params.widgetId
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
console.log("directory created..");
}
fs.readdir(dir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
console.log(filename);
fs.unlink(dir+'/'+filename, function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
});
//fs.writeFile('uploads/'+req.params.widgetId+'/sanmoy.jpg', chunk, function(err) {
var fileName = req.params.choosenFileName;
var widgetId = req.params.widgetId;
//fs.writeFile('uploads/'+widgetId+'/'+fileName, filedata, 'binary', function(err) {
var buffer = Buffer.concat(chunks)
fs.writeFile('uploads/'+widgetId+'/'+fileName, buffer, function(err) {
if (err) {
return console.error(err);
}
console.log("writing file success!");
})
});
});
res.end("File is uploaded");
});

How to get a function value on MongoDB collection.find()

When I run collection.find() in MongoDB/Node/Express, I need to return value for my array like this but iam in callback hell;
foursquare.getVenues(params,function(error, venues) {
if (!error) {
var places = [];
venues.response.venues.forEach(function(e) {
places.push(
{
obj_id:e.id,
name:e.name,
distance:e.distance,
here_now:req.collection.findById(e.id) //count- i want need this value
}
);
});
res.send(places);
}
});
You can try to use Async https://github.com/caolan/async#each
var async = require('async');
...
foursquare.getVenues(params, function (error, venues) {
if (!error) {
throw err;
}
var places = [];
async.each(venues.response.venues, function (e, callback) {
db.collection.findById(e.id, function (err, res) {
places.push({
obj_id: e.id,
name: e.name,
distance: e.distance,
here_now: res
});
callback()
});
}, function (err) {
if (err) {
console.log('A file failed to process');
} else {
console.log('All files have been processed successfully');
res.send(places);
}
});
});
or Using async.map
var async = require('async');
var createArray = function (e, cb) {
db.collection.findById(e.id,function(err,res){
var obj = {
obj_id: e.id,
name: e.name,
distance: e.distance,
here_now: res
}
cb(null, obj);
});
}
async.map(venues.response.venues, createArray, function (err, places) {
if(err){
throw err;
}
console.log(places);
});

wrap couchbase access function

below is my couchbase nodejs code
kdatabase.js
var couchbase = require('couchbase');
var db = new couchbase.Connection({
host: "http://127.0.0.1:8091",
bucket: "default",
},
function(err) {
if (err) throw err;
db.get('id1', function(err, result) {
if (err) throw err;
console.log(result.value);
process.exit(0);
});
});
it works
but I hope to wrap it to object that can be easily to operate
module.exports = function(app) {
return new KDatabase(app);
};
var KDatabase = function(app) {
this.app = app;
};
//couchbase
KDatabase.prototype.query = function(userName) {
var couchbase = require('couchbase');
var db = new couchbase.Connection({
host: "http://127.0.0.1:8091",
bucket: "default",
},
function(err) {
if (err) throw err;
console.log(userName + '!!!!--');
db.get(userName, function(err, result) {
if (err) throw err;
var o = result.value;
console.log(o['password'] + '***--');
return o['password'];
});
});
};
then I call
var db = require('kdatabase.js')();
var s = db.query(msg.username, function(err) {
if (err) {
console.log('aaa');
}
console.log('bbb');
return;
});
the lines
console.log(userName + '!!!!--');
console.log(o['password'] + '***--');
display correctly
but
console.log('aaa');
console.log('bbb');
are never executed
Your query method does not take a callback argument, so you never call it.
KDatabase.prototype.query = function(userName, cb) {
/* snip */
console.log(o['password'] + '***--');
cb(err, result);

Resources