Extracting initVect from node stream a deciphering the rest - node.js

I have readableStream with an encrypted file and I would like to decrypt it. So far I was able to create 2 readableStreams - with 1 I extracted the IV and the second was used to decrypt the rest of the data, but I would like to just pipe it into one stream - both IV extraction AND the decryption. Something like this:
function getDecryptionStream(password, initVectLength = 16) {
const cipherKey = getCipherKey(password);
const unzip = zlib.createUnzip();
return new Transform({
transform(chunk, encoding, callback) {
if (!this.initVect) {
this.initVect = chunk.subarray(0, initVectLength);
chunk = chunk.subarray(initVectLength, chunk.length);
}
const decipher = crypto.createDecipheriv(algorithm, cipherKey, this.initVect);
callback(null, chunk.pipe(decipher).pipe(unzip));
}
});
}
function decrypt({ file, newFile, passphrase }) {
const readStream = fs.createReadStream(file);
const writeStream = fs.createWriteStream(newFile);
const decryptStream = getDecryptionStream(passphrase , IV_LENGTH);
readStream
.pipe(decryptStream)
.pipe(writeStream);
}
However I cannot figure out, how to process the chunk as chunk.pipe(decipher) throws an error. TypeError: chunk.pipe is not a function as it as Buffer

Related

Saving object to JSON file than using it back in function after reading

I am completly new to node.js but have to use it in my student project that require this library: https://github.com/bitchan/eccrypto.
The goal is to encrypt file using ethereum public key, save it as JSON and than read it back to decrypt with private key:
var fs = require('fs');
var crypto = require("crypto");
var eccrypto = require("eccrypto");
var content = fs.readFileSync('pdf_test.pdf');
var importPrivateKey = "c337ded6f56c07205fb7b391654d7d463c9e0c726869523ae6024c9bec878878"
var importPublicKey = "04730a151f545f5dcdb1c6d99fb1251f5c70f216f39ba2681bcf10db16bd582e6720bc881d51f25ffbe961df6a0af24a9d39a4db3d86a7f6b3f9bf4eaac0e4006b"
var privateKey = new Buffer(importPrivateKey, "hex");
var publicKey = new Buffer(importPublicKey, "hex");
// Encrypting the file for B.
eccrypto.encrypt(publicKey, Buffer(content)).then(function(encrypted) {
//console.log('Encrypted message ' + JSON.stringify(encrypted));
let data = JSON.stringify(encrypted);
fs.writeFileSync('encrypted.json', data);
console.log('encryption done');
let rawData = fs.readFileSync('encrypted.json')
let encryptedContent = JSON.parse(rawData);
//console.log(encryptedContent);
// B decrypting the file.
eccrypto.decrypt(privateKey, encryptedContent).then(function(plaintext) {
//console.log("Decrypted message: ", plaintext.toString());
fs.writeFile('decrypted.pdf', plaintext, function (err) {
if (err) return console.log(err);
console.log('decryption done');
});
});
});
I get following error from this code: "(node:271) UnhandledPromiseRejectionWarning: Error: Bad input."
Everything is working when i replace "encryptedContent" variable with "encrypted" on eccrypto.decrypt function but i want to let user store encrypted object and decrypt it later with this function. How can i do that?
The problem is the encrypted object is not exactly JSON serializable so you have to encode the buffers in some sort of JSON serializable object. Since you used hex for the private and public keys I used it as well at below. (Also the Buffer() constructor is depracated and not secure so I switched it to Buffer.from()
var fs = require('fs');
var crypto = require("crypto");
var eccrypto = require("eccrypto");
var content = fs.readFileSync('pdf_test.pdf');;
var importPrivateKey = "c337ded6f56c07205fb7b391654d7d463c9e0c726869523ae6024c9bec878878"
var importPublicKey = "04730a151f545f5dcdb1c6d99fb1251f5c70f216f39ba2681bcf10db16bd582e6720bc881d51f25ffbe961df6a0af24a9d39a4db3d86a7f6b3f9bf4eaac0e4006b"
let privateKey = Buffer.from(importPrivateKey, 'hex');
let publicKey = Buffer.from(importPublicKey, 'hex');
// Encrypting the file for B.
eccrypto.encrypt(publicKey, Buffer.from(content)).then(function (encrypted) {
//console.log('Encrypted message ' + JSON.stringify(encrypted));
let data = JSON.stringify({
iv: encrypted.iv.toString('hex'),
ciphertext: encrypted.ciphertext.toString('hex'),
mac: encrypted.mac.toString('hex'),
ephemPublicKey: encrypted.ephemPublicKey.toString('hex')
});
fs.writeFileSync('encrypted.json', data);
console.log('encryption done');
let rawData = fs.readFileSync('encrypted.json')
let encryptedContent = JSON.parse(rawData);
encryptedContent = {
iv: Buffer.from(encryptedContent.iv, 'hex'),
ciphertext: Buffer.from(encryptedContent.ciphertext, 'hex'),
mac: Buffer.from(encryptedContent.mac, 'hex'),
ephemPublicKey: Buffer.from(encryptedContent.ephemPublicKey, 'hex')
}
//console.log(encryptedContent);
// B decrypting the file.
eccrypto.decrypt(privateKey, encryptedContent).then(function (plaintext) {
//console.log("Decrypted message: ", plaintext.toString());
fs.writeFile('decrypted.pdf', plaintext, function (err) {
if (err) return console.log(err);
console.log('decryption done');
});
});
});

Node.js copy a stream into a file without consuming

Given a function parses incoming streams:
async onData(stream, callback) {
const parsed = await simpleParser(stream)
// Code handling parsed stream here
// ...
return callback()
}
I'm looking for a simple and safe way to 'clone' that stream, so I can save it to a file for debugging purposes, without affecting the code. Is this possible?
Same question in fake code: I'm trying to do something like this. Obviously, this is a made up example and doesn't work.
const fs = require('fs')
const wstream = fs.createWriteStream('debug.log')
async onData(stream, callback) {
const debugStream = stream.clone(stream) // Fake code
wstream.write(debugStream)
const parsed = await simpleParser(stream)
// Code handling parsed stream here
// ...
wstream.end()
return callback()
}
No you can't clone a readable stream without consuming. However, you can pipe it twice, one for creating file and the other for 'clone'.
Code is below:
let Readable = require('stream').Readable;
var stream = require('stream')
var s = new Readable()
s.push('beep')
s.push(null)
var stream1 = s.pipe(new stream.PassThrough())
var stream2 = s.pipe(new stream.PassThrough())
// here use stream1 for creating file, and use stream2 just like s' clone stream
// I just print them out for a quick show
stream1.pipe(process.stdout)
stream2.pipe(process.stdout)
I've tried to implement the solution provided by #jiajianrong but was struggling to get it work with a createReadStream, because the Readable throws an error when I try to push the createReadStream directly. Like:
s.push(createReadStream())
To solve this issue I have used a helper function to transform the stream into a buffer.
function streamToBuffer (stream: any) {
const chunks: Buffer[] = []
return new Promise((resolve, reject) => {
stream.on('data', (chunk: any) => chunks.push(Buffer.from(chunk)))
stream.on('error', (err: any) => reject(err))
stream.on('end', () => resolve(Buffer.concat(chunks)))
})
}
Below the solution I have found using one pipe to generate a hash of the stream and the other pipe to upload the stream to a cloud storage.
import stream from 'stream'
const Readable = require('stream').Readable
const s = new Readable()
s.push(await streamToBuffer(createReadStream()))
s.push(null)
const fileStreamForHash = s.pipe(new stream.PassThrough())
const fileStreamForUpload = s.pipe(new stream.PassThrough())
// Generating file hash
const fileHash = await getHashFromStream(fileStreamForHash)
// Uploading stream to cloud storage
await BlobStorage.upload(fileName, fileStreamForUpload)
My answer is mostly based on the answer of jiajianrong.

NodeJS: Unable to convert stream/buffer to base64 string

I need to create base64 string that I need to send to a third party API. I have the stream and buffer. Form stream I am able to create an image so there is no way the stream is corrupted. Here are the two variables:
var newJpeg = new Buffer(newData, "binary");
var fs = require('fs');
let Duplex = require('stream').Duplex;
let _updatedFileStream = new Duplex();
_updatedFileStream.push(newJpeg);
_updatedFileStream.push(null);
No matter whatever I try, I can not convert either of them in base64 string.
_updatedFileStream.toString('base64');
Buffer(newJpeg, 'base64');
Buffer(newData, 'base64');
None of the above works. Sometimes I get Uint8Array[arraySize] or Gibberish string. What am I doing wrong?
Example using promises (but could easily be adapted to other approaches):
return new Promise((resolve, reject) => {
let buffers = [];
let myStream = <...>;
myStream.on('data', (chunk) => { buffers.push(chunk); });
myStream.once('end', () => {
let buffer = Buffer.concat(buffers);
resolve(buffer.toString('base64'));
});
myStream.once('error', (err) => {
reject(err);
});
});

How to close node decipher stream on error

I'm trying to close a decipher stream created with Crypto.createDecipheriv when the readable stream feeding it errors. I am doing this per the recommendation in the docs: "One important caveat is that if the Readable stream emits an error during processing, the Writable destination is not closed automatically. If an error occurs, it will be necessary to manually close each stream in order to prevent memory leaks." https://nodejs.org/dist/latest-v6.x/docs/api/stream.html#stream_readable_read_size. When I call the end() function on the decrypt stream I get an error: Error: error:0606506D:digital envelope routines:EVP_DecryptFinal_ex:wrong final block length
. How can I close the stream and avoid this error? Here is a full code sample:
const Fs = require('fs');
const Crypto = require('crypto');
const algorithm = 'aes-256-cbc';
const Iv = Crypto.randomBytes(16);
const key = Crypto.randomBytes(32);
const readStream = Fs.createReadStream('./image1.jpg');
const writeStream = Fs.createWriteStream('./out.file');
const encrypt = Crypto.createCipheriv(algorithm, key, Iv);
writeStream.on('finish', () => {
const readStream2 = Fs.createReadStream('./out.file');
const writeStream2 = Fs.createWriteStream('./out2.jpeg');
const decrypt = Crypto.createDecipheriv(algorithm, key, Iv);
writeStream2.on('finish', () => {
console.log('fin');
});
readStream2.on('error', (err) => {
console.error(err.message);
decrypt.end();
writeStream2.end();
});
readStream2.pipe(decrypt).pipe(writeStream2);
readStream2.emit('error', new Error('Oh Fuck'));
});
readStream.pipe(encrypt).pipe(writeStream);

Node.JS Convert Base64 String into Binary and write to MongoDB GridFS

I have a Base64 string that I am converting to binary like this:
var b64string = req.body.image.substr(23);//The base64 has a imageURL
var buf = new Buffer(b64string, 'base64');
I need to insert this into MongoDB GridFS. The problem I am having is that createReadStream require a filepath where I already have the file in memory.
This is what I am trying that does not work
var grid = new gfs(db, mongo, 'files');
grid.createWriteStream(options, function (err, ws) {
fs.createReadStream(buf, {autoClose: true})
.pipe(ws)
.on('close', function (f) {
console.log(f._id)
res.send(f._id)
})
.on('error', function (err) {
console.log(err)
})
})
But as I described, it wants a path where I have buf
UPDATE ---
I was over thinking it...
This works
var b64string = req.body.image.substr(23);
var buf = new Buffer(b64string, 'base64');
var grid = new Grid(db, 'files');
grid.put(buf, {}function(err, file){})

Resources