Using the build-in streamWriter from the xmlbuilder package in nodejs - node.js

I want to create XML files and for this I am using the package xmlbuilder.
The initial code works fine, which looks like following:
const xmlStructure = {
// xml structure
};
const root = builder.create(xmlStructure, { encoding: 'utf-8' });
const xml = root.end({ pretty: true });
const readableStream = new Readable();
const writableStream = file.createWriteStream();
writableStream
.on('error', error => {
throw error;
})
.on('finish', async () => {
// do something in the DB
});
readableStream._read = () => {
readableStream.push(xml);
readableStream.push(null);
};
readableStream.pipe(writableStream);
but I saw in the wiki of the package, that there is a build in streamwriter.
I would like to add this to the project and remove some of the custom things I did in the code.
I tried the following approach:
const xmlStructure = {
// xml structure
};
const writableStream = file.createWriteStream();
const writer = xmlbuilder.streamWriter(writableStream);
writableStream
.on('error', error => {
throw error;
})
.on('finish', async () => {
// do something in the DB
});
const root = builder.create(xmlStructure, { encoding: 'utf-8' });
const xml = root.end(writer);
const readableStream = new Readable();
readableStream.pipe(writableStream);
Unfortunatly this didn't work, what am I doing wrong or forgetting?

The missing line of code was the following:
writableStream.end();
So the end result looks like this:
const xmlStructure = {
// xml structure
};
const root = builder.create(xmlStructure, { encoding: 'utf-8' });
const writableStream = file.createWriteStream();
const writer = xmlbuilder.streamWriter(writableStream);
const xml = root.end(writer);
writableStream.end();
writableStream
.on('error', error => {
throw error;
})
.on('finish', async () => {
// do something in the DB
});

Related

How can I use Readable() constructor to read from a text file and write it in a Writable() constructor?

I am trying to read from a text file using new Readable() constructor,but I don't know how to make the code receive it as a file path. I would like the result to be displayed in the console using new Writable() constructor.
const fs = require('fs');
const {Readable} = require('stream');
const {Writable} = require('stream');
const userData = __dirname + '/files/data.txt';
const rStream = new Readable({
read() {}
});
const wStream = new Writable({
write(chunk, encoding, callback) {
console.log("Readable data: ", chunk.toString());
callback();
}
});
rStream.pipe(wStream);
rStream.push(userData);
rStream.on('close', () =>
wStream.end());
wStream.on('close', () =>
console.log('No more data in file...')
);
rStream.on('error', (err) =>
console.log('Readable error!', err.message)
);
wStream.on('error', (err) =>
console.log('Writable error!', err.message)
);
rStream.destroy();

Issue with unit testing a unzip method node/jest

I have a small function designed to unzip a file using 'unzipper' and extract to a given location.
when unit testing the function times out, for unit testing I am using jest.
see below code :
exports.unzipFile = async (folderPath) => {
return new Promise((resolve, reject) => {
fs.createReadStream(folderPath)
.pipe(unzipper.Extract({ path: tmpPath+ path.parse(folderPath).name })).on('close', () => resolve()).on('error', (error) => reject(error))
})
The function itself works as expected. I have tried some changes to the function but this seems to break the function. I need this function to execute fully as the unzipped file is then relied on later in the program.
The program is written in node 16.
Any help would be appreciated thanks
EDIT: this is my current unit test- I have tried various things :
const { PassThrough } = require('stream')
const os = require('os');
const unzipper = require("unzipper")
const fs = require("fs")
let tmpdir, mockReadStream
beforeEach(() => {
tmpdir = os.tmpdir() + "/uploadFolder/";
if (!fs.existsSync(tmpdir)){
fs.mkdirSync(tmpdir);
}
fs.writeFileSync(tmpdir+"tempfile.zip", "file to be used")
mockReadStream = new PassThrough()
})
afterEach(() => {
// Restore mocks
jest.clearAllMocks()
})
describe('Test helper.js unzip method', () => {
test('should be able to unzip file ', async () => {
jest.isolateModules(() => {
helper = require('helper')
})
const result = await helper.unzipFile(tmpdir+"tempfile.zip")
console.log(result)
})
})
you need to make some minor changes in order to test it
in helper.js
change the signature of the unzipFile function to unzipFile = async (zipFilePath, outputDir)
const unzipper = require("unzipper")
const fs = require("fs")
const unzipFile = async (zipFilePath, outputDir) => {
return new Promise((resolve, reject) => {
fs
.createReadStream(zipFilePath)
.pipe(unzipper.Extract({ path: outputDir }))
.on('close', () => resolve())
.on('error', (error) => reject(error))
})
}
module.exports = { unzipFile }
then you need to create a simple zip file and add it locally at your test dir
in my example i created a simple zip file 1.txt.zip that contains text file with the string hello
then your test should look like this
const path = require("path");
const shelljs = require("shelljs");
const helper = require("./helper")
const fs = require("fs")
const testFilesDir = path.join(__dirname, "test");
const outPutDir = path.join(__dirname, "output")
console.log(testFilesDir)
beforeAll((done) => {
shelljs.mkdir("-p", testFilesDir);
shelljs.mkdir("-p", outPutDir);
shelljs.cp("-r", path.join(__dirname, "*.zip"), testFilesDir);
done();
});
afterAll((done) => {
shelljs.rm("-rf", testFilesDir);
done();
});
describe('Test helper.js unzip method', () => {
test('should be able to unzip file ', async () => {
await helper.unzipFile(path.join(testFilesDir, "1.txt.zip"), outPutDir)
const files = fs.readdirSync(outPutDir)
expect(files.length).toEqual(1);
expect(files[0]).toEqual("1.txt");
const text = fs.readFileSync(path.join(outPutDir,"1.txt"),"utf8")
expect(text).toEqual("hello");
})
})

How to Process Uploaded Image with Graphql Apollo with SharpJS in NodeJS?

I have a graphql mutation that gets an image from the frontend, and that then is processed and optimized on my server.
But I can't figure out how to pass my image to sharp.
Here is my code:
const Mutation = {
createImage: async (_, { data }) => {
const { file } = data
const image = await file
console.log(image)
const sharpImage = sharp(image)
}
}
I know the code doesn't work and sharp throws an error saying that the input is invalid. So how can I work with createReadStream and to create an instance of sharp?
When I console.log(image), here is what I see:
image {
filename: 'image.png',
mimetype: 'image/png',
encoding: '7bit',
createReadStream: [Function: createReadStream]
}
Thanks a lot in advance!
So I figured out the solution to my question.
First, I found out that I needed to add scalar Upload to typeDefs.
Then, I need to add a resolver for Upload like this:
const { GraphQLUpload } = require('graphql-upload');
const server = new ApolloServer({
resolvers: {
Upload: GraphQLUpload,
}
})
Then in my resolver, here is what I had to do:
// this is a utility function to promisify the stream and store the image in a buffer, which then is passed to sharp
const streamToString = (stream) => {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks)));
})
}
const Mutation = {
createImage: async (_, { data }) => {
const { file } = data
const { createReadStream } = await file
const image = await streamToString(createReadStream())
const sharpImage = sharp(image)
}
}

node javascript file upload doesn't work on remote server

On my local dev machine accessing localhost the following code works beautifully even with network settings changed to "Slow 3G." However, when running on my VPS, it fails to process the file on the server. Here are two different codes blocks I tried (again, both work without issue on local dev machine accessing localhost)
profilePicUpload: async (parent, args) => {
const file = await args.file;
const fileName = `user-${nanoid(3)}.jpg`;
const tmpFilePath = path.join(__dirname, `../../tmp/${fileName}`);
file
.createReadStream()
.pipe(createWriteStream(tmpFilePath))
.on('finish', () => {
jimp
.read(`tmp/${fileName}`)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.catch(error => {
throw new Error(error);
});
});
}
It seems like this code block doesn't wait long enough for the file upload to finish since if I check the storage location on the VPS, I see this:
I also tried the following with no luck:
profilePicUpload: async (parent, args) => {
const { createReadStream } = await args.file;
let data = '';
const fileStream = await createReadStream();
fileStream.setEncoding('binary');
// UPDATE: 11-2
let i = 0;
fileStream.on('data', chunk => {
console.log(i);
i++;
data += chunk;
});
fileStream.on('error', err => {
console.log(err);
});
// END UPDATE
fileStream.on('end', () => {
const file = Buffer.from(data, 'binary');
jimp
.read(file)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.catch(error => {
throw new Error(error);
});
});
}
With this code, I don't even get a partial file.
jimp is a JS library for image manipulation.
If anyone has any hints to get this working properly, I'd appreciate it very much. Please let me know if I'm missing some info.
I was able to figure out a solution by referring to this article: https://nodesource.com/blog/understanding-streams-in-nodejs/
Here is my final, working code:
const { createWriteStream, unlink } = require('fs');
const path = require('path');
const { once } = require('events');
const { promisify } = require('util');
const stream = require('stream');
const jimp = require('jimp');
profilePicUpload: async (parent, args) => {
// have to wait while file is uploaded
const { createReadStream } = await args.file;
const fileStream = createReadStream();
const fileName = `user-${args.uid}-${nanoid(3)}.jpg`;
const tmpFilePath = path.join(__dirname, `../../tmp/${fileName}`);
const tmpFileStream = createWriteStream(tmpFilePath, {
encoding: 'binary'
});
const finished = promisify(stream.finished);
fileStream.setEncoding('binary');
// apparently async iterators is the way to go
for await (const chunk of fileStream) {
if (!tmpFileStream.write(chunk)) {
await once(tmpFileStream, 'drain');
}
}
tmpFileStream.end(() => {
jimp
.read(`tmp/${fileName}`)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.then(() => {
unlink(tmpFilePath, error => {
console.log(error);
});
})
.catch(error => {
console.log(error);
});
});
await finished(tmpFileStream);
}

Streaming image data from node server results in corrupted file (gridfs-stream)

I decided to post this after extensive searching here (1, 2, 3 ) and here (1, 2) and many, many other related posts. I am loosing hope, but will not give up that easily :)
I'm using multer to upload a PNG image to mongo database:
const storage = new GridFsStorage({
url: 'mongodb://my_database:thisIsfake#hostName/my_database',
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => { // generating unique names to avoid duplicates
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'media',
metadata : {
clientId : req.body.client_id // added metadata to have a reference to the client to whom the image belongs
}
};
resolve(fileInfo);
});
});
}
});
const upload = multer({storage}).single('image');
Then I create a stream and pipe it to response:
loader: function (req, res) {
var conn = mongoose.createConnection('mongodb://my_database:thisIsfake#hostName/my_database');
conn.once('open', function () {
var gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('media');
gfs.files.find({ metadata : {clientId : req.body.id}}).toArray(
(err, files) => {
if (err) throw err;
if (files) {
const readStream = gfs.createReadStream(files[0].filename); //testing only with the first file in the array
console.log(readStream);
res.set('Content-Type', files[0].contentType)
readStream.pipe(res);
}
});
});
}
Postman POST request to end point results in response body being displayed as an image file:
In the front end I pass the response in a File object, read it and save the result in a src attribute of img:
findAfile(){
let Data = {
id: this.$store.state.StorePatient._id,
};
console.log(this.$store.state.StorePatient._id);
visitAxios.post('http://localhost:3000/client/visits/findfile', Data )
.then(res => {
const reader = new FileReader();
let file = new File([res.data],"image.png", {type: "image/png"});
console.log('this is file: ',file);
reader.readAsDataURL(file); // encode a string
reader.onload = function() {
const img = new Image();
img.src = reader.result;
document.getElementById('imgContainer').appendChild(img);
};
})
.catch( err => console.error(err));
}
My File object is similar to the one I get when using input field only bigger:
This is original file:
When inspecting element I see this:
Looks like data URI is where it should be, but it's different from the original image on file input:
Again, when I want to display it through input element:
onFileSelected(event){
this.file = event.target.files[0];
this.fileName = event.target.files[0].name;
const reader = new FileReader();
console.log(this.file);
reader.onload = function() {
const img = new Image();
img.src = reader.result;
document.getElementById('imageContainer').appendChild(img);
};
reader.readAsDataURL(this.file);
}
I get this:
But when reading it from the response, it is corrupted:
Postman gets it right, so there must be something wrong with my front-end code, right? How do I pass this gfs stream to my html?
I managed to make a POST request to fetch an image from MongoDB and save it in the server dir:
const readStream = gfs.createReadStream(files[0].filename);
const wstream = fs.createWriteStream(path.join(__dirname,"uploads", "fileToGet.jpg"));
readStream.pipe(wstream);
Then, I just made a simple GET request by adding an absolute path to the and finally delete the file after successful response:
app.get('/image', function (req, res) {
var file = path.join(dir, 'fileToGet.jpg');
if (file.indexOf(dir + path.sep) !== 0) {
return res.status(403).end('Forbidden');
}
var type = mime[path.extname(file).slice(1)] || 'text/plain';
var s = fs.createReadStream(file);
s.on('open', function () {
res.set('Content-Type', type);
s.pipe(res);
});
s.on('end', function () {
fs.unlink(file, ()=>{
console.log("file deleted");
})
});
s.on('error', function () {
res.set('Content-Type', 'text/plain');
res.status(404).end('Not found');
});

Resources