how to send multiple image object to client in node express? - node.js

I am new in angular. I have a set of images & I want to display it on to the client browser on a load of the page.
Is it possible or not?
I am able to send a single file but not multiple files now I am stuck can someone help me.
Thanks for help
router.get('/getList', function(req, res, next) {
var fileNames = [];
fileNames = readDir.readSync('/NodeWorkspace/uploads/output/', ['**.png']);
var data = {};
fileNames.forEach(function(filename) {
filepath = path.join(__dirname, '../../uploads/output') + '/' + filename;
fs.readFile(path.join(__dirname, '../../uploads/output') + '/' + filename, function(err, content) {
if (!err) {
console.log(content);
}
});
});
//res.sendFile(path.join(__dirname,'../../uploads/output/', fileNames[0]));
response.data = fileNames;
res.json(response);
});

Convert fs callback to promise and read all parallel once done send response.
const fs = require('fs');
const db = require('../db');
const readFile = util.promisify(fs.readFile);
router.get('/getList', function (req, res, next) {
var fileNames = [];
fileNames = readDir.readSync('/NodeWorkspace/uploads/output/', ['**.png']); // use async function instead of sync
var data = {};
const files = fileNames.map(function (filename) {
filepath = path.join(__dirname, '../../uploads/output') + '/' + filename;
return readFile(filepath); //updated here
});
Promise.all(files).then(fileNames => {
response.data = fileNames;
res.json(response);
}).catch(error => {
res.status(400).json(response);
});
});

Related

How to store the readStream files of Node.js into Redis and also how to retrieve the stored readStream files from Redis?

I tried converting the readStream (Image) to string and then storing it in Redis. Then retrieving the string from Redis and converting it back to readStream. But it didn't work out.
function getFile(fileKey) {
console.log(fileKey);
const downloadParams = {
Key: fileKey,
Bucket: bucketName,
};
return s3.getObject(downloadParams).createReadStream();
}
exports.getFile = getFile;
For converting stream to string I'm using stream-to-string. It gets converted and stored in Redis.
const { getFile } = require("../s3");
const redis = require("redis");
const client = redis.createClient();
var toString = require("stream-to-string");
exports.getFileFromS3Controller = async (req, res) => {
console.log(req.params);
const path = req.params.path;
const key = req.params.key;
const readStream = getFile(path + "/" + key);
toString(readStream).then(function (msg) {
// Set data to Redis
client.setex(key, 3600, msg);
});
readStream.pipe(res);
};
On Retrieving from the Redis I am not getting it.
const redis = require("redis");
const client = redis.createClient(null, null, { detect_buffers: true });
const Readable = require("stream").Readable;
// Cache middleware
function cache(req, res, next) {
const { path, key } = req.params;
client.get(key, (err, data) => {
if (err) throw err;
if (data !== null) {
var s = new Readable();
s.push(data);
s.push(null);
s.pipe(res);
} else {
next();
}
});
}
router.get("/:path/:key", cache, getFileFromS3Controller);
You are not calling next. Another mistake is that the stream is not being saved anywhere in the req so you can access later from the controller. From what I see you are writing it directly in res which is a problem because after this you cannot use res anymore to send anything else.
Here it is the code (not tested)
exports.getFileFromS3Controller = (req, res) => {
if (req.fileStream) {
req.fileStream.pipe(res);
return
}
console.log(req.params);
const path = req.params.path;
const key = req.params.key;
const readStream = getFile(path + "/" + key);
toString(readStream).then(function (msg) {
// Set data to Redis
client.setex(key, 3600, msg);
// Conver string to readable
const readable = new Readable();
readable.push(msg);
readable.push(null);
readable.pipe(res);
});
};
function cache(req, res, next) {
const { path, key } = req.params;
client.get(key, (err, data) => {
if (err) throw err;
if (data !== null) {
var s = new Readable();
s.push(data);
s.push(null);
req.fileStream = s;
}
next();
});
}
Edit I fixed a mistake in my answer because a Readable stream cannot be rewinded.

Problem to change request from get to post and change destination of upload file dynamic in multer nodejs

Hey i work with multer package in node js to upload file.
I make it dynamic but the request is in get and i have problem to change it to post.
Its only work to me in get request.
What it doing now?
It save the file in new folder that call flow and take the id from the client.
I need to replace it to post and get the id in post but i want to move the logic to the route and make it stay same. thanks
My Code:
fileupload service:
const path = require("path");
const multer = require("multer");
const crypto = require("crypto");
const models = require("../models");
const fsex = require("fs-extra");
const today = new Date();
const date =
today.getFullYear() + "-" + (today.getMonth() + 1) + "-" + today.getDate();
const storage = multer.diskStorage({
destination: (req, file, callback) => {
let flowID = req.params.flowID;
let path = `./uploads/flow/${flowID}`;
fsex.mkdirsSync(path);
callback(null, path);
},
filename: (req, file, cb) => {
crypto.pseudoRandomBytes(8, (err, raw) => {
if (err) return cb(err);
cb(
null,
date + "_" + raw.toString("hex") + "_" + path.extname(file.originalname)
);
});
}
});
const upload = multer({ storage: storage }).single("upload");
module.exports = {
upload: upload,
};
my route:
router.get("/uploadfile/:flowID", upload, (req, res, next) => {
const file = req.file;
const flowID = req.params.flowID;
if (!file) {
const error = new Error("Please upload a file");
error.httpStatusCode = 400;
return next(error);
} else {
createFileInDB(file.originalname, flowID, file.filename)
.then(() => {
console.log("File Created");
res.json(file);
})
.catch(err => {
res.status(500).send(err);
});
}
});

Piping a multipart/form-data request to another request while changing some fields names in Node.js

I want to stream a file upload request in multipart/form-data to another server and change some fields name at the same time.
I don't want to store temporarily a file on disk and don't want to store the file completely in memory either.
I tried to use multer, busboy and multiparty. I think I got closer by using custom Transform streams but it is not working yet.
const express = require('express');
const request = require('request');
const { Transform } = require('stream');
const router = express.Router();
class TransformStream extends Transform {
_transform(chunk, encoding, callback) {
// here I tried to manipulate the chunk
this.push(chunk);
callback();
}
_flush(callback) {
callback();
}
}
router.post('/', function pipeFile(req, res) {
const transformStream = new TransformStream();
req.pipe(transformStream).pipe(request.post('http://somewhere.com'));
res.sendStatus(204);
});
I tried to manipulate chunks in _transform without success (EPIPE). It sounds quit hacky, are they any better solutions ?
Here is a solution using replacestream along with content-disposition.
const replaceStream = require('replacestream');
const contentDisposition = require('content-disposition');
router.post('/', function pipeFile(req, res) {
let changeFields = replaceStream(/Content-Disposition:\s+(.+)/g, (match, p1) => {
// Parse header
let {type, parameters} = contentDisposition.parse(p1);
// Change the desired field
parameters.name = "foo";
// Prepare replacement
let ret = `Content-Disposition: ${type}`;
for(let key in parameters) {
ret += `; ${key}="${parameters[key]}"`;
}
return ret;
})
req.pipe(changeFields)
.pipe(request.post('http://somewhere.com'))
.on('end', () => {
res.sendStatus(204);
});
});
This worked for a single file multipart upload using express, multiparty, form-data, pump and got.
const stream = require('stream');
const express = require('express');
const multiparty = require("multiparty");
const got = require("got");
const FormData = require('form-data');
const pump = require('pump');
const app = express();
app.post('/upload', (req, res) => {
const url = "<<multipart image upload endpoint>>";
var form = new multiparty.Form();
form.on("part", function(formPart) {
var contentType = formPart.headers['content-type'];
var formData = new FormData();
formData.append("file", formPart, {
filename: formPart.filename,
contentType: contentType,
knownLength: formPart.byteCount
});
const resultStream = new stream.PassThrough();
try {
// Pipe the formdata to the image upload endpoint stream and the result to the result stream
pump(formData, got.stream.post(url, {headers: formData.getHeaders(), https:{rejectUnauthorized: false}}), resultStream, (err) =>{
if(err) {
res.send(error);
}
else {
// Pipe the result of the image upload endpoint to the response when there are no errors.
resultStream.pipe(res);
}
resultStream.destroy();
});
}
catch(err) {
resultStream.destroy();
console.log(err);
}
});
form.on("error", function(error){
console.log(error);
})
form.parse(req);
});

How to return listObjectsV2 as json

I'm trying to call the AWS S3 Bucket and get my list of image URLs but it's not retrieving it.
I'm using Express for the back end and React for the front end. When i manually enter static data for the array of JSON it does show on the server. I'm using listObjectsV2 to get the list
const express = require('express');
const app = express();
const aws = require('aws-sdk');
const s3 = new aws.S3({
accessKeyId: "secretId",
secretAccessKey: "secretKey",
region : 'eu-west-2',
Bucket: 'my-aws-bucket'
});
let params = {
Bucket: "my-aws-bucket",
MaxKeys: 6
};
app.get('/api/images', (req, res) => {
let images = [];
s3.listObjectsV2(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
var href = this.request.httpRequest.endpoint.href;
var bucketUrl = href + "my-aws-bucket" + '/';
data.Contents.map(function(photo) {
var photoKey = photo.Key;
var photoUrl = bucketUrl + encodeURIComponent(photoKey);
images.push(photoUrl);
});
}
});
res.json(images);
});
const port = 5001;
app.listen(port, () => console.log(`Server is on port: ${port}`));
i expected my images empty array that i declared to be filled with image URL such as
[https://s3.eu-west-2.amazonaws.com/my-aws-bucket/images/image1.png,https://s3.eu-west-2.amazonaws.com/my-aws-bucket/images/image2.png]
However nothing is being filled in the array. If i manually add stuff into the images array like let images = [1,2,3,4]; then when i go to http://localhost:5001/api/images it does show the [1,2,3,4], but nothing with the code above that should work.
I worked it out in the end, i just had to simply move the
res.json(images); just after the closing else{}, so this is the fixed code
s3.listObjectsV2(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
var href = this.request.httpRequest.endpoint.href;
var bucketUrl = href + "my-aws-bucket" + '/';
data.Contents.map(function(photo) {
var photoKey = photo.Key;
var photoUrl = bucketUrl + encodeURIComponent(photoKey);
images.push(photoUrl);
});
}
res.json(images);
});

How do I render an EJS template file in Node.js?

I'm using Node.js and trying to render an EJS template file. I figured out how to render strings:
var http = require('http');
var ejs = require('ejs');
var server = http.createServer(function(req, res){
res.end(ejs.render('Hello World'));
});
server.listen(3000);
How can I render an EJS template file?
There is a function in EJS to render files, you can just do:
ejs.renderFile(__dirname + '/template.ejs', function(err, data) {
console.log(err || data);
});
Source: Official EJS documentation
var fs = require('fs');
var templateString = fs.readFileSync('template.ejs', 'utf-8');
and then you do your thing:
var server = http.createServer(function(req, res){
res.end(ejs.render(templateString));
});
All you have to do is compile the file as a string (with optional local variables), like so:
var fs = require('fs'), ejs = require('ejs'), http = require('http'),
server, filePath;
filePath = __dirname + '/sample.html'; // this is from your current directory
fs.readFile(filePath, 'utf-8', function(error, content) {
if (error) { throw error); }
// start the server once you have the content of the file
http.createServer(function(req, res) {
// render the file using some local params
res.end(ejs.render(content, {
users: [
{ name: 'tj' },
{ name: 'mape' },
{ name: 'guillermo' }
]
});
});
});
#ksloan's answer is really good. I also had the same use case and did little bit of digging. The function renderFile() is overloaded. The one you will need mostly is:
renderFile(path: string,data, cb)
for example:
ejs.renderFile(__dirname + '/template.ejs', dataForTemplate, function(err, data) {
console.log(err || data)
})
where dataForTemplate is an object containing values that you need inside the template.
There's a synchronous version of this pattern that tightens it up a little more.
var server = http.createServer(function(req, res) {
var filePath = __dirname + '/sample.html';
var template = fs.readFileSync(filePath, 'utf8');
res.end(ejs.render(template,{}));
});
Note the use of readFileSync(). If you specify the encoding (utf8 here), the function returns a string containing your template.
The answer of #ksloan should be the accepted one. It uses the ejs function precisely for this purpose.
Here is an example of how to use with Bluebird:
var Promise = require('bluebird');
var path = require('path');
var ejs = Promise.promisifyAll(require('ejs'));
ejs.renderFileAsync(path.join(__dirname, 'template.ejs'), {context: 'my context'})
.then(function (tpl) {
console.log(tpl);
})
.catch(function (error) {
console.log(error);
});
For the sake of completeness here is a promisified version of the currently accepted answer:
var ejs = require('ejs');
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
var path = require('path');
fs.readFileAsync(path.join(__dirname, 'template.ejs'), 'utf-8')
.then(function (tpl) {
console.log(ejs.render(tpl, {context: 'my context'}));
})
.catch(function (error) {
console.log(error);
});
Use ejs.renderFile(filename, data) function with async-await.
To render HTML files.
const renderHtmlFile = async () => {
try {
//Parameters inside the HTML file
let params = {firstName : 'John', lastName: 'Doe'};
let html = await ejs.renderFile(__dirname + '/template.html', params);
console.log(html);
} catch (error) {
console.log("Error occured: ", error);
}
}
To render EJS files.
const renderEjsFile = async () => {
try {
//Parameters inside the HTML file
let params = {firstName : 'John', lastName: 'Doe'};
let ejs = await ejs.renderFile(__dirname + '/template.ejs', params);
console.log(ejs);
} catch (error) {
console.log("Error occured: ", error);
}
}

Resources