I am new in NodeJS. I know we can stream data to the client by using pipe() method.
Here is the snippet of the code
router.get('/archive/*', function (req, res) {
var decodedURI = decodeURI(req.url);
var dirarr = decodedURI.split('/');
var dirpath = path.join(dir, dirarr.slice(2).join("/"));
console.log("dirpath: " + dirpath);
var archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.directory(dirpath, 'new-subdir');
archive.on('error', function (err) {
throw err;
});
archive.pipe(res)
archive.on('finish', function () {
console.log("finished zipping");
});
archive.finalize();
});
when I use a get request the zipped file downloaded but without any extension. I know its because I am piping a writestream into the response. Is there anyway pipe it with a .zip extension ? Or How can I send the zip file without building the zip file in HDD ?
You can use res.attachment() to both set the filename of the download, and also its mime-type:
router.get('/archive/*', function (req, res) {
res.attachment('archive.zip');
...
});
One of the ways is to change Headers before piping,
res.setHeader("Content-Type", "application/zip");
res.setHeader('Content-disposition' ,'attachment; filename=downlaod.zip');
For the Given Code,
router.get('/archive/*', function (req, res) {
var decodedURI = decodeURI(req.url);
var dirarr = decodedURI.split('/');
var dirpath = path.join(dir, dirarr.slice(2).join("/"));
var output = fs.createWriteStream(__dirname + '/7.zip');
var archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.directory(dirpath, 'new-subdir');
archive.on('error', function (err) {
throw err;
});
res.setHeader("Content-Type", "application/zip");
res.setHeader('Content-disposition' ,'attachment; filename=downlaod.zip');
archive.pipe(res);
archive.finalize();
});
Related
I am working on the API for store a file uploaded by the user.
function uploadPhoto(req, res) {
var imagedata = new Buffer('');
req.body.on('data', function (chunk) {
imagedata = Buffer.concat([imagedata, chunk]);
});
req.body.on('end', function (chunk) {
fs.writeFile('success.jpeg', imagedata, 'binary', function (err) {
if (err) throw err
console.log('File saved.')
})
});
}
There is an error when I used req.body.on('data').
The data from postman
When I print out the value of req.body with console.log("message: "+req.body), it was string and has value:
I tried to write to file by using Buffer like this
var writeFile = function (type, cb) {
var data = new Buffer(req.body, type);
fs.writeFile(type + '.jpeg', data, type, function (err) {
cb(null, data.length);
});
}
async.parallel([
writeFile.bind(null, 'binary'),
writeFile.bind(null, 'utf8'),
writeFile.bind(null, 'ascii'),
writeFile.bind(null, 'ucs2'),
writeFile.bind(null, 'base64')
], function (err, results) {
response.status(200).send({});
})
This will create some jpeg files with different size but can't read them as an image.
How can I store this image from the user?
Thank you very much.
This looks like a good case for streams.
function uploadPhoto(req, res) {
var file = fs.createWriteStream(__dirname + '/success.jpeg')
req.pipe(file).on('error', function(err) { console.log(err) })
}
Headers could also help determine what file type and character encoding it has.
var file = fs.createWriteStream(__dirname + '/success.jpeg', {defaultEncoding: req.headers.encoding || 'utf8'})
I would like to move a small image from one server to another (both running node). As I search, I haven't found enough. This post remains unanswered.
As I started experimenting I wrote the following to the first server :
app.post("/move_img", function(req, res) {
console.log("post handled");
fs.readFile(__dirname + "/img_to_move.jpg", function(err, data) {
if (err) throw err;
console.log(data);
needle.post(server2 + "/post_img", {
data: data,
name : "test.jpg"
}, function(result) {
console.log(result);
res.send("ok");
});
});
});
This part seems to be working as I could be writing the data in the same server (using fs.writeFile) recreate the img.
Now as I am trying to handle the post in the other server I have a problem.
Server2:
app.post('/post_img', [ multer({ dest: './uploads/images'}), function(req, res) {
console.log("body ",req.body) // form fields
console.log("files ",req.files) // form files
res.send("got it");
}]);
This way i get an empty object in the files and the following in the body: { 'headers[Content-Type]': 'application/x-www-form-urlencoded', 'headers[Content-Length]': '45009' }
I think I could use busboy as an alternative but I can't make it to work. Any advice, tutorial would be welcome.
I solved my problem by using the following code,
server1 (using needle) :
app.post("/move_img", function(req, res) {
console.log("post handled")
var data = {
image:{
file: __dirname + "/img_to_move.jpg",
content_type: "image/jpeg"}
}
needle.post(server2 + "/post_img", data, {
multipart: true
}, function(err,result) {
console.log("result", result.body);
});
})
Server 2:
app.use('/post_img',multer({
dest: '.uploads/images',
rename: function(fieldname, filename) {
return filename;
},
onFileUploadStart: function(file) {
console.log(file.originalname + ' is starting ...')
},
onFileUploadComplete: function(file) {
console.log(file.fieldname + ' uploaded to ' + file.path)
}
}));
app.post('/post_img', function(req, res) {
console.log(req.files);
res.send("File uploaded.");
});
An alternative for the server 1 is the following (using form-data module):
var form = new FormData();
form.append('name', 'imgTest.jpg');
form.append('my_file', fs.createReadStream(__dirname + "/img_to_move.jpg"));
form.submit(frontend + "/post_img", function(err, result) {
// res – response object (http.IncomingMessage) //
console.log(result);
});
I'd simply read your file from the first server with the function readFile() and then write it to the other server with the function writeFile().
Here you can see use of both functions in one of my servers.
'use strict';
const express = require('express');
const multer= require('multer');
const concat = require('concat-stream');
const request = require('request');
const router = express.Router();
function HttpRelay (opts) {}
HttpRelay.prototype._handleFile = function _handleFile (req, file, cb) {
file.stream.pipe(concat({ encoding: 'buffer' }, function (data) {
const r = request.post('/Endpoint you want to upload file', function (err, resp, body) {
if (err) return cb(err);
req.relayresponse=body;
cb(null, {});
});
const form = r.form();
form.append('uploaded_file', data, {
filename: file.originalname,
contentType: file.mimetype
});
}))
};
HttpRelay.prototype._removeFile = function _removeFile (req, file, cb) {
console.log('hello');
cb(null);
};
const relayUpload = multer({ storage: new HttpRelay() }).any();
router.post('/uploadMsgFile', function(req, res) {
relayUpload(req, res, function(err) {
res.send(req.relayresponse);
});
});
module.exports = router;
see multer does all the tricks for you.
you just have to make sure you use no middle-ware but multer to upload files in your node starting point.
Hope it does the tricks for you also.
I am creating a node server to upload files using 'express','fs' and 'busboy' module. The server is working as expected but when I cancel the upload before complete, the incomplete file is stored in the filesystem. How can I remove the incomplete file?
var express = require("express"),
fs = require("fs"),
Busboy = require("busboy");
app = express();
app.listen(7000);
app.get("/", display_form);
app.post("/upload", function(req, res) {
var busboy = new Busboy({
headers: req.headers
});
busboy.on("file", function(fieldname, file, filename, encoding, mime) {
var fstream = fs.createWriteStream("./uploads/" + filename);
file.pipe(fstream);
file.on("data", function(chunk) {
console.log(chunk.length);
});
file.on("end", function() {
console("end");
});
fstream.on("close", function() {
fstream.close();
console("fstream close");
});
fstream.on("error", function() {
console("fstream error ");
});
});
busboy.on("finish", function() {
console.log("uploaded");
res.send("file uploaded");
});
busboy.on("error", function() {
console("error busboy");
});
req.pipe(busboy);
});
Thanks for your help and I finally I found a way to this problem. I added under mentioned code snippet and its working fine.
req.on("close", function(err) {
fstream.end();
fs.unlink('./uploads/' + name);
console.log("req aborted by client");
});
I don't know busboy, but you open a stream and never close it.
Why don't you exploit the stream and filename to 'finish' and 'error' and act accordingly?
Example:
busboy.on('error', function() {
fs.unlink('./uploads/' + filename);
console.log('error busboy');
}
I have request handler to send file from MongoDB (GridFS) to client like below, but it use data variable so content is in memory. I need to make this in streaming mode and send file in chunks to client. I can't regognize how to pipe buffer to response. Look at second code - it doesn't work, but show something what i need.
Maybe it is useful: Data in GridFS is Base64 encoded, but may be changed if streaming can be more efficient.
In-Memory version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
var data = '';
stream.on("data", function(chunk) {
data += chunk;
});
stream.on("end", function() {
res.send(new Buffer(data, 'base64'));
});
});
});
});
Streaming mode version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
stream.on("data", function(chunk) {
new Buffer(chunk, 'base64').pipe(res);
});
stream.on("end", function() {
res.end();
});
});
});
});
Update
I think I'm close to resolve this. I found this works, but does't decode from Base64:
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(res);
});
exports.sendFile = function(db, res, fileId) {
var grid = require('gridfs-stream');
var gfs = grid(db, mongoose.mongo);
var on_error = function(){
res.status(404).end();
};
var readstream = gfs.createReadStream({
filename: fileId,
root: 'r'
});
readstream.on('error', function(err) {
if (('\'' + err + '\'') === '\'Error: does not exist\'') {
return on_error && on_error(err);
}
throw err;
});
return readstream.pipe(res);
}
I found a solution, but think that can be better. I use base64-stream module to decode Base64 stream. Solution below:
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(base64.decode()).pipe(res);
});
});
});
stream.on("data", function(chunk) {
res.send(chunk.toString('utf8'));
});
I'm quite puzzled with reading files in Node.js.
fs.open('./start.html', 'r', function(err, fileToRead){
if (!err){
fs.readFile(fileToRead, {encoding: 'utf-8'}, function(err,data){
if (!err){
console.log('received data: ' + data);
response.writeHead(200, {'Content-Type': 'text/html'});
response.write(data);
response.end();
}else{
console.log(err);
}
});
}else{
console.log(err);
}
});
File start.html is in the same directory with file that tries to open and read it.
However, in the console I get:
{ [Error: ENOENT, open './start.html'] errno: 34, code: 'ENOENT', path: './start.html' }
Any ideas?
Use path.join(__dirname, '/start.html');
var fs = require('fs'),
path = require('path'),
filePath = path.join(__dirname, 'start.html');
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
if (!err) {
console.log('received data: ' + data);
response.writeHead(200, {'Content-Type': 'text/html'});
response.write(data);
response.end();
} else {
console.log(err);
}
});
Thanks to dc5.
With Node 0.12, it's possible to do this synchronously now:
var fs = require('fs');
var path = require('path');
// Buffer mydata
var BUFFER = bufferFile('../public/mydata.png');
function bufferFile(relPath) {
return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
}
fs is the file system. readFileSync() returns a Buffer, or string if you ask.
fs correctly assumes relative paths are a security issue. path is a work-around.
To load as a string, specify the encoding:
return fs.readFileSync(path,{ encoding: 'utf8' });
1).For ASync :
var fs = require('fs');
fs.readFile(process.cwd()+"\\text.txt", function(err,data)
{
if(err)
console.log(err)
else
console.log(data.toString());
});
2).For Sync :
var fs = require('fs');
var path = process.cwd();
var buffer = fs.readFileSync(path + "\\text.txt");
console.log(buffer.toString());
simple synchronous way with node:
let fs = require('fs')
let filename = "your-file.something"
let content = fs.readFileSync(process.cwd() + "/" + filename).toString()
console.log(content)
Run this code, it will fetch data from file and display in console
function fileread(filename)
{
var contents= fs.readFileSync(filename);
return contents;
}
var fs =require("fs"); // file system
var data= fileread("abc.txt");
//module.exports.say =say;
//data.say();
console.log(data.toString());
To read the html file from server using http module. This is one way to read file from server. If you want to get it on console just remove http module declaration.
var http = require('http');
var fs = require('fs');
var server = http.createServer(function(req, res) {
fs.readFile('HTMLPage1.html', function(err, data) {
if (!err) {
res.writeHead(200, {
'Content-Type': 'text/html'
});
res.write(data);
res.end();
} else {
console.log('error');
}
});
});
server.listen(8000, function(req, res) {
console.log('server listening to localhost 8000');
});
<html>
<body>
<h1>My Header</h1>
<p>My paragraph.</p>
</body>
</html>
If you want to know how to read a file, within a directory, and do something with it, here you go. This also shows you how to run a command through the power shell. This is in TypeScript! I had trouble with this, so I hope this helps someone one day. What this did for me was webpack all of my .ts files in each of my directories within a certain folder to get ready for deployment. Hope you can put it to use!
import * as fs from 'fs';
let path = require('path');
let pathDir = '/path/to/myFolder';
const execSync = require('child_process').execSync;
let readInsideSrc = (error: any, files: any, fromPath: any) => {
if (error) {
console.error('Could not list the directory.', error);
process.exit(1);
}
files.forEach((file: any, index: any) => {
if (file.endsWith('.ts')) {
//set the path and read the webpack.config.js file as text, replace path
let config = fs.readFileSync('myFile.js', 'utf8');
let fileName = file.replace('.ts', '');
let replacedConfig = config.replace(/__placeholder/g, fileName);
//write the changes to the file
fs.writeFileSync('myFile.js', replacedConfig);
//run the commands wanted
const output = execSync('npm run scriptName', { encoding: 'utf-8' });
console.log('OUTPUT:\n', output);
//rewrite the original file back
fs.writeFileSync('myFile.js', config);
}
});
};
// loop through all files in 'path'
let passToTest = (error: any, files: any) => {
if (error) {
console.error('Could not list the directory.', error);
process.exit(1);
}
files.forEach(function (file: any, index: any) {
let fromPath = path.join(pathDir, file);
fs.stat(fromPath, function (error2: any, stat: any) {
if (error2) {
console.error('Error stating file.', error2);
return;
}
if (stat.isDirectory()) {
fs.readdir(fromPath, (error3: any, files1: any) => {
readInsideSrc(error3, files1, fromPath);
});
} else if (stat.isFile()) {
//do nothing yet
}
});
});
};
//run the bootstrap
fs.readdir(pathDir, passToTest);
var fs = require('fs');
var path = require('path');
exports.testDir = path.dirname(__filename);
exports.fixturesDir = path.join(exports.testDir, 'fixtures');
exports.libDir = path.join(exports.testDir, '../lib');
exports.tmpDir = path.join(exports.testDir, 'tmp');
exports.PORT = +process.env.NODE_COMMON_PORT || 12346;
// Read File
fs.readFile(exports.tmpDir+'/start.html', 'utf-8', function(err, content) {
if (err) {
got_error = true;
} else {
console.log('cat returned some content: ' + content);
console.log('this shouldn\'t happen as the file doesn\'t exist...');
//assert.equal(true, false);
}
});