Test binary file upload with mocha - node.js

I'm currently working on a small API with nodejs and restify that requires a file upload done, by receiving a binary string.
What I dont know how to do, is test it with mocha, so Ive been doing some search and found this on stack overflow Unit test file upload with mocha, its a fine start but it wont work because its sending a multipart form, and what I require the client to send on the api is the file as a stream.
Heres my controller:
exports.uploadVideo = function(req, res, next) {
var video = "public/video/" + req.params.videoId + ".mp4",
util = require('util'),
exec = require('child_process').exec;
var newFile = fs.createWriteStream("./uploads/" + video);
req.pipe(newFile);
req.on('end', function () {
var cmd = 'qtfaststart ' + './uploads/' + video;
var qtfaststart = exec(cmd, function(error, stdout, stderr){
if (error === "atom not found, is this a valid MOV/MP4 file?\n" || error !== null) {
return next(new restify.ConflictError("Error: " + stdout));
} else {
fs.chmodSync('./uploads/' + video, '644');
Video.findOne( { _id: req.params.videoId }, function(err, video) {
if (err) return next(new restify.ConflictError(err));
if (!video) {
newVideo = new Video({
_id: req.params.videoId,
file: video});
newVideo.save()
} else {
video.file = video;
video.increment();
video.save();
}
});
}
});
});
req.on('error', function(err){
return next(new restify.NetworkConnectTimeoutError(err));
});
};
So given this controller which receives a stream (binary file), and puts the stream together on the backend, how would I test this controller with mocha?

You could just use http for that:
it('should be possible to upload a file', function(done) {
var http = require('http');
var options = require('url').parse(YOUR_URL);
options.method = 'POST';
var req = http.request(options, function(response) {
// TODO: check for errors, correct response, etc...
done(...);
});
require('fs').createReadStream(YOUR_TEST_FILE).pipe(req);
});

You want to use the request module from within mocha. It supports multi-part forms.

Related

Why does this firebase function run recursively?

I'm guessing this is related to not understanding promises and execution order, but I'm currently stumped why this Firebase Function (repackaged Google Cloud Functions) code runs recursively.
Currently the function executes once successfully (fetches data, writes database entry, writes file in storage), and then repeats every 15-30 seconds until it reaches the '402' error state. It is intended to only execute once.
Any help would be appreciated.
exports.add = functions.https.onRequest((req, res) => {
cors(req, res, () => {
if (req.query.idToken) {
// there's a query param
var idToken = req.query.idToken;
admin.auth().verifyIdToken(idToken)
.then(function(decodedToken) {
var uid = decodedToken.uid;
var userRef = database.ref('users/' + uid);
var feedCountRef = database.ref('users/' + uid).child('feeds');
var plansRef = database.ref('plans')
userRef.once('value', function(snapshot){
var feedsCount = snapshot.val().feeds;
var currentPlan = snapshot.val().membership;
var planRef = database.ref('plans/' + currentPlan);
planRef.once('value', function(snapshot) {
console.log(snapshot.val());
var allowedFeeds = snapshot.val().feeds;
if(feedsCount < allowedFeeds) {
fetchFeed(req.body.feedSource, function(feedData) {
var defaultFeedName = 'Untitled';
var defaultUpdateFrequency = 'Weekly';
var feedsdatabaseRef = database.ref('feeds/' + uid);
var newFeedDatabaseRef = feedsdatabaseRef.push();
var feedKey = newFeedDatabaseRef.key;
writeFeedStorage(feedKey, feedData, function(response) {
console.log(response);
newFeedDatabaseRef.set({
// write data
})
});
feedCountRef.transaction(function(feeds){
return (feeds || 0) + 1;
});
return;
});
} else {
console.log('over quota');
res.status(402).send({error: 'You are at the maximum number of feeds your plan allows.'});
}
});
})
}).catch(function(error) {
res.status(401);
});
} else {
res.status(401);
}
})
})
From your code snippet, a potential reason that it would be running repeatedly is that you are not returning an ok status if things worked out correctly, e.g.
res.status(200).send('ok');
According to the Firebase documentation, this is something you should be doing for HTTP Functions.

How to Synchronize the file writes in Node.Js

I am using the EJS compile to create notification templates and I would like to know how to write the file to the file system in parallel and send the notification once all the files are saved.
Please see the below code snippet which I used
var fs = require('fs');
var ejs = require('ejs');
var arrayOfData = [someData]; //Prepare data from database
//Iterate through the data
for (var i = 0; i < arrayOfData.length; i++) {
generateFileFromTemplate(arrayOfData[i],function(){});
}
function generateFileFromTemplate(templateData,callback)
{
var outputFile = fileData.Id + ".html";
var compiled = ejs.compile(fs.readFileSync('email-template.ejs', 'utf8'));
var html = compiled(templateData);
fs.writeFile(outputFile, html, callback);
}
Please help.
Use async.each for your use case
async.each(arrayOfData,
function(ele, next){
generateFileFromTemplate(ele,function(){});
},
function(err){
if(err) console.log('err', err);
sendNotification();
}
);
You can use a great utility library called Async, particularly its parallel method: https://github.com/caolan/async#parallel.
Here's an example:
var async = require('async');
/*-------------*/
var tasks = arrayOfData.map(function(data) {
return function(cb) {
generateFileFromTemplate(data,function(){});
cb(null);
}
});
async.parallel(tasks, function(err) {
console.log('My job is done');
})

why does console.log output all records and not response.end - Node.js

Im a newbie with node.js and i'm trying to output some data to html.
My code works when I use console.log but not when I use response.end.
When I use response.end I only see on record while when I use console.log I get to see all the records
See my full code below:
var http = require('http');
var formOutput;
var WooCommerceAPI = require('woocommerce-api');
// Initialize the WooCommerceAPI class
var WooCommerce = new WooCommerceAPI({
//url: 'http://example.com', // Your store url (required)
});
function handleRequest(response) {
// GET example
WooCommerce.get('products', function (err, data, res) {
//console.log(res);
//var fs = require('fs');
//var jsonContent = JSON.parse(JSON.stringify(res, null, 4))
var jsonContent = JSON.parse(res)
for (var i = 0; i < jsonContent["products"].length; i++) {
var name = jsonContent["products"][i];
// this works well and I can output all records
//console.log(name['title']);
//console.log(name['id']);
//console.log(name['sku']);
//console.log(name['regular_price']);
//response.writeHead(200, { 'Content-Type': 'text/html' });
//res.end(name['id']);
formOutput = name['regular_price'];
//formOutput = '<h1>XYZ Repository Commit Monitor</h1>';
//response.write();
//Only get one record
response.end(formOutput);
//response.write('<html><head></head><body>');
//response.end("test");
//response.end('</body></html>');
}
});
//response.end(formOutput);
}
http.createServer(function (req, response) {
if (response.url === '/favicon.ico') {
response.writeHead(404);
response.end();
} else {
response.writeHead(200, { 'Content-Type': 'text/html' });
}
//code here...
handleRequest(response);
// response.end(formOutput);
}).listen(1337, "localhost");
console.log("Server running at http://localhost:1337/");
With Express, response.end() closes the communication channel after one call so only one element will be sent to the user. Don't use end() to send data, in your case, use response.json() (or send()) ONCE after you built the data array.
var dataToSend = [];
for (var i = 0; i < jsonContent["products"].length; i++) {
// build an array of data to send here
}
response.json(dataToSend);
On a side note, don't use response.end() unless you want to end the communication explicitly. response.json() and response.send() already close the channel when needed.

Uploading a file in Azure File Storage using node.js

We are trying create an webservice to upload files to Azure file storage using node.js service.
Below is the node.js server code.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body;
var length;
request.on("data", function(chunk){
body += chunk;
console.log("Get data");
});
request.on("end", function(){
try{
console.log("end");
var data = body;
length = data.length;
console.log(body); // This giving the result as undefined
console.log(length);
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
}catch (er) {
response.statusCode = 400;
return res.end('error: ' + er.message);
}
});
}
Below is our client to upload a file.
private static void sendPOST() throws IOException {
URL obj = new URL("https://crowdtest-fileservice.azure-mobile.net/api/files_stage/");
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
con.setRequestMethod("POST");
con.setRequestProperty("sharename", "newamactashare");
con.setRequestProperty("directorypath", "MaheshApp/TestLibrary/");
con.setRequestProperty("filename", "temp.txt");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
// For POST only - START
con.setDoOutput(true);
OutputStream os = con.getOutputStream();
os.write(data);
os.flush();
os.close();
// For POST only - END
int responseCode = con.getResponseCode();
System.out.println("POST Response Code :: " + responseCode);
if (responseCode == HttpURLConnection.HTTP_OK) { // success
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuffer response = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
System.out.println(inputLine);
}
in.close();
// print result
System.out.println(response.toString());
} else {
BufferedReader br = new BufferedReader(new InputStreamReader(con.getErrorStream()));
String line = "";
while ((line = br.readLine()) != null) {
System.out.println(line);
}
System.out.println("POST request not worked");
}
}
It is showing the error
The request 'POST /api/files_stage/' has timed out. This could be
caused by a script that fails to write to the response, or otherwise
fails to return from an asynchronous call in a timely manner.
Updated:
I have also tried below code.
var body = new Object();
body = request.body;
var length = body.length;
console.log(request.body);
console.log(body);
console.log(length);
try {
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
But facing the issue
{"error":"Parameter stream for function createFileFromStream should be
an object"}
I am new to node.js. Please help me to fix this.
There are several issue here. Let us go over them one by one.
1. In your Java client you cannot just dump the binary data into an Azure mobile service connection.
The reason for this is that an Azure mobile service has two body parsers that ensure that no matter what, the request body is parsed for you.
So, while you can walk around the Express body parser by specifying an uncommon content type, you will still hit the Azure body parser that will mess up your data stream by naively assuming that it is a UTF-8 string.
The only option therefore is to skip the Express parser by specifying a content type it cannot handle and then play along with the Azure parser by encoding your binary data with Base64 encoding.
So, in the Java client replace
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
with
con.setRequestProperty("content-type", "binary");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
data = Base64.getEncoder().encode(data);
If you are not on Java 8, replace the java.util.Base64 encoder with any other Base64 encoder you have access to.
2. The createFileFromStream Azure storage api function you are trying to use expects a stream.
At the same time, the best you can get when parsing a request body manually is a byte array. Unfortunately, Azure mobile services use NodeJS version 0.8, which means there is no easy way to construct a readable stream from a byte array, and you you will have to assemble your own stream suitable for Azure storage api. Some duct tape and stream#0.0.1 should do just fine.
var base64 = require('base64-js'),
Stream = require('stream'),
fileService = require('azure-storage')
.createFileService('yourStorageAccount', 'yourStoragePassword');
exports.post = function (req, res) {
var data = base64.toByteArray(req.body),
buffer = new Buffer(data),
stream = new Stream();
stream['_ended'] = false;
stream['pause'] = function() {
stream['_paused'] = true;
};
stream['resume'] = function() {
if(stream['_paused'] && !stream['_ended']) {
stream.emit('data', buffer);
stream['_ended'] = true;
stream.emit('end');
}
};
try {
fileService.createFileFromStream(req.headers.sharename, req.headers.directorypath,
req.headers.filename, stream, data.length, function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
} catch (e) {
res.statusCode = 500;
res.end();
}
};
These are the dependencies you need for this sample.
"dependencies": {
"azure-storage": "^0.7.0",
"base64-js": "^0.0.8",
"stream": "0.0.1"
}
If specifying them in your service's package.json does not work you can always go to this link and install them manually via the console.
cd site\wwwroot
npm install azure-storage
npm install base64-js
npm install stream#0.0.1
3. To increase the default upload limit of 1Mb, specify MS_MaxRequestBodySizeKB for your service.
Do keep in mind though that since you are transferring you data as Base64-encoded you have to account for this overhead. So, to support uploading files up to 20Mb in size, you have to set MS_MaxRequestBodySizeKB to roughly 20 * 1024 * 4 / 3 = 27307.
I find the easiest way is to use pkgcloud which abstracts the differences between cloud providers and also provides a clean interface for uploading and downloading files. It uses streams so the implementation is memory efficient as well.
var pkgcloud = require('pkgcloud')
var fs = require('fs')
var client = pkgcloud.storage.createClient({
provider: 'azure',
storageAccount: 'your-storage-account',
storageAccessKey: 'your-access-key'
});
var readStream = fs.createReadStream('a-file.txt');
var writeStream = client.upload({
container: 'your-storage-container',
remote: 'remote-file-name.txt'
});
writeStream.on('error', function (err) {
// handle your error case
});
writeStream.on('success', function (file) {
// success, file will be a File model
});
readStream.pipe(writeStream);
We can leverage this answer of the thread on SO How to send an image from Android client to Node.js server via HttpUrlConnection?, which create a custom middleware to get the upload file content into a buffer array, then we can use createFileFromText() to store the file in Azure Storage.
Here is the code snippet:
function rawBody(req, res, next) {
var chunks = [];
req.on('data', function (chunk) {
chunks.push(chunk);
});
req.on('end', function () {
var buffer = Buffer.concat(chunks);
req.bodyLength = buffer.length;
req.rawBody = buffer;
next();
});
req.on('error', function (err) {
console.log(err);
res.status(500);
});
}
router.post('/upload', rawBody,function (req, res){
fileService.createShareIfNotExists('taskshare', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
fileService.createDirectoryIfNotExists('taskshare', 'taskdirectory', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
try {
fileService.createFileFromText('taskshare', 'taskdirectory', 'test.txt', req.rawBody, function (error, result, resp) {
if (!error) {
// file uploaded
res.send(200, "File Uploaded");
} else {
res.send(200, "Error!");
}
});
} catch (ex) {
res.send(500, { error: ex.message });
}
}
});
}
});
})
router.get('/getfile', function (req, res){
fileService.createReadStream('taskshare', 'taskdirectory', 'test.txt').pipe(res);
})
When the request arrives at the function defined in exports.post, the whole request is already there, so you don't need to buffer it. You can simplify it by writing something along the lines of the code below.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body = request.body;
var length = body.length;
console.log(length);
try {
fileService.createFileFromText(shareName, dirPath, fileName, body, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
} else {
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
}
There are several things:
1. createFileFromText can work with plain text. But it will fail for those binary content, as it uses UTF-8 encoding.
You might want to refer to the similar issue for blob at: Saving blob (might be data!) returned by AJAX call to Azure Blob Storage creates corrupt image
2. The createFileFromStream or createWriteStreamToExistingFile \ createWriteStreamToNewFile Azure storage API may be the function can help.
Please be noted that these APIs are target to streams. You need convert your buffer/string in the request body to a stream. You can refer to How to wrap a buffer as a stream2 Readable stream?
For createFileFromStream :
fileService.createFileFromStream(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
requestStream,
data.length,
function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
For createWriteStreamToNewFile :
var writeStream = fileService.createWriteStreamToNewFile(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
data.length);
requestStream.pipe(writeStream);
3. There are several issues in your code
console.log(body); // This giving the result as undefined
The reason is you define var body and it is undefined. The code body += chunk will still make body undefined.
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
When error happens in createFileFromStream, it could also be an error in the network transfer, you might also want to return the error code instead of statusCodes.OK.

How to get a json file in express js and display in view

I have a problem in getting a .json file in express and displaying in a view. Kindly share your examples.
var fs = require("fs"),
json;
function readJsonFileSync(filepath, encoding){
if (typeof (encoding) == 'undefined'){
encoding = 'utf8';
}
var file = fs.readFileSync(filepath, encoding);
return JSON.parse(file);
}
function getConfig(file){
var filepath = __dirname + '/' + file;
return readJsonFileSync(filepath);
}
//assume that config.json is in application root
json = getConfig('config.json');
Do something like this in your controller.
To get the json file's content :
ES5
var foo = require('./path/to/your/file.json');
ES6
import foo from './path/to/your/file.json';
To send the json to your view:
function getJson(req, res, next){
res.send(foo);
}
This should send the json content to your view via a request.
NOTE
According to BTMPL
While this will work, do take note that require calls are cached and will return the same object on each subsequent call. Any change you make to the .json file when the server is running will not be reflected in subsequent responses from the server.
This one worked for me. Using fs module:
var fs = require('fs');
function readJSONFile(filename, callback) {
fs.readFile(filename, function (err, data) {
if(err) {
callback(err);
return;
}
try {
callback(null, JSON.parse(data));
} catch(exception) {
callback(exception);
}
});
}
Usage:
readJSONFile('../../data.json', function (err, json) {
if(err) { throw err; }
console.log(json);
});
Source

Resources