Getting binary files with cradles getAttachment from a CouchDB? - node.js

I uploaded a png as attachment to a CouchDb database. When I have look at it via Futon it is fine, if I try to get it back via cradle it is corrupted. I used a snipptlet from the crade-test.js shipped with crade and modified it a bit:
var response = {};
var streamer = db.getAttachment(data.id,filename);
streamer.addListener('response', function (res) {
response.headers = res.headers;
response.headers.status = res.statusCode;
response.body = "";
});
streamer.addListener('data', function (chunk) { response.body += chunk; });
streamer.addListener('end', function () {
fs.writeFile('new-'+filename, response.body, function (err) {
if (err) throw err;
console.log('It\'s saved!');
});
});
The result is a corrupted png that is bigger than the input. I provided a working example here: http://jsfiddle.net/x8GZc/

The snippet you found is used with a text document (= basically a string).
For binary data (e.g. images), you must set the correct encoding on the response object:
stream = client.database('images').getAttachment(req.params.id, filename);
// response is your HTTP response object
stream.on('data', function(chunk) {
return response.write(chunk, "binary");
});
stream.on('end', function() {
return response.end();
});

Related

Node.js in aws callback variable

Hi im trying to use callback for variable external use outside the function but something is wrong, i think my callback is not so correct as i think:
function latitude(callback){
var mylat;
const https = require('https');
https.get('https://url_of_my_json', (resp) => {
let data = '';
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
mylat = JSON.parse(data).results[0].geometry.location.lat;
callback(mylat);
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});
}
var mylat = latitude(); // variable i want to use for the rest of code
Thanks
The syntax of the callback is incorrect.
here is the example, for better understanding of callback Try this example as simple as you can read, just copy save newfile.js do node newfile to run the application.
function myNew(next){
console.log("Im the one who initates callback");
next("nope", "success");
}
myNew(function(err, res){
console.log("I got back from callback",err, res);
});
happy coding :)

Nodejs: How to send a readable stream to the browser

If I query the box REST API and get back a readable stream, what is the best way to handle it? How do you send it to the browser?? (DISCLAIMER: I'm new to streams and buffers, so some of this code is pretty theoretical)
Can you pass the readStream in the response and let the browser handle it? Or do you have to stream the chunks into a buffer and then send the buffer??
export function getFileStream(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
res.type('application/octet-stream');
console.log('stream', stream);
return res.status(200).send(stream);
});
}
Will ^^ work, or do you need to do something like:
export function downloadFile(req, res) {
const fileId = req.params.fileId;
console.log('fileId', fileId);
req.sdk.files.getReadStream(fileId, null, (err, stream) => {
if (err) {
console.log('error', err);
return res.status(500).send(err);
}
const buffers = [];
const document = new Buffer();
console.log('stream', stream);
stream.on('data', (chunk) => {
buffers.push(buffer);
})
.on('end', function(){
const finalBuffer = Buffer.concat(buffers);
return res.status(200).send(finalBuffer);
});
});
}
The first example would work if you changed you theoretical line to:
- return res.status(200).send(stream);
+ res.writeHead(200, {header: here})
+ stream.pipe(res);
That's the nicest thing about node stream. The other case would (in essence) work too, but it would accumulate lots of unnecessary memory.
If you'd like to check a working example, here's one I wrote based on scramjet, express and browserify:
https://github.com/MichalCz/scramjet/blob/master/samples/browser/browser.js
Where your streams go from the server to the browser. With minor mods it'll fit your problem.

Uploading a file in Azure File Storage using node.js

We are trying create an webservice to upload files to Azure file storage using node.js service.
Below is the node.js server code.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body;
var length;
request.on("data", function(chunk){
body += chunk;
console.log("Get data");
});
request.on("end", function(){
try{
console.log("end");
var data = body;
length = data.length;
console.log(body); // This giving the result as undefined
console.log(length);
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
}catch (er) {
response.statusCode = 400;
return res.end('error: ' + er.message);
}
});
}
Below is our client to upload a file.
private static void sendPOST() throws IOException {
URL obj = new URL("https://crowdtest-fileservice.azure-mobile.net/api/files_stage/");
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
con.setRequestMethod("POST");
con.setRequestProperty("sharename", "newamactashare");
con.setRequestProperty("directorypath", "MaheshApp/TestLibrary/");
con.setRequestProperty("filename", "temp.txt");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
// For POST only - START
con.setDoOutput(true);
OutputStream os = con.getOutputStream();
os.write(data);
os.flush();
os.close();
// For POST only - END
int responseCode = con.getResponseCode();
System.out.println("POST Response Code :: " + responseCode);
if (responseCode == HttpURLConnection.HTTP_OK) { // success
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuffer response = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
System.out.println(inputLine);
}
in.close();
// print result
System.out.println(response.toString());
} else {
BufferedReader br = new BufferedReader(new InputStreamReader(con.getErrorStream()));
String line = "";
while ((line = br.readLine()) != null) {
System.out.println(line);
}
System.out.println("POST request not worked");
}
}
It is showing the error
The request 'POST /api/files_stage/' has timed out. This could be
caused by a script that fails to write to the response, or otherwise
fails to return from an asynchronous call in a timely manner.
Updated:
I have also tried below code.
var body = new Object();
body = request.body;
var length = body.length;
console.log(request.body);
console.log(body);
console.log(length);
try {
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
But facing the issue
{"error":"Parameter stream for function createFileFromStream should be
an object"}
I am new to node.js. Please help me to fix this.
There are several issue here. Let us go over them one by one.
1. In your Java client you cannot just dump the binary data into an Azure mobile service connection.
The reason for this is that an Azure mobile service has two body parsers that ensure that no matter what, the request body is parsed for you.
So, while you can walk around the Express body parser by specifying an uncommon content type, you will still hit the Azure body parser that will mess up your data stream by naively assuming that it is a UTF-8 string.
The only option therefore is to skip the Express parser by specifying a content type it cannot handle and then play along with the Azure parser by encoding your binary data with Base64 encoding.
So, in the Java client replace
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
with
con.setRequestProperty("content-type", "binary");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
data = Base64.getEncoder().encode(data);
If you are not on Java 8, replace the java.util.Base64 encoder with any other Base64 encoder you have access to.
2. The createFileFromStream Azure storage api function you are trying to use expects a stream.
At the same time, the best you can get when parsing a request body manually is a byte array. Unfortunately, Azure mobile services use NodeJS version 0.8, which means there is no easy way to construct a readable stream from a byte array, and you you will have to assemble your own stream suitable for Azure storage api. Some duct tape and stream#0.0.1 should do just fine.
var base64 = require('base64-js'),
Stream = require('stream'),
fileService = require('azure-storage')
.createFileService('yourStorageAccount', 'yourStoragePassword');
exports.post = function (req, res) {
var data = base64.toByteArray(req.body),
buffer = new Buffer(data),
stream = new Stream();
stream['_ended'] = false;
stream['pause'] = function() {
stream['_paused'] = true;
};
stream['resume'] = function() {
if(stream['_paused'] && !stream['_ended']) {
stream.emit('data', buffer);
stream['_ended'] = true;
stream.emit('end');
}
};
try {
fileService.createFileFromStream(req.headers.sharename, req.headers.directorypath,
req.headers.filename, stream, data.length, function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
} catch (e) {
res.statusCode = 500;
res.end();
}
};
These are the dependencies you need for this sample.
"dependencies": {
"azure-storage": "^0.7.0",
"base64-js": "^0.0.8",
"stream": "0.0.1"
}
If specifying them in your service's package.json does not work you can always go to this link and install them manually via the console.
cd site\wwwroot
npm install azure-storage
npm install base64-js
npm install stream#0.0.1
3. To increase the default upload limit of 1Mb, specify MS_MaxRequestBodySizeKB for your service.
Do keep in mind though that since you are transferring you data as Base64-encoded you have to account for this overhead. So, to support uploading files up to 20Mb in size, you have to set MS_MaxRequestBodySizeKB to roughly 20 * 1024 * 4 / 3 = 27307.
I find the easiest way is to use pkgcloud which abstracts the differences between cloud providers and also provides a clean interface for uploading and downloading files. It uses streams so the implementation is memory efficient as well.
var pkgcloud = require('pkgcloud')
var fs = require('fs')
var client = pkgcloud.storage.createClient({
provider: 'azure',
storageAccount: 'your-storage-account',
storageAccessKey: 'your-access-key'
});
var readStream = fs.createReadStream('a-file.txt');
var writeStream = client.upload({
container: 'your-storage-container',
remote: 'remote-file-name.txt'
});
writeStream.on('error', function (err) {
// handle your error case
});
writeStream.on('success', function (file) {
// success, file will be a File model
});
readStream.pipe(writeStream);
We can leverage this answer of the thread on SO How to send an image from Android client to Node.js server via HttpUrlConnection?, which create a custom middleware to get the upload file content into a buffer array, then we can use createFileFromText() to store the file in Azure Storage.
Here is the code snippet:
function rawBody(req, res, next) {
var chunks = [];
req.on('data', function (chunk) {
chunks.push(chunk);
});
req.on('end', function () {
var buffer = Buffer.concat(chunks);
req.bodyLength = buffer.length;
req.rawBody = buffer;
next();
});
req.on('error', function (err) {
console.log(err);
res.status(500);
});
}
router.post('/upload', rawBody,function (req, res){
fileService.createShareIfNotExists('taskshare', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
fileService.createDirectoryIfNotExists('taskshare', 'taskdirectory', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
try {
fileService.createFileFromText('taskshare', 'taskdirectory', 'test.txt', req.rawBody, function (error, result, resp) {
if (!error) {
// file uploaded
res.send(200, "File Uploaded");
} else {
res.send(200, "Error!");
}
});
} catch (ex) {
res.send(500, { error: ex.message });
}
}
});
}
});
})
router.get('/getfile', function (req, res){
fileService.createReadStream('taskshare', 'taskdirectory', 'test.txt').pipe(res);
})
When the request arrives at the function defined in exports.post, the whole request is already there, so you don't need to buffer it. You can simplify it by writing something along the lines of the code below.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body = request.body;
var length = body.length;
console.log(length);
try {
fileService.createFileFromText(shareName, dirPath, fileName, body, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
} else {
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
}
There are several things:
1. createFileFromText can work with plain text. But it will fail for those binary content, as it uses UTF-8 encoding.
You might want to refer to the similar issue for blob at: Saving blob (might be data!) returned by AJAX call to Azure Blob Storage creates corrupt image
2. The createFileFromStream or createWriteStreamToExistingFile \ createWriteStreamToNewFile Azure storage API may be the function can help.
Please be noted that these APIs are target to streams. You need convert your buffer/string in the request body to a stream. You can refer to How to wrap a buffer as a stream2 Readable stream?
For createFileFromStream :
fileService.createFileFromStream(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
requestStream,
data.length,
function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
For createWriteStreamToNewFile :
var writeStream = fileService.createWriteStreamToNewFile(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
data.length);
requestStream.pipe(writeStream);
3. There are several issues in your code
console.log(body); // This giving the result as undefined
The reason is you define var body and it is undefined. The code body += chunk will still make body undefined.
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
When error happens in createFileFromStream, it could also be an error in the network transfer, you might also want to return the error code instead of statusCodes.OK.

How to write an image in Node after request() has returned

This feels like an obvious question but it's perplexing me: I want a Node function that downloads a resource at a URI. I need it to work for several different content types without the user needing to specify which type it is.
I know how to pipe request to fs.createWriteStream when you know it's going to be an image, but not how to handle it when you've already invoked the callback from request. Here's where I am:
var request = require('request'),
fs = require('graceful-fs');
function cacheURI(uri, cache_path, cb) {
request(uri, function(err, resp, body) {
var content_type = resp.headers['content-type'].toLowerCase().split("; ")[0],
type = content_type.split("/")[0],
sub_type = content_type.split("/")[1];
if (sub_type == "json") {
body = JSON.parse(body);
}
if (type == "image") {
// this is where the trouble starts
var ws = fs.createWriteStream(cache_path);
ws.write(body);
ws.on('close', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
ws.end();
cb()
});
} else {
// this works fine for text resources
fs.writeFile(cache_path, body, cb);
}
});
}
This answer to a previous question suggests the following:
request.get({url: 'https://someurl/somefile.torrent', encoding: 'binary'}, function (err, response, body) {
fs.writeFile("/tmp/test.torrent", body, 'binary', function(err) {
if(err)
console.log(err);
else
console.log("The file was saved!");
});
});
But I can't pass "binary" to request if I don't yet know the type of response I'll get.
UPDATE
Per the suggested answer, changing "close" to "finish" in the event handler does fire the callback:
if (opts.image) {
var ws = fs.createWriteStream(opts.path);
ws.on('finish', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
});
//tried as buffer as well
//ws.write(new Buffer(body));
ws.write(body);
ws.end();
}
This does write the image file, but not correctly:
As suggested in here, try using the finish event (if you have node >= v0.10)
ws.on('finish', function() {
console.log('image done');
console.log(resp.socket.bytesRead);
ws.end();
cb()
});

"Unhandled stream error in pipe" - Too many file downloads in Node.js / request?

I am trying to download many (around 2,000) images from a JSON feed using Node, (and specifically the request module). When I try to do this (looping through the JSON) I get
throw er; // Unhandled stream error in pipe.
I checked ulimit -n and it was set at 256, so I increased that to 4,000 and I still get the same error (although after I am downloading a much higher number of images).
I have two questions,
Why am I still getting an error if I raised the maximum download number well in excess of the number of simultaneous downloads I actually have
What is the best way to "queue" or pause the downloads so as not to overwhelm my system? Here is my code.
var fs = require('fs')
, http = require('http')
, request = require('request')
, url = 'http://www.urlOfJsonFeed'
function get(){
http.get(url, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
jParse(body);
});
}).on('error', function(e) {
console.log("Got error: ", e);
});
}
function jParse(info){
data = JSON.parse(info)
entries = data.entries
numToDownload = entries.length;
for(var i = numToDownload - 1; i >= 0; i -= 1){
link = entries[i]['imgUrl']
download(link, 'images/' + mov + '.mp4', function(){
console.log('Downloaded ' + dMov + ' movies')
dMov++
}
}
}
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
if (err) {
console.log('header error');
}
if (!err && res.statusCode == 200) {
//Download the image
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
}
});
};
get()

Resources