How to consume response from Knox module? - node.js

I am using the Knox S3 module, but when I eventually get the file, the resulting file is corrupt. Am I using Knox incorrectly?
var data;
client.getFile(path, function(err, file) {
file.on('data', function(chunk) { data += chunk; });
file.on('end', function() {
//Here I end up sending the response with new Buffer(data), but that produces a bad file.
});
});

Try using the writeStream:
var fs = require('fs');
var file = fs.createWriteStream(path);
client.getFile(path, function(err, stream) {
stream.on('data', function(chunk) { file.write(chunk); });
stream.on('end', function(chunk) { file.end(); });
});
and make sure to take a look at https://github.com/aws/aws-sdk-js

Related

NodeJs Decode to readable text

PROBLEM
I want to receive data from a device using IP Address via NodeJs. But I received the following data:
What I've Tried
This is the code that I've been able to get, which still produces the problem I described above.
var app = require('http').createServer(handler);
var url = require('url') ;
var statusCode = 200;
app.listen(6565);
function handler (req, res) {
var data = '';
req.on('data', function(chunk) {
data += chunk;
});
req.on('end', function() {
console.log(data.toString());
fs = require('fs');
fs.appendFile('helloworld.txt', data.toString(), function (err) {
if (err) return console.log(err);
});
});
res.writeHead(statusCode, {'Content-Type': 'text/plain'});
res.end();
}
And below is the result I received for console.log(req.headers)
So my question is, how do I decode the data? and anyone know what type of data are they?
Use Buffers to handle octet streams.
function handler (req, res) {
let body=[];
req.on('data', function(chunk) {
body.push(chunk);
});
req.on('end', function() {
body = Buffer.concat(body).toString('utf8');
...

OPT Files retrieved from S3 AWS are broken

I can successfully retrieve my OPT file from AWS using Knox.. but when I go to use the file it is broken. I believe this is an encoding issue.. but honestly I'm not sure.
The end file size is larger than the files actual size.
Below is a simplified example:
var client = knox.createClient({
key: '************',
secret: '************',
bucket: '************'
});
client.get(otfFile).on('response', function(res){
var file = "";
res.setEncoding("binary");
res.on('data', function(chunk){
file += chunk;
});
res.on('end', function() {
// Save File
fs.writeFile( filepath, file, function(err) {
if (err) console.error(err);
});
});
}).end();
Do you know how to fix it or have an idea as to what is wrong?
The short answer — otfs require ISO-8859-1 encoding. :)
It seems that the issue is that otf are encoded in ISO-8859-1 but node doesn't provide a default functionality to use that format. You can just get the file via GET and encode it after with a package like Incov.
https://github.com/bnoordhuis/node-iconv
var client = knox.createClient({
key: '************',
secret: '************',
bucket: '************'
});
client.get(otfFile).on('response', function(res){
var file = "";
res.setEncoding("utf8");
res.on('data', function(chunk){
file += chunk;
});
res.on('end', function() {
// Encode
var iconv = new Iconv('UTF-8', 'ISO-8859-1');
file = iconv.convert(file);
// Save File
fs.writeFile( filepath, file, function(err) {
if (err) console.error(err);
});
});
}).end();

Stream buffer to client in Express

I have request handler to send file from MongoDB (GridFS) to client like below, but it use data variable so content is in memory. I need to make this in streaming mode and send file in chunks to client. I can't regognize how to pipe buffer to response. Look at second code - it doesn't work, but show something what i need.
Maybe it is useful: Data in GridFS is Base64 encoded, but may be changed if streaming can be more efficient.
In-Memory version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
var data = '';
stream.on("data", function(chunk) {
data += chunk;
});
stream.on("end", function() {
res.send(new Buffer(data, 'base64'));
});
});
});
});
Streaming mode version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
stream.on("data", function(chunk) {
new Buffer(chunk, 'base64').pipe(res);
});
stream.on("end", function() {
res.end();
});
});
});
});
Update
I think I'm close to resolve this. I found this works, but does't decode from Base64:
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(res);
});
exports.sendFile = function(db, res, fileId) {
var grid = require('gridfs-stream');
var gfs = grid(db, mongoose.mongo);
var on_error = function(){
res.status(404).end();
};
var readstream = gfs.createReadStream({
filename: fileId,
root: 'r'
});
readstream.on('error', function(err) {
if (('\'' + err + '\'') === '\'Error: does not exist\'') {
return on_error && on_error(err);
}
throw err;
});
return readstream.pipe(res);
}
I found a solution, but think that can be better. I use base64-stream module to decode Base64 stream. Solution below:
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(base64.decode()).pipe(res);
});
});
});
stream.on("data", function(chunk) {
res.send(chunk.toString('utf8'));
});

I am trying to read image from different server and write on response

function media(req,res){
console.log(req.query.image);
var collectionName = 'imageTable';
var selector = MongoHelper.idSelector(req.query.image);
MongoHelper.findOne(selector, collectionName, function(err, image) {
console.log(image.picture);
var url_parts = url.parse(image.picture);
var options = {host: url_parts.hostname, path: url_parts.pathname};
http.get(options).on('response', function (response) {
var body = '';
var i = 0;
response.on('data', function (chunk) {
i++;
body += chunk;
console.log('BODY Part: ' + i);
});
response.on('end', function () {
console.log('Finished');
res.writeHead(200,{'Content-Type':'image/JPEG'});
res.write(body);
res.end();
});
});
});
}
I am fetching image from different server. I have url of that image. And I am writing the response. But here response image is get corrupted. Any idea about how to write jpeg image in response?
function media(req,res){
console.log(req.query.image);
var collectionName = 'facebook';
var selector = MongoHelper.idSelector(req.query.image);
MongoHelper.findOne(selector, collectionName, function(err, image) {
var url_parts = url.parse(image.picture);
var options = {host: url_parts.hostname, path: url_parts.pathname};
http.get(options).on('response', function (response) {
res.writeHead(200,{'Content-Type':'image/JPEG'});
response.on('data', function (chunk) {
res.write(chunk);
});
response.on('end', function () {
res.end();
});
});
});
}
Here I got the solution. Instead of writing whole data at the end. Write it each time you get and end the response when you reach to the end of file. But still if anyone have better idea can write here.

Read remote file with node.js (http.get)

Whats the best way to read a remote file? I want to get the whole file (not chunks).
I started with the following example
var get = http.get(options).on('response', function (response) {
response.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
});
I want to parse the file as csv, however for this I need the whole file rather than chunked data.
I'd use request for this:
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
Or if you don't need to save to a file first, and you just need to read the CSV into memory, you can do the following:
var request = require('request');
request.get('http://www.whatever.com/my.csv', function (error, response, body) {
if (!error && response.statusCode == 200) {
var csv = body;
// Continue with your processing here.
}
});
etc.
You can do something like this, without using any external libraries.
const fs = require("fs");
const https = require("https");
const file = fs.createWriteStream("data.txt");
https.get("https://www.w3.org/TR/PNG/iso_8859-1.txt", response => {
var stream = response.pipe(file);
stream.on("finish", function() {
console.log("done");
});
});
http.get(options).on('response', function (response) {
var body = '';
var i = 0;
response.on('data', function (chunk) {
i++;
body += chunk;
console.log('BODY Part: ' + i);
});
response.on('end', function () {
console.log(body);
console.log('Finished');
});
});
Changes to this, which works. Any comments?
function(url,callback){
request(url).on('data',(data) => {
try{
var json = JSON.parse(data);
}
catch(error){
callback("");
}
callback(json);
})
}
You can also use this. This is to async flow. The error comes when the response is not a JSON. Also in 404 status code .

Resources