How can i get binary from image using node ftp? - node.js

I want to get binary from image to rotate then, using sharp.rotate();
I try to do this content += chunk; but dosent work.
let Client = require('ftp');
let fs = require('fs');
let sharp = require('sharp');
let path = 'users/'+userId+'/headerImage/header';
let Ftp = new Client();//create new istance of Ftp
//Start. Here we get image from server
await Ftp.on('ready', function(){
Ftp.get(path, async function(err, stream){
if(err){
res.status(400).send(err);
};
var content = '';
await stream.on('data', async (chunk) => {
content += chunk;
});
await stream.on('end', async function(){
console.log(content);
let image = await sharp(content);
await image
.rotate(90)
.toBuffer()
.then(async data => {
console.log(data);
})
.catch(error => {
console.log(error);
});
Ftp.end();
});
});
});
await Ftp.connect({
host: fileTransferProtocol.host,
port: fileTransferProtocol.port,
user: fileTransferProtocol.user,
password: fileTransferProtocol.pass
});
console: Error: [Error: Input file is missing]

I believe the problem you are having is that you are not handling the incoming data as a buffer. The stream variable inside the Ftp.get callback is of type ReadableStream. By default, stream data will be returned as Buffer objects unless you specify an encoding for the data, using the readable.setEncoding() method.
For your specific purpose, you want to handle the data as a Buffer object, since that is what the sharp function is expecting. To store the incoming data into a Buffer modify what happens on the data event.
var content = new Buffer(0);
stream.on("data", async chunk => {
content = Buffer.concat([content, chunk]);
});
Also, I don't think you are using async/await duly. The ftp module runs with callbacks and events, not promises. Appending those functions with await won't make them run synchronously.
Please check the following link to find more information about this feature:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
If you want to us async/await to handle your ftp requests try this module:
https://www.npmjs.com/package/promise-ftp
It provides an asynchronous interface for communicating with an FTP server.

Related

Save file to disk and then download in Node

I'm downloading a file from remote server using axios post method,then saving to the disk.
Once the file is saved to disk I need to send it to client in response.
let response = await axios.post(url, body, {headers :headers, responseType:'stream'})
let filePath = "reports/" + response.headers['content-disposition'].split("filename=")[1].replace(/\"/g, "");
await response.data.pipe(fs.createWriteStream(filePath))
res.download(filePath);
The problem I'm facing is, Response is sent when the file writing is still in progress.
If there is any alternative, Please suggest.
EDIT:-
The problem was with the write stream.
Solution:-
let response = await axios.post(url, body, {headers :headers, responseType:'stream'});
let filePath = "reports/" + response.headers['content-disposition'].split("filename=")[1].replace(/\"/g, "");
let ws = fs.createWriteStream(filePath);
new Promise((resolve, reject) => {
ws.on('open', () => {
response.data.pipe(ws);
}).on('error', function (err) {
reject(res.send({error: "message"}));
}).on('finish', function () {
ws.end();
resolve(res.download(filePath));
});
});
Link:- fs.createWriteStream does not immediately create file?
I think you should use Promise.All to resolve all promises before moving on.
As much as i know response.data.pipe is not a promise so await is not going to work on it.
Try :-
response.data.pipe(fs.createWriteStream(filePath), {end: false})
response.data.on('end', ()=> res.download(filePath))

No data after piping response stream

Using Electron's net module, the aim is to fetch a resource and, once the response is received, to pipe it to a writeable stream like so:
const stream = await fetchResource('someUrl');
stream.pipe(fs.createWriteStream('./someFilepath'));
As simplified implementation of fetchResource is as follows:
import { net } from 'electron';
async function fetchResource(url) {
return new Promise((resolve, reject) => {
const data = [];
const request = net.request(url);
request.on('response', response => {
response.on('data', chunk => {
data.push(chunk);
});
response.on('end', () => {
// Maybe do some other stuff with data...
});
// Return the response to then pipe...
resolve(response);
});
request.end();
});
}
The response ends up being an instance of IncomingMessage, which implements a Readable Stream interface according to the node docs, so it should be able to be piped to a write stream.
The primary issue is there ends up being no data in the stream that get's piped through 😕
Answering my own question, but the issue is reading from multiple sources: the resolved promise and the 'data' event. The event listener source was flushing out all the data before the resolved promise could get to it.
A solution is to fork the stream into a new one that won't compete with the original if more than once source tries to pipe from it.
import stream from 'stream';
// ...make a request and get a response stream, then fork the stream...
const streamToResolve = response.pipe(new stream.PassThrough());
// Listen to events on response and pipe from it
// ...
// Resolve streamToResolve and separately pipe from it
// ...

How do I call a third party Rest API from Firebase function for Actions on Google

I am trying to call a rest API from Firebase function which servers as a fulfillment for Actions on Google.
I tried the following approach:
const { dialogflow } = require('actions-on-google');
const functions = require('firebase-functions');
const http = require('https');
const host = 'wwws.example.com';
const app = dialogflow({debug: true});
app.intent('my_intent_1', (conv, {param1}) => {
// Call the rate API
callApi(param1).then((output) => {
console.log(output);
conv.close(`I found ${output.length} items!`);
}).catch(() => {
conv.close('Error occurred while trying to get vehicles. Please try again later.');
});
});
function callApi (param1) {
return new Promise((resolve, reject) => {
// Create the path for the HTTP request to get the vehicle
let path = '/api/' + encodeURIComponent(param1);
console.log('API Request: ' + host + path);
// Make the HTTP request to get the vehicle
http.get({host: host, path: path}, (res) => {
let body = ''; // var to store the response chunks
res.on('data', (d) => { body += d; }); // store each response chunk
res.on('end', () => {
// After all the data has been received parse the JSON for desired data
let response = JSON.parse(body);
let output = {};
//copy required response attributes to output here
console.log(response.length.toString());
resolve(output);
});
res.on('error', (error) => {
console.log(`Error calling the API: ${error}`)
reject();
});
}); //http.get
}); //promise
}
exports.myFunction = functions.https.onRequest(app);
This is almost working. API is called and I get the data back. The problem is that without async/await, the function does not wait for the "callApi" to complete, and I get an error from Actions on Google that there was no response. After the error, I can see the console.log outputs in the Firebase log, so everything is working, it is just out of sync.
I tried using async/await but got an error which I think is because Firebase uses old version of node.js which does not support async.
How can I get around this?
Your function callApi returns a promise, but you don't return a promise in your intent handler. You should make sure you add the return so that the handler knows to wait for the response.
app.intent('my_intent_1', (conv, {param1}) => {
// Call the rate API
return callApi(param1).then((output) => {
console.log(output);
conv.close(`I found ${output.length} items!`);
}).catch(() => {
conv.close('Error occurred while trying to get vehicles. Please try again later.');
});
});

Upload file to servlet from node without saving it

On my node express server, I am receiving a pdf file. I am using the below code to get the pdf contents from the request
var data = new Buffer('');
request.on('data', function (chunk) {
data = Buffer.concat([data, chunk]);
});
request.on('end', function() {
console.log('PDF data is '+JSON.stringify(data));
});
Now that PDF content is available on node, I need to send it as it is to a J2EE server. In order to do that, I am first saving the PDF file in the node server, reading it from the node server and then piping it to request.post (https://github.com/request/request)
var req = require('request');
fs.writeFile('abc.pdf', data, 'binary', function(err) {
if (err) {
console.log('Error ' + JSON.stringify(err) );
throw err;
}
var source = fs.createReadStream('abc.pdf');
//send our data via POST request
source.pipe(req.post('http://'+j2ee_host+':'+j2ee_port+'/myjavaapp/Upload')
});
This works fine. However, I feel the part of saving the PDF file on the node server and then reading it is (before posting to the J2EE server using request module) is completely unnecessary, as I am not making any changes to the file.
Once I have the PDF contents in 'data' variable, I would like to directly post them to the J2EE server. However, I have not been able to find a way to use the request module to directly post file contents. I have seen some examples related to POST using request module but they refer to formData. In my case, I don't have formData but instead reading the file from request and directly posting it to the J2EE server.
Is there a way to achieve this and avoid the file write and read?
EDIT
Below is my complete code
function upload(request, response) {
var data = new Buffer('');
request.on('data', function (chunk) {
data = Buffer.concat([data, chunk]);
});
request.on('end', function () {
fs.writeFile('abc.pdf', data, 'binary', function(err){
if (err) {
console.log('Error ' + JSON.stringify(err) );
throw err;
}
var source = fs.createReadStream('abc.pdf');
source.pipe(req.post('http://'+j2ee_host+':'+j2ee_port+'/myj2eeapp/Upload'));
})
})
}
You can pipe directly from the data request to the servlet
var req = require('request');
function upload(request, response) {
var target = req.post('http://'+j2ee_host+':'+j2ee_port+'/myjavaapp/Upload');
request.pipe(target);
target.on('finish', function () {
console.log('All done!');
//send the response or make a completed callback here...
});
}

POSTing RAW body with restify client

I'm trying to POST a raw body with restify. I have the receive side correct, when using POSTman I can send a raw zip file, and the file is correctly created on the server's file system. However, I'm struggling to write my test in mocha. Here is the code I have, any help would be greatly appreciated.
I've tried this approach.
const should = require('should');
const restify = require('restify');
const fs = require('fs');
const port = 8080;
const url = 'http://localhost:' + port;
const client = restify.createJsonClient({
url: url,
version: '~1.0'
});
const testPath = 'test/assets/test.zip';
fs.existsSync(testPath).should.equal(true);
const readStream = fs.createReadStream(testPath);
client.post('/v1/deploy', readStream, function(err, req, res, data) {
if (err) {
throw new Error(err);
}
should(res).not.null();
should(res.statusCode).not.null();
should(res.statusCode).not.undefined();
res.statusCode.should.equal(200);
should(data).not.null();
should(data.endpoint).not.undefined();
data.endpoint.should.equal('http://endpointyouhit:8080');
done();
});
Yet the file size on the file system is always 0. I'm not using my readStream correctly, but I'm not sure how to correct it. Any help would be greatly appreciated.
Note that I want to stream the file, not load it in memory on transmit and receive, the file can potentially be too large for an in memory operation.
Thanks,
Todd
One thing is that you would need to specify a content-type of multi-part/form-data. However, it looks like restify doesn't support that content type, so you're probably out of luck using the restify client to post a file.
To answer my own question, it doesn't appear to be possible to do this with the restify client. I also tried the request module, which claims to have this capability. However, when using their streaming examples, I always had a file size of 0 on the server. Below is a functional mocha integration test.
const testPath = 'test/assets/test.zip';
fs.existsSync(testPath).should.equal(true);
const readStream = fs.createReadStream(testPath);
var options = {
host: 'localhost'
, port: port
, path: '/v1/deploy/testvalue'
, method: 'PUT'
};
var req = http.request(options, function (res) {
//this feels a bit backwards, but these are evaluated AFTER the read stream has closed
var buffer = '';
//pipe body to a buffer
res.on('data', function(data){
buffer+= data;
});
res.on('end', function () {
should(res).not.null();
should(res.statusCode).not.null();
should(res.statusCode).not.undefined();
res.statusCode.should.equal(200);
const json = JSON.parse(buffer);
should(json).not.null();
should(json.endpoint).not.undefined();
json.endpoint.should.equal('http://endpointyouhit:8080');
done();
});
});
req.on('error', function (err) {
if (err) {
throw new Error(err);
}
});
//pipe the readstream into the request
readStream.pipe(req);
/**
* Close the request on the close of the read stream
*/
readStream.on('close', function () {
req.end();
console.log('I finished.');
});
//note that if we end up with larger files, we may want to support the continue, much as S3 does
//https://nodejs.org/api/http.html#http_event_continue

Resources