How to download a file from an SFTP server using Node - node.js

I'm looking to let a user download a file directly from an sftp server, but in the browser. For example, user wants to find a audio file name called 'noise', they enter the parameters and a button shows to download. I am using express as my web application framework and as well as ejs.
I've found methods to download via SFTP like the code below but this is saved through my applications folder rather than the user's disk.
sftp.connect(config).then(() => {
sftp.get('file.wav').then((data) => {
var outFile = fs.createWriteStream('file.wav')
data.on('data',function(response) {
outFile.write(response);
});
data.on('close', function() {
outFile.close();
});
});
})
How can you download directly from sftp to the user's disk by giving them the option through a button?

You pipe the stream to your res. For example:
router.get('/download', (req, res) => {
sftp.connect(config).then(() => {
sftp.get('file.wav').then((data) => {
res.pipe(data)
})
})
})

Related

Drive Api - large file stream in nodejs

Drive Api - large file stream in nodejs
hello , I went to stream large file from google drive to my website but I have issue that...
app.get("/blog.mkv", (req, ress) => {
const p38290token = new google.auth.OAuth2(CLIENT_ID, CLIENT_SECRET, REDIRECT_URI);
p38290token.setCredentials({ refresh_token: token.acc });
const p38290Id = google.drive({
version: "v3",
auth: p38290token,
});
try {
p38290Id.files.get({
fileId: "1OU3BXc4FmyRD0rCW9S4XFfVxIl48vy3v",
alt: "media",
// arraybuffer , stream , blob
}, { responseType: "stream" },
(err, res) => {
if (err) {
console.log(err.message)
if (err.message === "invalid_grant") {
// fatchToken(exportFile)
}
} else {
res.data
.on("end", () => {
console.log("Done");
})
.on("error", err => {
console.log("Error", err);
})
.pipe(ress);
}
}
)
} catch (error) {
}
})
when user come to /blog.mkv video is start stream but user can't skip it (can't go forward or backwards ) , what should I do ?
Check this repo for streaming and downloading files from Google Drive.
Google-drive-video-streaming-nodejs
This is a small script in nodejs that allow you to watch a video stored in your Google Drive directly in your video player.
Install
You need only to install all the dependencies by typing this command:
npm install
Usage
Just type this command to startup the script.
node ./app.js
Now that the server is started you can start watching your video or download it.
Streaming
Paste this link into your player to start streaming the video.
http://127.0.0.1:9001/
Download
To download it, type this URL in a new browser tab.
http://127.0.0.1:9001/download
if you want you can specify the parameter p, that indicates as a percentage what portion of the video will be skipped. For example, to start downloading the video from starting from the halfway point you should use this link:
http://127.0.0.1:9001/download?p=50
You can even use the parameter c that indicates from what chunk the download must be started. To stop the downloading process use this URL:
http://127.0.0.1:9001/download_stop

How to download files in /tmp folder of Google Cloud Function and then upload it in Google Cloud Storage

So I need to deploy a Google Cloud Function that allow me to make 2 things.
The first one is to DOWNLOAD any files on SFTP/FTP server on /tmp local directory of the Cloud Function. Then, the second step, is to UPLOAD this file in a bucket on the Google Cloud Storage.
Actually I know how to upload but I don't get how to DOWNLOAD files from ftp server to my local /tmp directory.
So, actually I have written a GCF that receive in parameters (on the body), the configuration (config) to allow me to connect on the FTP server, the filename and the path.
For my test I used the following ftp server test: https://www.sftp.net/public-online-sftp-servers with this configuration.
{
config:
{
hostname: 'test.rebex.net',
username: 'demo',
port: 22,
password: 'password'
},
filename: 'FtpDownloader.png',
path: '/pub/example'
}
After my DOWNLOAD, I start my UPLOAD. For that I check if I found the DOWNLOAD file in '/tmp/filename' before to UPLOAD but the file is nerver here.
See the following code:
exports.transferSFTP = (req, res) =>
{
let body = req.body;
if(body.config)
{
if(body.filename)
{
//DOWNLOAD
const Client = require('ssh2-sftp-client');
const fs = require('fs');
const client = new Client();
let remotePath
if(body.path)
remotePath = body.path + "/" + body.filename;
else
remotePath = "/" + body.filename;
let dst = fs.createWriteStream('/tmp/' + body.filename);
client.connect(body.config)
.then(() => {
console.log("Client is connected !");
return client.get(remotePath, dst);
})
.catch(err =>
{
res.status(500);
res.send(err.message);
})
.finally(() => client.end());
//UPLOAD
const {Storage} = require('#google-cloud/storage');
const storage = new Storage({projectId: 'my-project-id'});
const bucket = storage.bucket('my-bucket-name');
const file = bucket.file(body.filename);
fs.stat('/tmp/' + body.filename,(err, stats) =>
{
if(stats.isDirectory())
{
fs.createReadStream('/tmp/' + body.filename)
.pipe(file.createWriteStream())
.on('error', (err) => console.error(err))
.on('finish', () => console.log('The file upload is completed !!!'));
console.log("File exist in tmp directory");
res.status(200).send('Successfully executed !!!')
}
else
{
console.log("File is not on the tmp Google directory");
res.status(500).send('File is not loaded in tmp Google directory')
}
});
}
else res.status(500).send('Error: no filename on the body (filename)');
}
else res.status(500).send('Error: no configuration elements on the body (config)');
}
So, I received the following message: "File is not loaded in tmp Google directory" because after fs.stat() method, stats.isDirectory() is false. Before I use the fs.stats() method to check if the file is here, I have just writen files with the same filenames but without content.
So, I conclude that my upload work but without DONWLOAD files is really hard to copy it in the Google Cloud Storage.
Thanks for your time and I hope I will find a solution.
The problem is that your not waiting for the download to be completed before your code which performs the upload starts running. While you do have a catch() statement, that is not sufficient.
Think of the first part (the download) as a separate block of code. You have told Javascript to go off an do that block asynchronously. As soon as your script has done that, it immediately goes on to do the the rest of your script. It does not wait for the 'block' to complete. As a result, your code to do the upload is running before the download has been completed.
There are two things you can do. The first would be to move all the code which does the uploading into a 'then' block following the get() call (BTW, you could simplify things by using fastGet()). e.g.
client.connect(body.config)
.then(() => {
console.log("Client is connected !");
return client.fastGet(remotePath, localPath);
})
.then(() => {
// do the upload
})
.catch(err => {
res.status(500);
res.send(err.message);
})
.finally(() => client.end());
The other alternative would be to use async/await, which will make your code look a little more 'synchronous'. Something along the lines of (untested)
async function doTransfer(remotePath, localPath) {
try {
let client - new Client();
await client.connect(config);
await client.fastGet(remotePath, localPath);
await client.end();
uploadFile(localPath);
} catch(err) {
....
}
}
here is a github project that answers a similar issue to yours.
here they deploy a Cloud Function to download the file from the FTP and upload them directly to the bucket, skipping the step of having the temporal file.
The code works, the deployment way in this github is not updated so I'll put the deploy steps as I suggest and i verified they work:
Activate Cloud Shell and run:
Clone the repository from github: git clone https://github.com/RealKinetic/ftp-bucket.git
Change to the directory: cd ftp-bucket
Adapt your code as needed
Create a GCS bucket, if you dont have one already you can create one by gsutil mb -p [PROJECT_ID] gs://[BUCKET_NAME]
Deploy: gcloud functions deploy importFTP --stage-bucket [BUCKET_NAME] --trigger-http --runtime nodejs8
In my personal experience this is more efficient than having it in two functions unless you need to do some file editing within the same cloud function

Display PDF file in ReactJS that is received from a Node.js server?

I am trying to build a system where a user can store pdf files on a server, and another user can view those pdf files by using a simple click on a file link.
I am trying to store a file in a MySQL database and retrieve it using app.get(). I have successfully stored the file in the database using BLOB, but when I try to retrieve it, it is in some other format.
I have also tried to store the file in local folder ./uploads using 'express-fileupload', but that also doesn't seem to work when I try to retrieve the file location. After receiving the file location I am sending it back to my React app, and then try to open it using embed and iframe tags.
I have also tried 'react-pdf', 'simple-react-pdf', but nothing seems to work.
Below is the code that is written on server side that is sending the pdf file. I have also tried sending the location of pdf file that is stored in location provided in the code below. But that also doesn't work.
app.get('/getFile', (req, res) => {
const {email, courseid, filename} = req.query;
console.log(email);
console.log(courseid);
console.log(filename);
var filePath = `${__dirname}`+'/uploads/'+`${filename}`;
fs.readFile(filePath , function (err,data){
console.log(data);
res.contentType("application/pdf");
res.send(data);
});
});
This worked for me:
Node:
app.get("/getFile", function(req, res) {
res.sendFile(__dirname + "/test.pdf");
});
React:
axios(`http://localhost:5000/getFile `, {
method: "GET",
responseType: "blob"
//Force to receive data in a Blob Format
})
.then(response => {
//Create a Blob from the PDF Stream
const file = new Blob([response.data], {
type: "application/pdf"
});
//Build a URL from the file
const fileURL = URL.createObjectURL(file);
//Open the URL on new Window
window.open(fileURL);
})
.catch(error => {
console.log(error);
});

Transferring from Windows to Linux in node js with smbget

How do you retrieve a file if your node express web service is on a linux server (ubuntu) and you need to download files from a windows server?
2 Valid Options:
Windows Servers do not support SFTP/SSH. There are some ways to do this (with https://winscp.net/eng/docs/guide_windows_openssh_server and https://www.npmjs.com/package/ssh2) but this requires our server administrator to install on windows and it isn't fully supported by Microsoft.
You could use FTP/FTPS. There's this package: https://github.com/Atinux/node-ftps I just wasn't able to get this to work but it should be a viable option.
How can you do this from the operating system directly instead of relying on third-party node packages?
You can use smbget (linux utility included with https://linux.die.net/man/1/smbget ) and then just use node child_process spawn to call the function.
Just replace [workgroup], [username], [password], [serveraddress], and [path] with your own information here.
function getFile(file) {
return new Promise(function(resolve, reject) {
var tempFilePath = `/tmp/${file}`; // the linux machine
var remoteFile = require('child_process').spawn('smbget', ['--outputfile', tempFilePath, '--workgroup=[workgroup]', '-u', '[username]', '-p', '[password]', `smb://[serveraddress]/c$/[path]/[path]/${file}`]);
remoteFile.stdout.on('data', function(chunk) {
// //handle chunk of data
});
remoteFile.on('exit', function() {
//file loaded completely, continue doing stuff
// TODO: make sure the file exists on local drive before resolving.. an error should be returned if the file does not exist on WINDOWS machine
resolve(tempFilePath);
});
remoteFile.on('error', function(err) {
reject(err);
})
})
}
The code snippet above returns a promise. So in node you could send the response to a route like this:
var express = require('express'),
router = express.Router(),
retrieveFile = require('../[filename-where-above-function-is]');
router.route('/download/:file').get(function(req, res) {
retrieveFile.getFile(req.params.file).then(
file => {
res.status(200);
res.download(file, function(err) {
if (err) {
// handle error, but keep in mind the response may be partially sent
// so check res.headersSent
} else {
// remove the temp file from this server
fs.unlinkSync(file); // this delete the file!
}
});
})
.catch(err => {
console.log(err);
res.status(500).json(err);
})
}
The response will be the actual binary for the file to download. Since this file was retrieved from a remote server, we also need to make sure we delete the local file using fs.unlinkSync().
Use res.download to send the proper headers with the response so that most modern web browsers will know to prompt the user to download the file.

Create a PDF from HTML and send it to the user from a buffer

I'm trying to generate a PDF from an HTML file from the frontend and the users may download the PDF (never be stored on the server).
For this I am using the module: html5-to-pdf
My code is like this:
var pdf = require('html5-to-pdf');
var fs = require('fs');
router.post('/pdf', function(req, res) {
var html = req.body.html;
if (!html) {
return res.sendStatus(400);
}
pdf().from.string(html).to.buffer({
renderDelay: 1000
}, function(err, data) {
if (err) {
return res.sendStatus(500);
}
var file = fs.createWriteStream('myDocument.pdf');
file.write(data, function(err) {
if (err) {
res.sendStatus(500);
}
res.download('myDocument');
});
});
});
Whenever I download a file size of 0Bytes and also creates the file on the server
Someone could help me?
Maybe it send file before write finish
file.on('end',function(){
res.download('myDocument');
})
The problem is that html5-to-pdf used phantom to generate the PDF, so it phantom deploys a little server at "localhost" and the port 0, the fact is that OpenShift not recognize "localhost"[1] and if instead of using "localhost" variable is used: process.env.OPENSHIFT_NODEJS_IP the application works correctly.
[1] https://github.com/amir20/phantomjs-node/tree/v1#use-it-in-restricted-enviroments

Resources