I am trying to use ssh2-sftp-client in NodeJS to send a file from an S3 bucket in AWS to my server.
I have seen many examples and I have tried them all with no success. I feel that this one is close but still is not working for me:
async function sendFileViaSftp(srcEvent){
let Client = require('ssh2-sftp-client');
let Path = '/';
let sftp = new Client();
await sftp.connect({
host: '73.XXX.XX.XXX',
port: 22,
username: 'username',
password: 'mypassword'
}).then(() => {
console.log("Connected...");
return sftp.list(Path);
}).then((list) => {
console.log("It worked");
console.log("file to transfer: " + srcEvent.file);
var fs = require('fs');
var path = require('path');
var params = {
Bucket: srcEvent.bucket,
Key: srcEvent.key
};
var tempFileName = path.join('/tmp/', srcEvent.file);
var tempFile = fs.createWriteStream(tempFileName);
s3.getObject(params).createReadStream().pipe(tempFile);
console.log("file is in tmp");
let data = fs.createReadStream(tempFileName);
let remote = '/';
sftp.put(data, remote);
console.log("the code makes it to here and prints this");
return list;
}).catch((err) => {
console.log('Catch Error: ', err);
throw new Error(err);
});
}
I am calling this function like this:
if (folder === 'something') {
await sendFileViaSftp(srcEvent);
return {
statusCode: 200,
body: srcEvent
};
}
I do not get any error message, it looks like it just times out which I don't understand because I am using async/await. In my example, I am attempting to pull the file from the S3 bucket, and storing it in /tmp/ and then sending /tmp/test.xls. srcEvent.file is test.xls in this case.
Even if I can simply send a blank txt file to my server from this function, that would be helpful.
Thanks!
I figured it out. Not sure where exactly the problem was but this will successfully pull a file from S3 and then upload to SFTP server:
async function sendFileViaSftp(srcEvent) {
let Client = require('ssh2-sftp-client');
var path = require('path');
var localFolder = "/tmp";
var remoteFolder = "/complete";
var localfile = path.join(localFolder, srcEvent.file);
var remotePath = path.join(remoteFolder, srcEvent.file);
getFileFromS3(srcEvent, localFolder);
let sftp = new Client();
await sftp.connect(sftpCredentials).then(() => {
try {
return sftp.fastPut(localfile, remotePath);
} catch (err) {
console.log("Could not upload file: " + err);
}
})
.then(() => {
console.log("ending connections");
sftp.end();
})
.catch(err => {
console.error(err.message);
});
}
async function getFileFromS3(srcEvent, localFolder) {
var params = {
Bucket: srcEvent.bucket,
Key: srcEvent.key
};
var tempFileName = path.join(localFolder, srcEvent.file);
var tempFile = fs.createWriteStream(tempFileName);
s3.getObject(params).createReadStream().pipe(tempFile);
console.log("Put file in temp");
}
Related
I need to download a file from a private gitlab server and I need the method to be synchronous. This was by previous async code and it works fine because I was using promises. But I'm having trouble converting it to synchronous. The other posts i've seen on SO either ended up using async code or didn't have options for headers.
const https = require('https');
const fs = require('fs');
const gitlabUrl = 'https://gitlab.custom.private.com';
const gitlabAcessToken = 'xmyPrivateTokenx';
const gLfilePath = '/api/v4/projects/1234/repository/files/FolderOne%2Ftest.txt/raw?ref=main';
const gLfileName='test.txt';
function downloadFileFromGitlab(filePath, fileName) {
return new Promise((resolve, reject) => {
var options = {
path: filePath,
headers: {
'PRIVATE-TOKEN': gitlabAccessToken
}
};
var url = gitlabUrl
var file = fs.createWriteStream(fileName);
const request = https.get(url, options, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close();
resolve();
});
file.on('error', (err) => {
file.close();
reject(err);
});
});
request.on('error', error => {
throw console.error(error);
});
});
}
downloadFileFromGitlab(gLfilePath,gLfileName);
I was able to figure it out using curl
function downloadFileFromGitlab(filePath, fileName) {
let curlCommand = "curl -s " + gitlabUrl + filePath + " -H 'PRIVATE-TOKEN:" + gitlabAccessToken +"'";
let file = child_process.execSync(curlCommand);
fse.writeFileSync(fileName,file);
}
I have built a AWS Lambda function with custom container image. I am trying to convert an excel file to pdf with Libreoffice - getting the file from S3 and saving it to a file and converting it to pdf and then uploading it back to S3.
Here the code.
const fs = require('fs');
const getStream = require('get-stream');
const { Readable } = require('stream')
const { S3Client, GetObjectCommand, PutObjectCommand } = require("#aws-sdk/client-s3");
const libre = require('libreoffice-convert');
const path = require('path');
exports.handler = async (event) => {
const bucket = event.queryStringParameters.bucket;
const file = event.queryStringParameters.file;
const convertedFile = event.queryStringParameters.convertedFile;
if (event.queryStringParameters['warmup'] !== undefined) {
return {
result: true,
message: 'warmed up'
}
}
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const response = await client.send(command);
const objectData = response.Body;
const writeStream = fs.createWriteStream("/tmp/sample.xlsx");
objectData.pipe(writeStream);
var end = new Promise((resolve, reject) => {
objectData.on('close', resolve(true));
objectData.on('end', resolve(true));
objectData.on('error', reject(false));
});
let completed = await end;
if (completed) {
const extend = '.pdf'
const outputPath = `/tmp/sample${extend}`;
const enterPath = '/tmp/sample.xlsx';
var readingFile = new Promise((resolve, reject) => {
fs.readFile(enterPath, (err, data)=>{
if (err) {
reject(false);
}
resolve(data);
});
});
var fileData = await readingFile;
var converting = new Promise((resolve, reject) => {
libre.convert(fileData, extend, undefined, (err, done) => {
if (err) {
reject(false)
}
fs.writeFileSync(outputPath, done);
resolve(true)
});
})
var converted = await converting;
if (converted) {
var convertedFileStream = fs.createReadStream(outputPath);
const uploadCommand = new PutObjectCommand({ Bucket: bucket, Key: convertedFile, Body: convertedFileStream });
const lastResponse = await client.send(uploadCommand);
const returnResponse = {
result: true,
message: 'success',
bucket: event.queryStringParameters.bucket,
file: event.queryStringParameters.file,
convertedFile: event.queryStringParameters.convertedFile
};
if (event.queryStringParameters['returnEvent'] !== undefined) {
returnResponse['returnEvent'] = event;
}
return returnResponse;
}
}
return completed;
};
However, I am getting this error at time. Sometimes, it is success, but, sometimes it throws this error.
{
"errorType": "Error",
"errorMessage": "false",
"stack": [
"Error: false",
" at _homogeneousError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:56:16)",
" at postError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:72:34)",
" at done (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:99:13)",
" at fail (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:113:13)",
" at /function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:148:24",
" at processTicksAndRejections (internal/process/task_queues.js:97:5)"
]
}
I dont know Nodejs on a great deal so I think if the code is not written the correct way. Any ideas what I am doing wrong here ?
Like #hoangdv when I logged errors I came to know that the file saving to the disk was not correct. So, I changed the area of the code where it saves to like this and then it worked.
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const { Body } = await client.send(command);
await new Promise((resolve, reject) => {
Body.pipe(fs.createWriteStream(filePath))
.on('error', err => reject(err))
.on('close', () => resolve())
})
const excelFile = fs.readFileSync(filePath);
I am trying to upload image to AWS S3 bucket using NodeJS. The issue I am facing it is while the image is getting saved but the API is returning 404(Not Found). Here is my controller code:
async UploadProfileImage(ctx) {
try {
var file = ctx.request.files.profileImage;
if (file) {
fs.readFile(file.path, (err, fileData) => {
var resp = s3Utility.UploadProfileImageToS3(file.name, fileData);
//Not reaching here. Although E3 tag printing in console.
console.log(resp);
ctx.status = 200;
ctx.body = { response: 'file Uploaded!' };
});
}
else {
ctx.status = 400;
ctx.body = { response: 'File not found!' };
}
} catch (error) {
ctx.status = 500;
ctx.body = { response: 'There was an error. Please try again later!' };
}
}
Utility Class I am using is:
const AWS = require('aws-sdk');
const crypto = require("crypto");
var fs = require('fs');
const mime = require('mime-types');
export class S3Utility {
constructor() { }
async UploadProfileImageToS3(fileName, data) {
let randomId = crypto.randomBytes(16).toString("hex");
AWS.config.update({ region: "Region", accessKeyId: "KeyID", secretAccessKey: "SecretAccessKey" });
var s3 = new AWS.S3();
var imageName = randomId + fileName;
var params = {
Bucket: "BucketName"
Key: imageName,
Body: data,
ContentType: mime.lookup(fileName)
};
return new Promise((resolve, reject) => {
s3.putObject(params, function (err, data) {
if (err) {
console.log('Error: ', err);
reject(new Error(err.message));
} else {
console.log(data);
resolve({
response: data,
uploadedFileName: imageName
});
}
});
});
}
}
const s3Utility: S3Utility = new S3Utility();
export default s3Utility;
The code is uploading file on S3 but it is not returning proper response. Upon testing this endpoint on postman, I get "Not Found" message. However, I can see E Tag getting logged in console. I don't know what is going wrong here. I am sure it has something to do with promise. Can someone please point out or fix the mistake?
Edit:
Using async fs.readFile does the trick.
const fs = require('fs').promises;
const fileData = await fs.readFile(file.path, "binary");
var resp = await s3Utility.UploadProfileImageToS3(file.name, fileData);
Dose the Kuzzle or Minio development teams have a working example of using the Kuzzle S3 plugin for Minio? I have the following but my file isnt being uploaded and the pre-signed url is referring to https://your-s3-bucket.s3.eu-west-3.amazonaws.com/
const fs = require("fs");
const fsPromises = require('fs').promises;
// Create a JS File object instance from a local path using Node.js
const fileObject = require("get-file-object-from-local-path");
// Promise based HTTP client for the browser and node.js
const axios = require('axios');
// Loads the Kuzzle SDK modules
const {
Kuzzle,
WebSocket
} = require('kuzzle-sdk');
var start = new Date();
const webSocketOptionsObject = {
"autoReconnect": true,
"ssl": true,
"port": 443
};
const kuzzle = new Kuzzle(new WebSocket('myurl.com', webSocketOptionsObject));
const credentials = { username: 'xyz123', password: 'fithenmgjtkj' };
const path = __dirname + "\\" + "yellow_taxi_data.csv"; // the "\\" is for Windows path
var fileData = {};
// check file exists
fs.access(path, fs.F_OK, (err) => {
if (err) {
console.error(err)
return
}
fileData = new fileObject.LocalFileData(path);
// Adds a listener to detect connection problems
kuzzle.on('networkError', error => {
console.error('Network Error:', error);
});
});
const connectToKuzzle = async () => {
// Connects to the Kuzzle server
await kuzzle.connect();
return await kuzzle.auth.login('local', credentials);
// console.log('jwt auth token: ', jwt);
}
const disConnectFromKuzzle = async () => {
console.log('Disconnected from Kuzzle');
kuzzle.disconnect();
var time = new Date() - start;
// sec = Math.floor((time/1000) % 60);
console.log('Execution time in milliseconds: ', time);
}
const presignedURL = async () => {
// Get a Presigned URL
const result = await kuzzle.query({
controller: 's3/upload',
action: 'getUrl',
uploadDir: 'proxybucket', // directory name inside the Bucket specified in the s3 plugin bucket name
filename: fileData.name
});
console.log("result: ", result);
return result;
}
const loadFileStream = async () => {
console.log('getting file: ', path);
targetFile = null;
await fs.promises.readFile(path)
.then(function (result) {
console.log("file loaded------", result.length);
targetFile = result;
})
.catch(function (error) {
console.log(error);
return;
});
return targetFile;
}
const kuzzleValidate = async (kuzzleResource) => {
// console.log("kuzzleResource: ", kuzzleResource.result.fileKey);
// validate
// Validate and persist a previsously uploaded file.
// https://docs.kuzzle.io/official-plugins/s3/2/controllers/upload/validate/
const Presult = await kuzzle.query({
// Kuzzle API params
"controller": "s3/upload",
"action": "validate",
// File key in S3 bucket
"fileKey": kuzzleResource.result.fileKey
});
console.log('validate: ', Presult.result.fileUrl);
}
const uploadFile = async (fileBuffer, kuzzleResource, jwt) => {
// options at https://github.com/axios/axios
const axiosOptions = {
headers: {
'Content-Type': fileData.type
},
maxBodyLength: 200000000 // 200,000,000 bytes 200 Mb
};
// PUT the fileBuffer to the Kuzzle S3 endpoint
// https://github.com/axios/axios
axios.defaults.headers.common['Authorization'] = jwt;
const response = await axios.put(kuzzleResource.result.uploadUrl, fileBuffer, axiosOptions)
.then((response) => {
console.log('file uploaded......');
})
.catch(function (error) {
console.log("File upload error: ", error);
return;
});
return "Upload successful";
}
if (fileData) {
connectToKuzzle().then((jwt) => {
console.log(jwt);
// upload(jwt);
presignedURL().then((kuzzleResource) => {
loadFileStream().then((fileBuffer) => {
uploadFile(fileBuffer, kuzzleResource, jwt).then((doneMessage) => {
console.log("doneMessage: ", doneMessage);
}).then(() => {
kuzzleValidate(kuzzleResource).then(() => {
disConnectFromKuzzle();
});
});
});
});
});
}
I'm looking to upload to a Minio bucket and obtain a pre-signedURL so I can store it in a document later.
You can change the endpoint configuration to set a different s3-compatible endpoint who can be a Minio one.
This configuration can be changer under the plugins.s3.endpoint key. You should also disable the usage of default s3 path.
Example:
app.config.set('plugins.s3.endpoint', 'https://minio.local');
app.config.set('plugins.s3.s3ClientOptions.s3ForcePathStyle', false);
I decided to post this after extensive searching here (1, 2, 3 ) and here (1, 2) and many, many other related posts. I am loosing hope, but will not give up that easily :)
I'm using multer to upload a PNG image to mongo database:
const storage = new GridFsStorage({
url: 'mongodb://my_database:thisIsfake#hostName/my_database',
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => { // generating unique names to avoid duplicates
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'media',
metadata : {
clientId : req.body.client_id // added metadata to have a reference to the client to whom the image belongs
}
};
resolve(fileInfo);
});
});
}
});
const upload = multer({storage}).single('image');
Then I create a stream and pipe it to response:
loader: function (req, res) {
var conn = mongoose.createConnection('mongodb://my_database:thisIsfake#hostName/my_database');
conn.once('open', function () {
var gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('media');
gfs.files.find({ metadata : {clientId : req.body.id}}).toArray(
(err, files) => {
if (err) throw err;
if (files) {
const readStream = gfs.createReadStream(files[0].filename); //testing only with the first file in the array
console.log(readStream);
res.set('Content-Type', files[0].contentType)
readStream.pipe(res);
}
});
});
}
Postman POST request to end point results in response body being displayed as an image file:
In the front end I pass the response in a File object, read it and save the result in a src attribute of img:
findAfile(){
let Data = {
id: this.$store.state.StorePatient._id,
};
console.log(this.$store.state.StorePatient._id);
visitAxios.post('http://localhost:3000/client/visits/findfile', Data )
.then(res => {
const reader = new FileReader();
let file = new File([res.data],"image.png", {type: "image/png"});
console.log('this is file: ',file);
reader.readAsDataURL(file); // encode a string
reader.onload = function() {
const img = new Image();
img.src = reader.result;
document.getElementById('imgContainer').appendChild(img);
};
})
.catch( err => console.error(err));
}
My File object is similar to the one I get when using input field only bigger:
This is original file:
When inspecting element I see this:
Looks like data URI is where it should be, but it's different from the original image on file input:
Again, when I want to display it through input element:
onFileSelected(event){
this.file = event.target.files[0];
this.fileName = event.target.files[0].name;
const reader = new FileReader();
console.log(this.file);
reader.onload = function() {
const img = new Image();
img.src = reader.result;
document.getElementById('imageContainer').appendChild(img);
};
reader.readAsDataURL(this.file);
}
I get this:
But when reading it from the response, it is corrupted:
Postman gets it right, so there must be something wrong with my front-end code, right? How do I pass this gfs stream to my html?
I managed to make a POST request to fetch an image from MongoDB and save it in the server dir:
const readStream = gfs.createReadStream(files[0].filename);
const wstream = fs.createWriteStream(path.join(__dirname,"uploads", "fileToGet.jpg"));
readStream.pipe(wstream);
Then, I just made a simple GET request by adding an absolute path to the and finally delete the file after successful response:
app.get('/image', function (req, res) {
var file = path.join(dir, 'fileToGet.jpg');
if (file.indexOf(dir + path.sep) !== 0) {
return res.status(403).end('Forbidden');
}
var type = mime[path.extname(file).slice(1)] || 'text/plain';
var s = fs.createReadStream(file);
s.on('open', function () {
res.set('Content-Type', type);
s.pipe(res);
});
s.on('end', function () {
fs.unlink(file, ()=>{
console.log("file deleted");
})
});
s.on('error', function () {
res.set('Content-Type', 'text/plain');
res.status(404).end('Not found');
});