I need to upload large files (2GB+) using Filepond to a NodeJS server running Express. These files need to be streamed (because of their size) to two locations...a local folder and a remote folder.
My research led me to a solution using tus-node-server https://github.com/pqina/filepond/issues/48
I've implemented the solution as seen below but I don't know how to:
1. Change the name of the file on upload (it uses a UID, I'd like to use the filename from the "metadata" property)
2. Execute a function on the server during or after the upload.
Any help?
Client.js
Filepond.setOptions({
server: {
process: (fieldName, file, metadata, load, error, progress, abort) => {
var upload = new tus.Upload(file, {
endpoint: `/upload`,
retryDelays: [0, 1000, 3000, 5000],
metadata: {
filename: file.name,
filetype: file.type
},
onError: function(err) {
console.log("Failed because: " + err);
error(err)
},
onProgress: function(bytesUploaded, bytesTotal) {
progress(true, bytesUploaded, bytesTotal)
},
onSuccess: function() {
load(upload.url.split('/').pop())
}
})
// Start the upload
upload.start();
return {
abort: () => {
upload.abort()
abort()
}
}
}
}
})
Server.js
const express = require('express');
const app = express();
const uploadApp = express();
const Client = require('ftp');
const path = require('path');
const tus = require('tus-node-server');
var tusServer = new tus.Server();
tusServer.datastore = new tus.FileStore({
path: '/public/files'
});
uploadApp.all('*', tusServer.handle.bind(tusServer));
app.use('/public', express.static(path.join(__dirname, "/public")));
app.use('/upload', uploadApp);
app.listen(3000);
Before trying this method I was using app.post to receive the upload and perform further action.
app.post('/upload', (req, res, next) => {
let uploadFile = req.files.file,
fileName = req.files.file.name,
ftp = new Client();
uploadFile.mv(`${__dirname}/public/files/${fileName}`, err => {
if (err) { return res.status(500).send(err) };
res.json({ file: `public/${req.files.file.name}` })
});
ftp.on('ready', () => {
ftp.put(fileName, `/ftppath/${fileName}`, err => {
if (err) throw err;
ftp.end();
})
})
ftp.connect(ftpOptions)
});
Related
I want to upload the excel file on the MySQL database and then import it back,
I have uses the technologies :
express
multer
mysql2
read-excel-file
sequelize
But when I upload the excel, req.file is showing undefined. in the excel controller folder.
I have checked the multer twice but it seems to be right.
I don't know what is the problem ...
Your answer will help me.
Thanks
Server-side code:
const multer = require("multer");
const excelFilter = (req, file, cb) => {
if (
file.mimetype.includes("excel") ||
file.mimetype.includes("spreadsheetml")
) {
cb(null, true);
} else {
cb("Please upload only excel file.", false);
}
console.log("done");
};
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, __basedir + "/uploads");
},
filename: (req, file, cb) => {
console.log(file.originalname);
cb(null, `${Date.now()}-bestfile-${file.originalname}`);
},
});
var uploadFile = multer({ storage: storage, fileFilter: excelFilter });
module.exports = uploadFile;
Excel controller
const db = require("../../models");
const Branch = db.bestdata;
const readXlsxFile = require("read-excel-file/node");
const upload = async (req, res) => {
try {
console.log(req);
if (req.file == undefined) {
return res.status(400).send({ msg: "No, Excel File Uploaded" });
}
let path = __basedir + "/uploads/" + req.file.filename;
readXlsxFile(path).then((rows) => {
// skip header
rows.shift();
let branchArray = [];
rows.forEach((row) => {
let branchtoArray = {
id: row[0],
branch_name: row[1],
mgr_id: row[2],
mgr_start_date: row[3],
};
branchArray.push(branchtoArray);
});
Branch.bulkCreate(branchArray)
.then(() => {
res.status(200).send({
message: "Uploaded the file successfully: " + req.file.originalname,
});
})
.catch((error) => {
res.status(500).send({
message: "Fail to import data into database!",
error: error.message,
});
});
});
} catch (error) {
console.log(error);
res.status(500).send({
message: "Could not upload the file: " + req.file.originalname,
});
}
};
const GetImport = (req, res) => {
Branch.findAll()
.then((data) => {
res.send(data);
})
.catch((err) => {
res.status(500).send({
message:
err.message || "Some error occurred while retrieving tutorials.",
});
});
};
module.exports = {
upload,
GetImport,
};
Router:
const express = require("express");
const router = express.Router();
const excelController = require("../controller/BestData/excel.controller.js");
const uploadFile = require("../middlewares/upload.js");
let routes = (app) => {
router.post("/upload", uploadFile.single("file"), excelController.upload);
router.get("/import", excelController.GetImport);
app.use("/excel", router);
};
module.exports = routes;
Snapshot of postman test
enter image description here
Excel File uploading
enter image description here
The answer is simple: Destination and path should be the same.:
I am trying to implement a feature to my web app where you can upload CSV files and insert data into Postgresql. I have made my app endpoint and written some code
const router = require('express').Router()
const uploadMid = require('./fileUpMid')
const pool = require('./db')
const fs = require("fs");
const fastcsv = require("fast-csv");
const upload = async (req, res) => {
if (req.files === null) {
return res.status(400).json({ msg: 'No file uploaded' });
}
const file = req.files.file;
file.mv(`${__dirname}/uploads/${file.name}`, err => {
if (err) {
console.error(err);
return res.status(500).send(err);
}
res.json({ fileName: file.name, filePath: `/uploads/${file.name}` });
});
let persons = [];
let path = __dirname + "/uploads/" +file.name;
fs.createReadStream(path)
.pipe(fastcsv.parse({ headers: true }))
.on("error", (error) => {
console.error(error.message);
})
.on("data", (row) => {
persons.push(row);
})
.on("end", () => {
//remove head
persons.shift();
const q = "some query here";
pool.connect((err, client, done) => {
if (err) throw err;
try {
persons.forEach(row => {
console.log(typeof row)
var obj = JSON.parse(JSON.stringify(row));
var values = Object.keys(obj).map(function (key) { return obj[key]; });
console.log(values)
client.query(q, values, (err, res) => {
if (err) {
console.log(err.stack);
} else {
console.log("inserted " + res.rowCount + " row:", row);
}
});
});
} finally {
done();
}
});
})
// fs.unlinkSync(path)
}
router.post('/file', uploadMid.single("file") ,upload)
module.exports = router
Everything seemed to work fine, but when I try to upload a second file I awlways get an error on terminal
Error: ENOENT: no such file or directory, open 'filename here with full path'
>- Emitted 'error' event on ReadStream instance at:
>- at internal/fs/streams.js:126:14
>- at FSReqCallback.oncomplete (fs.js:180:23) {
>- errno: -4058,
>- code: 'ENOENT',
>- syscall: 'open',
>- path: 'filename here with full path'}
I know this is not a safe nor secure way to upload data but this app is intended to be run only locally. Even when the first file is upload successfully in DevTools console it logs
GET http://localhost:3000/uploads/filename [HTTP/1.1 404 Not Found 8ms]
But the file is created with all its content on uploads directory.
Any tip for what to look for ?
Thank you in advance :)
Judging by the error (Error: ENOENT: no such file or directory, open 'filename here with full path'), here is the suggested way of defining paths in NodeJS apps using the path module.
const path = require('path');
// Inside`upload` middleware
const filePath = path.join(__dirname, 'uploads', file.name);
I'm trying to do the following in Node.js using express router, Multer-S3, Multer, AWS and Mongodb.
I want to:
1: Check if filetype is image, price is number etc (some kind of quality check)
2: If above true, upload image to S3 to get Image url
3: If Image Url was generated, upload to Mongodb, including the generated image url..
Trying with below code but can only get one of these to work at same time..
const express = require("express");
const router = express.Router();
const shopController = require("../controllers/shop");
router.post(
"/shop/create/:shopId",
shopController.creatingShop,
shopController.createShopItem
);
const ShopItem = require("../models/shopitem"); //Mongoose Schema
const multer = require("multer");
const fileview = multer().single("file1"); //Trying to use this to view file before uploading to S3
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1"); //Using this to upload to S3
exports.createShopItem = (req, res, next) => {
fileview(req, res, function (err) {
const file = req.file;
const title = req.body.title;
const price = req.body.price;
const description = req.body.description;
const location = req.body.location;
const user = "OrreSnorre";
if (
file.mimetype != "image/jpeg" &&
file.mimetype != "image/jpg" &&
file.mimetype != "image/png"
) {
return next(new Error("invalid file type"));
}
if (file.size > 2500000) {
return next(new Error("Your image is to big. Maximum 2.5mb"));
}
next();
console.log(
"Here I want to add upload text to mongoDb... including URL from S3 after it is generated"
);
});
exports.creatingShop = (req, res, next) => {
singleUpload(req, res, function (err) {
console.log(req.file);
// res.json({ "image-url": req.file.location });
});
next();
};
Anyone got ideas? Or examples that work?
Best regards,
Oscar
There are 2 ways to do this, either you can use only multer or multer-s3.
For simplicity, I will show you the way using only multer.
Flow of processing as follow:
Multer process and save to local
You read from local, and upload to s3 using s3 SDK (You should explore how to remove the file after upload as well, but I wont clutter you with this logic here)
If upload is successful, you retrieve the URL and pass it to your MongoDB.
// Make "temp" directory as multer.diskStorage wont create folder
fs.mkdir('./temp', { recursive: true }, (err) => {
if (err) throw err;
});
const PORT = parseInt(process.argv[2]) || parseInt(process.env.PORT) || 3000;
// Multer
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './temp');
},
filename: function (req, file, cb) {
let extArray = file.mimetype.split('/');
let extension = extArray[extArray.length - 1];
cb(null, new Date().getTime() + '.' + extension);
},
});
const upload = multer({ storage: storage });
const endpoint = new AWS.Endpoint(AWS_S3_HOSTNAME);
const s3 = new AWS.S3({
endpoint,
accessKeyId: AWS_S3_ACCESSKEY_ID,
secretAccessKey: AWS_S3_SECRET_ACCESSKEY,
});
// Get the uploaded file in local here
const readFile = (path) =>
new Promise((resolve, reject) =>
fs.readFile(path, (err, buff) => {
if (null != err) reject(err);
else resolve(buff);
})
// Upload to AWS S3 here
const putObject = (file, buff, s3) =>
new Promise((resolve, reject) => {
const params = {
Bucket: AWS_S3_BUCKET_NAME,
Key: file.filename,
Body: buff,
ACL: 'public-read',
ContentType: file.mimetype,
ContentLength: file.size,
};
s3.putObject(params, (err, result) => {
if (null != err) reject(err);
else resolve(file.filename);
});
});
);
const mongoClient = new MongoClient(MONGO_URL, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
app.post('/api/post', upload.single('imageFile'), async (req, res) => {
readFile(req.file.path)
.then((buff) =>
// Insert Image to S3 upon succesful read
putObject(req.file, buff, s3)
)
.then((results) => {
// build url of the resource upon successful insertion
const resourceURL = `https://${AWS_S3_BUCKET_NAME}.${AWS_S3_HOSTNAME}/${results}`;
const doc = {
comments,
title,
ts: new Date(),
image: resourceURL, // Your URL reference to image here
};
// Insert to your mongoDB
mongoClient
.db(MONGO_DB)
.collection(MONGO_COLLECTION)
.insertOne(doc)
.then((results) => {
// delete the temp file when no error from MONGO & AWS S3
fs.unlink(req.file.path, () => {});
// return the inserted object
res.status(200).json(results.ops[0]);
})
.catch((error) => {
console.error('Mongo insert error: ', error);
res.status(500);
res.json({ error });
});
})
.catch((error) => {
console.error('insert error: ', error);
res.status(500);
res.json({ error });
});
}
I'm using Node JS / Express and would like to download a file from url to local system, and in the next step upload it Google Cloud Storage.
This is my router with middlewares:
router.post("", fileFromUrl, uploadFromUrl, scrapeController.scrapeCreateOne);
this is a fileFromUrl middleware that is just saving a file from url to local disk
module.exports = (req, res, next) => {
try {
console.log('Image: ', req.body.image);
const url = req.body.image ? req.body.image : '';
console.log(typeof url);
if(url === '') {
//no image url provided
console.log('image parsing skipped');
next()
}
else {
// image url ok then
const pathToImage = path.resolve(__dirname, '..', 'images', Date.now() + '_file.jpg');
const localPath = fs.createWriteStream(pathToImage);
const saveFile = https.get(url, (response) => {
console.log(response.headers['content-type']);
response.pipe(localPath);
})
req.body.customImageUrl = pathToImage;
req.body.customImageName = path.basename(pathToImage);
next();
}
}
catch (error) {
console.log(error)
}
}
this is uploadFromUrl middleware that should upload the file from local path to the Google Cloud Storage
module.exports = (req, res, next) => {
try {
console.log(req.body.customImageUrl);
console.log(req.body.customImageName);
//storage.getBuckets().then(x => console.log(x));
//storage.createBucket(`testbucket_${Date.now()}`); / // it does work
storage.bucket(bucketName).upload(req.body.customImageUrl, {
gzip: true,
metadata: {
cacheControl: 'public, max-age=31536000',
},
}).then(
req.body.customData = `https://storage.googleapis.com/${bucketName}/${req.body.customImageName}`
);
next();
}
catch (error) {
res.send(error).json({
message: 'Error upload middleware' + error,
});
}
}
What is does right now, is just uploading almost empty file with 20kB to the Google Cloud Platform, not the full image. I feel that I'm not providing a proper file object to the uploadFromUrl middleware. On the other hand GCP API to upload a file is just asking for the path to the file which is being provided. Any ideas ?
The issue was that I was trying to upload image to GCP, even though the image was not yet fully saved on the server. The solution was to wait for 'finish' event in the request I made to save it locally
const pathToImage = path.resolve(__dirname, '..', 'images', Date.now() + '_file.jpg');
const localPath = fs.createWriteStream(pathToImage);
const fileRelativePath = "images/" + path.basename(pathToImage);
const request = https.get(url, (response) => {
//console.log(response.headers['content-type']);
response.pipe(localPath).on('finish', () => {
console.log('image saved on the server');
req.body.customImagePath = fileRelativePath;
req.body.customImageName = path.basename(pathToImage);
// now you can go and upload the image to GCP in the next moddleware
next();
});
});
I would like to move a small image from one server to another (both running node). As I search, I haven't found enough. This post remains unanswered.
As I started experimenting I wrote the following to the first server :
app.post("/move_img", function(req, res) {
console.log("post handled");
fs.readFile(__dirname + "/img_to_move.jpg", function(err, data) {
if (err) throw err;
console.log(data);
needle.post(server2 + "/post_img", {
data: data,
name : "test.jpg"
}, function(result) {
console.log(result);
res.send("ok");
});
});
});
This part seems to be working as I could be writing the data in the same server (using fs.writeFile) recreate the img.
Now as I am trying to handle the post in the other server I have a problem.
Server2:
app.post('/post_img', [ multer({ dest: './uploads/images'}), function(req, res) {
console.log("body ",req.body) // form fields
console.log("files ",req.files) // form files
res.send("got it");
}]);
This way i get an empty object in the files and the following in the body: { 'headers[Content-Type]': 'application/x-www-form-urlencoded', 'headers[Content-Length]': '45009' }
I think I could use busboy as an alternative but I can't make it to work. Any advice, tutorial would be welcome.
I solved my problem by using the following code,
server1 (using needle) :
app.post("/move_img", function(req, res) {
console.log("post handled")
var data = {
image:{
file: __dirname + "/img_to_move.jpg",
content_type: "image/jpeg"}
}
needle.post(server2 + "/post_img", data, {
multipart: true
}, function(err,result) {
console.log("result", result.body);
});
})
Server 2:
app.use('/post_img',multer({
dest: '.uploads/images',
rename: function(fieldname, filename) {
return filename;
},
onFileUploadStart: function(file) {
console.log(file.originalname + ' is starting ...')
},
onFileUploadComplete: function(file) {
console.log(file.fieldname + ' uploaded to ' + file.path)
}
}));
app.post('/post_img', function(req, res) {
console.log(req.files);
res.send("File uploaded.");
});
An alternative for the server 1 is the following (using form-data module):
var form = new FormData();
form.append('name', 'imgTest.jpg');
form.append('my_file', fs.createReadStream(__dirname + "/img_to_move.jpg"));
form.submit(frontend + "/post_img", function(err, result) {
// res – response object (http.IncomingMessage) //
console.log(result);
});
I'd simply read your file from the first server with the function readFile() and then write it to the other server with the function writeFile().
Here you can see use of both functions in one of my servers.
'use strict';
const express = require('express');
const multer= require('multer');
const concat = require('concat-stream');
const request = require('request');
const router = express.Router();
function HttpRelay (opts) {}
HttpRelay.prototype._handleFile = function _handleFile (req, file, cb) {
file.stream.pipe(concat({ encoding: 'buffer' }, function (data) {
const r = request.post('/Endpoint you want to upload file', function (err, resp, body) {
if (err) return cb(err);
req.relayresponse=body;
cb(null, {});
});
const form = r.form();
form.append('uploaded_file', data, {
filename: file.originalname,
contentType: file.mimetype
});
}))
};
HttpRelay.prototype._removeFile = function _removeFile (req, file, cb) {
console.log('hello');
cb(null);
};
const relayUpload = multer({ storage: new HttpRelay() }).any();
router.post('/uploadMsgFile', function(req, res) {
relayUpload(req, res, function(err) {
res.send(req.relayresponse);
});
});
module.exports = router;
see multer does all the tricks for you.
you just have to make sure you use no middle-ware but multer to upload files in your node starting point.
Hope it does the tricks for you also.