How to upload S3 files in KeystoneJS - node.js

I have an item called style which has 2 attributes, one which has raw css text and another which has an S3File.
Style.add({
...
css: { type: Types.Code, language: 'css' },
cssFile: {
type: Types.S3File,
s3path: 'uploads/assets',
},
...
});
I want to update the S3File with the contents of the css text.
function uploadCSStoAmazon(style) {
// Store css code in temporal file (with a md5 name)
var rndm = crypto.randomBytes(20).toString('hex'), file_path = '/tmp/css_temp_' + rndm + '.css';
fs.writeFile(file_path, style.css, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
// style.cssFile = new Types.S3File();
// TODO upload file to amazon
style.cssFile._.uploadFile(file_path, true, function(err, fileData){
// TODO erase css file
});
});
}
...
var aStyle = new Style.model({
...
css: 'Some css string',
...
});
...
uploadCSStoAmazon(aStyle);
The cssFile attribute is undefined, I understand, but how could I create a new file and assign it to this attribute, and also upload the file?

I found out how, you can use the updateHandler that comes with Keystone. They're still using req.files form express 3.x though.
// A express file generator
function writeToFile(fileName, txt, ext, callback) {
var rndm = crypto.randomBytes(20).toString('hex'), file_path = '/tmp/css_temp_' + rndm + '.' + ext, the_file = {};
fs.writeFile(file_path, txt, function(err) {
if(err) {
callback(null, err);
}
var stats = fs.statSync(file_path);
var fileSizeInBytes = stats["size"];
the_file.path = file_path;
the_file.name = fileName + '.' + ext;
the_file.type = 'text/' + ext;
the_file.size = fileSizeInBytes;
console.log("The file was cached!");
callback(the_file, err);
});
}
...
/**
* Update Style by ID
*/
exports.update = function(req, res) {
var data = (req.method == 'POST') ? req.body : req.query;
Style.model.findById(data._id).exec(function(err, item) {
if (err) return res.apiError('database error', err);
if (!item) return res.apiError('not found');
writeToFile(item.slug, data.css, 'css', function(req_file, err){
if (err) return res.apiError('update error during file cache', err);
req.files['cssFile_upload'] = req_file;
item.getUpdateHandler(req).process(data, function(err) {
if (err) return res.apiError('update error', err);
res.apiResponse({
success: true
});
}); // end process
}); // end writeToFile
});
};

Related

Formidable couldn't parse large file

Below code works with the small files, but could upload files that is more than 50kb. I think there is something I should setting with the maxFileSize. I'm using Uppy at the client. After I console.log in the server, actually it does do the request. I don't get any clue in the Stackoverflow, really need help.
const upload = (req, res) => {
// formidable : to parse html form data
const form = new formidable.IncomingForm({ multiples: true, maxFileSize: 10000 * 1024 * 1024 })
const d = new Date();
// I have console.log here and everything seems fine
form.parse(req, (err, fields, files) => {
console.log('err', err) // returns nothing
console.log('files', files) // returns nothing
console.log('fields', fields) // returns nothing
if (err) {
console.log("Error parsing the files");
console.log(err);
return res.status(400).json({
message: "There was an error parsing the files",
status: "Fail",
error: err
})
}
for (let file in files) {
try {
if (files[file]) {
let oldPath = files[file]['path']
let rawData = fs.readFileSync(oldPath)
const month = parseInt(d.getMonth() + 1) < 10 ? '0' + parseInt(d.getMonth() + 1) : parseInt(d.getMonth() + 1)
let today = `${d.getFullYear()}_${month}_${d.getDate()}`
let folderPath = __basedir + `\\media\\uploads\\storage\\${today}\\`;
// folderPath = ..\dashboard-v2.0\server\\media\uploads\storage\\2021_06_18\\
if (!fs.existsSync(folderPath)) {
fs.mkdirSync(folderPath, {
recursive: true
});
}
// newPath =..\dashboard-v2.0\server\\media\uploads\storage\\2021_06_18\\WIN.jpg
let newPath = folderPath + files[file]['name']
let databasePath = `storage/${today}/${files[file]['name']}`;
let filename = files[file]['name'] // example_files.zip
if (fs.existsSync(newPath)){
// if file is existed then add Date.now()
let time = Date.now()
let filenameSplit = filename.split('.')
filename = filenameSplit[0] + '_' + time + '.' + filenameSplit[1]
// filename = WIN_1626750408096.jpg
newPath = folderPath + filename
databasePath = `storage/${today}/${filename}`;
}
fs.writeFile(newPath, rawData, async (err) => {
if (err) {
console.log(err);
return res.status(400).send({ "err": err })
}
const userToken = jwt.verify(fields.user, config.TOKEN_SECRET)
const newFiles = {
filename: filename,
user_id: ObjectId(userToken.id),
filepath: databasePath,
added_time: Date.now(),
}
const result = await db.collection("ate_files").insertOne(newFiles)
console.log(`Created with the following id: ${result.insertedId}`)
console.log(`Successfull upload ${newPath}`);
})
}
} catch (err) {
console.log(`Error: ${err}`);
return res.status(409).send({ "error": `${err}` })
}
}
})
return res.status(200).send({ "message": "Successfully uploadded the files" })
}
Your return res.status(200).send({ "message": "Successfully uploadded the files" }) is too soon, it should be in the callback.
It could be problematic on large files since the beginning of the big file would be received and then the client already receives a response which could logically cut the connection in http

Video Upload using formidable in Nodejs, Error found: post 404 / 502

I'm uploading video file from local to server and then I'll be uploading it to cdn,
the issue i'm facing is my code is running well on local but its not working when i patch it to server.
Here is my code
commonJs
$("#uploadVideo").click(function (e) {
var reader = new FileReader();
var fileInput = document.getElementById('Videofile');
var previewUrl = window.URL.createObjectURL(fileInput.files[0]);
$(".video").attr("src", previewUrl);
var videotype = "video/mp4";
var file_data = $("#Videofile").prop("files")[0];
if (!file_data.type.match(videotype)) {
return "alert('Please upload mp4 files')"
} else {
var metadata = {
'content-type': 'video/mp4',
'size': file_data.size,
'uploaded': new Date(),
}
reader.onload = function (e) {
$("file_data").text("File Content: " + reader.result); // Show the file content
}
reader.readAsBinaryString(file_data);
file_data.onloadedmetadata = function () {
alert("Meta data for audio loaded");
};
};
var form_data = new FormData();
form_data.append("file", file_data)
form_data.append("metdata", metadata)
for (var key of form_data.entries()) {
console.log(key[0] + ', ' + key[1]);
}
if (form_data != undefined) {
$.ajax({
type: "post",
contentType: false,
processData: false,
url: "/api/recordvideo",
data: form_data,
dataType: 'json',
success: function (result) {
if (result) {
$(".video").attr("src", result.videolink);
alert("Successfully Uploaded Video");
console.log("Successfully Uploaded Video");
} else {
alert("Error on Uploading Video");
console.log("Error on Uploading Video");
}
},
error: function (err) {
console.log("error");
}
});
}
e.preventDefault();
e.stopPropagation();
});
ServerSide
app.post('/api/recordvideo',Api.recordvideo);
var Upload = require('gcs-resumable-upload');
ApiService.recordvideo = function (req, res) {
var db = req.db;
console.log("came in cloudupload");
var form = new formidable.IncomingForm();
var filesdata;
form.keepExtensions = true;
form.multiples = false;
form.on('fileBegin', function (name, file){
file.path = 'public/demo/' + file.name;
console.log("fileBegin: " + JSON.stringify(file));
});
form.on('file', function (name, file){
console.log('Uploaded ' + JSON.stringify(file));
var path = file.path;
console.log("came in cloud3 :" + JSON.stringify(path));
});
form.parse(req, function (err, fields, files) {
console.log("came in cloud0" + JSON.stringify(files));
filesdata = files;
});
console.log("came in cloud2");
form.on('end', function (fields, files) {
var userid = appconfig.ObjectID(appconfig.decrypt(req.signedCookies['gid']));
var path = this.openedFiles[0].path;
console.log("came in cloud3 :" + JSON.stringify(path));
fs.createReadStream(path)
.pipe(Upload.upload({ bucket: '******', file: path, metadata: { contentType: this.openedFiles[0].type } }))
.on('finish', function (response) {
console.log("Successfully Uploaded Video :" + JSON.stringify(response));
res.send({ "status": false, "videolink": "https://****/****/" + filesdata.file.name });
});
});
//res.send({ "status": false, "err": null });
}
At start atleast it was uploading to server folder & then in chrome developers tool it used to give response: {readystate : 4, . . . }
And now, I made some changes then it doesnt even hit my api, After few seconds it gives error in chrome developer tools 404() / 502 ()
Well, I got the solution, Previously I was using gcs-resumable-upload module to upload, but now I tried with '#google-cloud/storage' module through which I was able to upload upto 9mb.
const Storage = require('#google-cloud/storage');
var db = req.db;
console.log("came in cloudupload");
var form = new formidable.IncomingForm();
var filesdata;
form.keepExtensions = true;
form.multiples = false;
form.parse(req, function (err, fields, files) {
filesdata = files;
});
form.on('end', function (fields, files) {
var userid = appconfig.ObjectID(appconfig.decrypt(req.signedCookies['gid']));
var path = this.openedFiles[0].path;
const storage = new Storage({
keyFilename: 'gcloudcred.json'
});
const myBucket = storage.bucket('onfvideo');
myBucket.upload(path).then((resp) => {
console.log('uploaded to' + resp);
res.send({ "status": true, "err": null });
}).catch(err => {
console.error('ERROR:', err);
res.send({ "status": false, "err": null });
});
});
};
The Limitation of 9mb I was facing due to .netframework data-transfer limit which i was able to resolve using
<system.web>
<customErrors mode="Off"/>
<httpRuntime targetFramework="4.5" maxRequestLength="7483648" />
</system.web>
Method 2: Using xhr calling RestApi
1. Generated Access token using google-auto-auth module
2. XMLHttpRequest
var fileInput = $("#Videofile").prop("files")[0];
var url = "https://www.googleapis.com/upload/storage/v1/b/bucketname/o?uploadType=media&name=" + fileInput.name;
var http = new XMLHttpRequest();
http.open('POST', url, true);
http.setRequestHeader('Content-type', 'video/mp4');
http.setRequestHeader("Authorization", "Bearer " + token);
http.send(fileInput);
http.onprogress = function (ev) {
if (ev.lengthComputable) {
var percentage = Math.round((ev.loaded / ev.total) * 100);
console.log("percent " + percentage + '%');
}else {
console.log("Unable to compute progress information since the total size is unknown");
}
}
http.onloadstart = function (ev) {console.log("start")}
http.onloadend = function (ev) {}
http.onreadystatechange = function () {
if (http.readyState == 4 && http.status == 200) {
var response = JSON.parse(http.responseText);
alert("Successfully Uploaded Video");
}
}

local variable is not changing

I'm using node.js for converting text files to CSV. It works for one file, but when i try process more files fileDestination variable doesn't change. Why? Input files like this: r10_1C_BP1-11_e41-81_10_5X1x9_05_train2.res
I got following console output:
./1_train.csv has been written successufully! r10_1C_BP1-11_e41-81_10_5X1x9_05_train2.res
./1_train.csv has been written successufully! r10_1C_BP1-11_e41-81_1_5X1x9_05_train2.res
/*
* lee archivos *.dat y los convierte *.csv
*/
const fs = require('fs');
const inputDir = './';
const outputDir = './';
function readFiles(inputDir, onError) {
fs.readdir(inputDir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(inputFile) {
// first we arre looking for "right" file name
if (inputFile.search(/res/) != -1) {
console.log('Starting processing ' + inputFile);
convert2csv(inputFile, function(error) {
throw err;
});
}
});
});
}
function convert2csv(filename, onError) {
arrayFromFilename = filename.split('_');
epoca = arrayFromFilename[4];
trainORval = arrayFromFilename[7].replace('2.res', '');
console.log("from convert " + filename + " " + epoca);
fs.readFile(inputDir + filename, 'utf-8', function(err, content) {
if (err) {
onError(err);
return;
}
content = content.replace(/^[^0].*\n/mg, '');
arr = content.split('\n');
pares = arr.filter(function(d, i) {
return i % 2 == 1;
});
content = pares.join('\n');
content = content.replace(/(^[\d.]*) ([\d.]*)/gm, '$1,$2');
fileDestination = outputDir + epoca + '_' + trainORval + '.csv';
console.log("filedestination :" + fileDestination);
fs.writeFile(fileDestination, 'y,x\n', function(err) {
if (err) {
return console.error(err);
}
fs.appendFile(fileDestination, content, function(err) {
if (err) {
return console.error(err);
}
console.log(fileDestination + " has been written successufully!", filename);
});
});
});
}

Video from Mongo Grid fs is not playing on Safari browser (also on Cordova app)

I am using Mongodb to store video files as grid fs. It surprised me today when I came to know that video is not playing on Safari browser. However video read from Gridfs is playing fine on Chrome & Firefox. Following are two approach to read video files back from Grid fs. Both approach has same problem. I do the that correct mime type is getting set.
Approach 1:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.open(function (err, gridStore) {
var stream = gridStore.stream(true);
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
stream.on("data", function (chunk) {
log.debug("Chunk of file data");
res.write(chunk);
});
stream.on("end", function () {
log.debug("EOF of file");
res.end();
});
stream.on("close", function () {
log.debug("Finished reading the file");
});
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Approach 2:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.read(function (err, data) {
if (!err) {
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
res.end(data);
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: err});
}
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Following is screen of Safari for reference.
Please help
Try this GIST (by https://gist.github.com/psi-4ward)
It makes use of the byte range header
https://gist.github.com/psi-4ward/7099001
Although it does not work for me with safari, it makes sure that the correct hears are set and the correct content is delivered. It could narrow down your problem
EDIT
I've updated the GIST. It works now fine with Safari for me
https://gist.github.com/derMani/218bd18cc926d85a57a1
This should solve your problem
function StreamGridFile(req, res, GridFile) {
if(req.headers['range']) {
// Range request, partialle stream the file
console.log('Range Reuqest');
var parts = req.headers['range'].replace(/bytes=/, "").split("-");
var partialstart = parts[0];
var partialend = parts[1];
var start = parseInt(partialstart, 10);
var end = partialend ? parseInt(partialend, 10) : GridFile.length -1;
var chunksize = (end-start)+1;
res.writeHead(206, {
'Content-disposition': 'filename=xyz',
'Accept-Ranges': 'bytes',
'Content-Type': GridFile.contentType,
'Content-Range': 'bytes ' + start + '-' + end + '/' + GridFile.length,
'Content-Length': chunksize
});
// Set filepointer
GridFile.seek(start, function() {
// get GridFile stream
var stream = GridFile.stream(true);
// write to response
stream.on('data', function(buff) {
// count data to abort streaming if range-end is reached
// perhaps theres a better way?
if(start >= end) {
// enough data send, abort
GridFile.close();
res.end();
} else {
res.write(buff);
}
});
});
} else {
// stream back whole file
console.log('No Range Request');
res.header('Content-Type', GridFile.contentType);
res.header('Content-Length', GridFile.length);
var stream = GridFile.stream(true);
stream.pipe(res);
}
}
Regards
Rolf

form uploading through formidable

I'm receiving following files in the server
{"file":{"size":6818,"path":"/tmp/a451340156a9986cd9d208678bdc40cf","name":"test.pdf","type":"application/pdf","mtime":"2014-09-03T15:26:25.733Z"}}
I have file updload handing as follows:
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(JSON.stringify(files));
// `file` is the name of the <input> field of type `file`
var old_path = files.file.path,
file_size = files.file.size,
file_ext = files.file.name.split('.').pop(),
index = old_path.lastIndexOf('/') + 1,
file_name = old_path.substr(index),
new_path = path.join(process.env.PWD, '/uploads/', file_name + '.' + file_ext);
fs.readFile(old_path, function(err, data) {
fs.writeFile(new_path, data, function(err) {
fs.unlink(old_path, function(err) {
if (err) {
res.status(500);
res.json({'success': false});
} else {
res.status(200);
res.json({'success': true});
}
});
});
});
});
This gives 200 ok but file is not uploaded to desired directory i.e upload/
new_path is returned as /home/abc/myapp/uploads/0bc49fa19d15fb5bdf779c02d3cbc1d5.pdf
however it should just be /uploads/test.pdf
Is it the path or the filename that's causing the issue?
I'd start by simplifying the code by using the rename function. It would look something like this.
var newFilePath = path.join(process.env.PWD, 'uploads', files.file.name);
fs.rename(files.file.path,newFilePath,function(err){
if(err){
//handle error
}
res.json({success: 'true'});
});

Resources