weirdest thing ever, was trying out one of my endpoints in lambda, everything was working a few days back and nothing was changed.
Basically the functions runs fine up until the point where it needs to return a status code, for some reason, then it just returns a 502 and in the API Gateway it states "Malformed Lambda proxy response"
app.post("/api/v1/documents/create", async (req, res) => {
res.setHeader('Content-Type', 'application/json');
const filename = req.body.filename
const partner = req.body.partner
const payload = req.body
const uid = req.body.uid
console.log(payload)
try {
// Initial setup, create credentials instance.
const credentials = PDFServicesSdk.Credentials
.serviceAccountCredentialsBuilder()
.fromFile("./pdfservices-api-credentials.json")
.build();
// Setup input data for the document merge process.
const jsonString = payload,
jsonDataForMerge = jsonString;
// Create an ExecutionContext using credentials.
const executionContext = PDFServicesSdk.ExecutionContext.create(credentials);
// Create a new DocumentMerge options instance.
const documentMerge = PDFServicesSdk.DocumentMerge,
documentMergeOptions = documentMerge.options,
options = new documentMergeOptions.DocumentMergeOptions(jsonDataForMerge, documentMergeOptions.OutputFormat.PDF);
// Create a new operation instance using the options instance.
const documentMergeOperation = documentMerge.Operation.createNew(options);
// Set operation input document template from a source file.
const input = PDFServicesSdk.FileRef.createFromLocalFile('./darbo_sutartis.docx');
documentMergeOperation.setInput(input);
// Execute the operation and Save the result to the specified location.
documentMergeOperation.execute(executionContext)
.then(result => {
console.log("saving File to TMP?")
result.saveAsFile('/tmp/' + uid + '_' + partner + '.pdf')
const checkTime = 1000;
const timerId = setInterval(() => {
const isExists = fs.existsSync('/tmp/' + uid + '_' + partner + '.pdf', 'utf8')
if (isExists) {
console.log("\nFile written -> creating AWS Bucket")
const params1 = {
Bucket: "darbo-manija-docs",
Key: "employment_documents/" + uid + "/" + partner + "/",
};
s3.putObject(params1, (err, data) => {
if (err) {
console.log(err)
} else {
console.log(data)
}
});
console.log("\nAWS Bucket directory created...")
// do something here
console.log("\nUplaoding file to AWS\n")
fs.readFile('/tmp/' + uid + '_' + partner + '.pdf', function (err, data) {
if (err) throw err;
const pdf = data.toString('base64'); //PDF WORKS
const pdfNew = Buffer.from(pdf, 'base64')
const params = {
Bucket: 'darbo-manija-docs/employment_documents/' + uid + "/" + partner,
Key: uid + '_' + partner + '.pdf', // File name you want to save as in S3
Body: pdfNew, // <---------
ContentType: 'application/pdf'
};
// Uploading files to the bucket
s3.upload(params, function (err, data) {
if (err) {
res.status(400).send(JSON.stringify({
message: "ERR",
code: 0
}));
}
console.log(`\nFile uploaded successfully. ${data.Location}`);
console.log("\nCreating entry in Firebase")
var fb_ref = admin.database().ref('/documents');
fb_ref.push({
documentBucket: params.Bucket,
documentKey: params.Key,
candidate: partner,
employer: uid
})
.then(function (fb_ref) {
admin.database().ref('/documents').child(fb_ref.key).update({
documentID: fb_ref.key
})
});
console.log("\nFirebase entry created");
console.log("\nRemoving temp file...")
fs.unlinkSync('/tmp/' + uid + '_' + partner + '.pdf')
res.status(200).send(JSON.stringify({
result: pdf,
code: 100
}));
});
});
clearInterval(timerId)
}
}, checkTime)
})
.catch(err => {
if (err instanceof PDFServicesSdk.Error.ServiceApiError ||
err instanceof PDFServicesSdk.Error.ServiceUsageError) {
console.log('Exception encountered while executing operation', err);
res.status(400).send(JSON.stringify({
result: "Bad request",
code: 400
}));
} else {
console.log('Exception encountered while executing operation', err);
res.status(400).send(JSON.stringify({
result: "Bad request",
code: 401
}));
}
});
} catch (err) {
console.log('Exception encountered while executing operation', err);
}
});
No idea what is happening, read many posts regarding the same error, but none of them seem to have the same setup. Any suggestions? Thanks
Related
I have a Node.js + Express application running on EC2 instance. Part of that application is to send mass email to all its users (by the admin) with the ability to attach files (max 5 files allowed).
Recently we tested it by attaching three pdf files roughly the size of 2.5 MB each. When the send button is pressed the application keeps spinning before culminating to 504 Gateway Timeout error. I feel that maybe the sendgrid code is unable to process the attachments and the node server crashes taking down the EC2 with it. When this happens the only way for me is to stop the ec2 instance and then start it again. Rebooting does not help.
Here is the code
router.js
var fs = require('fs');
var multer = require('multer');
const uploadsDir = './uploads';
// SET STORAGE
var storage = multer.diskStorage({
destination: function (req, file, cb) {
if (!fs.existsSync(uploadsDir)){
fs.mkdirSync(uploadsDir);
}
cb(null, uploadsDir);
},
filename: function (req, file, cb) {
cb(null, file.originalname);
}
});
var upload = multer({ storage: storage });
router.post('/send', upload.array('files', 5), async (req, res) => {
let subject = req.body.subject;
let message = req.body.message;
let result = message.replace(/(\r\n|\r|\n)/g, '<br>');
let bccReceiverList = [];
let whereCondition = {
isActive: true
};
let attachments = [];
if(req.files && req.files.length > 0) {
req.files.forEach(file => {
attachments.push({
filename: file.originalname,
type: file.mimetype,
uploadPath: req.app.get('uploads')
});
});
}
let receiverUsers = await User.findAll({});
//find to whom we are sending the email to
for (let index = 0; index < receiverUsers.length; index++) {
const user = receiverUsers[index];
emailHandler.sendEmail(
{
receiver: user.email,
bccReceiver: bccReceiverList,
templateId: process.env.EMAIL_BLAST_TEMPLATE,
attachments: attachments.length > 0 ? attachments : []
},
{
subject: subject,
message: result
},
data => {}
);
}
if(req.files && req.files.length > 0) {
req.files.forEach(file => {
fs.unlink(req.app.get('uploads') + '/' + file.originalname, function (err) {
if (err) {
console.error(err);
}
console.log('File has been Deleted');
res.send('file was deleted');
});
});
}
res.redirect('back');
});
then in the actual email handler function
var sg = require('#sendgrid/mail');
var fs = require('fs');
sg.setApiKey(process.env.SENDGRID_API_KEY);
exports.sendEmail = async function(email, payload, callback) {
let msg = {
to: email.receiver,
from: {email: 'admin#myapp.com', name: 'My App'},
subject: email.subject,
templateId: email.templateId,
dynamic_template_data: payload
};
//Buffer.from(fileContent).toString('base64')
if(email.attachments != null && email.attachments.length > 0) {
try {
let attachmentObjects = [];
for (let index = 0; index < email.attachments.length; index++) {
const attachment = email.attachments[index];
const fileContent = fs.readFileSync(attachment.uploadPath + '/' + attachment.filename);
attachmentObjects.push({
content: Buffer.from(fileContent).toString('base64'),
filename: attachment.filename,
type: attachment.mimetype,
disposition: "attachment"
});
}
msg.attachments = attachmentObjects;
} catch (error) {
console.log(error);
callback({status: 500, message: 'Error while attaching files to email: ' + error.message});
}
}
if(email.hasOwnProperty('ccReceiver')) {
if(email.ccReceiver != null) {
msg.cc = email.ccReceiver;
}
}
if(email.hasOwnProperty('bccReceiver')) {
if(email.bccReceiver.length > 0) {
msg.bcc = email.bccReceiver;
}
}
sg.send(msg).then(() => {
console.log('---- email sent successfully');
// delete the attachment files from the uploads folder
if(email.attachments != null && email.attachments.length > 0) {
for (let index = 0; index < email.attachments.length; index++) {
const attachment = email.attachments[index];
fs.unlink(attachment.uploadPath + '/' + attachment.filename, function (err) {
if (err) {
console.error(err);
}
console.log('File has been Deleted');
});
}
}
callback({status: 200, message: 'Email Sent Successfully'});
}).catch(error => {
//Log friendly error
console.error('error-------- : ' + error.toString());
//Extract error msg
const {
message,
code,
response
} = error;
//Extract response msg
const {
headers,
body
} = response;
callback({status: 500, message: 'Error while sending email: ' + error.message});
});
};
I even tried just attaching one pdf file (2.5 MB) to the email and it still failed. When I perform this same test with files with lesser size then it works smoothly. I am not really sure how to solve this problem.
Below code works with the small files, but could upload files that is more than 50kb. I think there is something I should setting with the maxFileSize. I'm using Uppy at the client. After I console.log in the server, actually it does do the request. I don't get any clue in the Stackoverflow, really need help.
const upload = (req, res) => {
// formidable : to parse html form data
const form = new formidable.IncomingForm({ multiples: true, maxFileSize: 10000 * 1024 * 1024 })
const d = new Date();
// I have console.log here and everything seems fine
form.parse(req, (err, fields, files) => {
console.log('err', err) // returns nothing
console.log('files', files) // returns nothing
console.log('fields', fields) // returns nothing
if (err) {
console.log("Error parsing the files");
console.log(err);
return res.status(400).json({
message: "There was an error parsing the files",
status: "Fail",
error: err
})
}
for (let file in files) {
try {
if (files[file]) {
let oldPath = files[file]['path']
let rawData = fs.readFileSync(oldPath)
const month = parseInt(d.getMonth() + 1) < 10 ? '0' + parseInt(d.getMonth() + 1) : parseInt(d.getMonth() + 1)
let today = `${d.getFullYear()}_${month}_${d.getDate()}`
let folderPath = __basedir + `\\media\\uploads\\storage\\${today}\\`;
// folderPath = ..\dashboard-v2.0\server\\media\uploads\storage\\2021_06_18\\
if (!fs.existsSync(folderPath)) {
fs.mkdirSync(folderPath, {
recursive: true
});
}
// newPath =..\dashboard-v2.0\server\\media\uploads\storage\\2021_06_18\\WIN.jpg
let newPath = folderPath + files[file]['name']
let databasePath = `storage/${today}/${files[file]['name']}`;
let filename = files[file]['name'] // example_files.zip
if (fs.existsSync(newPath)){
// if file is existed then add Date.now()
let time = Date.now()
let filenameSplit = filename.split('.')
filename = filenameSplit[0] + '_' + time + '.' + filenameSplit[1]
// filename = WIN_1626750408096.jpg
newPath = folderPath + filename
databasePath = `storage/${today}/${filename}`;
}
fs.writeFile(newPath, rawData, async (err) => {
if (err) {
console.log(err);
return res.status(400).send({ "err": err })
}
const userToken = jwt.verify(fields.user, config.TOKEN_SECRET)
const newFiles = {
filename: filename,
user_id: ObjectId(userToken.id),
filepath: databasePath,
added_time: Date.now(),
}
const result = await db.collection("ate_files").insertOne(newFiles)
console.log(`Created with the following id: ${result.insertedId}`)
console.log(`Successfull upload ${newPath}`);
})
}
} catch (err) {
console.log(`Error: ${err}`);
return res.status(409).send({ "error": `${err}` })
}
}
})
return res.status(200).send({ "message": "Successfully uploadded the files" })
}
Your return res.status(200).send({ "message": "Successfully uploadded the files" }) is too soon, it should be in the callback.
It could be problematic on large files since the beginning of the big file would be received and then the client already receives a response which could logically cut the connection in http
Dose the Kuzzle or Minio development teams have a working example of using the Kuzzle S3 plugin for Minio? I have the following but my file isnt being uploaded and the pre-signed url is referring to https://your-s3-bucket.s3.eu-west-3.amazonaws.com/
const fs = require("fs");
const fsPromises = require('fs').promises;
// Create a JS File object instance from a local path using Node.js
const fileObject = require("get-file-object-from-local-path");
// Promise based HTTP client for the browser and node.js
const axios = require('axios');
// Loads the Kuzzle SDK modules
const {
Kuzzle,
WebSocket
} = require('kuzzle-sdk');
var start = new Date();
const webSocketOptionsObject = {
"autoReconnect": true,
"ssl": true,
"port": 443
};
const kuzzle = new Kuzzle(new WebSocket('myurl.com', webSocketOptionsObject));
const credentials = { username: 'xyz123', password: 'fithenmgjtkj' };
const path = __dirname + "\\" + "yellow_taxi_data.csv"; // the "\\" is for Windows path
var fileData = {};
// check file exists
fs.access(path, fs.F_OK, (err) => {
if (err) {
console.error(err)
return
}
fileData = new fileObject.LocalFileData(path);
// Adds a listener to detect connection problems
kuzzle.on('networkError', error => {
console.error('Network Error:', error);
});
});
const connectToKuzzle = async () => {
// Connects to the Kuzzle server
await kuzzle.connect();
return await kuzzle.auth.login('local', credentials);
// console.log('jwt auth token: ', jwt);
}
const disConnectFromKuzzle = async () => {
console.log('Disconnected from Kuzzle');
kuzzle.disconnect();
var time = new Date() - start;
// sec = Math.floor((time/1000) % 60);
console.log('Execution time in milliseconds: ', time);
}
const presignedURL = async () => {
// Get a Presigned URL
const result = await kuzzle.query({
controller: 's3/upload',
action: 'getUrl',
uploadDir: 'proxybucket', // directory name inside the Bucket specified in the s3 plugin bucket name
filename: fileData.name
});
console.log("result: ", result);
return result;
}
const loadFileStream = async () => {
console.log('getting file: ', path);
targetFile = null;
await fs.promises.readFile(path)
.then(function (result) {
console.log("file loaded------", result.length);
targetFile = result;
})
.catch(function (error) {
console.log(error);
return;
});
return targetFile;
}
const kuzzleValidate = async (kuzzleResource) => {
// console.log("kuzzleResource: ", kuzzleResource.result.fileKey);
// validate
// Validate and persist a previsously uploaded file.
// https://docs.kuzzle.io/official-plugins/s3/2/controllers/upload/validate/
const Presult = await kuzzle.query({
// Kuzzle API params
"controller": "s3/upload",
"action": "validate",
// File key in S3 bucket
"fileKey": kuzzleResource.result.fileKey
});
console.log('validate: ', Presult.result.fileUrl);
}
const uploadFile = async (fileBuffer, kuzzleResource, jwt) => {
// options at https://github.com/axios/axios
const axiosOptions = {
headers: {
'Content-Type': fileData.type
},
maxBodyLength: 200000000 // 200,000,000 bytes 200 Mb
};
// PUT the fileBuffer to the Kuzzle S3 endpoint
// https://github.com/axios/axios
axios.defaults.headers.common['Authorization'] = jwt;
const response = await axios.put(kuzzleResource.result.uploadUrl, fileBuffer, axiosOptions)
.then((response) => {
console.log('file uploaded......');
})
.catch(function (error) {
console.log("File upload error: ", error);
return;
});
return "Upload successful";
}
if (fileData) {
connectToKuzzle().then((jwt) => {
console.log(jwt);
// upload(jwt);
presignedURL().then((kuzzleResource) => {
loadFileStream().then((fileBuffer) => {
uploadFile(fileBuffer, kuzzleResource, jwt).then((doneMessage) => {
console.log("doneMessage: ", doneMessage);
}).then(() => {
kuzzleValidate(kuzzleResource).then(() => {
disConnectFromKuzzle();
});
});
});
});
});
}
I'm looking to upload to a Minio bucket and obtain a pre-signedURL so I can store it in a document later.
You can change the endpoint configuration to set a different s3-compatible endpoint who can be a Minio one.
This configuration can be changer under the plugins.s3.endpoint key. You should also disable the usage of default s3 path.
Example:
app.config.set('plugins.s3.endpoint', 'https://minio.local');
app.config.set('plugins.s3.s3ClientOptions.s3ForcePathStyle', false);
We have an application that sometimes serves a MP4 file which is stored on S3, since only specific people should be able to see each file, the file is private and inside our service we will only show it to authorised people.
The movie starts playing correctly (in the browser's built in video tag), however if we seek to a point in the movie that hasn't been buffered yet, the player will buffer for a bit, then stop playing. Afterwards clicking Play will cause the movie to start from the beginning. If I make the file public and access it directly form S3 seeking to an unbuffered point works correctly.
I created a standalone node program that reproduces this problem. I tried to make the response headers identical to those that S3 sends but the problem remains.
const http = require("http");
const AWS = require("aws-sdk");
const proxy = require("proxy-agent");
Object.assign(process.env, {
AWS_ACCESS_KEY_ID: "REDACTED",
AWS_SECRET_ACCESS_KEY: "REDACTED",
AWS_EC2_REGION: "us-west-2"
});
const s3 = new AWS.S3({
s3ForcePathStyle: 'true',
signatureVersion: 'v4',
httpOptions: { timeout: 300000 },
endpoint: 'https://s3.us-west-2.amazonaws.com',
region: 'us-west-2'
});
const objectParams = {
Bucket: 'REDACTED',
Key: 'some-movie.mp4'
};
let request = 0;
function serve(req, res) {
console.log("Handling request", ++request, req.url);
s3.headObject(objectParams, (err, data) => {
if (err)
throw err;
const { ContentType: type, ContentLength: length} = data;
console.log("Got", data);
if (data.ETag)
res.setHeader("ETag", data.ETag);
const range = req.headers.range;
if (range) {
console.log("Serving range", range);
const parts = range.replace("bytes=", "").split("-");
const start = parseInt(parts[0], 10);
const end = parts[1]? parseInt(parts[1], 10): length -1;
let headers = {
"Content-Range": `bytes ${start}-${end}/${length}`,
"Accept-Ranges": "bytes",
"Content-Type": type,
"Content-Length": end - start + 1,
"Last-Modified": data.LastModified,
};
if (req.headers["if-range"]) {
console.log("Setting if-range to", req.headers["if-range"]);
headers["If-Range"] = req.headers["if-range"];
}
res.writeHead(206, headers);
}
else {
console.log("Whole file");
res.setHeader("Accept-Ranges", "bytes");
res.setHeader("Content-Type", type);
res.setHeader("Content-Length", length);
res.setHeader("Last-Modified", data.LastModified);
}
const stream = s3.getObject(objectParams).createReadStream();
stream.on("error", err => console.error("stream error:", err));
stream.pipe(res).on("finish", data => {
console.log("Finished streaming");
});
});
}
http.createServer(serve).listen(1234);
What am I missing?
Here is the code with seekbar working just fine. You can test by integrating the below code and just open the api url in the browser.
import mime from 'mime-types';
const key = 'S3_BUCKET KEY';
const params = { Key: key, Bucket: AWS_BUCKET };
//s3 here refers to AWS.S3 object.
s3.headObject(params, function (err, data) {
if (err) {
console.error(err);
return next(err);
}
if (req.headers.range) {
const range = req.headers.range;
const bytes = range.replace(/bytes=/, '').split('-');
const start = parseInt(bytes[0], 10);
const total = data.ContentLength;
const end = bytes[1] ? parseInt(bytes[1], 10) : total - 1;
const chunkSize = end - start + 1;
res.set('Content-Range', 'bytes ' + start + '-' + end + '/' + total);
res.set('Accept-Ranges', 'bytes');
res.set('Content-Length', chunkSize.toString());
params['Range'] = range;
console.log('video buffering - range, total, start, end ,params', range, total, start, end, params);
} else {
res.set('Content-Length', data.ContentLength.toString());
console.log('video buffering - ,params', params);
}
res.status(206);
res.set('Content-Type', mime.lookup(key));
res.set('Last-Modified', data.LastModified.toString());
res.set('ETag', data.ETag);
const stream = s3.getObject(params).createReadStream();
stream.on('error', function error(err) {
return next(err);
});
stream.on('end', () => {
console.log('Served by Amazon S3: ' + key);
});
stream.pipe(res);
});
I'm uploading video file from local to server and then I'll be uploading it to cdn,
the issue i'm facing is my code is running well on local but its not working when i patch it to server.
Here is my code
commonJs
$("#uploadVideo").click(function (e) {
var reader = new FileReader();
var fileInput = document.getElementById('Videofile');
var previewUrl = window.URL.createObjectURL(fileInput.files[0]);
$(".video").attr("src", previewUrl);
var videotype = "video/mp4";
var file_data = $("#Videofile").prop("files")[0];
if (!file_data.type.match(videotype)) {
return "alert('Please upload mp4 files')"
} else {
var metadata = {
'content-type': 'video/mp4',
'size': file_data.size,
'uploaded': new Date(),
}
reader.onload = function (e) {
$("file_data").text("File Content: " + reader.result); // Show the file content
}
reader.readAsBinaryString(file_data);
file_data.onloadedmetadata = function () {
alert("Meta data for audio loaded");
};
};
var form_data = new FormData();
form_data.append("file", file_data)
form_data.append("metdata", metadata)
for (var key of form_data.entries()) {
console.log(key[0] + ', ' + key[1]);
}
if (form_data != undefined) {
$.ajax({
type: "post",
contentType: false,
processData: false,
url: "/api/recordvideo",
data: form_data,
dataType: 'json',
success: function (result) {
if (result) {
$(".video").attr("src", result.videolink);
alert("Successfully Uploaded Video");
console.log("Successfully Uploaded Video");
} else {
alert("Error on Uploading Video");
console.log("Error on Uploading Video");
}
},
error: function (err) {
console.log("error");
}
});
}
e.preventDefault();
e.stopPropagation();
});
ServerSide
app.post('/api/recordvideo',Api.recordvideo);
var Upload = require('gcs-resumable-upload');
ApiService.recordvideo = function (req, res) {
var db = req.db;
console.log("came in cloudupload");
var form = new formidable.IncomingForm();
var filesdata;
form.keepExtensions = true;
form.multiples = false;
form.on('fileBegin', function (name, file){
file.path = 'public/demo/' + file.name;
console.log("fileBegin: " + JSON.stringify(file));
});
form.on('file', function (name, file){
console.log('Uploaded ' + JSON.stringify(file));
var path = file.path;
console.log("came in cloud3 :" + JSON.stringify(path));
});
form.parse(req, function (err, fields, files) {
console.log("came in cloud0" + JSON.stringify(files));
filesdata = files;
});
console.log("came in cloud2");
form.on('end', function (fields, files) {
var userid = appconfig.ObjectID(appconfig.decrypt(req.signedCookies['gid']));
var path = this.openedFiles[0].path;
console.log("came in cloud3 :" + JSON.stringify(path));
fs.createReadStream(path)
.pipe(Upload.upload({ bucket: '******', file: path, metadata: { contentType: this.openedFiles[0].type } }))
.on('finish', function (response) {
console.log("Successfully Uploaded Video :" + JSON.stringify(response));
res.send({ "status": false, "videolink": "https://****/****/" + filesdata.file.name });
});
});
//res.send({ "status": false, "err": null });
}
At start atleast it was uploading to server folder & then in chrome developers tool it used to give response: {readystate : 4, . . . }
And now, I made some changes then it doesnt even hit my api, After few seconds it gives error in chrome developer tools 404() / 502 ()
Well, I got the solution, Previously I was using gcs-resumable-upload module to upload, but now I tried with '#google-cloud/storage' module through which I was able to upload upto 9mb.
const Storage = require('#google-cloud/storage');
var db = req.db;
console.log("came in cloudupload");
var form = new formidable.IncomingForm();
var filesdata;
form.keepExtensions = true;
form.multiples = false;
form.parse(req, function (err, fields, files) {
filesdata = files;
});
form.on('end', function (fields, files) {
var userid = appconfig.ObjectID(appconfig.decrypt(req.signedCookies['gid']));
var path = this.openedFiles[0].path;
const storage = new Storage({
keyFilename: 'gcloudcred.json'
});
const myBucket = storage.bucket('onfvideo');
myBucket.upload(path).then((resp) => {
console.log('uploaded to' + resp);
res.send({ "status": true, "err": null });
}).catch(err => {
console.error('ERROR:', err);
res.send({ "status": false, "err": null });
});
});
};
The Limitation of 9mb I was facing due to .netframework data-transfer limit which i was able to resolve using
<system.web>
<customErrors mode="Off"/>
<httpRuntime targetFramework="4.5" maxRequestLength="7483648" />
</system.web>
Method 2: Using xhr calling RestApi
1. Generated Access token using google-auto-auth module
2. XMLHttpRequest
var fileInput = $("#Videofile").prop("files")[0];
var url = "https://www.googleapis.com/upload/storage/v1/b/bucketname/o?uploadType=media&name=" + fileInput.name;
var http = new XMLHttpRequest();
http.open('POST', url, true);
http.setRequestHeader('Content-type', 'video/mp4');
http.setRequestHeader("Authorization", "Bearer " + token);
http.send(fileInput);
http.onprogress = function (ev) {
if (ev.lengthComputable) {
var percentage = Math.round((ev.loaded / ev.total) * 100);
console.log("percent " + percentage + '%');
}else {
console.log("Unable to compute progress information since the total size is unknown");
}
}
http.onloadstart = function (ev) {console.log("start")}
http.onloadend = function (ev) {}
http.onreadystatechange = function () {
if (http.readyState == 4 && http.status == 200) {
var response = JSON.parse(http.responseText);
alert("Successfully Uploaded Video");
}
}