I am creating an application that takes some file uploads and send them straight up to S3. I would prefer not to even have the tmp file on my server, so I am using the Knox module and would like to take the raw stream from Formidable and send it over Knox to S3. I have done something similar using Knox to download a file using this code:
knox.downloads.get(widget.download).on('response',function(sres){
res.writeHead(200, {
'Content-Type':'application/zip',
'Content-Length': sres.headers['content-length'],
'Content-Disposition':'attachment; filename=' + widget.download
});
util.pump(sres, res);
}).end();
Now I would like to do something similar in the oposite direction (File upload from the browser to S3).
So far I have written an event handler to capture each piece of data from the file as it's being uploaded:
var form = new formidable.IncomingForm();
form.onPart = function(part){
if(!part.filename){
form.handlePart(part);
}else{
if(part.name == 'download'){
// Upload to download bucket
controller.putDownload(part);
}else{
// Upload to the image bucket
controller.putImage(part);
}
//res.send(sys.inspect(part));
}
}
form.parse(req, function(err, fields, files){
if(err){
res.json(err);
}else{
res.send(sys.inspect({fields:fields, files:files}), {'content-type':'text/plain'});
//controller.createWidget(res,fields,files);
}
});
controller.putDownload = function(part){
part.addListener('data', function(buffer){
knox.download.putStream(data,part.filename, function(err,s3res){
if(err)throwError(err);
else{
console.log(s3res);
}
});
})
knox.downloads.putStream(part, part.filename, function(err,s3res){
if(err)throwError(err);
else{
console.log(s3res);
}
});
}
But the data event only give me the buffer. So is it possible to capture the stream itself and push it to S3?
What you want to do is override the Form.onPart method:
IncomingForm.prototype.onPart = function(part) {
// this method can be overwritten by the user
this.handlePart(part);
};
Formidable's default behavior is to write the part to a file. You don't want that. You want to handle the 'part' events to write to the knox download. Start with this:
form.onPart = function(part) {
if (!part.filename) {
// let formidable handle all non-file parts
form.handlePart(part);
return;
}
Then open the knox request and handle the raw part events yourself:
part.on('data', function(data) {
req.write(data);
});
part.on('end', function() {
req.end();
});
part.on('error', function(err) {
// handle this too
});
As a bonus, if the req.write(data) return false that means the send buffer is full. You should pause the Formidable parser. When you get a drain event from the Knox stream you should resume Formidable.
Use multiparty instead. It supports this kind of streaming like you want. It even has an example of streaming directly to s3: https://github.com/superjoe30/node-multiparty/blob/master/examples/s3.js
In an Express middleware, I use formidable together with PassThrough to stream-upload a file to S3 (in my case, to Minio which is S3 compatible through Minio SDK; and I believe it works for AWS S3 too with the same Minio SDK)
Here is the sample code.
const formidable = require('formidable')
const { PassThrough } = require('stream')
const form = new formidable.IncomingForm()
const pass = new PassThrough()
const fileMeta = {}
form.onPart = part => {
if (!part.filename) {
form.handlePart(part)
return
}
fileMeta.name = part.filename
fileMeta.type = part.mime
part.on('data', function (buffer) {
pass.write(buffer)
})
part.on('end', function () {
pass.end()
})
}
form.parse(req, err => {
if (err) {
req.minio = { error: err }
next()
} else {
handlePostStream(req, next, fileMeta, pass)
}
})
And handlePostStream looks like below, for your reference:
const uuidv1 = require('uuid/v1')
const handlePostStream = async (req, next, fileMeta, fileStream) => {
let filename = uuidv1()
try {
const metaData = {
'content-type': fileMeta.type,
'file-name': Buffer.from(fileMeta.name).toString('base64')
}
const minioClient = /* Get Minio Client*/
await minioClient.putObject(MINIO_BUCKET, filename, fileStream, metaData)
req.minio = { post: { filename: `${filename}` } }
} catch (error) {
req.minio = { error }
}
next()
}
You can find the source code on GitHub, and its unit tests too.
There is no way for you to capture the stream, because the data has to be translated by Formidable. The buffer you're given is the file contents in chunks of buffer.length: this might be a problem because looking at Formidable's docs it appears that until the file is completely uploaded it can't reliably report the file size and Knox's put method might need that.
Never used Knox this way before, but you might have some luck with something like this:
controller.putDownload = function(part){
var req = knox.download.put(part.filename, {
'Content-Type': 'text/plain'
});
part.addListener('data', function(buffer){
req.write(buffer);
});
req.on('response', function(res){
// error checking
});
req.end();
}
A little unsure about the response checking bits, but....see if you can whip that into shape. Also, Streaming an octet stream from request to S3 with knox on node.js also has a writeup that may be useful to you.
Related
I'm trying to upload a large (8.3GB) video to my Node.js (Express) server by chunking using busboy. How to I receive each chunk (busboy is doing this part) and piece it together as one whole video?
I have been looking into readable and writable streams but I'm not ever getting the whole video. I keep overwriting parts of it, resulting in about 1 GB.
Here's my code:
req.busboy.on('file', (fieldname, file, filename) => {
logger.info(`Upload of '${filename}' started`);
const video = fs.createReadStream(path.join(`${process.cwd()}/uploads`, filename));
const fstream = fs.createWriteStream(path.join(`${process.cwd()}/uploads`, filename));
if (video) {
video.pipe(fstream);
}
file.pipe(fstream);
fstream.on('close', () => {
logger.info(`Upload of '${filename}' finished`);
res.status(200).send(`Upload of '${filename}' finished`);
});
});
After 12+ hours, I got it figured out using pieces from this article that was given to me. I came up with this code:
//busboy is middleware on my index.js
const fs = require('fs-extra');
const streamToBuffer = require('fast-stream-to-buffer');
//API function called first
uploadVideoChunks(req, res) {
req.pipe(req.busboy);
req.busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const fileNameBase = filename.replace(/\.[^/.]+$/, '');
//save all the chunks to a temp folder with .tmp extensions
streamToBuffer(file, function (error, buffer) {
const chunkDir = `${process.cwd()}/uploads/${fileNameBase}`;
fs.outputFileSync(path.join(chunkDir, `${Date.now()}-${fileNameBase}.tmp`), buffer);
});
});
req.busboy.on('finish', () => {
res.status(200).send(`Finshed uploading chunk`);
});
}
//API function called once all chunks are uploaded
saveToFile(req, res) {
const { filename, profileId, movieId } = req.body;
const uploadDir = `${process.cwd()}/uploads`;
const fileNameBase = filename.replace(/\.[^/.]+$/, '');
const chunkDir = `${uploadDir}/${fileNameBase}`;
let outputFile = fs.createWriteStream(path.join(uploadDir, filename));
fs.readdir(chunkDir, function(error, filenames) {
if (error) {
throw new Error('Cannot get upload chunks!');
}
//loop through the temp dir and write to the stream to create a new file
filenames.forEach(function(tempName) {
const data = fs.readFileSync(`${chunkDir}/${tempName}`);
outputFile.write(data);
//delete the chunk we just handled
fs.removeSync(`${chunkDir}/${tempName}`);
});
outputFile.end();
});
outputFile.on('finish', async function () {
//delete the temp folder once the file is written
fs.removeSync(chunkDir);
}
});
}
Use streams
multer allow you to easily handle file uploads as part of an express route. This works great for small files that don’t leave a significant memory footprint.
The problem with loading a large file into memory is that you can actually run out of memory and cause your application to crash.
use multipart/form-data request. This can be handled by assigning the readStream to that field instead in your request options
streams are extremely valuable for optimizing performance.
Try with this code sample, I think it will work for you.
busboy.on("file", function(fieldName, file, filename, encoding, mimetype){
const writeStream = fs.createWriteStream(writePath);
file.pipe(writeStream);
file.on("data", data => {
totalSize += data.length;
cb(totalSize);
});
file.on("end", () => {
console.log("File "+ fieldName +" finished");
});
});
You can refer this link also for resolve this problem
https://github.com/mscdex/busboy/issues/143
I think multer is good with this, did you try multer?
We are trying create an webservice to upload files to Azure file storage using node.js service.
Below is the node.js server code.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body;
var length;
request.on("data", function(chunk){
body += chunk;
console.log("Get data");
});
request.on("end", function(){
try{
console.log("end");
var data = body;
length = data.length;
console.log(body); // This giving the result as undefined
console.log(length);
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
}catch (er) {
response.statusCode = 400;
return res.end('error: ' + er.message);
}
});
}
Below is our client to upload a file.
private static void sendPOST() throws IOException {
URL obj = new URL("https://crowdtest-fileservice.azure-mobile.net/api/files_stage/");
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
con.setRequestMethod("POST");
con.setRequestProperty("sharename", "newamactashare");
con.setRequestProperty("directorypath", "MaheshApp/TestLibrary/");
con.setRequestProperty("filename", "temp.txt");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
// For POST only - START
con.setDoOutput(true);
OutputStream os = con.getOutputStream();
os.write(data);
os.flush();
os.close();
// For POST only - END
int responseCode = con.getResponseCode();
System.out.println("POST Response Code :: " + responseCode);
if (responseCode == HttpURLConnection.HTTP_OK) { // success
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuffer response = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
System.out.println(inputLine);
}
in.close();
// print result
System.out.println(response.toString());
} else {
BufferedReader br = new BufferedReader(new InputStreamReader(con.getErrorStream()));
String line = "";
while ((line = br.readLine()) != null) {
System.out.println(line);
}
System.out.println("POST request not worked");
}
}
It is showing the error
The request 'POST /api/files_stage/' has timed out. This could be
caused by a script that fails to write to the response, or otherwise
fails to return from an asynchronous call in a timely manner.
Updated:
I have also tried below code.
var body = new Object();
body = request.body;
var length = body.length;
console.log(request.body);
console.log(body);
console.log(length);
try {
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
But facing the issue
{"error":"Parameter stream for function createFileFromStream should be
an object"}
I am new to node.js. Please help me to fix this.
There are several issue here. Let us go over them one by one.
1. In your Java client you cannot just dump the binary data into an Azure mobile service connection.
The reason for this is that an Azure mobile service has two body parsers that ensure that no matter what, the request body is parsed for you.
So, while you can walk around the Express body parser by specifying an uncommon content type, you will still hit the Azure body parser that will mess up your data stream by naively assuming that it is a UTF-8 string.
The only option therefore is to skip the Express parser by specifying a content type it cannot handle and then play along with the Azure parser by encoding your binary data with Base64 encoding.
So, in the Java client replace
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
with
con.setRequestProperty("content-type", "binary");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
data = Base64.getEncoder().encode(data);
If you are not on Java 8, replace the java.util.Base64 encoder with any other Base64 encoder you have access to.
2. The createFileFromStream Azure storage api function you are trying to use expects a stream.
At the same time, the best you can get when parsing a request body manually is a byte array. Unfortunately, Azure mobile services use NodeJS version 0.8, which means there is no easy way to construct a readable stream from a byte array, and you you will have to assemble your own stream suitable for Azure storage api. Some duct tape and stream#0.0.1 should do just fine.
var base64 = require('base64-js'),
Stream = require('stream'),
fileService = require('azure-storage')
.createFileService('yourStorageAccount', 'yourStoragePassword');
exports.post = function (req, res) {
var data = base64.toByteArray(req.body),
buffer = new Buffer(data),
stream = new Stream();
stream['_ended'] = false;
stream['pause'] = function() {
stream['_paused'] = true;
};
stream['resume'] = function() {
if(stream['_paused'] && !stream['_ended']) {
stream.emit('data', buffer);
stream['_ended'] = true;
stream.emit('end');
}
};
try {
fileService.createFileFromStream(req.headers.sharename, req.headers.directorypath,
req.headers.filename, stream, data.length, function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
} catch (e) {
res.statusCode = 500;
res.end();
}
};
These are the dependencies you need for this sample.
"dependencies": {
"azure-storage": "^0.7.0",
"base64-js": "^0.0.8",
"stream": "0.0.1"
}
If specifying them in your service's package.json does not work you can always go to this link and install them manually via the console.
cd site\wwwroot
npm install azure-storage
npm install base64-js
npm install stream#0.0.1
3. To increase the default upload limit of 1Mb, specify MS_MaxRequestBodySizeKB for your service.
Do keep in mind though that since you are transferring you data as Base64-encoded you have to account for this overhead. So, to support uploading files up to 20Mb in size, you have to set MS_MaxRequestBodySizeKB to roughly 20 * 1024 * 4 / 3 = 27307.
I find the easiest way is to use pkgcloud which abstracts the differences between cloud providers and also provides a clean interface for uploading and downloading files. It uses streams so the implementation is memory efficient as well.
var pkgcloud = require('pkgcloud')
var fs = require('fs')
var client = pkgcloud.storage.createClient({
provider: 'azure',
storageAccount: 'your-storage-account',
storageAccessKey: 'your-access-key'
});
var readStream = fs.createReadStream('a-file.txt');
var writeStream = client.upload({
container: 'your-storage-container',
remote: 'remote-file-name.txt'
});
writeStream.on('error', function (err) {
// handle your error case
});
writeStream.on('success', function (file) {
// success, file will be a File model
});
readStream.pipe(writeStream);
We can leverage this answer of the thread on SO How to send an image from Android client to Node.js server via HttpUrlConnection?, which create a custom middleware to get the upload file content into a buffer array, then we can use createFileFromText() to store the file in Azure Storage.
Here is the code snippet:
function rawBody(req, res, next) {
var chunks = [];
req.on('data', function (chunk) {
chunks.push(chunk);
});
req.on('end', function () {
var buffer = Buffer.concat(chunks);
req.bodyLength = buffer.length;
req.rawBody = buffer;
next();
});
req.on('error', function (err) {
console.log(err);
res.status(500);
});
}
router.post('/upload', rawBody,function (req, res){
fileService.createShareIfNotExists('taskshare', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
fileService.createDirectoryIfNotExists('taskshare', 'taskdirectory', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
try {
fileService.createFileFromText('taskshare', 'taskdirectory', 'test.txt', req.rawBody, function (error, result, resp) {
if (!error) {
// file uploaded
res.send(200, "File Uploaded");
} else {
res.send(200, "Error!");
}
});
} catch (ex) {
res.send(500, { error: ex.message });
}
}
});
}
});
})
router.get('/getfile', function (req, res){
fileService.createReadStream('taskshare', 'taskdirectory', 'test.txt').pipe(res);
})
When the request arrives at the function defined in exports.post, the whole request is already there, so you don't need to buffer it. You can simplify it by writing something along the lines of the code below.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body = request.body;
var length = body.length;
console.log(length);
try {
fileService.createFileFromText(shareName, dirPath, fileName, body, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
} else {
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
}
There are several things:
1. createFileFromText can work with plain text. But it will fail for those binary content, as it uses UTF-8 encoding.
You might want to refer to the similar issue for blob at: Saving blob (might be data!) returned by AJAX call to Azure Blob Storage creates corrupt image
2. The createFileFromStream or createWriteStreamToExistingFile \ createWriteStreamToNewFile Azure storage API may be the function can help.
Please be noted that these APIs are target to streams. You need convert your buffer/string in the request body to a stream. You can refer to How to wrap a buffer as a stream2 Readable stream?
For createFileFromStream :
fileService.createFileFromStream(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
requestStream,
data.length,
function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
For createWriteStreamToNewFile :
var writeStream = fileService.createWriteStreamToNewFile(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
data.length);
requestStream.pipe(writeStream);
3. There are several issues in your code
console.log(body); // This giving the result as undefined
The reason is you define var body and it is undefined. The code body += chunk will still make body undefined.
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
When error happens in createFileFromStream, it could also be an error in the network transfer, you might also want to return the error code instead of statusCodes.OK.
I'm trying to realize a file upload in Node.js 4.x using express with busboy.
I'm already able to upload files and store them in an Azure Blob Storage.
No I'd like to verify the file type, before storing it to Azure, and reject any file which is not valid.
I'd like to do the validation using magic numbers. I found
const fileType = require('file-type'); which determines the file type for me.
Now I'm trying to get this work as efficient as possible, but here is where I'm struggling:
I want to directly pipe the file stream to azure. But before that, I need to read the first 5 bytes from the stream into a buffer which is processed by file-type.
Reading from stream and then piping to azure surely does not work. After some research I found a solution by piping the file in 2 PassThrough streams. But now I'm struggling in correctly handling those 2 streams.
const fileType = require('file-type');
const pass = require('stream').PassThrough;
//...
req.busboy.on('file', function (fieldname, file, filename) {
console.log("Uploading: " + filename);
var b = new pass;
var c = new pass;
file.pipe(b);
file.pipe(c);
var type = null;
b.on('readable', function() {
b.pause();
if(type === null) {
var chunk = b.read(5);
type = fileType(chunk) || false;
b.end();
}
});
b.on('finish', function() {
if(type && ['jpg', 'png', 'gif'].indexOf(type.ext) !== -1) {
var blobStream = blobSvc.createWriteStreamToBlockBlob(storageName,
blobName,
function (error) {
if (error) console.log('blob upload error', error);
else console.log('blob upload complete')
});
c.pipe(blobStream);
}
else {
console.error("Rejected file of type " + type);
}
});
});
This solution sometimes works - and sometimes there is some "write after end" error.
Also, I think the streams are not properly closed, because normally, after a request, express logs something like this on console:
POST /path - - ms - -
But this log message now comes like 30s-60s after "blob upload complete", probably due to some timeout.
Any idea how to fix this?
You don't need to add additional streams into the mix. Just unshift() the consumed portion back onto the stream. For example:
const fileType = require('file-type');
req.busboy.on('file', function (fieldname, file, filename) {
function readFirstBytes() {
var chunk = file.read(5);
if (!chunk)
return file.once('readable', readFirstBytes);
var type = fileType(chunk);
if (type.ext === 'jpg' || type.ext === 'png' || type.ext === 'gif') {
const blobStream = blobSvc.createWriteStreamToBlockBlob(
storageName,
blobName,
function (error) {
if (error)
console.log('blob upload error', error);
else
console.log('blob upload complete');
}
);
file.unshift(chunk);
file.pipe(blobStream);
} else {
console.error('Rejected file of type ' + type);
file.resume(); // Drain file stream to continue processing form
}
}
readFirstBytes();
});
I am working on an application that is sending a audio file to a server powered by sails.js.
I need to convert this audio file on-the-fly and send the converted data to amazon S3 using node streams.
I don't want to store data on the server but directly stream the upload file to S3 after it has been converted.
Do you know a way to do that?
I tried using formidable, but I couldn't get it working. Did anyone succeed implementing such a thing?
Thanks
EDIT
As jibsales noticed, it will probably be better if I show you a piece of what I've tried so far. So basically my strategy is to use formidable, fluent-ffmpeg and knox with streams.
I plan to receive the file stream with formidable and write chunks of received data in a first stream (stream 1) that will be the entry point for the conversion with fluent-ffmpeg. Then fluent-ffmpeg writes the output stream into stream2 which is the entry point of Knox.
The first problem I have to face, is the fact that formidable doesn't seem to work. However I'm not sure my strategy is good...
The code so far looks like this:
upload : function(req,res){
//to streams to transfer file data
var stream1 = new stream.Stream(); //stream for the incoming file data
var stream2 = new stream.Stream(); //stream for the converted file data
var client = knox.createClient({
key: 'APIKEY'
, secret: 'SECRET'
, bucket: 'bucket'
});
//Using formidable to acces data chunks
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files){ //form.parse is not called
if(err){
return res.json(err);
}else{
return res.send('ok');
}
});
//overriding form.onPart to get the file data chunk
form.onPart = function(part) {
sails.log('getting part...');
if (!part.filename) {
form.handlePart(part);
return;
}
//we put the data chunk in stream1 to convert it
part.on('data', function(chunk) {
stream1.write(chunk[1]);
});
}
form.on('error',function(err){
return sails.log(err);
});
form.on('progress', function(bytesReceived, bytesExpected) {
sails.log(bytesReceived);
});
//conversion process
var proc = new ffmpeg({ source : stream1})
.withAudioCodec('libfdk_aac')
.toFormat('mp3')
.writeToStream(stream2, {end:true}, function(retcode, error){
console.log('file has been converted succesfully');
});
client.putStream(stream2, '/file.mp3', headers, function(err, response){
return res.send(response);
});
},
The reason formidable isn't working is that the default Sails body parser is parsing the request before formidable can get to it. In order to get this to work, you'll have to bypass the Sails body parser for multi-part form uploads. So, in config/express.js:
var express = require('sails/node_modules/express');
module.exports.express = {
bodyParser: function(options) {
return function (req, res, next) {
if (!req.headers['content-type'] || req.headers['content-type'].indexOf('multipart/form-data') === -1) {
return express.bodyParser()(req, res, next);
} else {
return next();
}
}
}
}
This just skips the body parser entirely if the content-type header includes multipart/form-data. Otherwise, it executes the default express body parser. Note that the default Sails body parser is slightly fancier than what comes with Express (if it can't parse the request, it fakes an application/json header and retries), so if you want the extra goodness you'll have to copy/paste the code from the core in to the bodyParser function above. But in most cases you won't miss it ;-)
We're working on a better file parser for Sails that will hopefully take care of some of this for you, but in the mean time this is your best bet!
I've figured out a way to convert files on-the-fly with fluent-ffmpeg and formidable. However it seems impossible for the moment to stream the converted chunks comming out of ffmpeg conversion directly to amazon as you must precise the "Content-Length" header which is unknown during the conversion...
For the first part (client upload) I first had to disable express bodyParser on the upload route in a config/express.js file :
var express = require('sails/node_modules/express');
module.exports.express = {
bodyParser: function() {
return function (req, res, next){
console.log(req.path);
if (!(req.path === '/upload' && req.method === 'POST')) {
return express.bodyParser()(req, res, next);
} else {
return next();
}
}
}
}
For the implementation I used a transform stream that does basically nothing. It just get the right parts of the uploaded data (the ones related to file data). It links formidable parser to fluent-ffmpeg. Then I can only save the converted file on the disk before sending it to amazon...
upload : function(req,res){
var Transform = Stream.Transform; //stream for the incoming file data
var client = knox.createClient({
key: 'KEY'
, secret: 'SECRET'
, bucket: 'BUCKET',
region : 'eu-west-1' //don't forget the region (My bucket is in Europe)
});
function InputStream(options)
{
if(!(this instanceof InputStream))
{
return new InputStream(options);
}
Transform.call(this,options);
return;
};
util.inherits(InputStream, Transform);
var inputDataStream = new InputStream;
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files)
{
if(err){
return res.send(err);
}else{
return;
}
});
form.onPart = function(part)
{
if (!part.filename)
{
form.handlePart(part);
return;
}
//we put the data chunk in stream1 to convert it
part.on('data', function (chunk)
{
if(!inputDataStream.write(chunk));
form.pause()
inputDataStream.once('drain', function(){form.resume()});
});
part.on('end', function (chunk){
inputDataStream.end(chunk);
});
}
InputStream.prototype._transform = function (chunk, enc, cb)
{
this.push(chunk);
cb();
}
var proc = new ffmpeg({ source : inputDataStream})
.withAudioBitrate('64k')
.withAudioCodec('libmp3lame')
.toFormat('mp3')
.saveToFile('file.mp3', function (retcode, error){
console.log('file has been converted successfully');
res.send('ok');
var upload = new MultiPartUpload(
{
client : client,
objectName: 'file.mp3',
file: 'file.mp3'
}, function(err,body){
if(err) {
console.log(err);
return;
}
console.log(body);
return;
});
});
},
EDIT
Using knox-mpu you can actually stream data to amazon s3 directly ! You just have to create another transform stream that will be the source of your upload and knox-mpu do the magic. Thanks you all!
I've tried to save a image to a specified directory with node.js using express.js and socket.io but it doesnt work.
On the client-side:
var reader = new FileReader();
function drop(e) {
e.stopPropagation();
e.preventDefault();
var dt = e.dataTransfer;
var files = dt.files;
jQuery.each(files, function(){
reader.onload = function(e) {
socket.emit('sendfile', e.target.result);
};
});
return false;
}
The image should be uploaded by a drag and drop function.
Then on the server-side:
io.sockets.on('connection', function (socket) {
[...]
socket.on('sendfile', function (data) {
var fs = require('fs');
app.use(express.bodyParser({ keepExtensions: true, uploadDir: '/uploaded' }));
io.sockets.emit('updatechat', socket.username, data); //test
});
I have also tried
socket.on('sendfile', function (data) {
var fs = require('fs');
fs.writeFile('/uploaded/test.png', data, "binary" , function (err) {
if (err) throw err;
console.log('It\'s saved!');
});
io.sockets.emit('updatechat', socket.username, data); //data test
});
but it doesnt saved anything.
The "data test" shows me, that the data are already were arrived on the server, so I don't think, that the problem comes from the client-side, but on the server-side I have no idea what I doing wrong
I made a simple example to illustrate the usage of file upload via socket!
The steps following are:
Create the send-file socket.io event to receive the file on app.js. This file received is a binary one;
In the jade/HTML page put an input file and a button to send it. NOTE: you don't have to use multipart to send a post with multipart content, we are sending socket files not a TCP request/response;
Initialize HTML5 File API support and prepare the listeners to watching out your file input component;
The rest of remaining routines to read the file and sent it content forward.
Now first step (app.js):
var io = require('socket.io').listen(8000, {log: false});
io.sockets.on('connection', function(socket) {
socket.on('send-file', function(name, buffer) {
var fs = require('fs');
//path to store uploaded files (NOTE: presumed you have created the folders)
var fileName = __dirname + '/tmp/uploads/' + name;
fs.open(fileName, 'a', 0755, function(err, fd) {
if (err) throw err;
fs.write(fd, buffer, null, 'Binary', function(err, written, buff) {
fs.close(fd, function() {
console.log('File saved successful!');
});
})
});
});
});
Second step (in my case I've used jade rather html)
extends layout
block content
h1 Tiny Uploader
p Save an Image to the Server
input#input-files(type='file', name='files[]', data-url='/upload', multiple)
button#send-file(onclick='javascript:sendFile();') Send
script(src='http://127.0.0.1:8000/socket.io/socket.io.js')
script(src='/javascripts/uploader.js')
Third and Fourth steps (coding uploader.js to send the file to server)
//variable declaration
var filesUpload = null;
var file = null;
var socket = io.connect('http://localhost:8000');
var send = false;
if (window.File && window.FileReader && window.FileList) {
//HTML5 File API ready
init();
} else {
//browser has no support for HTML5 File API
//send a error message or something like that
//TODO
}
/**
* Initialize the listeners and send the file if have.
*/
function init() {
filesUpload = document.getElementById('input-files');
filesUpload.addEventListener('change', fileHandler, false);
}
/**
* Handle the file change event to send it content.
* #param e
*/
function fileHandler(e) {
var files = e.target.files || e.dataTransfer.files;
if (files) {
//send only the first one
file = files[0];
}
}
function sendFile() {
if (file) {
//read the file content and prepare to send it
var reader = new FileReader();
reader.onload = function(e) {
console.log('Sending file...');
//get all content
var buffer = e.target.result;
//send the content via socket
socket.emit('send-file', file.name, buffer);
};
reader.readAsBinaryString(file);
}
}
Some important considerations:
This is a tiny sample of socket file uploader. I don't consider some important things here: file chunks to send piece of files instead of all content in a row; Update the status of file sent as (error msg, successful msg, progress bar or percent stage, etc.). So this is a sample to initial steps to coding your own file uploader. In this case, we don't need a form to send files, its is completely asynchronous transaction via socket.io.
I hope this post is helpful.
This tutorial goes a little bit further because you can pause/resume your upload but you will find how to upload a file through socketio :)