Streaming a file from server to client with socket.io-stream - node.js

I've managed to upload files in chunk from a client to a server, but now i want to achieve the opposite way. Unfortunately the documentation on the offical module page lacks for this part.
I want to do the following:
emit a stream and 'download'-event with the filename to the server
the server should create a readstream and pipe it to the stream emitted from the client
when the client reaches the stream, a download-popup should appear and ask where to save the file
The reason why i don't wanna use simple file-hyperlinks is obfuscating: the files on the server are encrpted and renamed, so i have to decrypt and rename them for each download request.
Any code snippets around to get me started with this?

This is a working example I'm using. But somehow (maybe only in my case) this can be very slow.
//== Server Side
ss(socket).on('filedownload', function (stream, name, callback) {
//== Do stuff to find your file
callback({
name : "filename",
size : 500
});
var MyFileStream = fs.createReadStream(name);
MyFileStream.pipe(stream);
});
//== Client Side
/** Download a file from the object store
* #param {string} name Name of the file to download
* #param {string} originalFilename Overrules the file's originalFilename
* #returns {$.Deferred}
*/
function downloadFile(name, originalFilename) {
var deferred = $.Deferred();
//== Create stream for file to be streamed to and buffer to save chunks
var stream = ss.createStream(),
fileBuffer = [],
fileLength = 0;
//== Emit/Request
ss(mysocket).emit('filedownload', stream, name, function (fileError, fileInfo) {
if (fileError) {
deferred.reject(fileError);
} else {
console.log(['File Found!', fileInfo]);
//== Receive data
stream.on('data', function (chunk) {
fileLength += chunk.length;
var progress = Math.floor((fileLength / fileInfo.size) * 100);
progress = Math.max(progress - 2, 1);
deferred.notify(progress);
fileBuffer.push(chunk);
});
stream.on('end', function () {
var filedata = new Uint8Array(fileLength),
i = 0;
//== Loop to fill the final array
fileBuffer.forEach(function (buff) {
for (var j = 0; j < buff.length; j++) {
filedata[i] = buff[j];
i++;
}
});
deferred.notify(100);
//== Download file in browser
downloadFileFromBlob([filedata], originalFilename);
deferred.resolve();
});
}
});
//== Return
return deferred;
}
var downloadFileFromBlob = (function () {
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
return function (data, fileName) {
var blob = new Blob(data, {
type : "octet/stream"
}),
url = window.URL.createObjectURL(blob);
a.href = url;
a.download = fileName;
a.click();
window.URL.revokeObjectURL(url);
};
}());

Answer My dear friend Jeffrey van Norden That's right. it worked for me. But there was a small bug so I changed the server side code this way:
//== Server Side
ss(socket).on('filedownload', function (stream, name, callback) {
//== Do stuff to find your file
try {
let stats = fs.statSync(name);
let size = stats.size;
callback(false,{
name: name,
size: size
});
let MyFileStream = fs.createReadStream(name);
MyFileStream.pipe(stream);
}
catch (e){
callback(true,{});
}
});
//== Client Side
/** Download a file from the object store
* #param {string} name Name of the file to download
* #param {string} originalFilename Overrules the file's originalFilename
* #returns {$.Deferred}
*/
function downloadFile(name, originalFilename) {
var deferred = $.Deferred();
//== Create stream for file to be streamed to and buffer to save chunks
var stream = ss.createStream(),
fileBuffer = [],
fileLength = 0;
//== Emit/Request
ss(mysocket).emit('filedownload', stream, name, function (fileError, fileInfo) {
if (fileError) {
deferred.reject(fileError);
} else {
console.log(['File Found!', fileInfo]);
//== Receive data
stream.on('data', function (chunk) {
fileLength += chunk.length;
var progress = Math.floor((fileLength / fileInfo.size) * 100);
progress = Math.max(progress - 2, 1);
deferred.notify(progress);
fileBuffer.push(chunk);
});
stream.on('end', function () {
var filedata = new Uint8Array(fileLength),
i = 0;
//== Loop to fill the final array
fileBuffer.forEach(function (buff) {
for (var j = 0; j < buff.length; j++) {
filedata[i] = buff[j];
i++;
}
});
deferred.notify(100);
//== Download file in browser
downloadFileFromBlob([filedata], originalFilename);
deferred.resolve();
});
}
});
//== Return
return deferred;
}
var downloadFileFromBlob = (function () {
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
return function (data, fileName) {
var blob = new Blob(data, {
type : "octet/stream"
}),
url = window.URL.createObjectURL(blob);
a.href = url;
a.download = fileName;
a.click();
window.URL.revokeObjectURL(url);
};
}());

Related

Amazon Transcribe streaming with Node.js using websocket

I am working on a whatsapp chatbot where I receive audio file(ogg format) file url from Whatsapp and I get buffer and upload that file on S3(sample.ogg) Now what is want to use AWS Transcribe Streaming so I am creating readStream of file and sending to AWS transcribe I am using websocket but I am receiving Empty response of Sometimes when I Mhm mm mm response. Please can anyone tell what wrong I am doing in my code
const express = require('express')
const app = express()
const fs = require('fs');
const crypto = require('crypto'); // tot sign our pre-signed URL
const v4 = require('./aws-signature-v4'); // to generate our pre-signed URL
const marshaller = require("#aws-sdk/eventstream-marshaller"); // for converting binary event stream messages to and from JSON
const util_utf8_node = require("#aws-sdk/util-utf8-node");
var WebSocket = require('ws') //for opening a web socket
// our converter between binary event streams messages and JSON
const eventStreamMarshaller = new marshaller.EventStreamMarshaller(util_utf8_node.toUtf8, util_utf8_node.fromUtf8);
// our global variables for managing state
let languageCode;
let region = 'ap-south-1';
let sampleRate;
let inputSampleRate;
let transcription = "";
let socket;
let micStream;
let socketError = false;
let transcribeException = false;
// let languageCode = 'en-us'
app.listen(8081, (error, data) => {
if(!error) {
console.log(`running at 8080----->>>>`)
}
})
let handleEventStreamMessage = function (messageJson) {
let results = messageJson.Transcript.Results;
if (results.length > 0) {
if (results[0].Alternatives.length > 0) {
let transcript = results[0].Alternatives[0].Transcript;
// fix encoding for accented characters
transcript = decodeURIComponent(escape(transcript));
console.log(`Transcpted is----->>${transcript}`)
}
}
}
function downsampleBuffer (buffer, inputSampleRate = 44100, outputSampleRate = 16000){
if (outputSampleRate === inputSampleRate) {
return buffer;
}
var sampleRateRatio = inputSampleRate / outputSampleRate;
var newLength = Math.round(buffer.length / sampleRateRatio);
var result = new Float32Array(newLength);
var offsetResult = 0;
var offsetBuffer = 0;
while (offsetResult < result.length) {
var nextOffsetBuffer = Math.round((offsetResult + 1) * sampleRateRatio);
var accum = 0,
count = 0;
for (var i = offsetBuffer; i < nextOffsetBuffer && i < buffer.length; i++ ) {
accum += buffer[i];
count++;
}
result[offsetResult] = accum / count;
offsetResult++;
offsetBuffer = nextOffsetBuffer;
}
return result;
}
function pcmEncode(input) {
var offset = 0;
var buffer = new ArrayBuffer(input.length * 2);
var view = new DataView(buffer);
for (var i = 0; i < input.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, input[i]));
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
return buffer;
}
function getAudioEventMessage(buffer) {
// wrap the audio data in a JSON envelope
return {
headers: {
':message-type': {
type: 'string',
value: 'event'
},
':event-type': {
type: 'string',
value: 'AudioEvent'
}
},
body: buffer
};
}
function convertAudioToBinaryMessage(raw) {
if (raw == null)
return;
// downsample and convert the raw audio bytes to PCM
let downsampledBuffer = downsampleBuffer(raw, inputSampleRate);
let pcmEncodedBuffer = pcmEncode(downsampledBuffer);
setTimeout(function() {}, 1);
// add the right JSON headers and structure to the message
let audioEventMessage = getAudioEventMessage(Buffer.from(pcmEncodedBuffer));
//convert the JSON object + headers into a binary event stream message
let binary = eventStreamMarshaller.marshall(audioEventMessage);
return binary;
}
function createPresignedUrl() {
let endpoint = "transcribestreaming." + "us-east-1" + ".amazonaws.com:8443";
// get a preauthenticated URL that we can use to establish our WebSocket
return v4.createPresignedURL(
'GET',
endpoint,
'/stream-transcription-websocket',
'transcribe',
crypto.createHash('sha256').update('', 'utf8').digest('hex'), {
'key': <AWS_KEY>,
'secret': <AWS_SECRET_KEY>,
'protocol': 'wss',
'expires': 15,
'region': 'us-east-1',
'query': "language-code=" + 'en-US' + "&media-encoding=pcm&sample-rate=" + 8000
}
);
}
function showError(message) {
console.log("Error: ",message)
}
app.get('/convert', (req, res) => {
var file = 'recorded.mp3'
const eventStreamMarshaller = new marshaller.EventStreamMarshaller(util_utf8_node.toUtf8, util_utf8_node.fromUtf8);
let url = createPresignedUrl();
let socket = new WebSocket(url);
socket.binaryType = "arraybuffer";
let output = '';
const readStream = fs.createReadStream(file, { highWaterMark: 32 * 256 })
readStream.setEncoding('binary')
//let sampleRate = 0;
let inputSampleRate = 44100
readStream.on('end', function() {
console.log('finished reading----->>>>');
// write to file here.
// Send an empty frame so that Transcribe initiates a closure of the WebSocket after submitting all transcripts
let emptyMessage = getAudioEventMessage(Buffer.from(new Buffer([])));
let emptyBuffer = eventStreamMarshaller.marshall(emptyMessage);
socket.send(emptyBuffer);
})
// when we get audio data from the mic, send it to the WebSocket if possible
socket.onopen = function() {
readStream.on('data', function(chunk) {
let binary = convertAudioToBinaryMessage(chunk);
if (socket.readyState === socket.OPEN) {
console.log(`sending to steaming API------->>>>`)
socket.send(binary);
}
});
// the audio stream is raw audio bytes. Transcribe expects PCM with additional metadata, encoded as binary
}
// the audio stream is raw audio bytes. Transcribe expects PCM with additional metadata, encoded as binary
socket.onerror = function () {
socketError = true;
showError('WebSocket connection error. Try again.');
};
// handle inbound messages from Amazon Transcribe
socket.onmessage = function (message) {
//convert the binary event stream message to JSON
let messageWrapper = eventStreamMarshaller.unmarshall(Buffer(message.data));
//console.log(`messag -->>${JSON.stringify(messageWrapper)}`)
let messageBody = JSON.parse(String.fromCharCode.apply(String, messageWrapper.body));
console.log("results:.. ",JSON.stringify(messageBody))
if (messageWrapper.headers[":message-type"].value === "event") {
handleEventStreamMessage(messageBody);
}
else {
transcribeException = true;
showError(messageBody.Message);
}
}
let closeSocket = function () {
if (socket.OPEN) {
// Send an empty frame so that Transcribe initiates a closure of the WebSocket after submitting all transcripts
let emptyMessage = getAudioEventMessage(Buffer.from(new Buffer([])));
let emptyBuffer = eventStreamMarshaller.marshall(emptyMessage);
socket.send(emptyBuffer);
}
}
})

Does Azure blockBlobURL.download() have a limit to file size?

I'm using Azure's blockBlobURL.download() to download an image but am only receiving the top portion of the image. Is there a limit to how much I can download from Azure blob to a readable stream? The content length is 172628 and there exists a property highWaterMark: 16384. Are these to related?
async function compareToBaseline(imageData, blobName, metadata){
const baselineBlobName = "MacOSX10.12/chrome/initial"
const containerURL = ContainerURL.fromServiceURL(serviceURL, "baselines")
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, baselineBlobName );
let baseLineImage = await blockBlobURL.download(aborter, 0)
baseLineImage = baseLineImage.originalResponse.readableStreamBody.read()
console.log(baseLineImage.length);
baseLineImage = new Buffer(baseLineImage, 'base64');
await fs.writeFile('./newest.png', baseLineImage, 'binary', function(err){
console.log('written');
})
}
The result is only the top portion of an image.
There's a 4-MB limit for each call to the Azure Storage service. If your file is larger than 4 MB, you must break it in chunks. For more information, see Azure Storage scalability and performance targets.
Here is sample c# code to download very large files in 1MB chunks. it's performance oriented too.
private static void DownloadLargeFile()
{
string connectionString = "connString"; //ConfigurationSettings.AppSettings["StorageConnectionString"]; //blob connection string
#pragma warning restore CS0618 // Type or member is obsolete
#pragma warning disable CS0618 // Type or member is obsolete
string sourceContainerName = "quickstartblob"; //ConfigurationSettings.AppSettings["sourcecontainerName"]; //source blob container name
#pragma warning restore CS0618 // Type or member is obsolete
string sourceBlobFileName = "QuickStart1.txt"; //source blob name
CloudStorageAccount account = CloudStorageAccount.Parse(connectionString);
var blobClient = account.CreateCloudBlobClient();
var container = blobClient.GetContainerReference(sourceContainerName);
var file = sourceBlobFileName;
var blob = container.GetBlockBlobReference(file);
//First fetch the size of the blob. We use this to create an empty file with size = blob's size
blob.FetchAttributes();
var blobSize = blob.Properties.Length;
long blockSize = (1 * 1024 * 1024);//1 MB chunk;
blockSize = Math.Min(blobSize, blockSize);
//Create an empty file of blob size
using (FileStream fs = new FileStream(file, FileMode.Create))//Create empty file.
{
fs.SetLength(blobSize);//Set its size
}
var blobRequestOptions = new BlobRequestOptions
{
RetryPolicy = new ExponentialRetry(TimeSpan.FromSeconds(5), 3),
MaximumExecutionTime = TimeSpan.FromMinutes(60),
ServerTimeout = TimeSpan.FromMinutes(60)
};
long currentPointer = 0;
long bytesRemaining = blobSize;
do
{
var bytesToFetch = Math.Min(blockSize, bytesRemaining);
using (MemoryStream ms = new MemoryStream())
{
//Download range (by default 1 MB)
blob.DownloadRangeToStream(ms, currentPointer, bytesToFetch, null, blobRequestOptions);
ms.Position = 0;
var contents = ms.ToArray();
using (var fs = new FileStream(file, FileMode.Open))//Open that file
{
fs.Position = currentPointer;//Move the cursor to the end of file.
fs.Write(contents, 0, contents.Length);//Write the contents to the end of file.
}
currentPointer += contents.Length;//Update pointer
bytesRemaining -= contents.Length;//Update bytes to fetch
}
}
while (bytesRemaining > 0);
}
Something like below in node js
var azure = require('azure-storage');
var fs = require('fs');
module.exports = function (context, input) {
context.done();
var accessKey = 'myaccesskey';
var storageAccount = 'mystorageaccount';
var containerName = 'mycontainer';
var blobService = azure.createBlobService(storageAccount, accessKey);
var recordName = "a_large_movie.mov";
var blobName = "standard/mov/" + recordName;
var blobSize;
var chunkSize = (1024 * 512) * 8; // I'm experimenting with this variable
var startPos = 0;
var fullPath = "D:/home/site/wwwroot/myAzureFunction/input/";
var blobProperties = blobService.getBlobProperties(containerName, blobName, null, function (error, blob) {
if (error) {
throw error;
}
else {
blobSize = blob.contentLength;
context.log('Registered length: ' + blobSize);
fullPath = fullPath + recordName;
console.log(fullPath);
doDownload();
}
}
);
function doDownload() {
var stream = fs.createWriteStream(fullPath, {flags: 'a'});
var endPos = startPos + chunkSize;
if (endPos > blobSize) {
endPos = blobSize;
context.log('Reached end of file endPos: ' + endPos);
}
context.log("Downloading " + (endPos - startPos) + " bytes starting from " + startPos + " marker.");
blobService.getBlobToStream(
containerName,
blobName,
stream,
{
"rangeStart": startPos,
"rangeEnd": endPos-1
},
function(error) {
if (error) {
throw error;
}
else if (!error) {
startPos = endPos;
if (startPos <= blobSize - 1) {
doDownload();
}
}
}
);
}
};
Hope it helps.
blockBlobURL.download() doesn't have a limit to file size. But read() returns null doesn't mean no more data in the stream. You need to follow Node.js practices to get all data by listening to data or readable event.
For example, the data event posted by Peter Pan. Or the readable event posted by Node.js official documents:
readable.on('readable', () => {
let chunk;
while (null !== (chunk = readable.read())) {
console.log(`Received ${chunk.length} bytes of data.`);
}
});
Please always call read() inside readable event callback.
It seems that this issue was similar with your other thread Unable to read readableStreamBody from downloaded blob.
Here is my function to help saving the baseLineImage.readableStreamBody to a file, as below.
async function streamToFs(filename, readableStream) {
const ws = fs.createWriteStream(filename);
readableStream.on("data", data => {
ws.write(data);
}).on("end", () => {
console.log('written');
});
}
And change your code as below.
async function compareToBaseline(imageData, blobName, metadata){
const baselineBlobName = "MacOSX10.12/chrome/initial"
const containerURL = ContainerURL.fromServiceURL(serviceURL, "baselines");
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, baselineBlobName );
let baseLineImage = await blockBlobURL.download(aborter, 0);
await streamToFs('./newest.png', baseLineImage.readableStreamBody);
}
It works. Hope it helps.

How to download excel file from nodejs terminal

I am new to nodejs. Need your help. From the nodejs terminal, i want to download an excel file and convert it to csv (say, mocha online.js). Note: i don't want to do this via a browser.
Below is a script i am working on to download and convert to csv. There is no error nor the expected result:
online.js
if (typeof require !== 'undefined') XLSX = require('xlsx');
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
/* set up XMLHttpRequest */
var url = "http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx";
var xhr = new XMLHttpRequest();
xhr.open("GET", url, true);
xhr.responseType = "arraybuffer";
describe('suite', function () {
it('case', function () {
var arraybuffer = xhr.response;
/* convert data to binary string */
var data = new Uint8Array(arraybuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
/* Call XLSX */
var sheetName = 'Database';
var workbook = XLSX.read(bstr, { type: "binary" });
var worksheet = workbook.Sheets[sheetName];
var csv = XLSX.utils.sheet_to_csv(worksheet);
console.log(csv);
xhr.send();
//.... perform validations here using the csv data
});
})}
I tried myself with this code, and it seems it is working, the only thing is that I spent 15 minutes trying to understand why my open office would not open the file, I eventually understood that they were sending a zip file ... here is the full code, the doc of the http get function is here http.get
You could have used the request module, but it isn't native, request is easier though.
enjoy!
const url = 'http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx'
const http = require('http')
const fs = require('fs')
http.get(url, (res) => {
debugger
const {
statusCode
} = res;
const contentType = res.headers['content-type'];
console.log(`The type of the file is : ${contentType}`)
let error;
if (statusCode !== 200) {
error = new Error(`Request Failed.\n` +
`Status Code: ${statusCode}`);
}
if (error) {
console.error(error.message);
// consume response data to free up memory
res.resume();
return;
}
res.setEncoding('binary');
let rawData = '';
res.on('data', (chunk) => {
rawData += chunk;
});
res.on('end', () => {
try {
const parsedData = xlsxToCSVFunction(rawData);
// And / Or just put it in a file
fs.writeFileSync('fileName.zip', rawData, 'binary')
// console.log(parsedData);
} catch (e) {
console.error(e.message);
}
});
}).on('error', (e) => {
console.error(`Got error: ${e.message}`);
});
function xlsxToCSVFunction(rawData) {
return rawData //you should return the csv file here whatever your tools are
}
I actually encountered the same problem 3 months ago : here is what I did!
I did not find any nodeJS module that was exactly as I wanted, so I used in2csv (a python shell program) to transform the data; the t option is to use tabulation as the delimiter
1) Step 1: transforming the xlsx file into csv using in2csv
This code takes all the xlsx files in the current directory, transform them into csv files and put them in another directory
var shelljs = require('shelljs/global')
var dir = pwd().stdout.split('/')
dir = dir[dir.length - 1].replace(/\s/g, '\\ ')
mkdir('../'+ dir + 'CSV')
ls('*.xlsx').forEach(function(file) {
// below are the two lines you need
let string = 'in2csv -t ' + file.replace(/\s/g, '\\ ') + ' > ../'+ dir + 'CSV/' + file.replace('xlsx','csv').replace(/\s/g, '\\ ')
exec(string, {silent:true}, function(code, stdout, stderr){
console.log('new file : ' + file.replace('xlsx','csv'))
if(stderr){
console.log(string)
console.log('Program stderr:', stderr)
}
})
});
Step 2: loading the data in a nodejs program:
my script is very long but the main two lines are :
const args = fileContent.split('\n')[0].split(',')
const content = fileContent.split('\n').slice(1).map(e => e.split(','))
And for the benefit of seekers like me...here is a solution using mocha, request and xlsx
var request = require('request');
var XLSX = require('xlsx');
describe('suite', function () {
it('case', function (done) {
var url = "http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx";
var options = {
url: url,
headers: {
'Content-Type': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
},
encoding: null
};
request.get(options, function (err, res, body){
var arraybuffer = body;
/* convert data to binary string */
var data = arraybuffer;
//var data = new Uint8Array(arraybuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
/* Call XLSX */
var sheetName = 'Database';
var workbook = XLSX.read(bstr, { type: "binary" });
var worksheet = workbook.Sheets[sheetName];
var csv = XLSX.utils.sheet_to_csv(worksheet);
console.log(csv);
done();
});
});
});

RecordRTC upload video to node js server

I am using RecordRTC from recording webrtc meeting. After implementing recording, when I test this application if both client are on the same system then its working fine. When I test this application on different system it isn't working fine and meeting is not recorded.
Here this is my code from stop recording client side.
recordRTC.stopRecording(function (videoURL) {
console.log('recordRTC.stopRecording Function inside');
SelectedFile = recordRTC.getBlob();
$('#uploadForm').append('#attachmentFileId', recordRTC.getBlob());
StartUpload();
});
var FReader;
var Name = "Meeting" + "_" + Date.now() + ".webm";
function StartUpload()
{
FReader = new FileReader();
FReader.onload = function (evnt)
{
socket.emit('Upload', { 'Name': Name, Data: evnt.target.result });
}
socket.emit('Start', { 'Name': Name, 'Size': SelectedFile.size });
}
socket.on('MoreData', function (data)
{
var Place = data['Place'] * 524288; //The Next Blocks Starting Position
var NewFile; //The Variable that will hold the new Block of Data
if (SelectedFile.webkitSlice)
NewFile = SelectedFile.webkitSlice(Place, Place + Math.min(524288, (SelectedFile.size - Place)));
else
NewFile = SelectedFile.slice(Place, Place + Math.min(524288, (SelectedFile.size - Place)));
FReader.readAsBinaryString(NewFile);
});
Server Side Code
I get this from here.
socket.on('Start', function (data) { //data contains the variables that we passed through in the html file
var Name = data['Name'];
Files[Name] = { //Create a new Entry in The Files Variable
FileSize : data['Size'],
Data : "",
Downloaded : 0
}
var Place = 0;
try{
var Stat = fs.statSync('Temp/' + Name);
if(Stat.isFile())
{
Files[Name]['Downloaded'] = Stat.size;
Place = Stat.size / 524288;
}
}
catch(er){} //It's a New File
fs.open("Temp/" + Name, 'a', 0755, function(err, fd){
if(err)
{
console.log(err);
}
else
{
Files[Name]['Handler'] = fd; //We store the file handler so we can write to it later
socket.emit('MoreData', { 'Place' : Place, Percent : 0 });
}
});
});
socket.on('Upload', function (data){
var Name = data['Name'];
Files[Name]['Downloaded'] += data['Data'].length;
Files[Name]['Data'] += data['Data'];
if(Files[Name]['Downloaded'] == Files[Name]['FileSize']) //If File is Fully Uploaded
{
fs.write(Files[Name]['Handler'], Files[Name]['Data'], null, 'Binary', function(err, Writen){
var input = fs.createReadStream("Temp/" + Name);
var output = fs.createWriteStream("Video/" + Name);
//util.pump(readableStream, writableStream, [callback])
//Deprecated: Use readableStream.pipe(writableStream)
input.pipe(output);
input.on("end", function() {
console.log("end");
fs.unlink("Temp/" + Name, function ()
{ //This Deletes The Temporary File
console.log("unlink this file:",Name );
//socket.emit('Done', {'Image' : 'Video/' + Name + '.jpg'});
});
});
});
}
else if(Files[Name]['Data'].length > 10485760){ //If the Data Buffer reaches 10MB
fs.write(Files[Name]['Handler'], Files[Name]['Data'], null, 'Binary', function(err, Writen){
Files[Name]['Data'] = ""; //Reset The Buffer
var Place = Files[Name]['Downloaded'] / 524288;
var Percent = (Files[Name]['Downloaded'] / Files[Name]['FileSize']) * 100;
socket.emit('MoreData', { 'Place' : Place, 'Percent' : Percent});
});
}
else
{
var Place = Files[Name]['Downloaded'] / 524288;
var Percent = (Files[Name]['Downloaded'] / Files[Name]['FileSize']) * 100;
socket.emit('MoreData', { 'Place' : Place, 'Percent' : Percent});
}
});
If both clients are on same machine/system its working fine, but if both clients are on different system then meeting is not recorded.

How to get binary data from ng-file-upload file object?

I'm attempting to use the ng-file-upload directive to provide file upload functionality in my angular app.
I've got it working for the most part - I can select multiple files and loop through to grab the file name and file types. I just can't seem to figure out where the actual binary data of each file is stored in the file object.
I tried using the approach outlined in this post - AngularJS Upload a file and send it to a DB, but that results in a an error that "$q is not defined".
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
So then I tried the approach outlined in this post - Send an uploaded image to the server and save it in the server, but again I'm running into an error reading "dataURI.split is not a function".
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
The code I'm using is as follows:
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
$scope.uploadFiles = function (files) {
$scope.files = files;
angular.forEach(files, function (file) {
if (file && !file.$error) {
//var reader = new FileReader();
//console.log(reader.readAsDataURL(file));
//var binary = create_blob(file);
var fileBinary = dataURItoBlob(file);
$http({
url: root + '/DesktopModules/ServiceProxy/API/NetSuite/InsertCaseFile',
method: "POST",
//headers: { 'caseId': id, 'fileName': file.name, fileContent: $.base64.encode(file) }
headers: { 'caseId': id, 'fileName': file.name, fileContent: fileBinary }
}).
success(function (data, status, headers, config) {
//if (data == true) {
// getCase();
// $scope.newMessage = "";
// //toaster.pop('success', "", "Message succesfully submitted.",0);
//}
}).
error(function (data, status, headers, config) {
});
file.upload.progress(function (evt) {
file.progress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total));
});
}
});
}
What am I overlooking?
It depends on what format your DB is accepting for file upload. If it support multipart form data, then you can just use
Upload.upload({file: file, url: my/db/url}).then(...);
if it accepts post requests with file's binary as content of the request (like CouchDB, imgur, ...) then you can do
Upload.http({data: file, url: my/db/url, headers: {'Content-Type': file.type}})...;
if you db just accept json objects and you want to store the file as base64 data url in the database like this question then you can do
Upload.dataUrl(file, true).then(function(dataUrl) {
$http.post(url, {
fileBase64DataUrl: dataUrl,
fileName: file.name,
id: uniqueId
});
})

Resources