Does Azure blockBlobURL.download() have a limit to file size? - node.js

I'm using Azure's blockBlobURL.download() to download an image but am only receiving the top portion of the image. Is there a limit to how much I can download from Azure blob to a readable stream? The content length is 172628 and there exists a property highWaterMark: 16384. Are these to related?
async function compareToBaseline(imageData, blobName, metadata){
const baselineBlobName = "MacOSX10.12/chrome/initial"
const containerURL = ContainerURL.fromServiceURL(serviceURL, "baselines")
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, baselineBlobName );
let baseLineImage = await blockBlobURL.download(aborter, 0)
baseLineImage = baseLineImage.originalResponse.readableStreamBody.read()
console.log(baseLineImage.length);
baseLineImage = new Buffer(baseLineImage, 'base64');
await fs.writeFile('./newest.png', baseLineImage, 'binary', function(err){
console.log('written');
})
}
The result is only the top portion of an image.

There's a 4-MB limit for each call to the Azure Storage service. If your file is larger than 4 MB, you must break it in chunks. For more information, see Azure Storage scalability and performance targets.
Here is sample c# code to download very large files in 1MB chunks. it's performance oriented too.
private static void DownloadLargeFile()
{
string connectionString = "connString"; //ConfigurationSettings.AppSettings["StorageConnectionString"]; //blob connection string
#pragma warning restore CS0618 // Type or member is obsolete
#pragma warning disable CS0618 // Type or member is obsolete
string sourceContainerName = "quickstartblob"; //ConfigurationSettings.AppSettings["sourcecontainerName"]; //source blob container name
#pragma warning restore CS0618 // Type or member is obsolete
string sourceBlobFileName = "QuickStart1.txt"; //source blob name
CloudStorageAccount account = CloudStorageAccount.Parse(connectionString);
var blobClient = account.CreateCloudBlobClient();
var container = blobClient.GetContainerReference(sourceContainerName);
var file = sourceBlobFileName;
var blob = container.GetBlockBlobReference(file);
//First fetch the size of the blob. We use this to create an empty file with size = blob's size
blob.FetchAttributes();
var blobSize = blob.Properties.Length;
long blockSize = (1 * 1024 * 1024);//1 MB chunk;
blockSize = Math.Min(blobSize, blockSize);
//Create an empty file of blob size
using (FileStream fs = new FileStream(file, FileMode.Create))//Create empty file.
{
fs.SetLength(blobSize);//Set its size
}
var blobRequestOptions = new BlobRequestOptions
{
RetryPolicy = new ExponentialRetry(TimeSpan.FromSeconds(5), 3),
MaximumExecutionTime = TimeSpan.FromMinutes(60),
ServerTimeout = TimeSpan.FromMinutes(60)
};
long currentPointer = 0;
long bytesRemaining = blobSize;
do
{
var bytesToFetch = Math.Min(blockSize, bytesRemaining);
using (MemoryStream ms = new MemoryStream())
{
//Download range (by default 1 MB)
blob.DownloadRangeToStream(ms, currentPointer, bytesToFetch, null, blobRequestOptions);
ms.Position = 0;
var contents = ms.ToArray();
using (var fs = new FileStream(file, FileMode.Open))//Open that file
{
fs.Position = currentPointer;//Move the cursor to the end of file.
fs.Write(contents, 0, contents.Length);//Write the contents to the end of file.
}
currentPointer += contents.Length;//Update pointer
bytesRemaining -= contents.Length;//Update bytes to fetch
}
}
while (bytesRemaining > 0);
}
Something like below in node js
var azure = require('azure-storage');
var fs = require('fs');
module.exports = function (context, input) {
context.done();
var accessKey = 'myaccesskey';
var storageAccount = 'mystorageaccount';
var containerName = 'mycontainer';
var blobService = azure.createBlobService(storageAccount, accessKey);
var recordName = "a_large_movie.mov";
var blobName = "standard/mov/" + recordName;
var blobSize;
var chunkSize = (1024 * 512) * 8; // I'm experimenting with this variable
var startPos = 0;
var fullPath = "D:/home/site/wwwroot/myAzureFunction/input/";
var blobProperties = blobService.getBlobProperties(containerName, blobName, null, function (error, blob) {
if (error) {
throw error;
}
else {
blobSize = blob.contentLength;
context.log('Registered length: ' + blobSize);
fullPath = fullPath + recordName;
console.log(fullPath);
doDownload();
}
}
);
function doDownload() {
var stream = fs.createWriteStream(fullPath, {flags: 'a'});
var endPos = startPos + chunkSize;
if (endPos > blobSize) {
endPos = blobSize;
context.log('Reached end of file endPos: ' + endPos);
}
context.log("Downloading " + (endPos - startPos) + " bytes starting from " + startPos + " marker.");
blobService.getBlobToStream(
containerName,
blobName,
stream,
{
"rangeStart": startPos,
"rangeEnd": endPos-1
},
function(error) {
if (error) {
throw error;
}
else if (!error) {
startPos = endPos;
if (startPos <= blobSize - 1) {
doDownload();
}
}
}
);
}
};
Hope it helps.

blockBlobURL.download() doesn't have a limit to file size. But read() returns null doesn't mean no more data in the stream. You need to follow Node.js practices to get all data by listening to data or readable event.
For example, the data event posted by Peter Pan. Or the readable event posted by Node.js official documents:
readable.on('readable', () => {
let chunk;
while (null !== (chunk = readable.read())) {
console.log(`Received ${chunk.length} bytes of data.`);
}
});
Please always call read() inside readable event callback.

It seems that this issue was similar with your other thread Unable to read readableStreamBody from downloaded blob.
Here is my function to help saving the baseLineImage.readableStreamBody to a file, as below.
async function streamToFs(filename, readableStream) {
const ws = fs.createWriteStream(filename);
readableStream.on("data", data => {
ws.write(data);
}).on("end", () => {
console.log('written');
});
}
And change your code as below.
async function compareToBaseline(imageData, blobName, metadata){
const baselineBlobName = "MacOSX10.12/chrome/initial"
const containerURL = ContainerURL.fromServiceURL(serviceURL, "baselines");
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, baselineBlobName );
let baseLineImage = await blockBlobURL.download(aborter, 0);
await streamToFs('./newest.png', baseLineImage.readableStreamBody);
}
It works. Hope it helps.

Related

download subdirectory/directory from blob storage using nodejs and blob service?

I have implemented download files using getBlobProperties() and createReadStream(containerName, fullPath, options) methods of blob service. now, I am trying to download directory/subdirectory inside my containers using the same methods but it is not working, and throwing error, specified blob does not exist. though I know the reason for this error how Can I overcome this issue as I don't want to loop the blobs and download each file separately?. I want a complete folder to be downloaded.
here is the API:
exports.getBlobChunk = function (req, res) {
var userrole = utils.sanitizeStr(req.body.userrole);
var srcFilePath = utils.sanitizeStr(req.body.srcfilePath);
var fileName = utils.sanitizeStr(req.body.srcfileName);
var fullPath = srcFilePath + "/" + fileName;
var startRange = req.headers['startrange'];
var endRange = req.headers['endrange'];
genericHandler.getUserSubscMapping().then(function (results) {
if (results != undefined && results != null) {
var item = results[0].mapping.find(item => item.name == userrole);
var sasurl = item.sasurl;
if (sasurl == null) {
res.status(500).send("Subscription mapping not configured");
return;
}
var host = sasurl.substring(0, sasurl.lastIndexOf("/"));
var containerName = sasurl.substring(sasurl.lastIndexOf("/"), sasurl.indexOf("?")).split("/")[1];
var saskey = sasurl.substring(sasurl.indexOf("?"), sasurl.length);
var download = item.download; // download usage
var blobService = storage.createBlobServiceWithSas(host, saskey);
blobService.getBlobProperties(containerName, fullPath, function (err, properties, status) {
if (err) {
res.send(502, "Error fetching file: %s", err.message);
} else if (!status.isSuccessful) {
res.send(404, "The file %s does not exist", fullPath);
} else {
var contentLength = properties.contentLength / 1024; // bytes to KB
res.header('Content-Type', "application/zip");
res.attachment(fileName);
var options = {
rangeStart: startRange,
rangeEnd: endRange
};
if (startRange == 0) { // update download size on first chunk
exports.updateStorageDownload(userrole, contentLength, download);
}
blobService.createReadStream(containerName, fullPath, options).pipe(res);
}
});
}
Azure Blob storage does not have a concept of folders and everything inside the container is considered a blob including the folders. So you couldn't download directory/subdirectory with folder name.
For example:
Container structure
hello.txt
...
test
test.txt
test1
data.json
You need to download blob file from directory one by one.
const {
BlobServiceClient,
StorageSharedKeyCredential,
} = require("#azure/storage-blob");
// Enter your storage account name and shared key
const account = "";
const accountKey ="";
const containerName = "";
const filePath = "D:/downloads/"
// Use StorageSharedKeyCredential with storage account and account key
// StorageSharedKeyCredential is only available in Node.js runtime, not in browsers
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential,
);
async function listBlobs() {
const containerClient = await blobServiceClient.getContainerClient(containerName);
console.log("list blobs with method listBlobsFlat");
let iter = containerClient.listBlobsFlat({ prefix: "test/" });
for await (const item of iter) {
console.log(`\tBlobItem: name - ${item.name}`);
downloadBlobToLocal(containerClient, item.name, filePath);
}
console.log("list blobs with method listBlobsByHierarchy");
let iter1 = containerClient.listBlobsByHierarchy("/", { prefix: "test/" });
for await (const item of iter1) {
if (item.kind === "prefix") {
console.log(`\tBlobPrefix: ${item.name}`);
await listblob(containerClient, item.name);
} else {
console.log(`\tBlobItem: name - ${item.name}`);
downloadBlobToLocal(containerClient, item.name, filePath);
}
}
}
async function listblob(containerClient, prefix) {
let iter1 = containerClient.listBlobsByHierarchy("/", { prefix: prefix });
for await (const item of iter1) {
if (item.kind === "prefix") {
console.log(`\tBlobPrefix: ${item.name}`);
} else {
console.log(`\tBlobItem: name - ${item.name}`);
downloadBlobToLocal(containerClient, item.name, filePath);
}
}
}
async function downloadBlobToLocal(containerClient, blobName, filePath) {
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const downloadBlockBlobResponse = await blockBlobClient.downloadToFile(filePath + blobName);
}
listBlobs().catch((err) => {
console.error("Error running sample:", err.message);
});
I've written my own implementation based on this great article:
public async Task<List<BlobDto>> ListWithPrefixAsync(string folder)
{
// Get a reference to a container named in appsettings.json
BlobContainerClient container = new BlobContainerClient(_storageConnectionString, _storageContainerName);
// Create a new list object for
List<BlobDto> files = new List<BlobDto>();
await foreach (BlobItem file in container.GetBlobsAsync(prefix: folder))
{
// Add each file retrieved from the storage container to the files list by creating a BlobDto object
string uri = container.Uri.ToString();
var name = file.Name;
var fullUri = $"{uri}/{name}";
files.Add(new BlobDto
{
Uri = fullUri,
Name = name,
ContentType = file.Properties.ContentType
});
}
// Return all files to the requesting method
return files;
}
The implementation to get the list of blob files in a folder is then very simply like that:
// Get all files at the Azure Storage Location and return them
List<BlobDto>? files = await _storage.ListWithPrefixAsync(prefix);
Hope this helps.
Happy coding!!

Copy Azure File Share to Blob with node.js

Is there an example (in node.js) to copy an Azure File Share to Blob?
Server to Server without downloading the file previously.
Thanks in advance.
My code:
var arrFolders = [];
arrFolders.push("");
do
{
let directoryName = arrFolders.pop();
console.log(`List directories and files under directory ${directoryName}`);
let i = 1;
const directoryClient = shareClient.getDirectoryClient(directoryName);
for await (const entity of directoryClient.listFilesAndDirectories())
{
if (entity.kind === "directory")
{
console.log(`${i++} - directory\t: ${entity.name}`);
arrFolders.push((directoryName=="") ? entity.name : directoryName + "\\" + entity.name);
}
else
{
console.log(`${i++} - file\t: ${entity.name}`);
// Copy Files to Blob....
// ?????
}
}
} while (arrFolders.length > 0);
console.log("End list directories and files");
If you want to copy Azure file to Azure Blob, we can use the rest API Copy Blob.
In the node sdk, we can use the method BlobClient.beginCopyFromURL in the package #azure/storage-blob to implement it.
For example
const { BlobServiceClient } = require("#azure/storage-blob");
const {
ShareClient,
generateAccountSASQueryParameters,
StorageSharedKeyCredential,
AccountSASResourceTypes,
AccountSASPermissions,
AccountSASServices,
} = require("#azure/storage-file-share");
const fileAccountName = "";
const fileAccountKey ="";
const blobConStr ="";
async function copy() {
// create account sas token for file service
var fileCreds = new StorageSharedKeyCredential(
fileAccountName,
fileAccountKey
);
var accountSas = generateAccountSASQueryParameters(
{
startsOn: new Date(new Date().valueOf() - 8640),
expiresOn: new Date(new Date().valueOf() + 86400000),
resourceTypes: AccountSASResourceTypes.parse("sco").toString(),
permissions: AccountSASPermissions.parse("rwdlc").toString(),
services: AccountSASServices.parse("f").toString(),
},
fileCreds
).toString();
//get file share client
var shareClient = new ShareClient(
`https://${fileAccountName}.file.core.windows.net/<shareName>`,
fileCreds
);
//get blob container client
var blobServiceClient = BlobServiceClient.fromConnectionString(blobConStr);
var containerClient = blobServiceClient.getContainerClient("<containerName>");
await containerClient.createIfNotExists();
// list files and copy files to azure blob
var arrFolders = [];
arrFolders.push("input");
do {
let directoryName = arrFolders.pop();
console.log(`List directories and files under directory ${directoryName}`);
let i = 1;
const directoryClient = shareClient.getDirectoryClient(directoryName);
for await (const entity of directoryClient.listFilesAndDirectories()) {
if (entity.kind === "directory") {
console.log(`${i++} - directory\t: ${entity.name}`);
arrFolders.push(
directoryName == "" ? entity.name : directoryName + "\\" + entity.name
);
} else {
console.log(`${i++} - file\t: ${entity.name}`);
var fileClient = directoryClient.getFileClient(entity.name);
var soureUrl = fileClient.url + "?" + accountSas;
try {
var res = await (
await containerClient
.getBlobClient(entity.name)
.beginCopyFromURL(soureUrl)
).pollUntilDone();
console.log(res.copyStatus);
} catch (error) {
throw error;
}
}
}
} while (arrFolders.length > 0);
}

Amazon Transcribe streaming with Node.js using websocket

I am working on a whatsapp chatbot where I receive audio file(ogg format) file url from Whatsapp and I get buffer and upload that file on S3(sample.ogg) Now what is want to use AWS Transcribe Streaming so I am creating readStream of file and sending to AWS transcribe I am using websocket but I am receiving Empty response of Sometimes when I Mhm mm mm response. Please can anyone tell what wrong I am doing in my code
const express = require('express')
const app = express()
const fs = require('fs');
const crypto = require('crypto'); // tot sign our pre-signed URL
const v4 = require('./aws-signature-v4'); // to generate our pre-signed URL
const marshaller = require("#aws-sdk/eventstream-marshaller"); // for converting binary event stream messages to and from JSON
const util_utf8_node = require("#aws-sdk/util-utf8-node");
var WebSocket = require('ws') //for opening a web socket
// our converter between binary event streams messages and JSON
const eventStreamMarshaller = new marshaller.EventStreamMarshaller(util_utf8_node.toUtf8, util_utf8_node.fromUtf8);
// our global variables for managing state
let languageCode;
let region = 'ap-south-1';
let sampleRate;
let inputSampleRate;
let transcription = "";
let socket;
let micStream;
let socketError = false;
let transcribeException = false;
// let languageCode = 'en-us'
app.listen(8081, (error, data) => {
if(!error) {
console.log(`running at 8080----->>>>`)
}
})
let handleEventStreamMessage = function (messageJson) {
let results = messageJson.Transcript.Results;
if (results.length > 0) {
if (results[0].Alternatives.length > 0) {
let transcript = results[0].Alternatives[0].Transcript;
// fix encoding for accented characters
transcript = decodeURIComponent(escape(transcript));
console.log(`Transcpted is----->>${transcript}`)
}
}
}
function downsampleBuffer (buffer, inputSampleRate = 44100, outputSampleRate = 16000){
if (outputSampleRate === inputSampleRate) {
return buffer;
}
var sampleRateRatio = inputSampleRate / outputSampleRate;
var newLength = Math.round(buffer.length / sampleRateRatio);
var result = new Float32Array(newLength);
var offsetResult = 0;
var offsetBuffer = 0;
while (offsetResult < result.length) {
var nextOffsetBuffer = Math.round((offsetResult + 1) * sampleRateRatio);
var accum = 0,
count = 0;
for (var i = offsetBuffer; i < nextOffsetBuffer && i < buffer.length; i++ ) {
accum += buffer[i];
count++;
}
result[offsetResult] = accum / count;
offsetResult++;
offsetBuffer = nextOffsetBuffer;
}
return result;
}
function pcmEncode(input) {
var offset = 0;
var buffer = new ArrayBuffer(input.length * 2);
var view = new DataView(buffer);
for (var i = 0; i < input.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, input[i]));
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
return buffer;
}
function getAudioEventMessage(buffer) {
// wrap the audio data in a JSON envelope
return {
headers: {
':message-type': {
type: 'string',
value: 'event'
},
':event-type': {
type: 'string',
value: 'AudioEvent'
}
},
body: buffer
};
}
function convertAudioToBinaryMessage(raw) {
if (raw == null)
return;
// downsample and convert the raw audio bytes to PCM
let downsampledBuffer = downsampleBuffer(raw, inputSampleRate);
let pcmEncodedBuffer = pcmEncode(downsampledBuffer);
setTimeout(function() {}, 1);
// add the right JSON headers and structure to the message
let audioEventMessage = getAudioEventMessage(Buffer.from(pcmEncodedBuffer));
//convert the JSON object + headers into a binary event stream message
let binary = eventStreamMarshaller.marshall(audioEventMessage);
return binary;
}
function createPresignedUrl() {
let endpoint = "transcribestreaming." + "us-east-1" + ".amazonaws.com:8443";
// get a preauthenticated URL that we can use to establish our WebSocket
return v4.createPresignedURL(
'GET',
endpoint,
'/stream-transcription-websocket',
'transcribe',
crypto.createHash('sha256').update('', 'utf8').digest('hex'), {
'key': <AWS_KEY>,
'secret': <AWS_SECRET_KEY>,
'protocol': 'wss',
'expires': 15,
'region': 'us-east-1',
'query': "language-code=" + 'en-US' + "&media-encoding=pcm&sample-rate=" + 8000
}
);
}
function showError(message) {
console.log("Error: ",message)
}
app.get('/convert', (req, res) => {
var file = 'recorded.mp3'
const eventStreamMarshaller = new marshaller.EventStreamMarshaller(util_utf8_node.toUtf8, util_utf8_node.fromUtf8);
let url = createPresignedUrl();
let socket = new WebSocket(url);
socket.binaryType = "arraybuffer";
let output = '';
const readStream = fs.createReadStream(file, { highWaterMark: 32 * 256 })
readStream.setEncoding('binary')
//let sampleRate = 0;
let inputSampleRate = 44100
readStream.on('end', function() {
console.log('finished reading----->>>>');
// write to file here.
// Send an empty frame so that Transcribe initiates a closure of the WebSocket after submitting all transcripts
let emptyMessage = getAudioEventMessage(Buffer.from(new Buffer([])));
let emptyBuffer = eventStreamMarshaller.marshall(emptyMessage);
socket.send(emptyBuffer);
})
// when we get audio data from the mic, send it to the WebSocket if possible
socket.onopen = function() {
readStream.on('data', function(chunk) {
let binary = convertAudioToBinaryMessage(chunk);
if (socket.readyState === socket.OPEN) {
console.log(`sending to steaming API------->>>>`)
socket.send(binary);
}
});
// the audio stream is raw audio bytes. Transcribe expects PCM with additional metadata, encoded as binary
}
// the audio stream is raw audio bytes. Transcribe expects PCM with additional metadata, encoded as binary
socket.onerror = function () {
socketError = true;
showError('WebSocket connection error. Try again.');
};
// handle inbound messages from Amazon Transcribe
socket.onmessage = function (message) {
//convert the binary event stream message to JSON
let messageWrapper = eventStreamMarshaller.unmarshall(Buffer(message.data));
//console.log(`messag -->>${JSON.stringify(messageWrapper)}`)
let messageBody = JSON.parse(String.fromCharCode.apply(String, messageWrapper.body));
console.log("results:.. ",JSON.stringify(messageBody))
if (messageWrapper.headers[":message-type"].value === "event") {
handleEventStreamMessage(messageBody);
}
else {
transcribeException = true;
showError(messageBody.Message);
}
}
let closeSocket = function () {
if (socket.OPEN) {
// Send an empty frame so that Transcribe initiates a closure of the WebSocket after submitting all transcripts
let emptyMessage = getAudioEventMessage(Buffer.from(new Buffer([])));
let emptyBuffer = eventStreamMarshaller.marshall(emptyMessage);
socket.send(emptyBuffer);
}
}
})

nodejs base64 to blob conversion

Iam capturing webcam screenshot in reactjs(react-webcam). Screenshot is in the form of base64 encoded string. I am sending the base 64string to nodejs and I want to convert base64 String to .jpeg file, So that I can save in Azure Blob Storage.
Is there any method to convert base64 string to .jpeg file.
You can convert your Base64 string to Buffer and then try storing it to azure.
var base64String = "....."; // your base64 string
var bufferValue = Buffer.from(base64String,"base64");
I used this and it worked.
below is server side code(NodeJS)
var contentType = 'image/jpeg';
let base64String=req.body.img;
let base64Image = base64String.split(';base64,').pop();
let date=Date.now();
fs.writeFile(`./uploads/${date}.jpeg`, base64Image, {encoding: 'base64'}, function(err) {
console.log('File created');
sourceFilePath= path.resolve(`./uploads/${date}.jpeg`);
blobName=path.basename(sourceFilePath, path.extname(sourceFilePath));
//console.log(sourceFilePath);
blobService.createBlockBlobFromLocalFile(containerName, blobName, sourceFilePath, err => {
if (err) {
console.log(err);
}
else {
//resolve({ message: `Upload of '${blobName}' complete` });
console.log("UPLOADED")
}
});
Try this:
based-blob
(async function() {
const b = require('based-blob');
const base64String = 'some base64 data...';
const blob = b.toBlob(base64String);
const b64s = await b.toBase64(blob);
console.log(b64s == base64String); // true
})();
Hi i use this function
public b64toBlob = (b64Data: string = '', sliceSize?: number) => {
sliceSize = sliceSize || 512;
if ( b64Data !== null) {
let block = b64Data.split(';');
let dataType = block[0].split(':')[1];
let realData = block[1].split(',')[1];
let filename = this.makeid() + '.' + dataType.split('/')[1];
let byteCharacters = atob(realData);
let byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
let slice = byteCharacters.slice(offset, offset + sliceSize);
let byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
let byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
let blob = new Blob(byteArrays, {type: dataType});
return blob ;
} else {
return '';
}
}

Streaming a file from server to client with socket.io-stream

I've managed to upload files in chunk from a client to a server, but now i want to achieve the opposite way. Unfortunately the documentation on the offical module page lacks for this part.
I want to do the following:
emit a stream and 'download'-event with the filename to the server
the server should create a readstream and pipe it to the stream emitted from the client
when the client reaches the stream, a download-popup should appear and ask where to save the file
The reason why i don't wanna use simple file-hyperlinks is obfuscating: the files on the server are encrpted and renamed, so i have to decrypt and rename them for each download request.
Any code snippets around to get me started with this?
This is a working example I'm using. But somehow (maybe only in my case) this can be very slow.
//== Server Side
ss(socket).on('filedownload', function (stream, name, callback) {
//== Do stuff to find your file
callback({
name : "filename",
size : 500
});
var MyFileStream = fs.createReadStream(name);
MyFileStream.pipe(stream);
});
//== Client Side
/** Download a file from the object store
* #param {string} name Name of the file to download
* #param {string} originalFilename Overrules the file's originalFilename
* #returns {$.Deferred}
*/
function downloadFile(name, originalFilename) {
var deferred = $.Deferred();
//== Create stream for file to be streamed to and buffer to save chunks
var stream = ss.createStream(),
fileBuffer = [],
fileLength = 0;
//== Emit/Request
ss(mysocket).emit('filedownload', stream, name, function (fileError, fileInfo) {
if (fileError) {
deferred.reject(fileError);
} else {
console.log(['File Found!', fileInfo]);
//== Receive data
stream.on('data', function (chunk) {
fileLength += chunk.length;
var progress = Math.floor((fileLength / fileInfo.size) * 100);
progress = Math.max(progress - 2, 1);
deferred.notify(progress);
fileBuffer.push(chunk);
});
stream.on('end', function () {
var filedata = new Uint8Array(fileLength),
i = 0;
//== Loop to fill the final array
fileBuffer.forEach(function (buff) {
for (var j = 0; j < buff.length; j++) {
filedata[i] = buff[j];
i++;
}
});
deferred.notify(100);
//== Download file in browser
downloadFileFromBlob([filedata], originalFilename);
deferred.resolve();
});
}
});
//== Return
return deferred;
}
var downloadFileFromBlob = (function () {
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
return function (data, fileName) {
var blob = new Blob(data, {
type : "octet/stream"
}),
url = window.URL.createObjectURL(blob);
a.href = url;
a.download = fileName;
a.click();
window.URL.revokeObjectURL(url);
};
}());
Answer My dear friend Jeffrey van Norden That's right. it worked for me. But there was a small bug so I changed the server side code this way:
//== Server Side
ss(socket).on('filedownload', function (stream, name, callback) {
//== Do stuff to find your file
try {
let stats = fs.statSync(name);
let size = stats.size;
callback(false,{
name: name,
size: size
});
let MyFileStream = fs.createReadStream(name);
MyFileStream.pipe(stream);
}
catch (e){
callback(true,{});
}
});
//== Client Side
/** Download a file from the object store
* #param {string} name Name of the file to download
* #param {string} originalFilename Overrules the file's originalFilename
* #returns {$.Deferred}
*/
function downloadFile(name, originalFilename) {
var deferred = $.Deferred();
//== Create stream for file to be streamed to and buffer to save chunks
var stream = ss.createStream(),
fileBuffer = [],
fileLength = 0;
//== Emit/Request
ss(mysocket).emit('filedownload', stream, name, function (fileError, fileInfo) {
if (fileError) {
deferred.reject(fileError);
} else {
console.log(['File Found!', fileInfo]);
//== Receive data
stream.on('data', function (chunk) {
fileLength += chunk.length;
var progress = Math.floor((fileLength / fileInfo.size) * 100);
progress = Math.max(progress - 2, 1);
deferred.notify(progress);
fileBuffer.push(chunk);
});
stream.on('end', function () {
var filedata = new Uint8Array(fileLength),
i = 0;
//== Loop to fill the final array
fileBuffer.forEach(function (buff) {
for (var j = 0; j < buff.length; j++) {
filedata[i] = buff[j];
i++;
}
});
deferred.notify(100);
//== Download file in browser
downloadFileFromBlob([filedata], originalFilename);
deferred.resolve();
});
}
});
//== Return
return deferred;
}
var downloadFileFromBlob = (function () {
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
return function (data, fileName) {
var blob = new Blob(data, {
type : "octet/stream"
}),
url = window.URL.createObjectURL(blob);
a.href = url;
a.download = fileName;
a.click();
window.URL.revokeObjectURL(url);
};
}());

Resources