Content-Length error in Upload File to Blob Storage - azure

I am trying to upload a file stored in my local system to the Azure blob storage account using Azure Client Library.
Using following code:
const { BlobServiceClient, StorageSharedKeyCredential } = require('#azure/storage-
blob')
const fs = require('fs')
const account = '<account>'
const accountKey = '<SharedKey>'
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey)
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
)
const containerClient = blobServiceClient.getContainerClient('stream-test-container')
const blockBlobClient = containerClient.getBlockBlobClient('path1/path2/file.xml')
const uploadBlobResponse = blockBlobClient.upload(fs.readFileSync('demo.xml'))
console.log(uploadBlobResponse)
However, I am getting an error that
contentLength cannot be null or undefined.
Can anyone help?

I believe the reason you're getting this error is because you're using incorrect upload method. upload method expects body as HttpRequestBody and contentLength parameters. Since you're not providing the value for contentLength parameter, you're getting this error.
Instead of upload method, you should use uploadData method. It only expects a data buffer that you will get when you read the file. I just tried your code with uploadData method and it worked well for me.
So your code would be:
const { BlobServiceClient, StorageSharedKeyCredential } = require('#azure/storage-blob')
const fs = require('fs')
const account = '<account>'
const accountKey = '<SharedKey>'
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey)
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
)
const containerClient = blobServiceClient.getContainerClient('stream-test-container')
const blockBlobClient = containerClient.getBlockBlobClient('path1/path2/file.xml')
const uploadBlobResponse = blockBlobClient.uploadData(fs.readFileSync('demo.xml'))
console.log(uploadBlobResponse)

Related

Azure Blob Storage .uploadStream() not uploading more than 42 bytes

I am trying to upload a video from my azure VM to azure blob storage with #azure/storage-blob and using .uploadStream(), but it is not uploading more than 42 bytes
progress { loadedBytes: 42 }
even though the file is 5.7 kb
-rw-r--r-- 1 root root 5.7K Oct 11 10:33 345009243
My upload code is like this
const { BlobServiceClient, StorageSharedKeyCredential } = require("#azure/storage-blob");
const dotenv = require('dotenv');
const getStream = require('into-stream');
//import getStream from 'into-stream'
dotenv.config();
const account = process.env.STORAGE_ACCOUNT;
const accountKey = process.env.ACCOUNT_KEY;
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,sharedKeyCredential
);
const containerName = process.env.CONTAINER_NAME;
const uploadFile = async(filePath,filename) => {
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobName = "vimeoVedio" + new Date().getTime();
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const buffer = Buffer.from(filePath);
const stream = getStream(buffer);
const streamLenght = (1 * 1024 * 1024) / 2;
const uploadBlobResponse = await blockBlobClient.uploadStream(stream,streamLenght, 400, {
// abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
onProgress: (ev) => {console.log("progress", ev)},
blobHTTPHeaders: {blobContentType: filename.mimeType}
});
return Promise.resolve(`Upload block blob ${blobName} successfully`, uploadBlobResponse.requestId);
}
module.exports = {uploadFile}
I tried this upload with multiple different files with different file sizes but it still is only uploading 42 bytes
I believe the issue is in this line of code:
const buffer = Buffer.from(filePath);
What you are doing here is creating a buffer from a string representing the file path (e.g. C:\temp\myfile.txt) instead of the actual file contents.
You would want to use fs module and read the contents of the file using something like fs.readFile(path[, options], callback) or fs.readFileSync(path\[, options\]). Once you do that, you should be able to upload the file properly.

Copy file from Azure Storage blob (Containers) to Azure File shares using Nodejs

Is there a way to copy files from Azure Containers (blobs) to Azure File shares?
I was able to copy files from one container to another - see below.
But I wanted to copy files from Blob to File Shares
const {
BlobServiceClient,
StorageSharedKeyCredential
} = require("#azure/storage-blob");
async function copy() {
const account = "<account-name>";
const accountKey = "<account-key>";
const cert = new StorageSharedKeyCredential(account, accountKey)
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
cert
);
const sourceContainer = blobServiceClient.getContainerClient("documents")
const desContainer = blobServiceClient.getContainerClient("copy")
//if the desContainer does not exist, please run the following code
// await desContainer.create()
//copy blob
const sourceBlob = sourceContainer.getBlobClient("file1.png");
console.log(sourceBlob, sourceBlob.name)
const desBlob = desContainer.getBlobClient(sourceBlob.name)
const response = await desBlob.beginCopyFromURL(sourceBlob.url);
const result = (await response.pollUntilDone())
console.log(result._response.status)
console.log(result.copyStatus)
}
copy()
I have tested in my environment.
To copy a file from Azure File Share to Azure Blob Storage, you can use the below code:
const {
BlobServiceClient,
StorageSharedKeyCredential,
} = require("#azure/storage-blob");
const {
ShareServiceClient
} = require("#azure/storage-file-share")
async function copy() {
const account = "<account-name>";
const accountKey = "<account-key>";
const cert = new StorageSharedKeyCredential(account, accountKey)
const accountSas = "<account-sas>"
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
cert
);
const serviceClient = new ShareServiceClient(`https://${account}.file.core.windows.net${accountSas}`,cert)
const sourceContainer = blobServiceClient.getContainerClient("containerName")
const shareClient = serviceClient.getShareClient("fileShareName")
const directoryClient = shareClient.getDirectoryClient("directoryName");
var fileClient = directoryClient.getFileClient("fileName");
//if the desContainer does not exist, please run the following code
// await desContainer.create()
//copy blob
const sourceBlob = sourceContainer.getBlobClient("blobFileName");
const response = await sourceBlob.beginCopyFromURL(fileClient.url);
}
copy()
To copy the files from Azure Blob Storage to Azure File Share, we can download the blob file to local first and then upload the local file to Azure File Share.
You can use below code to download the blob file to local:
const {
BlobServiceClient,
StorageSharedKeyCredential,
} = require("#azure/storage-blob");
const account = "<account-name>";
const accountKey = "<account-key>";
const cert = new StorageSharedKeyCredential(account, accountKey)
const accountSas = "<account-sas>"
function download() {
const account = "<account-name>";
const accountKey = "<account-key>";
const cert = new StorageSharedKeyCredential(account, accountKey)
const accountSas = "<account-sas>"
const container = "containerName"
const blobFileName = "blobFileName"
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
cert
);
const sourceContainer = blobServiceClient.getContainerClient(container)
const sourceBlob = sourceContainer.getBlobClient(blobFileName);
sourceBlob.downloadToFile(blobFileName);
}
download()
You can use the below code to upload the file from local to Azure File Share:
const {
ShareServiceClient
} = require("#azure/storage-file-share");
function upload() {
const account = "<account-name>";
const accountKey = "<account-key>";
const cert = new StorageSharedKeyCredential(account, accountKey)
const accountSas = "<account-sas>"
const serviceClient = new ShareServiceClient(`https://${account}.file.core.windows.net${accountSas}`,cert)
const shareClient = serviceClient.getShareClient("fileShareName")
const directoryClient = shareClient.getDirectoryClient("directoryName");
var fileClient = directoryClient.getFileClient("FileName");
fileClient.uploadFile("localFilePath");
}
upload()

How to generate an azure blob url with SAS signature in nodejs sdk v12?

previously (in older sdk like v2) you can generate a sas url (a signed shareable url for a blob) like following :
var azure = require('azure-storage');
var blobService = azure.createBlobService();
var startDate = new Date();
var expiryDate = new Date(startDate);
expiryDate.setMinutes(startDate.getMinutes() + 100);
startDate.setMinutes(startDate.getMinutes() - 100);
var sharedAccessPolicy = {
AccessPolicy: {
Permissions: azure.BlobUtilities.SharedAccessPermissions.READ,
Start: startDate,
Expiry: expiryDate
}
};
var token = blobService.generateSharedAccessSignature(containerName, blobName, sharedAccessPolicy);
var sasUrl = blobService.getUrl(containerName, blobName, token);
I'm wondering how we can generate that url in sdk v12?
I could not find any documentation for Sas URL in v12.
BlobUtilities and getUrl() methods also not available in v12 (in v12 there are separate packages for every module , in my case I'm using require("#azure/storage-blob");)
Thanks.
Regarding the issue, please refer to the following code
var storage = require("#azure/storage-blob")
const accountname ="blobstorage0516";
const key = "";
const cerds = new storage.StorageSharedKeyCredential(accountname,key);
const blobServiceClient = new storage.BlobServiceClient(`https://${accountname}.blob.core.windows.net`,cerds);
const containerName="test";
const client =blobServiceClient.getContainerClient(containerName)
const blobName="help.txt";
const blobClient = client.getBlobClient(blobName);
const blobSAS = storage.generateBlobSASQueryParameters({
containerName,
blobName,
permissions: storage.BlobSASPermissions.parse("racwd"),
startsOn: new Date(),
expiresOn: new Date(new Date().valueOf() + 86400)
},
cerds
).toString();
const sasUrl= blobClient.url+"?"+blobSAS;
console.log(sasUrl);
You can do so by using generateBlobSASQueryParameters. For example, see the code below:
const AZURE_STORAGE_ACCOUNT = 'account-name';
const AZURE_STORAGE_ACCESS_KEY = 'account-key';
const { StorageSharedKeyCredential, BlobServiceClient, generateBlobSASQueryParameters, BlobSASPermissions } = require("#azure/storage-blob");
const sharedKeyCredential = new StorageSharedKeyCredential(AZURE_STORAGE_ACCOUNT, AZURE_STORAGE_ACCESS_KEY);
const blobServiceClient = new BlobServiceClient(
`https://${AZURE_STORAGE_ACCOUNT}.blob.core.windows.net`,
sharedKeyCredential
);
const containerName = 'container-name';
const blobName = 'blob-name';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
// const credentials = new StorageSharedKeyCredential()
const sasToken = generateBlobSASQueryParameters({
containerName: containerName,
blobName: blobName,
expiresOn: new Date(new Date().valueOf() + 86400),
permissions: BlobSASPermissions.parse("racwd")
}, sharedKeyCredential);
const sasUrl = `${blockBlobClient.url}?${sasToken}`;
console.log(sasUrl);

SharedKeyCredential is not a constructor - Azure Blob Storage + Nodejs

I'm trying to delete an image in my aucitonImages container, but when I execute the function from postman, I get SharedKeyCredential is not a constructor I've been following the documentation and I think I have everything setup, but I don't see what's different in my code from the docs. I appreciate any help!
app.delete("/api/removeauctionimages", upload, async (req, res, next) => {
const { ContainerURL, ServiceURL, StorageURL, SharedKeyCredential } = require("#azure/storage-blob");
const credentials = new SharedKeyCredential(process.env.AZURE_STORAGE_ACCOUNT, process.env.AZURE_STORAGE_ACCESS_KEY);
const pipeline = StorageURL.newPipeline(credentials);
const serviceURL = new ServiceURL(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`, pipeline);
const containerName = "auctionImages";
const blobName = "myimage.png";
const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, blobName);
await blockBlobURL.delete(aborter)
console.log(`Block blob "${blobName}" is deleted`);
});
Based on the SDK Version 12.1.0 documentation here, looks like Microsoft changed SharedKeyCredential to StorageSharedKeyCredential.
Can you try with that?
Also, please see the samples for this version of SDK here: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/storage/storage-blob/samples/javascript.
Here's the code I wrote using v12.1.0 of Node SDK:
const { StorageSharedKeyCredential, BlobServiceClient } = require("#azure/storage-blob");
const sharedKeyCredential = new StorageSharedKeyCredential(process.env.AZURE_STORAGE_ACCOUNT, process.env.AZURE_STORAGE_ACCESS_KEY);
const blobServiceClient = new BlobServiceClient(
`https://${process.env.AZURE_STORAGE_ACCOUNT}.blob.core.windows.net`,
sharedKeyCredential
);
const containerName = `temp`;
const blobName = 'test.png';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
await blockBlobClient.delete();

I want to upload files to Blob Storage NOT AS STREAMS (Buffer, base64) BUT AS original file (jpg , png jpeg)

I am unable to find a way to upload a file not as a stream (buffer, base64) but as a file(png,jgeg,jpg) to Azure Storage Blob.
MY Stream Code is
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential, defaultAzureCredentials
);
createBlob = (blobName,blob)=>{
try{
async function main() {
const containerClient = blobServiceClient.getContainerClient('blue');
const content = base64_encode(blob.buffer);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
console.log(`Upload block blob ${blobName} successfully`, uploadBlobResponse.requestId);
return uploadBlobResponse.requestId;
}
main();
}
catch(err){
res.send(err)
}
}
function base64_encode(file) {
// read binary data
//var bitmap = fs.readFileSync(file);
// convert binary data to base64 encoded string
return file.toString('base64');
}
It seems that you were using #azure/storage-blob and your code inspired from Create a blob by uploading data to.
There is a function uploadFile of BlockBlobClient that can help to directly upload a local file to Azure Blob Storage, as the figure below.
Here is my sample code.
const { BlobServiceClient, StorageSharedKeyCredential } = require("#azure/storage-blob");
// Enter your storage account name and shared key
const account = "<your account name>";
const accountKey = "<your account key>";
// Use StorageSharedKeyCredential with storage account and account key
// StorageSharedKeyCredential is only avaiable in Node.js runtime, not in browsers
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
);
var containerName = '<your container name>';
var blobName = '<your blob name>';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
var filePath = '<your local file path>';
blockBlobClient.uploadFile(filePath);
You can specify the content type in the options
await blockBlobClient.uploadStream(stream, bufferSize, maxConcurrency, {
blobHTTPHeaders: {
blobContentType: "image/jpeg"
}
})

Resources