Due to depreciation, I am trying to update my code to the following 2 packages:
https://www.npmjs.com/package/#azure/storage-blob
https://www.npmjs.com/package/#azure/cosmos
The issue I am having is that my code currently uses packages that were pre async/await, whereas all the documentation for the newer packages assume use of async functions.
I would like to update my code's functions but without restructuring it with async functions. Is there any documentation out there on how to do that? Or any clear and easy examples?
For example, I am using this example to upload a blob:
const { DefaultAzureCredential } = require("#azure/identity");
const { BlobServiceClient } = require("#azure/storage-blob");
const account = "<account>";
const defaultAzureCredential = new DefaultAzureCredential();
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
defaultAzureCredential
);
const containerName = "<container name>";
async function main() {
const containerClient = blobServiceClient.getContainerClient(containerName);
const content = "Hello world!";
const blobName = "newblob" + new Date().getTime();
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
console.log(`Upload block blob ${blobName} successfully`, uploadBlobResponse.requestId);
}
main();
I would like to do so without async/await, and for blockBlobClient.upload use function(err, result){ do stuff} as my code already has.
Similarly, I would like to do so with #azure/cosmos functions.
My current packages are:
https://www.npmjs.com/package/documentdb
https://www.npmjs.com/package/azure-storage
Is this possible?
I would really prefer not to restructure all my code...
Thanks
For any methdods that returns a promise, you can still use the call back approach:
blockBlobClient.upload().then((response) => {
// do something with the response...
});
Related
I was trying to use a user-assigned managed identity with the DefaultAzureCredential, but am getting the 403 permissions mismatch error. I'm following the code example found in MS docs and it still fails. However, replacing the DefaultAzureCredential with the explicit ManagedIdentityCredential works just fine.
This is my code:
const { BlobServiceClient } = require('#azure/storage-blob');
const { DefaultAzureCredential } = require('#azure/identity');
const {url, clientId} = require('./config');
const cred = new DefaultAzureCredential({managedIdentityClientId: clientId});
const containerClient = new BlobServiceClient(url, cred).getContainerClient('container-name');
(async () => {
const exists = await containerClient.exists();
console.log({exists});
})();
This looks like it should work, but it does not. Any thoughts?
versions:
"#azure/identity": "^1.1.0",
"#azure/storage-blob": "^12.12.0",
node v16.18.1
I tried in my environment and got below results:
I tried to reproduce same code in my environment, and it successfully executed with container exist or not.
Code:
const { BlobServiceClient } = require('#azure/storage-blob');
const { DefaultAzureCredential } = require('#azure/identity');
const url="https://venkat123.blob.core.windows.net";
const clientId="<client-id>";
const cred = new DefaultAzureCredential({managedIdentityClientId: clientId});
const Client = new BlobServiceClient(url, cred);
const containerClient=Client.getContainerClient("test");
(async () => {
const exists = await containerClient.exists();
console.log({exists});
})();
Console:
403, This request is not authorized to perform this operation using this permission.
If you are accessing storage account with identity, you need a role like Storage-blob-contributor or storage-blob-owner.
Go to portal -> your storage account -> Access Control (IAM) ->Add -> Add role assignments -> storage-blob-contributor or storage-blob-owner->Add your user managed identity id.
Also, I tried with user-assigned managed identity with the DefaultAzureCredential it worked perfectly.
Code:
const { BlobServiceClient } = require('#azure/storage-blob');
const { DefaultAzureCredential } = require('#azure/identity');
const url="https://venkat123.blob.core.windows.net";
const cred = new DefaultAzureCredential();
const Client = new BlobServiceClient(url, cred);
const containerClient=Client.getContainerClient("test");
(async () => {
const exists = await containerClient.exists();
console.log({exists});
})();
Console:
Trying to upload a file to azure blob storage using #azure/storage-blob sdk in nodejs:
module.exports.createBlob = (blobName, containerName, blobContent) => {
return new Promise(async (resolve, reject) => {
try {
const sharedKeyCredential = await this.createSharedAccessToken(blobName, 'c')
const blobServiceClient = new BlobServiceClient(
`https://${process.env.AZURE_BLOB_ACCOUNT}.blob.core.windows.net`,
sharedKeyCredential
)
const containerClient = blobServiceClient.getContainerClient(containerName)
const blockBlobClient = containerClient.getBlockBlobClient(blobName)
const blob = await blockBlobClient.upload(blobContent, blobContent.length) // implement later
resolve(blob)
} catch (err) {
console.log(err)
reject(err)
}
})
}
module.exports.createSharedAccessToken = (blobName, permission) => {
return new Promise(async (resolve, reject) => {
const sharedKeyCredential = new StorageSharedKeyCredential(process.env.AZURE_BLOB_ACCOUNT, process.env.AZURE_BLOB_KEY)
const containerName = process.env.AZURE_CONTAINER_NAME
const startsOn = new Date()
expiresOn.setMinutes(expiresOn.getMinutes() + parseInt(autoLogoutDuration.KeyValue))
const blobSAS = generateBlobSASQueryParameters({
containerName, // Required
blobName, // Required
permissions: BlobSASPermissions.parse(permission), // Required
startsOn: startsOn, // Required
},
sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`
).toString()
resolve(decodeURI(blobSAS))
})
}
It keeps throwing a "NoAuthenticationInformation" error. The same creds work for downloading an existing blob but uploading is not working no matter what I try. Any help would be appreciated.
Followed by this MS DOC , I tried to reproduce your issue ,but able to upload files into my azure blob container using Node.js without any authentication error.
As you are using shared key credential we need to have all those permissions in our portal as shown below:
Also i am using #azure/storage-blob sdk in nodejs in my package.json .
Also added #azure/storage-blob sdk in my testupload.js file
And added the below code into my testupload.js file as i have already created container i just commented the above screenshot code .
const account="testa";
const sharedKeyCredential = new StorageSharedKeyCredential("yourstorageaccountname", "your storage key connection string");
const blobServiceClient1 = new BlobServiceClient(
// When using AnonymousCredential, following url should include a valid SAS or support public access
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
);
const blobnewname="example2.txt"
blobContent="hi hello";
const containerClient1 = blobServiceClient.getContainerClient("test")
const blockBlobClient1= containerClient.getBlockBlobClient(blobnewname)
const blob = blockBlobClient1.upload(blobContent, blobContent.length)
Then i can able to test and upload my files to Azure blob container
Here are screenshot for reference:
For more information please refer this MS DOC : Azure Storage samples using v12 JavaScript client libraries
UPDATE:-
As suggested by #Setanjan Roy
Alternate way:-
To get rid of this we can use new BlobServiceClient( https://${process.env.AZURE_BLOB_ACCOUNT}.blob.core.windows.net${sharedKeyCredential} )
I have implemented ** azure blob/storage** in my project. And it works fine on smaller files but when I try to download a file more than 40 MB. it does not return from blob.downlaodToFile()
Here is my code.
Api call
exports.downloadSingle = function(req,res){
downloadService.downloadSingleFile(req.params.id).then(function (result) {
res.send(result);
})
}
Downlaod service
var filesLocalLinks = [];
const request = require('request-promise');
const fs = require('fs');
var download = require('download-file')
let promise = new Promise((resolve, reject) => {
filemodel.findOne({_id: id,cancelled: false}).exec(function(error,result){
if(error){
resolve(error);
}else{
blobDownload.downloadFile(result.blobName,result.containerName).then(function(blobResponse){
var filename = path.resolve(__dirname+'/../services/uploads/'+result.fileName);
filename = filename.replace('/myProject','');
filename = process.env.BASE_URL+'/myProject/services'+filename.split('/services')[1];
resolve({file: filename, filename: result.originalname})
}).catch(function(error){
reject(error);
})
}
})
})
let result =await promise;
return {file: result.file,filename: result.filename };
}
Blob service
let promise = new Promise(async (resolve, reject) => {
const account = process.env.BLOB_ACCOUNT;
const accountKey = process.env.BLOB_ACCOUNT_KEY;
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
);
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const blobClient = containerClient.getBlobClient(blobName);
var blobResponse = await blobClient.downloadToFile(blobName);
resolve(blobResponse)
})
let result =await promise;
return result;
}
In blob service, this line take too much time that the page responds page not working something var blobResponse = await blobClient.downloadToFile(blobName);
Can anyone help me
Basically the flow of code is, first it download the file to our server directory and then the user can download it. I figured it out that the code was working fine, but the issue was of the memory of our server, as we increased the memory, the issue is resolved. But it took my 2 days to figure it out
I came across that out of the blue.. but I noticed that you put blobName as a parameter of downloadToFile call, instead of the target file's name, not to mention that the line const blockBlobClient = containerClient.getBlockBlobClient(blobName); seems to be unnecessary, which suggests that you didn't look into your own code properly...
I'm trying to delete an image in my aucitonImages container, but when I execute the function from postman, I get SharedKeyCredential is not a constructor I've been following the documentation and I think I have everything setup, but I don't see what's different in my code from the docs. I appreciate any help!
app.delete("/api/removeauctionimages", upload, async (req, res, next) => {
const { ContainerURL, ServiceURL, StorageURL, SharedKeyCredential } = require("#azure/storage-blob");
const credentials = new SharedKeyCredential(process.env.AZURE_STORAGE_ACCOUNT, process.env.AZURE_STORAGE_ACCESS_KEY);
const pipeline = StorageURL.newPipeline(credentials);
const serviceURL = new ServiceURL(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`, pipeline);
const containerName = "auctionImages";
const blobName = "myimage.png";
const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, blobName);
await blockBlobURL.delete(aborter)
console.log(`Block blob "${blobName}" is deleted`);
});
Based on the SDK Version 12.1.0 documentation here, looks like Microsoft changed SharedKeyCredential to StorageSharedKeyCredential.
Can you try with that?
Also, please see the samples for this version of SDK here: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/storage/storage-blob/samples/javascript.
Here's the code I wrote using v12.1.0 of Node SDK:
const { StorageSharedKeyCredential, BlobServiceClient } = require("#azure/storage-blob");
const sharedKeyCredential = new StorageSharedKeyCredential(process.env.AZURE_STORAGE_ACCOUNT, process.env.AZURE_STORAGE_ACCESS_KEY);
const blobServiceClient = new BlobServiceClient(
`https://${process.env.AZURE_STORAGE_ACCOUNT}.blob.core.windows.net`,
sharedKeyCredential
);
const containerName = `temp`;
const blobName = 'test.png';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
await blockBlobClient.delete();
How can I delete Azure Blob through Node.js and I am using Azure library v12 SDK for Node.js (https://learn.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-nodejs)
I could not find delete blob method, I want to delete blob by name.
Just as #Georage said in the comment, you can use the delete method to delete a blob.
Here is my demo:
const { BlobServiceClient,ContainerClient, StorageSharedKeyCredential } = require("#azure/storage-blob");
// Load the .env file if it exists
require("dotenv").config();
async function streamToString(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on("data", (data) => {
chunks.push(data.toString());
});
readableStream.on("end", () => {
resolve(chunks.join(""));
});
readableStream.on("error", reject);
});
}
async function main() {
const AZURE_STORAGE_CONNECTION_STRING = process.env.AZURE_STORAGE_CONNECTION_STRING;
const blobServiceClient = await BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
const containerClient = await blobServiceClient.getContainerClient("test");
const blockBlobClient = containerClient.getBlockBlobClient("test.txt")
const downloadBlockBlobResponse = await blockBlobClient.download(0);
console.log(await streamToString(downloadBlockBlobResponse.readableStreamBody));
const blobDeleteResponse = blockBlobClient.delete();
console.log((await blobDeleteResponse).clientRequestId);
}
main().catch((err) => {
console.error("Error running sample:", err.message);
});
After running this sample, the test.txt file was removed from the test container.
While Jack's answer works, it is more complicated than it needs to be. Instead of creating the blockBlobClient and then deleting it, a simpler way would be to use:
containerClient.deleteBlob('blob-name')