How do I read metadata for a blob in azure via the JavaScript SDK?
When I iterate the blobs returned from the specified container I see a metadata property:
But it's undefined, even though there is definitely metadata associated with the blob:
Is there something else I need to do to get the metadata to populate?
import { BlobServiceClient, SharedKeyCredential } from "#azure/storage-blob";
const account = "<redacted>";
const accountKey = "<redacted>";
const sharedKeyCredential = new SharedKeyCredential(account, accountKey);
const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net`, sharedKeyCredential);
const containerClient = blobServiceClient.getContainerClient(podcastName);
const blobs = await containerClient.listBlobsFlat({ include: ["metadata"] });
for await (const blob of blobs) {
console.log(blob.name);
//blob.metadata is undefined
}
// package.json relevant dependencies
"dependencies": {
"#azure/storage-blob": "^12.0.0-preview.2
}
// You can try this:
for await (const blob of containerClient.listBlobsFlat()) {
const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
const meta = (await blockBlobClient.getProperties()).metadata;
console.log(meta);
// process metadata
}
I am assuming you have already declared blockBlobClient and containerClient.
If you haven't already declared blockBlobClient and containerClient ,then you can refer here
I test it's null, then I use getProperties() to get the metadata and it worked, you could have a try.
const containerName = "test";
const blobName = "test.txt";
let response;
let marker;
do {
response = await containerURL.listBlobFlatSegment(aborter);
marker = response.marker;
for(let blob of response.segment.blobItems) {
const url= BlockBlobURL.fromContainerURL(containerURL,blob.name);
const pro=await url.getProperties(aborter);
console.log(pro.metadata);
}
} while (marker);
You can fetch the properties for a blob with the getBlobMetadata method.
var storage = require('azure-storage');
var blobService = storage.createBlobService();
var containerName = 'your-container-name';
var blobName = 'my-awesome-blob';
blobService.getBlobMetadata(containerName, blobName, function(err, result, response) {
if (err) {
console.error("Couldn't fetch metadata for blob %s", blobName);
console.error(err);
} else if (!response.isSuccessful) {
console.error("Blob %s wasn't found container %s", blobName, containerName);
} else {
console.log("Successfully fetched metadata for blob %s", blobName);
console.log(result.metadata);
}
});
For more details, you could refer to this article.
in v12 you can retrieve metadata when listing blobs by passing the option includeMetadata: true
await containerClient.listBlobsFlat({ includeMetadata: true });
https://github.com/Azure/azure-sdk-for-js/blob/d2730549e078571df008e929f19c07aaf8f9efd9/sdk/storage/storage-blob/test/containerclient.spec.ts#L198
Related
Trying to upload a file to azure blob storage using #azure/storage-blob sdk in nodejs:
module.exports.createBlob = (blobName, containerName, blobContent) => {
return new Promise(async (resolve, reject) => {
try {
const sharedKeyCredential = await this.createSharedAccessToken(blobName, 'c')
const blobServiceClient = new BlobServiceClient(
`https://${process.env.AZURE_BLOB_ACCOUNT}.blob.core.windows.net`,
sharedKeyCredential
)
const containerClient = blobServiceClient.getContainerClient(containerName)
const blockBlobClient = containerClient.getBlockBlobClient(blobName)
const blob = await blockBlobClient.upload(blobContent, blobContent.length) // implement later
resolve(blob)
} catch (err) {
console.log(err)
reject(err)
}
})
}
module.exports.createSharedAccessToken = (blobName, permission) => {
return new Promise(async (resolve, reject) => {
const sharedKeyCredential = new StorageSharedKeyCredential(process.env.AZURE_BLOB_ACCOUNT, process.env.AZURE_BLOB_KEY)
const containerName = process.env.AZURE_CONTAINER_NAME
const startsOn = new Date()
expiresOn.setMinutes(expiresOn.getMinutes() + parseInt(autoLogoutDuration.KeyValue))
const blobSAS = generateBlobSASQueryParameters({
containerName, // Required
blobName, // Required
permissions: BlobSASPermissions.parse(permission), // Required
startsOn: startsOn, // Required
},
sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`
).toString()
resolve(decodeURI(blobSAS))
})
}
It keeps throwing a "NoAuthenticationInformation" error. The same creds work for downloading an existing blob but uploading is not working no matter what I try. Any help would be appreciated.
Followed by this MS DOC , I tried to reproduce your issue ,but able to upload files into my azure blob container using Node.js without any authentication error.
As you are using shared key credential we need to have all those permissions in our portal as shown below:
Also i am using #azure/storage-blob sdk in nodejs in my package.json .
Also added #azure/storage-blob sdk in my testupload.js file
And added the below code into my testupload.js file as i have already created container i just commented the above screenshot code .
const account="testa";
const sharedKeyCredential = new StorageSharedKeyCredential("yourstorageaccountname", "your storage key connection string");
const blobServiceClient1 = new BlobServiceClient(
// When using AnonymousCredential, following url should include a valid SAS or support public access
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
);
const blobnewname="example2.txt"
blobContent="hi hello";
const containerClient1 = blobServiceClient.getContainerClient("test")
const blockBlobClient1= containerClient.getBlockBlobClient(blobnewname)
const blob = blockBlobClient1.upload(blobContent, blobContent.length)
Then i can able to test and upload my files to Azure blob container
Here are screenshot for reference:
For more information please refer this MS DOC : Azure Storage samples using v12 JavaScript client libraries
UPDATE:-
As suggested by #Setanjan Roy
Alternate way:-
To get rid of this we can use new BlobServiceClient( https://${process.env.AZURE_BLOB_ACCOUNT}.blob.core.windows.net${sharedKeyCredential} )
I have implemented download files using getBlobProperties() and createReadStream(containerName, fullPath, options) methods of blob service. now, I am trying to download directory/subdirectory inside my containers using the same methods but it is not working, and throwing error, specified blob does not exist. though I know the reason for this error how Can I overcome this issue as I don't want to loop the blobs and download each file separately?. I want a complete folder to be downloaded.
here is the API:
exports.getBlobChunk = function (req, res) {
var userrole = utils.sanitizeStr(req.body.userrole);
var srcFilePath = utils.sanitizeStr(req.body.srcfilePath);
var fileName = utils.sanitizeStr(req.body.srcfileName);
var fullPath = srcFilePath + "/" + fileName;
var startRange = req.headers['startrange'];
var endRange = req.headers['endrange'];
genericHandler.getUserSubscMapping().then(function (results) {
if (results != undefined && results != null) {
var item = results[0].mapping.find(item => item.name == userrole);
var sasurl = item.sasurl;
if (sasurl == null) {
res.status(500).send("Subscription mapping not configured");
return;
}
var host = sasurl.substring(0, sasurl.lastIndexOf("/"));
var containerName = sasurl.substring(sasurl.lastIndexOf("/"), sasurl.indexOf("?")).split("/")[1];
var saskey = sasurl.substring(sasurl.indexOf("?"), sasurl.length);
var download = item.download; // download usage
var blobService = storage.createBlobServiceWithSas(host, saskey);
blobService.getBlobProperties(containerName, fullPath, function (err, properties, status) {
if (err) {
res.send(502, "Error fetching file: %s", err.message);
} else if (!status.isSuccessful) {
res.send(404, "The file %s does not exist", fullPath);
} else {
var contentLength = properties.contentLength / 1024; // bytes to KB
res.header('Content-Type', "application/zip");
res.attachment(fileName);
var options = {
rangeStart: startRange,
rangeEnd: endRange
};
if (startRange == 0) { // update download size on first chunk
exports.updateStorageDownload(userrole, contentLength, download);
}
blobService.createReadStream(containerName, fullPath, options).pipe(res);
}
});
}
Azure Blob storage does not have a concept of folders and everything inside the container is considered a blob including the folders. So you couldn't download directory/subdirectory with folder name.
For example:
Container structure
hello.txt
...
test
test.txt
test1
data.json
You need to download blob file from directory one by one.
const {
BlobServiceClient,
StorageSharedKeyCredential,
} = require("#azure/storage-blob");
// Enter your storage account name and shared key
const account = "";
const accountKey ="";
const containerName = "";
const filePath = "D:/downloads/"
// Use StorageSharedKeyCredential with storage account and account key
// StorageSharedKeyCredential is only available in Node.js runtime, not in browsers
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential,
);
async function listBlobs() {
const containerClient = await blobServiceClient.getContainerClient(containerName);
console.log("list blobs with method listBlobsFlat");
let iter = containerClient.listBlobsFlat({ prefix: "test/" });
for await (const item of iter) {
console.log(`\tBlobItem: name - ${item.name}`);
downloadBlobToLocal(containerClient, item.name, filePath);
}
console.log("list blobs with method listBlobsByHierarchy");
let iter1 = containerClient.listBlobsByHierarchy("/", { prefix: "test/" });
for await (const item of iter1) {
if (item.kind === "prefix") {
console.log(`\tBlobPrefix: ${item.name}`);
await listblob(containerClient, item.name);
} else {
console.log(`\tBlobItem: name - ${item.name}`);
downloadBlobToLocal(containerClient, item.name, filePath);
}
}
}
async function listblob(containerClient, prefix) {
let iter1 = containerClient.listBlobsByHierarchy("/", { prefix: prefix });
for await (const item of iter1) {
if (item.kind === "prefix") {
console.log(`\tBlobPrefix: ${item.name}`);
} else {
console.log(`\tBlobItem: name - ${item.name}`);
downloadBlobToLocal(containerClient, item.name, filePath);
}
}
}
async function downloadBlobToLocal(containerClient, blobName, filePath) {
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const downloadBlockBlobResponse = await blockBlobClient.downloadToFile(filePath + blobName);
}
listBlobs().catch((err) => {
console.error("Error running sample:", err.message);
});
I've written my own implementation based on this great article:
public async Task<List<BlobDto>> ListWithPrefixAsync(string folder)
{
// Get a reference to a container named in appsettings.json
BlobContainerClient container = new BlobContainerClient(_storageConnectionString, _storageContainerName);
// Create a new list object for
List<BlobDto> files = new List<BlobDto>();
await foreach (BlobItem file in container.GetBlobsAsync(prefix: folder))
{
// Add each file retrieved from the storage container to the files list by creating a BlobDto object
string uri = container.Uri.ToString();
var name = file.Name;
var fullUri = $"{uri}/{name}";
files.Add(new BlobDto
{
Uri = fullUri,
Name = name,
ContentType = file.Properties.ContentType
});
}
// Return all files to the requesting method
return files;
}
The implementation to get the list of blob files in a folder is then very simply like that:
// Get all files at the Azure Storage Location and return them
List<BlobDto>? files = await _storage.ListWithPrefixAsync(prefix);
Hope this helps.
Happy coding!!
The functionality I am trying to implement is copy paste files/folders from one source to another(same container). Can I use same method to copy folders like I do for copy files?
startcopyblob() throws error while copy pasting folders.
input:
newFileName:'new folder_copy1'
newFilePath:'603487d1e966a91fd86b6c11/spe9_rs_2021-03-17_17-14-38/output'
oldFilePath:'603487d1e966a91fd86b6c11/spe9_rs_2021-02-23_11-14-41/output/new folder'
error:
code:'CannotVerifyCopySource'
message:'The specified blob does not exist.
name:'StorageError'
requestId:'4a8a76bf-701e-0078-17c8-1b4439000000'
stack:'StorageError: The specified blob does not exist.
statusCode:404
Uncaught Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
code:
var host = sasurl.substring(0, sasurl.lastIndexOf("/"));
var containerName = sasurl.substring(sasurl.lastIndexOf("/"), sasurl.indexOf("?")).split("/")[1];
var saskey = sasurl.substring(sasurl.indexOf("?"), sasurl.length);
var blobService = storage.createBlobServiceWithSas(host, saskey);
pasteFiles.forEach(elem => {
var storageuri = host + "/" + containerName + "/" + elem["oldFilePath"] + saskey;
var blobName = elem["newFilePath"] + "/" + elem["newFileName"];
blobService.startCopyBlob(storageuri, containerName, blobName, err => {
if (err) {
console.log(err)
return res.status(500).json({
message: 'error',
status: err
})
.....
..
Azure blob storage has a 2 level hierarchy - blob container and blob. It is based on a flat storage scheme, not a hierarchical scheme. It does not have a directory structure. We just can specify a character or string delimiter within a blob name to create a virtual hierarchy. So if we want to copy one folder with azure node blob sdk, we need to copy blobs in the folder one by one.
For example
sdk
npm i #azure/storage-blob
code
const {
BlobServiceClient,
StorageSharedKeyCredential,
generateBlobSASQueryParameters,
ContainerSASPermissions,
} = require("#azure/storage-blob");
const accountName = "andyprivate";
const accountKey =
"";
const creds = new StorageSharedKeyCredential(accountName, accountKey);
const blobServiceClient = new BlobServiceClient(
`https://${accountName}.blob.core.windows.net`,
creds
);
async function test() {
try {
const sourceContainerClient = blobServiceClient.getContainerClient("input");
const desContainerClient = blobServiceClient.getContainerClient("output");
const blobSAS = generateBlobSASQueryParameters(
{
expiresOn: new Date(new Date().valueOf() + 86400000),
containerName: sourceContainerClient.containerName,
permissions: ContainerSASPermissions.parse("rl"),
},
creds
).toString();
for await (const response of sourceContainerClient
.listBlobsFlat({ prefix: "<your folder name>/" })
.byPage()) {
for (const blob of response.segment.blobItems) {
console.log(`Blob name : ${blob.name}`);
const sourceBlob = sourceContainerClient.getBlobClient(blob.name);
const sourceUrl = sourceBlob.url + "?" + blobSAS;
const res = await (
await desContainerClient
.getBlobClient(blob.name)
.beginCopyFromURL(sourceUrl)
).pollUntilDone();
console.log(res.copyStatus);
}
}
} catch (error) {
console.log(error);
}
}
test();
Besides, if you want to directly copy one folder from one container from another container, we can use azcopy implement it. For more details, please refer to here and here
For example
npm i #azure/storage-blob #azure-tools/azcopy-node #azure-tools/azcopy-<your system win32 linux win64>
code
const {
StorageSharedKeyCredential,
generateAccountSASQueryParameters,
AccountSASPermissions,
AccountSASResourceTypes,
AccountSASServices,
} = require("#azure/storage-blob");
const accountName = "andyprivate";
const accountKey =
"";
const creds = new StorageSharedKeyCredential(accountName, accountKey);
//create account sas token
const accountSas = generateAccountSASQueryParameters(
{
startsOn: new Date(new Date().valueOf() - 8640),
expiresOn: new Date(new Date().valueOf() + 86400000),
resourceTypes: AccountSASResourceTypes.parse("sco").toString(),
permissions: AccountSASPermissions.parse("rwdlacup").toString(),
services: AccountSASServices.parse("b").toString(),
},
creds
).toString();
const { AzCopyClient } = require("#azure-tools/azcopy-node");
let copyClient = new AzCopyClient();
async function copy() {
try {
let jobId = await copyClient.copy(
{
type: "RemoteSas",
resourceUri: "https://<>.blob.core.windows.net/input",
sasToken: accountSas,
path: "/<folder name>",
},
{
type: "RemoteSas",
resourceUri: "https://<>.blob.core.windows.net/outcontainer",
sasToken: accountSas,
path: "",
},
{ recursive: true }
);
let status;
while (!status || status.StatusType !== "EndOfJob") {
let jobInfo = await copyClient.getJobInfo(jobId);
status = jobInfo.latestStatus;
await new Promise((resolve, reject) => setTimeout(resolve, 1000));
}
console.log("OK");
} catch (error) {
console.log(error);
}
}
copy();
I'm trying to delete an image in my aucitonImages container, but when I execute the function from postman, I get SharedKeyCredential is not a constructor I've been following the documentation and I think I have everything setup, but I don't see what's different in my code from the docs. I appreciate any help!
app.delete("/api/removeauctionimages", upload, async (req, res, next) => {
const { ContainerURL, ServiceURL, StorageURL, SharedKeyCredential } = require("#azure/storage-blob");
const credentials = new SharedKeyCredential(process.env.AZURE_STORAGE_ACCOUNT, process.env.AZURE_STORAGE_ACCESS_KEY);
const pipeline = StorageURL.newPipeline(credentials);
const serviceURL = new ServiceURL(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`, pipeline);
const containerName = "auctionImages";
const blobName = "myimage.png";
const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, blobName);
await blockBlobURL.delete(aborter)
console.log(`Block blob "${blobName}" is deleted`);
});
Based on the SDK Version 12.1.0 documentation here, looks like Microsoft changed SharedKeyCredential to StorageSharedKeyCredential.
Can you try with that?
Also, please see the samples for this version of SDK here: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/storage/storage-blob/samples/javascript.
Here's the code I wrote using v12.1.0 of Node SDK:
const { StorageSharedKeyCredential, BlobServiceClient } = require("#azure/storage-blob");
const sharedKeyCredential = new StorageSharedKeyCredential(process.env.AZURE_STORAGE_ACCOUNT, process.env.AZURE_STORAGE_ACCESS_KEY);
const blobServiceClient = new BlobServiceClient(
`https://${process.env.AZURE_STORAGE_ACCOUNT}.blob.core.windows.net`,
sharedKeyCredential
);
const containerName = `temp`;
const blobName = 'test.png';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
await blockBlobClient.delete();
I am unable to find a way to upload a file not as a stream (buffer, base64) but as a file(png,jgeg,jpg) to Azure Storage Blob.
MY Stream Code is
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential, defaultAzureCredentials
);
createBlob = (blobName,blob)=>{
try{
async function main() {
const containerClient = blobServiceClient.getContainerClient('blue');
const content = base64_encode(blob.buffer);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
console.log(`Upload block blob ${blobName} successfully`, uploadBlobResponse.requestId);
return uploadBlobResponse.requestId;
}
main();
}
catch(err){
res.send(err)
}
}
function base64_encode(file) {
// read binary data
//var bitmap = fs.readFileSync(file);
// convert binary data to base64 encoded string
return file.toString('base64');
}
It seems that you were using #azure/storage-blob and your code inspired from Create a blob by uploading data to.
There is a function uploadFile of BlockBlobClient that can help to directly upload a local file to Azure Blob Storage, as the figure below.
Here is my sample code.
const { BlobServiceClient, StorageSharedKeyCredential } = require("#azure/storage-blob");
// Enter your storage account name and shared key
const account = "<your account name>";
const accountKey = "<your account key>";
// Use StorageSharedKeyCredential with storage account and account key
// StorageSharedKeyCredential is only avaiable in Node.js runtime, not in browsers
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
);
var containerName = '<your container name>';
var blobName = '<your blob name>';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
var filePath = '<your local file path>';
blockBlobClient.uploadFile(filePath);
You can specify the content type in the options
await blockBlobClient.uploadStream(stream, bufferSize, maxConcurrency, {
blobHTTPHeaders: {
blobContentType: "image/jpeg"
}
})