I am trying to use azure function to zip all files inside a blob container using System.IO.Compression.
I could list all files inside the container using the below CloudBlob code
CloudStorageAccount storageAccount = CloudStorageAccount.Parse (storageConn);
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer container = blobClient.GetContainerReference("<container>");
BlobContinuationToken blobToken = null;
var blobs = await container.ListBlobsSegmentedAsync(blobToken);
var fileList = new List<string>();
var blobpath1 = #"https://<pathtocontainer>/test.zip";
foreach (var blbitem in blobs.Results)
{
if (blbitem is CloudBlockBlob)
{
var blobFileName = blbitem.Uri.Segments.Last().Replace("%20", " ");
var blobFilePath = blbitem.Uri.AbsolutePath.Replace(blbitem.Container.Uri.AbsolutePath + "/", "").Replace("%20", " ");
var blobPath = blobFilePath.Replace("/" + blobFileName, "");
log.LogInformation("blob path : " + blbitem.Uri.ToString());
fileList.Add(blbitem.Uri.ToString());
string rootpath = #"D:\home\site\wwwroot\ZipandSendFile\temp\";
string path = rootpath + blobPath;
log.LogInformation("saving in " + path);
//Add to zip
/*
CloudBlobContainer container = cloudBlobClient.GetContainerReference("<container>");
CloudBlockBlob blob = container.GetBlockBlobReference(blobName);
using (FileStream fs = new FileStream(rootpath, FileMode.Create))
{
blob. DownloadToStream(fs);
}
*/
}
}
</code>
After getting each file details inside the blob , I am trying to add them into zip archive
using the below System.IO.Compression package
My attempt to download file
<code>
public static void AddFilesToZip(string zipPath, string[] files,ILogger log)
{
if (files == null || files.Length == 0)
{
return;
}
log.LogInformation("Executing add files to zip");
log.LogInformation(zipPath);
using (var zipArchive = ZipFile.Open(zipPath, ZipArchiveMode.Update))
{
log.LogInformation("in Zip archive");
foreach (var file in files)
{
var fileInfo = new FileInfo(file);
log.LogInformation(fileInfo.FullName);
zipArchive.CreateEntryFromFile(fileInfo.FullName, fileInfo.Name);
}
}
}
</code>
But I am getting access denied. Any pointers on this ?
Resolved issue by logging into kudo cmdshell and cd into directory and change file attribute
with attrib +A .
Related
I have an Azure Storage Account with Azure File share. I want to extract zip archive file to another dir in file share using Azure functions. I wrote this code in C#:
CloudFileDirectory rootDirectory = cloudFileShare.GetRootDirectoryReference();
CloudFileDirectory output = rootDirectory.GetDirectoryReference("output");
CloudFile cloudFile = input.GetFileReference("archive1.zip");
using (var stream = await cloudFile.OpenReadAsync())
{
var file1 = new ZipArchive(stream);
foreach (var zipEntry in file1.Entries)
{
var file2 = output.GetFileReference(zipEntry.Name);
var fileStream = zipEntry.Open();
await file2.UploadFromStreamAsync(fileStream); //error is in this line
}
}
But I got the error:
System.Private.CoreLib: Exception while executing function: HttpTriggerExtract. Microsoft.WindowsAzure.Storage:
Operation is not valid due to the current state of the object.
How to fix this?
Edit: I fix the error using MemoryStream in addition, this code works:
foreach (var zipEntry in file1.Entries) {
var fsz = output.GetFileReference(zipEntry.Name);
using (var ms = new MemoryStream())
{
using (var fileStream = zipEntry.Open())
{
await fileStream.CopyToAsync(ms);
ms.Seek(0, SeekOrigin.Begin);
await fsz.UploadFromStreamAsync(ms);
}
}
Regarding the issue, please refer to the following code (I use package WindowsAzure.Storage 9.3.1 to do that)
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(connectionString);
CloudFileClient cloudFileClient = storageAccount.CreateCloudFileClient();
CloudFileShare cloudFileShare = cloudFileClient.GetShareReference("share2");
CloudFileDirectory rootDirectory = cloudFileShare.GetRootDirectoryReference();
CloudFileDirectory input = rootDirectory.GetDirectoryReference("input");
CloudFileDirectory output = rootDirectory.GetDirectoryReference("output");
CloudFile cloudFile = input.GetFileReference("sample.zip");
using (var stream = await cloudFile.OpenReadAsync())
using (var zipArchive = new ZipArchive(stream)) {
foreach (var entry in zipArchive.Entries)
{
if (entry.Length > 0) {
CloudFile extractedFile = output.GetFileReference(entry.Name);
using (var entryStream = entry.Open())
{
byte[] buffer = new byte[16 * 1024];
using (var ms = await extractedFile.OpenWriteAsync(entry.Length))
{
int read;
while ((read = await entryStream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
ms.Write(buffer, 0, read);
}
}
}
}
}
}
The above answer helped me for my problem.
With the new Azure Library (12.7.0) you have to code this way:
string srcDir = #"sourcePath";
string destDir = #"sourcePath\testStorageUnzip";
string srcFileName = "AzureStorageZip.zip";
string azureConnectionString = ConfigurationManager.AppSettings["beecloudfileshare_AzureStorageConnectionString"];
StorageSharedKeyCredential credential = BeeFileManager.GetAzureStorageKeyCredential(azureConnectionString);
Uri srcUri = new Uri("https:" + Path.Combine(srcDir, srcFileName).Replace("\\", "/"), UriKind.Absolute);
Uri destDirUri = new Uri("https:" + Path.Combine(destDir).Replace("\\", "/"), UriKind.Absolute);
// Get a reference to the file we created previously
ShareFileClient sourceFile = new ShareFileClient(srcUri, credential);
ShareDirectoryClient shareDirectoryClient = new ShareDirectoryClient(destDirUri,credential);
shareDirectoryClient.CreateIfNotExistsAsync().GetAwaiter().GetResult();
using (var stream = sourceFile.OpenRead())
using (var zipArchive = new ZipArchive(stream))
{
foreach (var entry in zipArchive.Entries)
{
if (entry.Length > 0)
{
//CloudFile extractedFile = output.GetFileReference(entry.Name);
Uri destUri = new Uri("https:" + Path.Combine(destDir, entry.Name).Replace("\\", "/"), UriKind.Absolute);
ShareFileClient extractedFile = new ShareFileClient(destUri, credential);
using (var entryStream = entry.Open())
{
using (MemoryStream ms = new MemoryStream())
{
entryStream.CopyTo(ms);
//
//Sorry I have this part in another method
//
Uri fileUri = new Uri("https:" + Path.GetDirectoryName(filePath).Replace("\\", "/"), UriKind.Absolute);
// Get a reference to the file we created previously
ShareDirectoryClient directory = new ShareDirectoryClient(fileUri, credential);
ShareFileClient file = directory.GetFileClient(Path.GetFileName(filePath));
ms.Seek(0, SeekOrigin.Begin);
file.Create(ms.Length);
file.Upload(ms);
//
//
//
}
}
}
}
}
}
I have multiple files in my Azure storage account say its Master container, I have created a dynamic container which will have the required files copied from Master Container, and those coped files needed to be zipped inside that created container. See below code, where in I have created the zip archive. Also the zip archive is getting created, but when I download manually and see the files, it seems corrupted (for ex. the actual size of the individual files are more than 1Mb but the files which I get after download seems 22Kb), and File formats would be .ipt, .iam (Autodesk Inventor Files)
CloudBlobContainer destContainer = blobClient.GetContainerReference(AzureContainer);
bool isCreated = destContainer.CreateIfNotExists();
var zipblob = destContainer.GetBlockBlobReference("inputAssembly.zip");
using (var stream = await zipblob.OpenWriteAsync())
using (var zip = new ZipArchive(stream, ZipArchiveMode.Create))
{
foreach (var fileName in inputfile)
{
using (var fileStream = new MemoryStream())
{
if (destContainer.GetBlockBlobReference(fileName).Exists())
{
destContainer.GetBlockBlobReference(fileName).DownloadToStream(fileStream);
}
var newZip = new ZipArchive(fileStream, ZipArchiveMode.Create);
var entry = newZip.CreateEntry(fileName, CompressionLevel.NoCompression);
using (var innerFile = entry.Open())
{
fileStream.CopyTo(innerFile);
}
fileStream.Close();
}
}
}
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer container = blobClient.GetContainerReference(fixedpartContainer);
CloudBlobContainer destContainer1 = blobClient.GetContainerReference(AzureContainer);
bool isCreated = destContainer1.CreateIfNotExists();
var zipblob = destContainer1.GetBlockBlobReference("inputAssembly.zip");
using (var stream = await zipblob.OpenWriteAsync())
{
using (var zipArchive = new ZipArchive(stream, ZipArchiveMode.Create))
{
foreach (var blobName in blobFileNames)
{
var blob = destContainer.GetBlockBlobClient(blobName);
var zipEntry = zipArchive.CreateEntry(blobName);
using var zipStream = zipEntry.Open();
using var fileStream = new MemoryStream();
await blob.DownloadToAsync(fileStream);
await zipStream.WriteAsync(fileStream.ToArray());
AssemblyCreated = true;
}
}
}
I am trying to download all the logs from the Container "$logs", but it always throws a Exception -
"Could not find a part of the path 'C:\logs\blob\2020\05\24\2300\000000.log'"
public static void GetAnalyticsLogs(CloudBlobClient blobClient, CloudTableClient tableClient)
{
try
{
DateTime time = DateTime.UtcNow;
CloudAnalyticsClient analyticsClient = new CloudAnalyticsClient(blobClient.StorageUri, tableClient.StorageUri, tableClient.Credentials);
IEnumerable<ICloudBlob> results = analyticsClient.ListLogs(StorageService.Blob, time.AddDays(-30), null, LoggingOperations.All, BlobListingDetails.Metadata, null, null);
List<ICloudBlob> logs = results.ToList();
foreach (var item in logs)
{
string name = ((CloudBlockBlob)item).Name;
CloudBlobContainer container = blobClient.GetContainerReference("$logs");
CloudBlockBlob blockBlob = container.GetBlockBlobReference(name);
string path = (#"C:/logs/" + name);
using (var fileStream = System.IO.File.Create(path))
{
blockBlob.DownloadToStream(fileStream);
}
}
}
catch (Exception e)
{
Console.WriteLine(e);
}
}
How can we solve this error?
The reason is that the path contains directories, but the File.Create() method cannot create a file inside a directory if the directory does not exist. So you should create the directory first, then create the file using File.Create() method.
The code below works fine at my side:
public static void GetAnalyticsLogs(CloudBlobClient blobClient, CloudTableClient tableClient)
{
try
{
DateTime time = DateTime.UtcNow;
CloudAnalyticsClient analyticsClient = new CloudAnalyticsClient(blobClient.StorageUri, tableClient.StorageUri, tableClient.Credentials);
IEnumerable<ICloudBlob> results = analyticsClient.ListLogs(StorageService.Blob, time.AddDays(-30), null, LoggingOperations.All, BlobListingDetails.Metadata, null, null);
List<ICloudBlob> logs = results.ToList();
foreach (var item in logs)
{
string name = ((CloudBlockBlob)item).Name;
CloudBlobContainer container = blobClient.GetContainerReference("$logs");
CloudBlockBlob blockBlob = container.GetBlockBlobReference(name);
//specify the directory without file name
string sub_folder = name.Remove(name.LastIndexOf("/") + 1);
string path = (#"C:/logs/" + sub_folder);
//create the directory if it does not exist.
Directory.CreateDirectory(path);
//specify the file full path
string file_path= (#"C:/logs/" + name);
using (var fileStream = File.Create(file_path))
{
blockBlob.DownloadToStream(fileStream);
}
}
}
catch (Exception e)
{
Console.WriteLine(e);
}
}
I am looping through the file names from my database and the same file i have in azure storage. I am zipping those n number of files and download from azure storage. I saves the zipped file to my local storage. When i extract and want to see a file, it say damaged/corrupt.
public ActionResult Download(string productid, string YearActiveid)
{
HomePageModel homepagemodel = new HomePageModel();
homepagemodel.ProdHeaderDetail = GetProductHeaderDetail(productid, YearActiveid);
homepagemodel.PriorYearsActive = GetPriorYearActive(productid, YearActiveid);
CloudStorageAccount cloudStorageAccount = CloudStorageAccount.Parse("DefaultEndpointsProtocol=https;AccountName=<name>;AccountKey=<key>;EndpointSuffix=core.windows.net");
CloudBlobClient cloudBlobClient = cloudStorageAccount.CreateCloudBlobClient();
CloudBlobContainer cloudBlobContainer = cloudBlobClient.GetContainerReference("product");
var blobFileNames = new string[] { "file1.png", "file2.png", "file3.png", "file4.png" };
var outputMemStream = new MemoryStream();
var zipOutputStream = new ZipOutputStream(outputMemStream);
foreach (var ProdHeaderDetail in homepagemodel.ProdHeaderDetail)
{
zipOutputStream.SetLevel(5);
var blob = cloudBlobContainer.GetBlockBlobReference(ProdHeaderDetail.FileName);
var entry = new ZipEntry(ProdHeaderDetail.FileName);
zipOutputStream.PutNextEntry(entry);
blob.DownloadToStreamAsync(zipOutputStream);
}
zipOutputStream.Finish();
//zipOutputStream.Close();
//zipOutputStream.CloseEntry();
zipOutputStream.IsStreamOwner = false;
outputMemStream.Position = 0;
return File(outputMemStream, "application/zip", "filename.zip");
}
I resolved the issue by adding async and wait
public async Task Download(string productid, string YearActiveid)
await blob.DownloadToStreamAsync(zipOutputStream);
I have the following list of blobs:
VirtualDirectroy1/VirtualSubDirectory1/Blob1
VirtualDirectroy2/VirtualSubDirectory2/Blob2
VirtualDirectroy3/VirtualSubDirectory3/Blob3
I need to list Blob1, Blob2 and Blob3, so that when accessing the CloudBlockBlob.Name property it returns just Blob1, Blob2 or Blob 3 WITHOUT prefix of virtual directories.
How can I archive this?
Best Wishes, Oleg
If you are using the Azure storage .Net client library (I am using version 3.0.3 in which these methods/overloads are available), you could do something this:
var container = GetBlobContainer();
foreach (var blobItem in container.ListBlobs(useFlatBlobListing: true))
{
Console.WriteLine(blob.Parent.Uri.MakeRelativeUri(blob.Uri));
}
I found you can do this:
var storageAccountConnectionString = "...";
var storageAccount = CloudStorageAccount.Parse(storageAccountConnectionString);
var cloudBlobClient = storageAccount.CreateCloudBlobClient();
var cloudBlobContainer = cloudBlobClient.GetContainerReference("containerName");
foreach (var blob in cloudBlobContainer.ListBlobs())
{
Console.WriteLine(blob.Uri.Segments.Last());
}
class Program
{
const string _conStr = "storage connection string";
const string _container = "container name";
static void Main(string[] args)
{
var names = new Program().GetBlobNames();
Console.ReadKey();
}
private List<string> GetBlobNames()
{
CloudStorageAccount acc = CloudStorageAccount.Parse(_conStr);
CloudBlobClient blobClient = acc.CreateCloudBlobClient();
CloudBlobContainer cntnr = blobClient.GetContainerReference(_container);
List<IListBlobItem> blobList = cntnr.ListBlobs("").ToList();
List<string> flatList = new List<string>();
List<IListBlobItem> blobItems = new List<IListBlobItem>();
foreach (IListBlobItem blobItem in blobList)
{
//If it is cloudblob directory
if (blobItem.GetType() == typeof(CloudBlobDirectory))
{
CloudBlobDirectory dir = blobItem as CloudBlobDirectory;
GetFilesInDirectory(dir, blobItems);
}
}
return blobItems.Select(b => b.Parent.Uri.MakeRelativeUri(b.Uri).ToString()).ToList();
}
private void GetFilesInDirectory(CloudBlobDirectory directory, List<IListBlobItem> fileList)
{
foreach (var blobItem in directory.ListBlobs())
{
if (blobItem.GetType() == typeof(CloudBlockBlob))
{
CloudBlockBlob blob = (CloudBlockBlob)blobItem;
fileList.Add(blob);
}
else if (blobItem.GetType() == typeof(CloudPageBlob))
{
CloudPageBlob blob = (CloudPageBlob)blobItem;
fileList.Add(blob);
}
else if (blobItem.GetType() == typeof(CloudBlobDirectory))
{
//Fetch recursively all the blobs
CloudBlobDirectory blob = (CloudBlobDirectory)blobItem;
GetFilesInDirectory(blob, fileList);
}
}
}
}