How to upload a file to a storage location through URL using Azure function app - azure

I want to upload the upload a file to a storage location through URL using Azure function app from Azure blob storage. I'm able to pull the file from Azure blob. But not able to upload the file through url.
Below I have attached the code which i have written. Could anyone help me on this?
#r "Newtonsoft.Json"
#r "Microsoft.WindowsAzure.Storage"
#r "System.IO"
using System;
using System.IO;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using Microsoft.WindowsAzure.Storage.Auth;
using System.Xml;
using System.Collections.Generic;
using Newtonsoft.Json;
using System.Net;
public static void Run(string input, TraceWriter log)
{
log.Info($"C# manual trigger function processed\n");
const string StorageAccountName = "";
const string StorageAccountKey = "";
var storageAccount = new CloudStorageAccount(new StorageCredentials(StorageAccountName, StorageAccountKey), true);
var blobClient = storageAccount.CreateCloudBlobClient();
var container = blobClient.GetContainerReference("hannahtest");
var Destcontainer = blobClient.GetContainerReference("hannahtestoutput");
var blobs = container.ListBlobs();
log.Info($"Creating Client and Connecting");
foreach (IListBlobItem item in container.ListBlobs(null, false))
{
if (item is CloudBlockBlob blockBlob)
{
using (StreamReader reader = new StreamReader(blockBlob.OpenRead())
{
//old content string will read the blockblob (xml)till end
string oldContent1 = reader.ReadToEnd();
log.Info(oldContent1);
var content = new FormUrlEncodedContent(oldContent1);
var response = await client.PostAsync("http://www.example.com/recepticle.aspx", content);
var responseString = await response.Content.ReadAsStringAsync();
log.Info($"Success");
}
}
}
}

Have a look at Blob Output Binding - that's how the blobs are intended to be uploaded from Azure Functions, without messing with Azure Storage SDK.

Azure function to upload multiple image file to blob storage.
using Microsoft.WindowsAzure.Storage.Auth;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
public static class ImageUploadFunction
{
[FunctionName("ImageUploadFunction")]
public static async Task<IActionResult> Run([HttpTrigger(AuthorizationLevel.Function, "post")]HttpRequestMessage req, ILogger log)
{
var provider = new MultipartMemoryStreamProvider();
await req.Content.ReadAsMultipartAsync(provider);
var files = provider.Contents;
List<string> uploadsurls = new List<string>();
foreach (var file in files) {
var fileInfo = file.Headers.ContentDisposition;
Guid guid = Guid.NewGuid();
string oldFileName = fileInfo.FileName;
string newFileName = guid.ToString();
var fileExtension = oldFileName.Split('.').Last().Replace("\"", "").Trim();
var fileData = await file.ReadAsByteArrayAsync();
try {
//Upload file to azure blob storage method
var upload = await UploadFileToStorage(fileData, newFileName + "." + fileExtension);
uploadsurls.Add(upload);
}
catch (Exception ex) {
log.LogError(ex.Message);
return new BadRequestObjectResult("Somthing went wrong.");
}
}
return uploadsurls != null
? (ActionResult)new OkObjectResult(uploadsurls)
: new BadRequestObjectResult("Somthing went wrong.");
}
private static async Task<string> UploadFileToStorage(byte[] fileStream, string fileName)
{
// Create storagecredentials object by reading the values from the configuration (appsettings.json)
StorageCredentials storageCredentials = new StorageCredentials("<AccountName>", "<KeyValue>");
// Create cloudstorage account by passing the storagecredentials
CloudStorageAccount storageAccount = new CloudStorageAccount(storageCredentials, true);
// Create the blob client.
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
// Get reference to the blob container by passing the name by reading the value from the configuration (appsettings.json)
CloudBlobContainer container = blobClient.GetContainerReference("digital-material-library-images");
// Get the reference to the block blob from the container
CloudBlockBlob blockBlob = container.GetBlockBlobReference(fileName);
// Upload the file
await blockBlob.UploadFromByteArrayAsync(fileStream,0, fileStream.Length);
blockBlob.Properties.ContentType = "image/jpg";
await blockBlob.SetPropertiesAsync();
return blockBlob.Uri.ToString();
//return await Task.FromResult(true);
}
}

Related

Can we use blob binders to upload file to Azure data lake gen 2

I know we can manage a file in ADLs gen 2 using .net sdk as mentioned in below article
https://learn.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-directory-file-acl-dotnet
I just want to know if we can also use binders such as cloudBlockBlob too in ADLS gen 2 as we can in regular azure storage account.
After testing, the cloudBlockBlob binding can be used in ADLs gen 2.
I use this code to upload files to the adls2 account:
using System;
using Azure;
using Azure.Storage.Files.DataLake;
using Azure.Storage.Files.DataLake.Models;
using Azure.Storage;
using System.IO;
namespace Frankadls
{
class Program
{
static async System.Threading.Tasks.Task Main(string[] args)
{
Console.WriteLine("Hello World!");
string accountName = "";
string accountKey = "";
StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey);
string dfsUri = "https://" + accountName + ".dfs.core.windows.net";
DataLakeServiceClient dataLakeServiceClient = new DataLakeServiceClient(new Uri(dfsUri), sharedKeyCredential);
DataLakeFileSystemClient dataLakeFileSystemClient = await dataLakeServiceClient.CreateFileSystemAsync("test1");
DataLakeDirectoryClient directoryClient = await dataLakeFileSystemClient.CreateDirectoryAsync("my-directory");
DataLakeFileClient fileClient = await directoryClient.CreateFileAsync("uploaded-file.txt");
FileStream fileStream = File.OpenRead("");
long fileSize = fileStream.Length;
await fileClient.AppendAsync(fileStream, offset: 0);
await fileClient.FlushAsync(position: fileSize);
}
}
}
This code. Using the cloudBlockBlob input binding, it can be triggered successfully:
using System;
using System.IO;
using Microsoft.Azure.WebJobs;
using Microsoft.Extensions.Logging;
using Microsoft.WindowsAzure.Storage.Blob;
namespace Frankblobtrigger
{
public static class Function1
{
[FunctionName("Function1")]
public static void Run([BlobTrigger("test1/{name}", Connection = "conn")]Stream myBlob, string name,
[Blob("test1/{name}", FileAccess.Read, Connection = "conn")] CloudBlockBlob blob,
ILogger log)
{
log.LogInformation($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes");
log.LogInformation(blob.Uri.AbsoluteUri);
}
}
}
Yes, for.net, just use blob output binding. And you should be able to use cloudblockblob to upload.

Performance of Azure SDK v12 vs Storage Data Movement Library?

I know that the Storage Data Movement Library is supposed to be faster when uploading and downloading files to and from blob storage, but I am not seeing the performance benefits of it when compared to Azure SDK v12. I got an average of 37.463 seconds with Azure SDK v12 and 41.863 seconds using Storage Data Movement Library (SDML).
Here is the code using SDML:
namespace FunctionApp
{
using Microsoft.AspNetCore.Mvc;
using Microsoft.Azure.Storage;
using Microsoft.Azure.Storage.Blob;
using Microsoft.Azure.Storage.DataMovement;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.Extensions.Logging;
using System;
using System.Diagnostics;
using System.IO;
using System.IO.Compression;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using System.Web.Http;
public static class Function1
{
[FunctionName("A")]
public static async Task<IActionResult> HttpStart(
[HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testRoute")] HttpRequestMessage req,
ILogger log)
{
Stopwatch timer = new Stopwatch();
timer.Start();
try
{
ServicePointManager.Expect100Continue = false;
ServicePointManager.DefaultConnectionLimit = Environment.ProcessorCount * 8;
TransferManager.Configurations.ParallelOperations = 64;
string fileToDownload = "<URI to zip file in blob storage containing two 300MB files";
string connectionString = "<connection string to storage account>";
string containerName = "<container to upload files to>";
using MemoryStream test = new MemoryStream();
CloudBlockBlob sourceBlob = new CloudBlockBlob(new Uri(fileToDownload));
await TransferManager.DownloadAsync(sourceBlob, test);
CloudStorageAccount account = CloudStorageAccount.Parse(connectionString);
CloudBlobClient blobClient = account.CreateCloudBlobClient();
CloudBlobContainer container = blobClient.GetContainerReference(containerName);
using ZipArchive zipArchive = new ZipArchive(test);
foreach (ZipArchiveEntry file in zipArchive.Entries)
{
if (!string.IsNullOrEmpty(file.Name))
{
CloudBlockBlob destBlob = container.GetBlockBlobReference(file.FullName);
using Stream stream = file.Open();
await TransferManager.UploadAsync(stream, destBlob);
}
}
}
catch (Exception exception)
{
return new InternalServerErrorResult();
}
timer.Stop();
return new OkObjectResult(timer.ElapsedMilliseconds);
}
}
}
Here is the code using Azure SDK v12:
namespace FunctionApp
{
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Specialized;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.Extensions.Logging;
using System;
using System.Diagnostics;
using System.IO;
using System.IO.Compression;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using System.Web.Http;
public static class Function1
{
[FunctionName("A")]
public static async Task<IActionResult> HttpStart(
[HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testRoute")] HttpRequestMessage req,
ILogger log)
{
Stopwatch timer = new Stopwatch();
timer.Start();
try
{
ServicePointManager.Expect100Continue = false;
ServicePointManager.DefaultConnectionLimit = Environment.ProcessorCount * 8;
string fileToDownload = "<URI to zip file in blob storage containing two 300MB files";
string connectionString = "<connection string to storage account>";
string containerName = "<container to upload files to>";
using MemoryStream test = new MemoryStream();
BlockBlobClient client = new BlockBlobClient(new Uri(fileToDownload));
await client.DownloadToAsync(test);
BlobContainerClient containerClient = new BlobContainerClient(connectionString, containerName);
using ZipArchive zipArchive = new ZipArchive(test);
foreach (ZipArchiveEntry file in zipArchive.Entries)
{
if (!string.IsNullOrEmpty(file.Name))
{
BlockBlobClient blockBlobClient = containerClient.GetBlockBlobClient(file.FullName);
using Stream stream = file.Open();
await blockBlobClient.UploadAsync(stream);
}
}
}
catch (Exception exception)
{
return new InternalServerErrorResult();
}
timer.Stop();
return new OkObjectResult(timer.ElapsedMilliseconds) ;
}
}
}
For Data Movement library, you may set ParallelOperations and BlockSize, like below:
TransferManager.Configurations.ParallelOperations = 20;
TransferManager.Configurations.BlockSize = 20971520*2; //20M
I did the test at my side, SDML is more faster.

Issue in creating a file and writing content into it in azure function

I am writing an azure function for creating and uploading text on azure storage but I got error 500 Internal server error. Below is my code of the azure function.
#r "Newtonsoft.Json"
#r "Microsoft.WindowsAzure.Storage"
using System.Net;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Primitives;
using Newtonsoft.Json;
using Microsoft.WindowsAzure.Storage.Auth;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.File;
using System;
public static async Task<IActionResult> Run(HttpRequest req, ILogger log)
{
log.LogInformation("C# HTTP trigger function processed a request.");
string strFileName = "321rahila.csv";//req.Query["name"];
string Content ="Hello File";
//string requestBody = await new StreamReader(req.Body).ReadToEndAsync();
// dynamic data = JsonConvert.DeserializeObject(requestBody);
// strFileName = strFileName ?? data?.strFileName;
string StorageAccountName = "xyz";
string StorageKey = "i0PNZ6Ykse7oSSfUzFeA36rQfAv9UZnJ5wybQWh5Jol0NRM4sal4s8B3ipkjvfzcsP8/gnI6A==";`enter code here`
string strShareName = "lables";
//string StorageScheme = "SharedKey";
// string FileEndPoint = string.Format("https://{0}.file.core.windows.net/", StorageAccountName);
CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentials(StorageAccountName, StorageKey), true);
var fileClient = storageAccount.CreateCloudFileClient();
var share = fileClient.GetShareReference(strShareName);
// if (share.Exists())
{
var rootDir = share.GetRootDirectoryReference();
CloudFile file = rootDir.GetFileReference(strFileName);
var fileToCreate = rootDir.GetFileReference(strFileName);
**fileToCreate.UploadText(Content);**
}
return strFileName != null
? (ActionResult)new OkObjectResult($"Hello, {strFileName}")
: new BadRequestObjectResult("Please pass a name on the query string or in the request body");
}
I get the error when I uncomment the line fileToCreate.UploadText(Content); and without it I am unable to create a file and upload text into it. The same is working fine on visual studio.
The problem is the Azure Function in the portal could not find the Microsoft.WindowsAzure.Storage package. The right way is to create project.json file and reference NuGet package explicitly. The below is my project.json file.
{
"frameworks": {
"net46":{
"dependencies": {
"Microsoft.WindowsAzure.Storage": "9.3.3"
}
}
}
}
Then in the run.csx use the assembly.
#r "Microsoft.WindowsAzure.Storage"
And the below is my work code.
#r "Newtonsoft.Json"
#r "Microsoft.WindowsAzure.Storage"
using System.Net;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Primitives;
using Newtonsoft.Json;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.File;
using Microsoft.WindowsAzure.Storage.Auth;
public static async Task<IActionResult> Run(HttpRequest req, ILogger log)
{
log.LogInformation("C# HTTP trigger function processed a request.");
string name = req.Query["name"];
string requestBody = await new StreamReader(req.Body).ReadToEndAsync();
dynamic data = JsonConvert.DeserializeObject(requestBody);
name = name ?? data?.name;
string strFileName = "321rahila.csv";//req.Query["name"];
string Content ="Hello File";
string StorageAccountName = "my account";
string StorageKey = "my key";
string strShareName = "windows";
//string StorageScheme = "SharedKey";
// string FileEndPoint = string.Format("https://{0}.file.core.windows.net/", StorageAccountName);
CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentials(StorageAccountName, StorageKey), true);
var fileClient = storageAccount.CreateCloudFileClient();
var share = fileClient.GetShareReference(strShareName);
// if (share.Exists())
{
var rootDir = share.GetRootDirectoryReference();
CloudFile file = rootDir.GetFileReference(strFileName);
var fileToCreate = rootDir.GetFileReference(strFileName);
await fileToCreate.UploadTextAsync(Content);
}
return name != null
? (ActionResult)new OkObjectResult($"Hello, {name}")
: new BadRequestObjectResult("Please pass a name on the query string or in the request body");
}
Hope this could help you.

reading content of blob from azure function

I'm trying to read the content of a blob inside an azure function.
Here's the code:
Note:
If I comment out the using block and return the blob i.e.
return new OkObjectResult(blob);
I get back the blob object.
However, if I use the using block, I get 500.
Any idea why I can't get the content?
string storageConnectionString = "myConnectionString";
CloudStorageAccount storageAccount;
CloudStorageAccount.TryParse(storageConnectionString, out storageAccount);
CloudBlobClient cloudBlobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer container = cloudBlobClient.GetContainerReference("drawcontainer");
var blob = drawingsContainer.GetBlockBlobReference("notes.txt");
using (StreamReader reader = new StreamReader(blob.OpenRead()))
{
content = reader.ReadToEnd();
}
return new OkObjectResult(content);
HTTP 500 indicates that the code has error. The most probable reason for error is the variable 'content'. Define the variable 'content' outside the using block as the scope of the content variable defined inside it is limited to the block only. Declare it outside the using block, something like below:
try
{
string content = string.Empty;
using (StreamReader reader = new StreamReader(blob.OpenRead()))
{
content = reader.ReadToEnd();
}
}
catch (Exception ex)
{
// Log exception to get the details.
}
Always make use of try catch to get more details about errors in the code.
The OpenRead method didn't exist so I used the async one and it solved it.
I got to this solution after creating an azure function in VS and publishing it and it works.
Here's the code I used:
public static class Function1
{
[FunctionName("Function1")]
public static async Task<ActionResult> Run([HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)]HttpRequest req, TraceWriter log)
{
log.Info("C# HTTP trigger function processed a request.");
string storageConnectionString = "DefaultEndpointsProtocol=https;AccountName=avitest19a1c;AccountKey=<AccessKey>";
CloudStorageAccount storageAccount = null;
CloudStorageAccount.TryParse(storageConnectionString, out storageAccount);
CloudBlobClient cloudBlobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer drawingsContainer = cloudBlobClient.GetContainerReference("drawcontainer");
var blob = drawingsContainer.GetBlockBlobReference("notes.txt");
string content = string.Empty;
**var contentStream = await blob.OpenReadAsync();**
using (StreamReader reader = new StreamReader(contentStream))
{
content = reader.ReadToEnd();
}
return new OkObjectResult(content);
}
}

Can I set the access tier when I upload a blob? If yes, then how to do that?

I did not find any way to set the access tier of a blob when I upload it, I know I can set a blob's access tier after I uploaded it, but I just want to know if I can upload the blob and set it's access tier in only one step. And if there is any golang API to do that?
I googled it but I got nothing helpful till now.
Here is what I did now, I mean upload it and then set it's access tier.
// Here's how to upload a blob.
blobURL := containerURL.NewBlockBlobURL(fileName)
ctx := context.Background()
_, err = azblob.UploadBufferToBlockBlob(ctx, data, blobURL, azblob.UploadToBlockBlobOptions{})
handleErrors(err)
//set tier
_, err = blobURL.SetTier(ctx, azblob.AccessTierCool, azblob.LeaseAccessConditions{})
handleErrors(err)
But I want to upload a blob and set it's tier in one step, not two steps as I do now.
The short answer is No. According to the offical REST API reference, the blob operation you want is that to do via two REST APIs Put Blob and Set Blob Tier. Actually, all SDK APIs for different languages are implemented by wrapping the related REST APIs.
Except for Page Blob, you can set the header x-ms-access-tier in your operation request to do your want, as below.
For Block Blob, the operations in two steps are necessary, and can not be merged.
It is now possible using the new x-ms-access-tier header:
x-ms-access-tier
REST API with auth
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Net.Mime;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace WhateverYourNameSpaceIs
{
class Program
{
private const string StorageKey = #"PutYourStorageKeyHere";
private const string StorageAccount = "PutYourStorageAccountHere";
private const string ContainerName = "PutYourContainerNameHere";
private const string Method = "PUT";
private const string ContentType = MediaTypeNames.Image.Jpeg;
private static readonly string BlobStorageTier = StorageTier.Cool;
private static readonly List<Tuple<string, string>> HttpContentHeaders = new List<Tuple<string, string>>()
{
new Tuple<string, string>("x-ms-access-tier", BlobStorageTier),
new Tuple<string, string>("x-ms-blob-type", "BlockBlob"),
new Tuple<string, string>("x-ms-date", DateTime.UtcNow.ToString("R")),
new Tuple<string, string>("x-ms-version", "2018-11-09"),
new Tuple<string, string>("Content-Type", ContentType),
};
static async Task Main()
{
await UploadBlobToAzure("DestinationFileNameWithoutPath", "LocalFileNameWithPath");
}
static async Task<int> UploadBlobToAzure(string blobName, string fileName)
{
int returnValue = (int)AzureCopyStatus.Unknown;
try
{
using var client = new HttpClient();
using var content = new ByteArrayContent(File.ReadAllBytes(fileName));
HttpContentHeaders.ForEach(x => content.Headers.Add(x.Item1, x.Item2));
var stringToSign = $"{Method}\n\n\n{content.Headers.ContentLength.Value}\n\n{ContentType}\n\n\n\n\n\n\n";
foreach (var httpContentHeader in HttpContentHeaders.Where(x => x.Item1 != "Content-Type").OrderBy(x => x.Item1))
stringToSign += $"{httpContentHeader.Item1.ToLower()}:{httpContentHeader.Item2}\n";
stringToSign += $"/{StorageAccount}/{ContainerName}/{blobName}";
HMACSHA256 hmac = new HMACSHA256(Convert.FromBase64String(StorageKey));
string signature = Convert.ToBase64String(hmac.ComputeHash(Encoding.UTF8.GetBytes(stringToSign)));
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("SharedKey", $"{StorageAccount}:{signature}");
var httpResponse = await client.PutAsync($"https://{StorageAccount}.blob.core.windows.net/{ContainerName}/{blobName}", content);
returnValue = (int)httpResponse.StatusCode;
}
catch (IOException ioException)
{
Console.WriteLine(ioException.ToString());
returnValue = (int)AzureCopyStatus.FileNotFound;
}
catch (Exception exception)
{
Console.WriteLine(exception.ToString());
returnValue = (int)AzureCopyStatus.Error;
}
return returnValue;
}
internal enum AzureCopyStatus
{
Unknown = -1,
Error = 0,
FileNotFound = 2
}
internal static class StorageTier
{
internal static string Cool = "Cool";
internal static string Hot = "Hot";
}
}
}

Resources