How can I merge two audio files (.mp3, .aac) into a single audio file in Metro? Is it possible?
don't know about audio files,
but try this to combine files:
public static async Task<StorageFile> CombineFiles(string[] files, string filepath)
{
StorageFile sf;
byte[] b;
byte[] allfiles = new byte[0];
int position = 0;
for (int i = 0; i < files.Length; i++)
{
sf = await StorageFile.GetFileFromPathAsync(files[i]);
b = await Conversions.Convert_StorageFileToByteArray_Async(sf);
Array.Resize(ref allfiles, allfiles.Length + b.Length);
Array.Copy(b, 0, allfiles, position, b.Length);
position += b.Length;
}
string filename = filepath.Substring(filepath.LastIndexOf("\\") + 1);
StorageFolder tempfolder = Windows.Storage.ApplicationData.Current.TemporaryFolder;
sf = await Conversions.Convert_ByteArrayToStorageFile(allfiles, tempfolder, filename);
return sf;
}
//convert byte array to a storageFile
public static async Task<StorageFile> Convert_ByteArrayToStorageFile(Byte[] data_byte, StorageFolder folder, string fileName)
{
StorageFile file = await folder.CreateFileAsync(fileName, CreationCollisionOption.ReplaceExisting);
using (IRandomAccessStream fileStream = await file.OpenAsync(FileAccessMode.ReadWrite))
{
using (IOutputStream outputStream = fileStream.GetOutputStreamAt(0))
{
using (DataWriter dataWriter = new DataWriter(outputStream))
{
dataWriter.WriteBytes(data_byte);
await dataWriter.StoreAsync();
dataWriter.DetachStream();
}
//write data on the empty file:
await outputStream.FlushAsync();
}
//write data on the empty file:
await fileStream.FlushAsync();
return file;
}
}
Related
I am downloading videos from my server in the application sandbox storage, at this path:
final String filePath = this.getExternalFilesDir("videos") + "/" + name + ".mp4";
Now, I want to copy some specific files from the path above to another folder in DCIM so users can discover the videos in the gallery.
I am able to create that file, but I don't understand how to copy and move the file.
File dir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "MyFolder");
if (!dir.exists()) {
boolean rv = dir.mkdir();
Log.d(TAG, "Folder creation " + ( rv ? "success" : "failed"));
}
Can anyone help?
Solved it using standard Java IO stream.
String inputFile = "/" + name + ".mp4";
String inputPath = this.getExternalFilesDir("videos") + "";
String outputPath = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "MyFolder") + "";
private void copyFile(String inputFile, String inputPath, String outputPath) {
try {
File dir = new File(outputPath);
if (!dir.exists()) {
if (!dir.mkdirs()) {
return;
}
}
try (InputStream inputStream = new FileInputStream(inputPath + inputFile)) {
try (OutputStream outputStream = new FileOutputStream(outputPath + inputFile)) {
File source = new File(inputPath + inputFile);
byte[] buffer = new byte[1024];
int read;
long length = source.length();
long total = 0;
while ((read = inputStream.read(buffer)) != -1) {
total += read;
int progress = (int) ((total * 100) / length);
if (progress == 100) {
Toast.makeText(VideoActivity.this, "Completed", Toast.LENGTH_SHORT).show();
}
outputStream.write(buffer, 0, read);
}
}
}
} catch (Exception e) {
FirebaseCrashlytics.getInstance().recordException(e);
}
}
We need to bundle a multiple files in a Zip format and download it. Could you please suggest a way to do this in ASP.NET core without using any third party libraries.
In ASP.NET MVC we can achieve this using https://msdn.microsoft.com/en-us/library/system.io.packaging.aspx. Whether is it possible in ASP.NET core 2.0 ?
I think this will help you a lot
protected FileStreamResult DownloadFolder(string path, string[] names, int count)
{
FileStreamResult fileStreamResult;
var tempPath = Path.Combine(Path.GetTempPath(), "temp.zip");
if (names.Length == 1)
{
path = path.Remove(path.Length - 1);
ZipFile.CreateFromDirectory(path, tempPath, CompressionLevel.Fastest, true);
FileStream fileStreamInput = new FileStream(tempPath, FileMode.Open, FileAccess.Read, FileShare.Delete);
fileStreamResult = new FileStreamResult(fileStreamInput, "APPLICATION/octet-stream");
fileStreamResult.FileDownloadName = names[0] + ".zip";
}
else
{
string extension;
string currentDirectory;
ZipArchiveEntry zipEntry;
ZipArchive archive;
if (count == 0)
{
string directory = Path.GetDirectoryName(path);
string rootFolder = Path.GetDirectoryName(directory);
using (archive = ZipFile.Open(tempPath, ZipArchiveMode.Update))
{
for (var i = 0; i < names.Length; i++)
{
currentDirectory = Path.Combine(rootFolder, names[i]);
foreach (var filePath in Directory.GetFiles(currentDirectory, "*.*", SearchOption.AllDirectories))
{
zipEntry = archive.CreateEntryFromFile(this.contentRootPath + "\\" + filePath, names[i] + filePath.Substring(currentDirectory.Length), CompressionLevel.Fastest);
}
}
}
}
else
{
string lastSelected = names[names.Length - 1];
string selectedExtension = Path.GetExtension(lastSelected);
if (selectedExtension == "")
{
path = Path.GetDirectoryName(Path.GetDirectoryName(path));
path = path.Replace("\\", "/") + "/";
}
using (archive = ZipFile.Open(tempPath, ZipArchiveMode.Update))
{
for (var i = 0; i < names.Length; i++)
{
extension = Path.GetExtension(names[i]);
currentDirectory = Path.Combine(path, names[i]);
if (extension == "")
{
foreach (var filePath in Directory.GetFiles(currentDirectory, "*.*", SearchOption.AllDirectories))
{
zipEntry = archive.CreateEntryFromFile(this.contentRootPath + "\\" + filePath, filePath.Substring(path.Length), CompressionLevel.Fastest);
}
}
else
{
zipEntry = archive.CreateEntryFromFile(this.contentRootPath + "\\" + currentDirectory, names[i], CompressionLevel.Fastest);
}
}
}
}
FileStream fileStreamInput = new FileStream(tempPath, FileMode.Open, FileAccess.Read, FileShare.Delete);
fileStreamResult = new FileStreamResult(fileStreamInput, "APPLICATION/octet-stream");
fileStreamResult.FileDownloadName = "folders.zip";
}
if (File.Exists(tempPath))
{
File.Delete(tempPath);
}
return fileStreamResult;
}
I've got an ASP.NET core application that uploads images to Azure. I am attempting to resize an image using Magick.NET before uploading said image to Azure Blob container. So far, I've managed to save the image to a folder in a local hard drive. Is this the correct way of writing this?
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Create(Product products)
{
var files = products.UploudThumbnail;
List<string> image = new List<string>();
List<string> names = new List<string>();
if (files != null)
{
foreach (var file in files)
{
if (ModelState.IsValid)
{
if (file.ContentType == "image/jpeg" || file.ContentType == "image/jpg")
{
if (file.Length < 1 * 1000 * 1000)
{
var parsedContentDisposition = ContentDispositionHeaderValue.Parse(file.ContentDisposition);
var fileName = parsedContentDisposition.FileName.Trim('"');
names.Add(fileName);
fileName = Guid.NewGuid().ToString() + "-" + fileName;
CloudBlockBlob cloudBlockBlob = cloudBlobContainer.GetBlockBlobReference(fileName);
cloudBlockBlob.Properties.ContentType = file.ContentType;
await cloudBlockBlob.UploadFromStreamAsync(file.OpenReadStream());
image.Add(cloudBlockBlob.Uri.AbsoluteUri);
const int size = 20;
const int quality = 75;
using (var image = new MagickImage(file.OpenReadStream()))
{
image.Resize(size, size);
image.Strip();
image.Quality = quality;
//how to save it into azure?
image.Write(fileName);
}
}
else
{
ModelState.AddModelError("UploudThumbnail", "Max size not accepted");
}
}
else
{
ModelState.AddModelError("UploudThumbnail", "jpeg & jpg are accepted");
}
}
}
}
_context.Add(products);
await _context.SaveChangesAsync();
return RedirectToAction(nameof(Index));
}
Just write the image to a memory stream and upload it using the UploadFromStreamAsync method.
Example (pseudo):
using (var memStream = new MemoryStream())
{
image.Write(memStream);
memStream.Seek(0, SeekOrigin.Begin);
await cloudBlockBlob.UploadFromStreamAsync(memStream);
}
I have a 2GB file in blob storage and am building a console application that will download this file into a desktop. Requirement is to split into 100MB chunks and append a number into the filename. I do not need to re-combine those files again. What I need is only the chunks of files.
I currently have this code from Azure download blob part
But I cannot figure out how to stop downloading when the file size is already 100MB and create a new one.
Any help will be appreciated.
Update: Here is my code
CloudStorageAccount account = CloudStorageAccount.Parse(connectionString);
var blobClient = account.CreateCloudBlobClient();
var container = blobClient.GetContainerReference(containerName);
var file = uri;
var blob = container.GetBlockBlobReference(file);
//First fetch the size of the blob. We use this to create an empty file with size = blob's size
blob.FetchAttributes();
var blobSize = blob.Properties.Length;
long blockSize = (1 * 1024 * 1024);//1 MB chunk;
blockSize = Math.Min(blobSize, blockSize);
//Create an empty file of blob size
using (FileStream fs = new FileStream(file, FileMode.Create))//Create empty file.
{
fs.SetLength(blobSize);//Set its size
}
var blobRequestOptions = new BlobRequestOptions
{
RetryPolicy = new ExponentialRetry(TimeSpan.FromSeconds(5), 3),
MaximumExecutionTime = TimeSpan.FromMinutes(60),
ServerTimeout = TimeSpan.FromMinutes(60)
};
long startPosition = 0;
long currentPointer = 0;
long bytesRemaining = blobSize;
do
{
var bytesToFetch = Math.Min(blockSize, bytesRemaining);
using (MemoryStream ms = new MemoryStream())
{
//Download range (by default 1 MB)
blob.DownloadRangeToStream(ms, currentPointer, bytesToFetch, null, blobRequestOptions);
ms.Position = 0;
var contents = ms.ToArray();
using (var fs = new FileStream(file, FileMode.Open))//Open that file
{
fs.Position = currentPointer;//Move the cursor to the end of file.
fs.Write(contents, 0, contents.Length);//Write the contents to the end of file.
}
startPosition += blockSize;
currentPointer += contents.Length;//Update pointer
bytesRemaining -= contents.Length;//Update bytes to fetch
Console.WriteLine(fileName + dateTimeStamp + ".csv " + (startPosition / 1024 / 1024) + "/" + (blob.Properties.Length / 1024 / 1024) + " MB downloaded...");
}
}
while (bytesRemaining > 0);
Per my understanding, you could break your blob file into your expected pieces (100MB), then leverage CloudBlockBlob.DownloadRangeToStream to download each of your chunks of files. Here is my code snippet, you could refer to it:
ParallelDownloadBlob
private static void ParallelDownloadBlob(Stream outPutStream, CloudBlockBlob blob,long startRange,long endRange)
{
blob.FetchAttributes();
int bufferLength = 1 * 1024 * 1024;//1 MB chunk for download
long blobRemainingLength = endRange-startRange;
Queue<KeyValuePair<long, long>> queues = new Queue<KeyValuePair<long, long>>();
long offset = startRange;
while (blobRemainingLength > 0)
{
long chunkLength = (long)Math.Min(bufferLength, blobRemainingLength);
queues.Enqueue(new KeyValuePair<long, long>(offset, chunkLength));
offset += chunkLength;
blobRemainingLength -= chunkLength;
}
Parallel.ForEach(queues,
new ParallelOptions()
{
MaxDegreeOfParallelism = 5
}, (queue) =>
{
using (var ms = new MemoryStream())
{
blob.DownloadRangeToStream(ms, queue.Key, queue.Value);
lock (outPutStream)
{
outPutStream.Position = queue.Key- startRange;
var bytes = ms.ToArray();
outPutStream.Write(bytes, 0, bytes.Length);
}
}
});
}
Program Main
var container = storageAccount.CreateCloudBlobClient().GetContainerReference(defaultContainerName);
var blob = container.GetBlockBlobReference("code.txt");
blob.FetchAttributes();
long blobTotalLength = blob.Properties.Length;
long chunkLength = 10 * 1024; //divide blob file into each file with 10KB in size
for (long i = 0; i <= blobTotalLength; i += chunkLength)
{
long startRange = i;
long endRange = (i + chunkLength) > blobTotalLength ? blobTotalLength : (i + chunkLength);
using (var fs = new FileStream(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, $"resources\\code_[{startRange}]_[{endRange}].txt"), FileMode.Create))
{
Console.WriteLine($"\nParallelDownloadBlob from range [{startRange}] to [{endRange}] start...");
Stopwatch sp = new Stopwatch();
sp.Start();
ParallelDownloadBlob(fs, blob, startRange, endRange);
sp.Stop();
Console.WriteLine($"download done, time cost:{sp.ElapsedMilliseconds / 1000.0}s");
}
}
RESULT
UPDATE:
Based on your requirement, I recommend that you could download your blob into a single file, then leverage LumenWorks.Framework.IO to read your large file records line by line, then check the byte size you have read and save into a new csv file with the size up to 100MB. Here is a code snippet, you could refer to it:
using (CsvReader csv = new CsvReader(new StreamReader("data.csv"), true))
{
int fieldCount = csv.FieldCount;
string[] headers = csv.GetFieldHeaders();
while (csv.ReadNextRecord())
{
for (int i = 0; i < fieldCount; i++)
Console.Write(string.Format("{0} = {1};",
headers[i],
csv[i] == null ? "MISSING" : csv[i]));
//TODO:
//1.Read the current record, check the total bytes you have read;
//2.Create a new csv file if the current total bytes up to 100MB, then save the current record to the current CSV file.
}
}
Additionally, you could refer to A Fast CSV Reader and CsvHelper for more details.
UPDATE2
Code sample for breaking large CSV file into small CSV file with the fixed bytes, I used CsvHelper 2.16.3 for the following code snippet, you could refer to it:
string[] headers = new string[0];
using (var sr = new StreamReader(#"C:\Users\v-brucch\Desktop\BlobHourMetrics.csv")) //83.9KB
{
using (CsvHelper.CsvReader csvReader = new CsvHelper.CsvReader(sr,
new CsvHelper.Configuration.CsvConfiguration()
{
Delimiter = ",",
Encoding = Encoding.UTF8
}))
{
//check header
if (csvReader.ReadHeader())
{
headers = csvReader.FieldHeaders;
}
TextWriter writer = null;
CsvWriter csvWriter = null;
long readBytesCount = 0;
long chunkSize = 30 * 1024; //divide CSV file into each CSV file with byte size up to 30KB
while (csvReader.Read())
{
var curRecord = csvReader.CurrentRecord;
var curRecordByteCount = curRecord.Sum(r => Encoding.UTF8.GetByteCount(r)) + headers.Count() + 1;
readBytesCount += curRecordByteCount;
//check bytes you have read
if (writer == null || readBytesCount > chunkSize)
{
readBytesCount = curRecordByteCount + headers.Sum(h => Encoding.UTF8.GetByteCount(h)) + headers.Count() + 1;
if (writer != null)
{
writer.Flush();
writer.Close();
}
string fileName = $"BlobHourMetrics_{Guid.NewGuid()}.csv";
writer = new StreamWriter(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, fileName), true);
csvWriter = new CsvWriter(writer);
csvWriter.Configuration.Encoding = Encoding.UTF8;
//output header field
foreach (var header in headers)
{
csvWriter.WriteField(header);
}
csvWriter.NextRecord();
}
//output record field
foreach (var field in curRecord)
{
csvWriter.WriteField(field);
}
csvWriter.NextRecord();
}
if (writer != null)
{
writer.Flush();
writer.Close();
}
}
}
RESULT
I am uploading a large file in azure storage . I am uploading a file in to 4 MB chunks. I used the following code from last 1 year but from last one month whenever I am uploading file It is getting corrupt some times and some times It uploads fine.
Can any one suggest me what I need to change in the code.
//Uploads a file from the file system to a blob. Parallel implementation.
public void ParallelUploadFile(CloudBlockBlob blob1, string fileName1, BlobRequestOptions options1, int maxBlockSize = 4 * 1024 * 1024, int rowId)
{
blob = blob1;
fileName = fileName1;
options = options1;
file = new FileInfo(fileName);
var fileStream = new FileStream(fileName, FileMode.Open, FileAccess.Read,FileShare.ReadWrite);
long fileSize = file.Length;
//Get the filesize
long fileSizeInMb = file.Length/1024/1024;
// let's figure out how big the file is here
long leftToRead = fileSize;
long startPosition = 0;
// have 1 block for every maxBlockSize bytes plus 1 for the remainder
var blockCount =
((int) Math.Floor((double) (fileSize/maxBlockSize))) + 1;
blockIds = new List<string>();
// populate the control array...
for (int j = 0; j < blockCount; j++)
{
var toRead = (int) (maxBlockSize < leftToRead
? maxBlockSize
: leftToRead);
var blockId = Convert.ToBase64String(
Encoding.ASCII.GetBytes(
string.Format("BlockId{0}", j.ToString("0000000"))));
transferDetails.Add(new BlockTransferDetail()
{
StartPosition = startPosition,
BytesToRead = toRead,
BlockId = blockId
});
if (toRead > 0)
{
blockIds.Add(blockId);
}
// increment the starting position
startPosition += toRead;
leftToRead -= toRead;
}
//*******
//PUT THE NO OF THREAD LOGIC HERE
//*******
int runFrom = 0;
int runTo = 0;
int uploadParametersCount = 0;
TotalUpload = Convert.ToInt64(fileSizeInMb);
for (int count = 0; count < transferDetails.Count; )
{
//Create uploading file parameters
uploadParametersesList.Add(new UploadParameters()
{
FileName = file.FullName,
BlockSize = 3900000,
//BlockSize = 4194304,
LoopFrom = runFrom + runTo,
IsPutBlockList = false,
UploadedBytes = 0,
Fs = fileStream,
RowIndex = rowId,
FileSize = Convert.ToInt64(fileSizeInMb)
});
//Logic to create correct threads
if (transferDetails.Count < 50)
{
runTo = transferDetails.Count;
uploadParametersesList[uploadParametersCount].LoopTo += runTo;
count += transferDetails.Count;
}
else
{
var tmp = transferDetails.Count - runTo;
if (tmp > 50 && tmp < 100)
{
runTo += tmp;
count += tmp;
uploadParametersesList[uploadParametersCount].LoopTo += runTo;
}
else
{
runTo += 50;
count += 50;
uploadParametersesList[uploadParametersCount].LoopTo += runTo;
}
}
//Add to Global Const
GlobalConst.UploadedParameters.Add(uploadParametersesList[uploadParametersCount]);
//Start the thread
int parametersCount = uploadParametersCount;
var thread = new Thread(() => ThRunThis(uploadParametersesList[parametersCount]))
{Priority = ThreadPriority.Highest};
thread.Start();
uploadParametersCount++;
//Start a timer here to put all blocks on azure blob
aTimer.Elapsed += OnTimedEvent;
aTimer.Interval = 5000;
aTimer.Start();
}
}
//Timer callback
private void OnTimedEvent(object source, ElapsedEventArgs e)
{
if (uploadParametersesList.Count(o => o.IsPutBlockList) == uploadParametersesList.Count)
{
aTimer.Elapsed -= OnTimedEvent;
aTimer.Stop();
//Finally commit it
try
{
uploadParametersesList.ForEach(x => x.Status = "Uploaded");
blob.PutBlockList(blockIds);
IsCompleted = true;
}
catch (Exception exception)
{
Console.WriteLine(exception.Message);
}
}
}
//Main thread
private void ThRunThis(UploadParameters uploadParameters)
{
try
{
for (int j = uploadParameters.LoopFrom; j < uploadParameters.LoopTo; j++)
{
br = new BinaryReader(uploadParameters.Fs);
var bytes = new byte[transferDetails[j].BytesToRead];
//move the file system reader to the proper position
uploadParameters.Fs.Seek(transferDetails[j].StartPosition, SeekOrigin.Begin);
br.Read(bytes, 0, transferDetails[j].BytesToRead);
if (bytes.Length > 0)
{
//calculate the block-level hash
MD5 md5 = new MD5CryptoServiceProvider();
byte[] blockHash = md5.ComputeHash(bytes);
string convertedHash = Convert.ToBase64String(blockHash, 0, 16);
blob.PutBlock(transferDetails[j].BlockId, new MemoryStream(bytes), convertedHash, options);
//Update Uploaded Bytes
uploadParameters.UploadedBytes += transferDetails[j].BytesToRead;
TotalUploadedBytes += transferDetails[j].BytesToRead;
Console.WriteLine(Thread.CurrentThread.Name);
//Try to free the memory
try
{
GC.Collect();
}
catch (Exception exception)
{
Console.WriteLine(exception.Message);
}
}
}
//Is Completed
uploadParameters.IsPutBlockList = true;
}
catch (Exception exception)
{
Console.WriteLine(Thread.CurrentThread.Name);
uploadParameters.Exception = exception.Message;
Console.WriteLine(exception.Message);
}
}
It's been a long time since I touched large blob upload with threads, but it looks like your block list is getting out of sequence by threads.
Why don't you get the block list from cloud once all blocks have been uploaded and then use that list for putBlockList. That would make sure you get them in correct sequence.