Stream.CopyTo(newStream) return Length 0 - audio

I try to make some concatenation of buffers which are saved in a memory streams. Then, when I'm trying to play the whole buffer it gives an exception:
An exception of type 'System.ArgumentException' occurred in
Microsoft.Xna.Framework.ni.dll but was not handled in user code
Additional information: Ensure that the buffer length is non-zero and
meets the block alignment requirements for the audio format.
When I debug the mStrm is still remains 0, can't find why.
private void mySendClick(object sender, RoutedEventArgs e)
{
var mStrmStartDelimiter = new MemoryStream();
var mStrmEndDelimiter = new MemoryStream();
BinaryWriter writer1 = new BinaryWriter(mStrmStartDelimiter);
Sinus(6500, 200, writer1, 32767);
BinaryWriter writer2 = new BinaryWriter(mStrmEndDelimiter);
Sinus(6800, 200, writer2, 32767);
var mStrm = new MemoryStream();
mStrmStartDelimiter.CopyTo(mStrm);
//ToDO
mStrmEndDelimiter.CopyTo(mStrm);
mStrm.Seek(0, SeekOrigin.Begin);
SoundEffect mySoundPlay = new SoundEffect(mStrm.ToArray(), 16000, AudioChannels.Mono);
mySoundPlay.Play();
}
public static void Sinus(double frequency, int msDuration, BinaryWriter writer, int volume)
{
double TAU = 2 * Math.PI;
double samplesPerSecond = 16000;
double theta = frequency * TAU / (double)samplesPerSecond;
int samples = (int)((decimal)samplesPerSecond * msDuration / 1000);
// 'volume' is UInt16 with range 0 thru Uint16.MaxValue ( = 65 535)
// we need 'amp' to have the range of 0 thru Int16.MaxValue ( = 32 767)
double amp = volume >> 2; // so we simply set amp = volume / 2
for (int step = 0; step < samples; step++)
{
short s = (short)(amp * Math.Sin(theta * (double)step));
writer.Write(s);
}
}
I'm targeting windows phone 8.1 silverlight platform

I got the solution for the problem: do the following before calling CopyTo()
mStrmStartDelimiter.Position = 0;
mStrmEndDelimiter.Position = 0;

Related

Combining WasapiLoopbackCapture with google Stream Recognition

I'm trying to write an app that will listen to my computer audio and transcribe it using Google Speach Recognition.
I've been able to record the system sound using WasapiLoopbackCapture and I've been able to use google streaming recognition api with test files, but I was not able to merge the two togther.
When I stream the audio from the WasapiLoopbackCapture to google it doesn't return any result.
I've based my code on the google code sample at:
https://github.com/GoogleCloudPlatform/dotnet-docs-samples/blob/9588cee6d96bfe484c8e189e9ac2f6eaa3c3b002/speech/api/Recognize/InfiniteStreaming.cs#L225
private WaveInEvent StartListening()
{
var waveIn = new WaveInEvent
{
DeviceNumber = 0,
WaveFormat = new WaveFormat(SampleRate, ChannelCount)
};
waveIn.DataAvailable += (sender, args) =>
_microphoneBuffer.Add(ByteString.CopyFrom(args.Buffer, 0, args.BytesRecorded));
waveIn.StartRecording();
return waveIn;
}
And adjusted it to use the WasapiLoopbackCapture:
private IDisposable StartListening()
{
var waveIn = new WasapiLoopbackCapture();
//var waveIn = new WaveInEvent
//{
// DeviceNumber = 0,
// WaveFormat = new WaveFormat(SampleRate, ChannelCount)
//};
SampleRate = waveIn.WaveFormat.SampleRate;
ChannelCount = waveIn.WaveFormat.Channels;
BytesPerSecond = SampleRate * ChannelCount * BytesPerSample;
Console.WriteLine(SampleRate);
Console.WriteLine(BytesPerSecond);
waveIn.DataAvailable += (sender, args) =>
_microphoneBuffer.Add(ByteString.CopyFrom(args.Buffer, 0, args.BytesRecorded));
waveIn.StartRecording();
return waveIn;
}
But it doesn't return any transcribed text.
I've saved the input stream to a file, and it played ok - so the sound is getting there, my guess is that the waveFormat that is received from the WasapiLoopback is not compatible with what google likes - I tried some conversion and couldn't get it to work.
I've reviewed the following topics on stack overflow, but still couldn't get it to work:
Resampling WasapiLoopbackCapture
Naudio - Convert 32 bit wav to 16 bit wav
And tried combining them both:
private IDisposable StartListening()
{
var waveIn = new WasapiLoopbackCapture();
//var waveIn = new WaveInEvent
//{
//DeviceNumber = 0,
//WaveFormat = new WaveFormat(SampleRate, ChannelCount)
//};
// SampleRate = waveIn.WaveFormat.SampleRate;
// ChannelCount = waveIn.WaveFormat.Channels;
// BytesPerSecond = waveIn.WaveFormat.AverageBytesPerSecond;// SampleRate * ChannelCount * BytesPerSample;
var target = new WaveFormat(SampleRate, 16, 1);
var writer = new WaveFileWriter(#"c:\temp\xx.wav", waveIn.WaveFormat);
Console.WriteLine(SampleRate);
Console.WriteLine(BytesPerSecond);
var stop = false;
waveIn.DataAvailable += (sender, args) =>
{
var a = args;
byte[] newArray16Bit = new byte[args.BytesRecorded / 2];
short two;
float value;
for (int i = 0, j = 0; i < args.BytesRecorded; i += 4, j += 2)
{
value = (BitConverter.ToSingle(args.Buffer, i));
two = (short)(value * short.MaxValue);
newArray16Bit[j] = (byte)(two & 0xFF);
newArray16Bit[j + 1] = (byte)((two >> 8) & 0xFF);
}
var resampleStream = new NAudio.Wave.Compression.AcmStream(new WaveFormat(waveIn.WaveFormat.SampleRate
,16,waveIn.WaveFormat.Channels), target);
Buffer.BlockCopy(newArray16Bit, 0, resampleStream.SourceBuffer, 0, a.BytesRecorded/2);
int sourceBytesConverted = 0;
var bytes = resampleStream.Convert(a.BytesRecorded/2, out sourceBytesConverted);
var converted = new byte[bytes];
Buffer.BlockCopy(resampleStream.DestBuffer, 9, converted,0, bytes);
a = new WaveInEventArgs(converted,bytes);
_microphoneBuffer.Add(ByteString.CopyFrom(a.Buffer, 0, a.BytesRecorded));
if (writer != null)
{
writer.Write(a.Buffer, 0, a.BytesRecorded);
if (writer.Position > waveIn.WaveFormat.AverageBytesPerSecond * 5)
{
stop = true;
writer.Dispose();
writer = null;
Console.WriteLine("Saved file");
}
}
};
waveIn.StartRecording();
return waveIn;
}
But it doesn't work.
I'm not sure if this is the right path.
A code sample of a fix would be highly appreciated
I tried converting the bit rate etc.. but couldn't get this to work.

Create an InMemory Wav stream and Play it in a UWP app

I have an in memory WAV stream that I am able to play using
mStrm.Seek(0, SeekOrigin.Begin);
new System.Media.SoundPlayer(mStrm).Play();
I am trying to see how i can use the same stream and play it in a UWP application. I tried the following code
MediaElement soundPlayer = new MediaElement();
var soundSource = mStrm.AsRandomAccessStream();
soundSource.Seek(0);
soundPlayer.SetSource(soundSource, "audio/wav");
soundPlayer.Play();
But I am receiving the following error
MF_MEDIA_ENGINE_ERR_SRC_NOT_SUPPORTED : HRESULT - 0x80000013
Adding the code for the audio synthesis that I have taken from web.
public static void PlayBeep(UInt16 frequency, int msDuration, UInt16 volume = 16383)
{
var mStrm = new MemoryStream();
BinaryWriter writer = new BinaryWriter(mStrm);
const double TAU = 2 * Math.PI;
int formatChunkSize = 16;
int headerSize = 8;
short formatType = 1;
short tracks = 1;
int samplesPerSecond = 44100;
short bitsPerSample = 16;
short frameSize = (short)(tracks * ((bitsPerSample + 7) / 8));
int bytesPerSecond = samplesPerSecond * frameSize;
int waveSize = 4;
int samples = (int)((decimal)samplesPerSecond * msDuration / 1000);
int dataChunkSize = samples * frameSize;
int fileSize = waveSize + headerSize + formatChunkSize + headerSize + dataChunkSize;
writer.Write(0x46464952);
writer.Write(fileSize);
writer.Write(0x45564157);
writer.Write(0x20746D66);
writer.Write(formatChunkSize);
writer.Write(formatType);
writer.Write(tracks);
writer.Write(samplesPerSecond);
writer.Write(bytesPerSecond);
writer.Write(frameSize);
writer.Write(bitsPerSample);
writer.Write(0x61746164); // = encoding.GetBytes("data")
writer.Write(dataChunkSize);
{
double theta = frequency * TAU / (double)samplesPerSecond;
double amp = volume >> 2;
for (int step = 0; step < samples; step++)
{
short s = (short)(amp * Math.Sin(theta * (double)step));
writer.Write(s);
}
}
mStrm.Seek(0, SeekOrigin.Begin);
MediaElement soundPlayer = new MediaElement();
var soundSource = mStrm.AsRandomAccessStream();
soundSource.Seek(0);
soundPlayer.SetSource(soundSource, "audio/wav");
soundPlayer.Play();
writer.Dispose();
mStrm.Dispose();
}
And I have am passing the different note frequencies for different octaves in a for loop and the duration is 1000milliseconds.
Any help on this is appreciated.
Thanks,
Teja

Issue with Photometric Interpretation tag even after inverting the image data

Note: Giving the background of my previous question once again so as to find all the related stuff at one source.
I'm capturing an image from an android mobile device and it’s in JPEG format. The image is of 72X72DPI and 24 bit. When I try to convert this JPEG image to TIFF using LibTiff.Net and to set the tag Photometric Interpretation = 0 for MinIsWhite, the image turns negative (the white becomes black and black becomes white). The environment is Windows 8.1 64 bit, Visual Studio 2012. The tag must have value 0, where 0 = white is zero.
I absolutely must use Photometric.MINISWHITE in images so tried inverting image data before writing it to TIFF as per the below code. But then the compression changes to LZW instead of CCITT4,Photometric is changed to MINISBLACK from MINISWHITE, FIllorder tag is removed, PlanarConfig tag is removed, New tag Predictor is added with value 1 and the image turns negative again.
public partial class Form1 : Form
{
private const TiffTag TIFFTAG_ASCIITAG = (TiffTag)666;
private const TiffTag TIFFTAG_LONGTAG = (TiffTag)667;
private const TiffTag TIFFTAG_SHORTTAG = (TiffTag)668;
private const TiffTag TIFFTAG_RATIONALTAG = (TiffTag)669;
private const TiffTag TIFFTAG_FLOATTAG = (TiffTag)670;
private const TiffTag TIFFTAG_DOUBLETAG = (TiffTag)671;
private const TiffTag TIFFTAG_BYTETAG = (TiffTag)672;
public Form1()
{
InitializeComponent();
}
private void button1_Click(object sender, EventArgs e)
{
using (Bitmap bmp = new Bitmap(#"D:\Projects\ITests\images\IMG_2.jpg"))
{
// convert jpg image to tiff
byte[] tiffBytes = GetTiffImageBytes(bmp, false);
File.WriteAllBytes(#"D:\Projects\ITests\images\output.tif", tiffBytes);
//Invert the tiff image
Bitmap bmpTiff = new Bitmap(#"D:\Projects\ITests\images\output.tif");
Bitmap FBitmap = Transform(bmpTiff);
FBitmap.Save(#"D:\Projects\ITests\images\invOutput1.tif");
}
}
public static byte[] GetTiffImageBytes(Bitmap img, bool byScanlines)
{
try
{
byte[] raster = GetImageRasterBytes(img);
using (MemoryStream ms = new MemoryStream())
{
using (Tiff tif = Tiff.ClientOpen("InMemory", "w", ms, new TiffStream()))
{
if (tif == null)
return null;
tif.SetField(TiffTag.IMAGEWIDTH, img.Width);
tif.SetField(TiffTag.IMAGELENGTH, img.Height);
tif.SetField(TiffTag.COMPRESSION, Compression.CCITTFAX4);
tif.SetField(TiffTag.PHOTOMETRIC, Photometric.MINISWHITE);
tif.SetField(TiffTag.ROWSPERSTRIP, img.Height);
tif.SetField(TiffTag.XRESOLUTION, 200);
tif.SetField(TiffTag.YRESOLUTION, 200);
tif.SetField(TiffTag.SUBFILETYPE, 0);
tif.SetField(TiffTag.BITSPERSAMPLE, 1);
tif.SetField(TiffTag.FILLORDER, FillOrder.LSB2MSB);
tif.SetField(TiffTag.ORIENTATION, BitMiracle.LibTiff.Classic.Orientation.TOPLEFT);
tif.SetField(TiffTag.SAMPLESPERPIXEL, 1);
tif.SetField(TiffTag.RESOLUTIONUNIT, ResUnit.INCH);
tif.SetField(TiffTag.PLANARCONFIG, PlanarConfig.CONTIG);
int tiffStride = tif.ScanlineSize();
int stride = raster.Length / img.Height;
if (byScanlines)
{
// raster stride MAY be bigger than TIFF stride (due to padding in raster bits)
for (int i = 0, offset = 0; i < img.Height; i++)
{
bool res = tif.WriteScanline(raster, offset, i, 0);
if (!res)
return null;
offset += stride;
}
}
else
{
if (tiffStride < stride)
{
// raster stride is bigger than TIFF stride
// this is due to padding in raster bits
// we need to create correct TIFF strip and write it into TIFF
byte[] stripBits = new byte[tiffStride * img.Height];
for (int i = 0, rasterPos = 0, stripPos = 0; i < img.Height; i++)
{
System.Buffer.BlockCopy(raster, rasterPos, stripBits, stripPos, tiffStride);
rasterPos += stride;
stripPos += tiffStride;
}
// Write the information to the file
int n = tif.WriteEncodedStrip(0, stripBits, stripBits.Length);
if (n <= 0)
return null;
}
else
{
// Write the information to the file
int n = tif.WriteEncodedStrip(0, raster, raster.Length);
if (n <= 0)
return null;
}
}
}
return ms.GetBuffer();
}
}
catch (Exception)
{
return null;
}
}
public static byte[] GetImageRasterBytes(Bitmap img)
{
// Specify full image
Rectangle rect = new Rectangle(0, 0, img.Width, img.Height);
Bitmap bmp = img;
byte[] bits = null;
try
{
// Lock the managed memory
if (img.PixelFormat != PixelFormat.Format1bppIndexed)
bmp = convertToBitonal(img);
BitmapData bmpdata = bmp.LockBits(rect, ImageLockMode.ReadOnly, PixelFormat.Format1bppIndexed);
// Declare an array to hold the bytes of the bitmap.
bits = new byte[bmpdata.Stride * bmpdata.Height];
// Copy the sample values into the array.
Marshal.Copy(bmpdata.Scan0, bits, 0, bits.Length);
// Release managed memory
bmp.UnlockBits(bmpdata);
}
finally
{
if (bmp != img)
bmp.Dispose();
}
return bits;
}
private static Bitmap convertToBitonal(Bitmap original)
{
int sourceStride;
byte[] sourceBuffer = extractBytes(original, out sourceStride);
// Create destination bitmap
Bitmap destination = new Bitmap(original.Width, original.Height,
PixelFormat.Format1bppIndexed);
destination.SetResolution(original.HorizontalResolution, original.VerticalResolution);
// Lock destination bitmap in memory
BitmapData destinationData = destination.LockBits(
new Rectangle(0, 0, destination.Width, destination.Height),
ImageLockMode.WriteOnly, PixelFormat.Format1bppIndexed);
// Create buffer for destination bitmap bits
int imageSize = destinationData.Stride * destinationData.Height;
byte[] destinationBuffer = new byte[imageSize];
int sourceIndex = 0;
int destinationIndex = 0;
int pixelTotal = 0;
byte destinationValue = 0;
int pixelValue = 128;
int height = destination.Height;
int width = destination.Width;
int threshold = 500;
for (int y = 0; y < height; y++)
{
sourceIndex = y * sourceStride;
destinationIndex = y * destinationData.Stride;
destinationValue = 0;
pixelValue = 128;
for (int x = 0; x < width; x++)
{
// Compute pixel brightness (i.e. total of Red, Green, and Blue values)
pixelTotal = sourceBuffer[sourceIndex + 1] + sourceBuffer[sourceIndex + 2] +
sourceBuffer[sourceIndex + 3];
if (pixelTotal > threshold)
destinationValue += (byte)pixelValue;
if (pixelValue == 1)
{
destinationBuffer[destinationIndex] = destinationValue;
destinationIndex++;
destinationValue = 0;
pixelValue = 128;
}
else
{
pixelValue >>= 1;
}
sourceIndex += 4;
}
if (pixelValue != 128)
destinationBuffer[destinationIndex] = destinationValue;
}
Marshal.Copy(destinationBuffer, 0, destinationData.Scan0, imageSize);
destination.UnlockBits(destinationData);
return destination;
}
private static byte[] extractBytes(Bitmap original, out int stride)
{
Bitmap source = null;
try
{
// If original bitmap is not already in 32 BPP, ARGB format, then convert
if (original.PixelFormat != PixelFormat.Format32bppArgb)
{
source = new Bitmap(original.Width, original.Height, PixelFormat.Format32bppArgb);
source.SetResolution(original.HorizontalResolution, original.VerticalResolution);
using (Graphics g = Graphics.FromImage(source))
{
g.DrawImageUnscaled(original, 0, 0);
}
}
else
{
source = original;
}
// Lock source bitmap in memory
BitmapData sourceData = source.LockBits(
new Rectangle(0, 0, source.Width, source.Height),
ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb);
// Copy image data to binary array
int imageSize = sourceData.Stride * sourceData.Height;
byte[] sourceBuffer = new byte[imageSize];
Marshal.Copy(sourceData.Scan0, sourceBuffer, 0, imageSize);
// Unlock source bitmap
source.UnlockBits(sourceData);
stride = sourceData.Stride;
return sourceBuffer;
}
finally
{
if (source != original)
source.Dispose();
}
}
public Bitmap Transform(Bitmap bitmapImage)
{
var bitmapRead = bitmapImage.LockBits(new Rectangle(0, 0, bitmapImage.Width, bitmapImage.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppPArgb);
var bitmapLength = bitmapRead.Stride * bitmapRead.Height;
var bitmapBGRA = new byte[bitmapLength];
Marshal.Copy(bitmapRead.Scan0, bitmapBGRA, 0, bitmapLength);
bitmapImage.UnlockBits(bitmapRead);
for (int i = 0; i < bitmapLength; i += 4)
{
bitmapBGRA[i] = (byte)(255 - bitmapBGRA[i]);
bitmapBGRA[i + 1] = (byte)(255 - bitmapBGRA[i + 1]);
bitmapBGRA[i + 2] = (byte)(255 - bitmapBGRA[i + 2]);
// [i + 3] = ALPHA.
}
var bitmapWrite = bitmapImage.LockBits(new Rectangle(0, 0, bitmapImage.Width, bitmapImage.Height), ImageLockMode.WriteOnly, PixelFormat.Format32bppPArgb);
Marshal.Copy(bitmapBGRA, 0, bitmapWrite.Scan0, bitmapLength);
bitmapImage.UnlockBits(bitmapWrite);
return bitmapImage;
}
}
You should invert image bytes in GetTiffImageBytes method, before writing them to TIFF. Also, the Transform method converts bi-level image to 32bpp one and that is why you get LZW compressed image in the end.
So, add the following code
for (int k = 0; k < raster.Length; k++)
raster[k] = (byte)(~raster[k]);
after byte[] raster = GetImageRasterBytes(img); in GetTiffImageBytes method. This will invert image bytes. And don't use the following code
//Invert the tiff image
Bitmap bmpTiff = new Bitmap(#"D:\Projects\ITests\images\output.tif");
Bitmap FBitmap = Transform(bmpTiff);
FBitmap.Save(#"D:\Projects\ITests\images\invOutput1.tif");

Continuously Updating Silverlight Image control from Byte[] in Backgroundworker (about every 100 ms)

I am new to posting in forums, as I usually can help my self through searches, but am really stuck here...
I have written an ASP.NET C# WebApp, which also needs to incorporate a client side serial com interface to a device that basically does live scanning, and streams the images scanned through the serial (USB) interface (These images need to be refreshed Continuously, approximately every 100ms, therefore basically creating an "animation" effect)
To get this to run Client side, I have written a small Silverlight App. running In Browser, with the following code, using a Backgroundworker trying to separate Serial Comms from UI, and refreshing the Silverlight image control from the Byte Arrays received.
I have VERY SIMILAR code working 100% in WinForms, my only issue in Silverlight, is that no image ever gets displayed in my image control.
Below is the relevant WinForms code followed by the corresponding Silverlight code.
My current suspicion is that the image is never rendered as Silverlight only allows for PixelFormat of 32bppArgb where I need to use PixelFormat.Format8bppIndexed, as per my WinForms CreateBitmap() method below.
If this is indeed the issue, I cannot find any way to create this format of Bitmap in Silverlight.
/////////////// WINFORMS CODE (Timer1 Interval = 100ms) //////////////////
private void timer1_Tick(object sender, EventArgs e)
{
BackgroundWorker bw = new BackgroundWorker();
bw.WorkerReportsProgress = true;
bw.DoWork += new DoWorkEventHandler(
delegate(object o, DoWorkEventArgs args)
{
BackgroundWorker b = o as BackgroundWorker;
int BytesToRead = COMport.Read(ReceiveBuffer);
for (int i = 0; i < BytesToRead; i++)
{
//Code that copies ReceiveBuffer to byte[] LiveImgArr
Bitmap liveBMP = CreateBitmap(LiveImgArr, imgWidth, imgHeight);
bw.ReportProgress(i, liveBMP);
}
});
bw.ProgressChanged += new ProgressChangedEventHandler(
delegate(object o, ProgressChangedEventArgs args)
{
pictureBox1.Image = (Bitmap)args.UserState;
});
bw.RunWorkerAsync();
}
private Bitmap CreateBitmap(byte[] buffer, int width, int height)
{
Bitmap bmp = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format8bppIndexed);
BitmapData bmpData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, PixelFormat.Format8bppIndexed);
System.Runtime.InteropServices.Marshal.Copy(buffer, 0, bmpData.Scan0, width * height);
bmp.UnlockBits(bmpData);
ColorPalette pal = bmp.Palette;
for (int i = 0; i < 256; i++)
{
pal.Entries[i] = Color.FromArgb(i, i, i);
}
bmp.Palette = pal;
return bmp;
}
//////////////////////////////////////// END /////////////////////////////////////////
//////////////////////////////// SilverLight Code ///////////////////////////////////
public void StartTimer()//object o, RoutedEventArgs sender)
{
System.Windows.Threading.DispatcherTimer CommsTimer = new System.Windows.Threading.DispatcherTimer();
CommsTimer.Interval = new TimeSpan(0, 0, 0, 0, 100); // 100 Milliseconds
CommsTimer.Tick += new EventHandler(CommsTimer_Tick);
CommsTimer.Start();
}
public void CommsTimer_Tick(object o, EventArgs sender)
{
BackgroundWorker bw = new BackgroundWorker();
bw.WorkerReportsProgress = true;
bw.DoWork += new DoWorkEventHandler(
delegate(object b, DoWorkEventArgs args)
{
BackgroundWorker obw = b as BackgroundWorker;
int BytesToRead = COMport.Read(ReceiveBuffer);
for (int i = 0; i < BytesToRead; i++)
{
//Code that copies ReceiveBuffer to byte[] LiveImgArr
bw.ReportProgress(i, LiveImgArr);
}
}
});
bw.ProgressChanged += new ProgressChangedEventHandler(
delegate(object j, ProgressChangedEventArgs args)
{
byte[] imgByte = (byte[])args.UserState;
using (MemoryStream ms = new MemoryStream(imgByte, 0, imgByte.Length))
{
BitmapImage bmp = new BitmapImage();
bmp.SetSource(ms);
this.image1.Source = bmp;
}
});
//////////////////////////////////////// END //////////////////////////////////////////
///////////////////////////////// NEW CODE AS PER CLEMENTS/////////////////////////////
bw.ProgressChanged += new ProgressChangedEventHandler(
delegate(object j, ProgressChangedEventArgs args)
{
byte[] imgByte = (byte[])args.UserState;
WriteableBitmap wbmp = new WriteableBitmap(208, 208);
int[] wbmpArray = wbmp.Pixels;
for (int pixelIndex = 0; pixelIndex < imgByte.Length; pixelIndex++)
{
byte alpha = 128;
byte red = 255;
byte green = 255;
byte blue = 255;
double scaleAlpha = alpha / 255.0;
// we are not using scaleAlpha here
//wbmp.Pixels[pixelIndex] =
// (alpha << 24)
// | (red << 16)
// | (green << 8)
// | blue;
// notice the alpha value is NOT scaled
// it’s also very important to scale BEFORE
// shifting the values
wbmp.Pixels[pixelIndex] =
(alpha << 24)
| ((byte)(red * scaleAlpha) << 16)
| ((byte)(green * scaleAlpha) << 8)
| (byte)(blue * scaleAlpha);
}
wbmp.Invalidate();
this.image1.Source = wbmp;
//ImageBrush imgBrush = new ImageBrush();
//imgBrush.ImageSource = wbmp;
//imgRect.Fill = imgBrush;
});
BitmapImage.SetSource only accepts streams that contain an encoded image buffer (PNG or JPEG).
In order to set pixel data directly, you would have to use WriteableBitmap instead of BitmapImage. From the Remarks section in the class documentation:
• Construct an initially empty but dimensioned WriteableBitmap using
WriteableBitmap(Int32, Int32).
• Get the pixel array from Pixels.
• Loop through the array, setting the individual pixel values as
integer values that are evaluated as premultiplied ARGB32.
• Call Invalidate.
• To display the image in UI, use the WriteableBitmap as the source
for an imaging control such as Image, or as the source image for an
ImageBrush.
The following code fills a WriteableBitmap with a blue color and an opacity gradient from 0 to 100% from left to right by using only integer arithmetics:
var bitmap = new WriteableBitmap(500, 500);
int red = 0;
int green = 0;
int blue = 255;
for (int i = 0; i < bitmap.PixelWidth * bitmap.PixelHeight; i++)
{
int x = i % bitmap.PixelWidth;
int alpha = x * 256 / bitmap.PixelWidth;
bitmap.Pixels[i] =
(alpha << 24) |
((red * alpha / 256) << 16) |
((green * alpha / 256) << 8) |
(blue * alpha / 256);
}
bitmap.Invalidate();
image.Source = bitmap;

how to create a waveform display from audio file using J2ME

I'm beginner in J2ME and want to develop a mobile music editor with waveform display embedded as my project.
Is there anyone who can help me, or give me some tutorial how to encode the audio file into waveform display?
This will generate waveform from audio file using nAudio...
using NAudio.Wave;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
public partial class test : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
string strPath = Server.MapPath("audio/060.mp3");
string SongID = "2";
byte[] bytes = File.ReadAllBytes(strPath);
WriteToFile(SongID,strPath, bytes);
Response.Redirect("Main.aspx");
}
private void WriteToFile(string SongID, string strPath, byte[] Buffer)
{
try
{
int samplesPerPixel = 128;
long startPosition = 0;
//FileStream newFile = new FileStream(GeneralUtils.Get_SongFilePath() + "/" + strPath, FileMode.Create);
float[] data = FloatArrayFromByteArray(Buffer);
Bitmap bmp = new Bitmap(1170, 200);
int BORDER_WIDTH = 5;
int width = bmp.Width - (2 * BORDER_WIDTH);
int height = bmp.Height - (2 * BORDER_WIDTH);
NAudio.Wave.Mp3FileReader reader = new NAudio.Wave.Mp3FileReader(strPath, wf => new NAudio.FileFormats.Mp3.DmoMp3FrameDecompressor(wf));
NAudio.Wave.WaveChannel32 channelStream = new NAudio.Wave.WaveChannel32(reader);
int bytesPerSample = (reader.WaveFormat.BitsPerSample / 8) * channelStream.WaveFormat.Channels;
using (Graphics g = Graphics.FromImage(bmp))
{
g.Clear(Color.White);
Pen pen1 = new Pen(Color.Gray);
int size = data.Length;
string hexValue1 = "#009adf";
Color colour1 = System.Drawing.ColorTranslator.FromHtml(hexValue1);
pen1.Color = colour1;
Stream wavestream = new NAudio.Wave.Mp3FileReader(strPath, wf => new NAudio.FileFormats.Mp3.DmoMp3FrameDecompressor(wf));
wavestream.Position = 0;
int bytesRead1;
byte[] waveData1 = new byte[samplesPerPixel * bytesPerSample];
wavestream.Position = startPosition + (width * bytesPerSample * samplesPerPixel);
for (float x = 0; x < width; x++)
{
short low = 0;
short high = 0;
bytesRead1 = wavestream.Read(waveData1, 0, samplesPerPixel * bytesPerSample);
if (bytesRead1 == 0)
break;
for (int n = 0; n < bytesRead1; n += 2)
{
short sample = BitConverter.ToInt16(waveData1, n);
if (sample < low) low = sample;
if (sample > high) high = sample;
}
float lowPercent = ((((float)low) - short.MinValue) / ushort.MaxValue);
float highPercent = ((((float)high) - short.MinValue) / ushort.MaxValue);
float lowValue = height * lowPercent;
float highValue = height * highPercent;
g.DrawLine(pen1, x, lowValue, x, highValue);
}
}
string filename = Server.MapPath("image/060.png");
bmp.Save(filename);
bmp.Dispose();
}
catch (Exception e)
{
}
}
public float[] FloatArrayFromStream(System.IO.MemoryStream stream)
{
return FloatArrayFromByteArray(stream.GetBuffer());
}
public float[] FloatArrayFromByteArray(byte[] input)
{
float[] output = new float[input.Length / 4];
for (int i = 0; i < output.Length; i++)
{
output[i] = BitConverter.ToSingle(input, i * 4);
}
return output;
}
}

Resources