I have a c# windows form project with the following c# code which uses BiQuadFilter class of NAudio library to implement low pass filter. The problem I am facing is that the intensity of the sound is also dropping along with the frequency thus making the volume barely audible. How can I fix this?
My Code:
private ISampleProvider sourceProvider;
private BiQuadFilter[] filters;
private int channels,cutOffFreq;
//Constructor
public MyFilter(ISampleProvider sourceProvider,int cutOffFreq)
{
this.sourceProvider = sourceProvider;
this.cutOffFreq = cutOffFreq;
channels = sourceProvider.WaveFormat.Channels;
filters = new BiQuadFilter[channels];
CreateFilters();
}
private void CreateFilters()
{
for (int n = 0; n < channels; n++)
if (filters[n] == null)
filters[n] = BiQuadFilter.LowPassFilter(44100, cutOffFreq, 1);
else
filters[n].SetLowPassFilter(44100, cutOffFreq, 1);
}
public WaveFormat WaveFormat { get { return sourceProvider.WaveFormat; } }
public int Read(float[] buffer, int offset, int count)
{
int samplesRead = sourceProvider.Read(buffer, offset, count);
for (int i = 0; i < samplesRead; i++)
buffer[offset + i] = filters[(i % channels)].Transform(buffer[offset + i]);
return samplesRead;
}
This is how I am using it:
waveOut.Init(new MyFilter(new AudioFileReader(path + "\\Audio_Tracks\\" + topic + "\\" + currentTrack + ".wav"), 750));
waveOut.Play();
Well I don't know what your source material is, but a low pass filter with a cutoff frequency of just 750Hz is potentially going to be cutting a lot out of your original signal. Generally speaking a filter will make things quieter. Amplify the sound afterwards (e.g. multiply each sample by a gain factor after transforming it).
Related
I'm currently learning how to code games and have designed a biom generation algorithm.
As long as I run that algorithm below syncron, it generates the same output every time and works perfectly fine.
Now I tried to speed it up and make it multithreaded, but every time I call the method, it results in a different result.
As far as I know, I used Threadsave Collections, whenever necessary, but it still doesn't work.
Also, I tried to lock the collection, but this didn't work either.
So I'm completely clueless as to why this doesn't work.
If you see anything that I could make better or how I could fix that problem, please let me know.
This code is working:
private Biome[,] Generate(string worldSeed, Vector2Int targetChunk, List<(Biome, float)> multiplier, float centroidsPerChunk)
{
//Calculate the NeighboursToGenerate depeding on the cendroids per Chunk value
int chunkNeighboursToGenerate = (int)Math.Ceiling(Math.Sqrt(1f / centroidsPerChunk * 12.5f));
int chunkSize = 8;
//Create List that contains all centroids of the chunk
List<(Vector2Int, Biome)> centroids = new();
//Create Centdroids for every chunk of the generated region around the targetchunk
for (int chunkX = targetChunk.x - chunkNeighboursToGenerate; chunkX < targetChunk.x + chunkNeighboursToGenerate + 1; chunkX++)
{
for (int chunkZ = targetChunk.y - chunkNeighboursToGenerate; chunkZ < targetChunk.y + chunkNeighboursToGenerate + 1; chunkZ++)
{
List<(Vector2Int, Biome)> generatedCentdroids = GetCentdroidsByChunk(worldSeed, new(chunkX, chunkZ), centroidsPerChunk, chunkSize, multiplier, targetChunk, chunkNeighboursToGenerate);
foreach ((Vector2Int, Biome) generatedCentdroid in generatedCentdroids)
{
centroids.Add(generatedCentdroid);
}
}
}
Biome[,] biomeMap = new Biome[chunkSize, chunkSize];
//---Generate biomeMap of the target Chunk---
for (int tx = 0; tx < chunkSize; tx++)
{
for (int tz = 0; tz < chunkSize; tz++)
{
int x = chunkSize * chunkNeighboursToGenerate + tx;
int z = chunkSize * chunkNeighboursToGenerate + tz;
biomeMap[tz, tx] = GetClosestCentroidBiome(new(x, z), centroids.ToArray());
};
};
//Return the biome map of the target chunk
return biomeMap;
}
private static List<(Vector2Int, Biome)> GetCentdroidsByChunk(string worldSeed, Vector2Int chunkToGenerate, float centroidsPerChunk, int chunkSize, List<(Biome, float)> multiplier, Vector2Int targetChunk, int chunkNeighboursToGenerate)
{
List<(Vector2Int, Biome)> centroids = new();
//---Generate Cendroids of a single chunk---
float centroidsInThisChunk = centroidsPerChunk;
//Init randomizer
System.Random randomInstance = new(Randomizer.GetSeed(worldSeed, chunkToGenerate.x, chunkToGenerate.y));
while (centroidsInThisChunk > 0.0f)
{
//if at least one more centroid is to generate do it
//if not randomize by the given probability if another one should be generated
if (centroidsInThisChunk >= 1 || (float)randomInstance.NextDouble() * (1 - 0) + 0 <= centroidsInThisChunk)
{
//Generate random point for a new centroid
Vector2Int pos = new(randomInstance.Next(0, chunkSize + 1), randomInstance.Next(0, chunkSize + 1));
//map the point to a zerobased coordinatesystem
int mappedX = (((chunkToGenerate.x - targetChunk.x) + chunkNeighboursToGenerate) * chunkSize) + pos.x;
int mappedZ = (((chunkToGenerate.y - targetChunk.y) + chunkNeighboursToGenerate) * chunkSize) + pos.y;
Vector2Int mappedPos = new Vector2Int(mappedX, mappedZ);
//Select the biom randomized
Biome biome = Randomizer.GetRandomBiom(randomInstance, multiplier);
centroids.Add(new(mappedPos, biome));
centroidsInThisChunk -= 1.0f;
}
//if no centroid is left to generate, end the loop
else
{
break;
}
}
return centroids;
}
//Calculates the closest Centroid to the given possition
Biome GetClosestCentroidBiome(Vector2Int pixelPos, IEnumerable<(Vector2Int, Biome)> centroids)
{
//Warp the possition so the biom borders won't be straight
//Vector2 warpedPos = pixelPos + Get2DTurbulence(pixelPos);
Vector2 warpedPos = pixelPos;
float smallestDst = float.MaxValue;
Biome closestBiome = Biome.Empty;
foreach ((Vector2Int, Biome) centroid in centroids)
{
float distance = Vector2.Distance(warpedPos, centroid.Item1);
if (distance < smallestDst)
{
smallestDst = distance;
closestBiome = centroid.Item2;
}
}
return closestBiome;
}
public static class Randomizer
{
//Generates a random integerseed by combining an hashing the inputvalues
public static int GetSeed(string worldSeed, int chunkx, int chunkz)
{
var stringSeed = worldSeed + ":" + chunkx + ";" + chunkz;
MD5 md5Hasher = MD5.Create();
byte[] hashed = md5Hasher.ComputeHash(Encoding.UTF8.GetBytes(stringSeed));
return BitConverter.ToInt32(hashed, 0);
}
//Returns a random biome based on the given properbilities/multiplier
//multiplier = 2 for example means the biom is generated twice as often as usually
public static Biome GetRandomBiom(System.Random rndm, List<(Biome, float)> multiplier)
{
float multmax = 0.0f;
multiplier.ForEach(x => multmax += x.Item2);
//Generate a random value that is in the range of all multiplieres added
float biome = (float)rndm.NextDouble() * (multmax + 0.01f);
//Map the biome to the multipliers and return the biome
float multcalc = 0.0f;
for (int r = 0; r < multiplier.Count; r++)
{
multcalc += multiplier[r].Item2;
if (multcalc >= biome)
{
return multiplier[r].Item1;
}
}
//Return Biome.Empty if something did't worked correct
return Biome.Empty;
}
}
This doesn't work:
private Biome[,] Generate(string worldSeed, Vector2Int targetChunk, List<(Biome, float)> multiplier, float centroidsPerChunk)
{
//Calculate the NeighboursToGenerate depeding on the cendroids per Chunk value
int chunkNeighboursToGenerate = (int)Math.Ceiling(Math.Sqrt(1f / centroidsPerChunk * 12.5f));
int chunkSize = 8;
//Create List that contains all centroids of the chunk
ConcurrentBag<(Vector2Int, Biome)> centroids = new();
ConcurrentQueue<Task> tasks = new();
//Create Centdroids for every chunk of the generated region around the targetchunk
for (int chunkX = targetChunk.x - chunkNeighboursToGenerate; chunkX < targetChunk.x + chunkNeighboursToGenerate + 1; chunkX++)
{
for (int chunkZ = targetChunk.y - chunkNeighboursToGenerate; chunkZ < targetChunk.y + chunkNeighboursToGenerate + 1; chunkZ++)
{
tasks.Enqueue(Task.Run(() =>
{
List<(Vector2Int, Biome)> generatedCentdroids = GetCentdroidsByChunk(worldSeed, new(chunkX, chunkZ), centroidsPerChunk, chunkSize, multiplier, targetChunk, chunkNeighboursToGenerate);
foreach ((Vector2Int, Biome) generatedCentdroid in generatedCentdroids)
{
centroids.Add(generatedCentdroid);
}
}));
}
}
Biome[,] biomeMap = new Biome[chunkSize, chunkSize];
Task.WaitAll(tasks.ToArray());
//---Generate biomeMap of the target Chunk---
for (int tx = 0; tx < chunkSize; tx++)
{
for (int tz = 0; tz < chunkSize; tz++)
{
int x = chunkSize * chunkNeighboursToGenerate + tx;
int z = chunkSize * chunkNeighboursToGenerate + tz;
biomeMap[tz, tx] = GetClosestCentroidBiome(new(x, z), centroids.ToArray());
};
};
//Return the biome map of the target chunk
return biomeMap;
}
If you're starting to get into programming and you want to learn multi-threading, converting a large piece of complex code like this is not where you want to start. I highly recommend you pick up a book or tutorial on threading/async in C#/.NET before starting something like this. Unity also has its own multi-threading library with its Job System, which is built for the Unity workflow: https://docs.unity3d.com/Manual/JobSystemMultithreading.html
I don't think most people could find what's causing the problem in these two code snippets alone. But I have a couple of suggestions
Change your tasks collection to a List<T>, tasks is only ever accessed on one thread so there's no need to use ConcurrentQueue<T>
Is Biome a class? Cause if so it's technically fine but modifying data structures from multiple threads gets hairy fast. And while I can't see that you're modifying data from these snippets, without the full code I can't say for sure. Turn Biome into a struct or make a struct equivalent for threading purposes.
Also avoid calling centroids.ToArray() in your loop, as doing so will actually copy the original array over and over and over again. Call it once outside of your loop and that alone should be a pretty huge performance bump.
Just find a full-blown tutorial for threading/async/Unity's Job system (depending on which you'd rather learn for your use case) and start from there, I can tell from your use of the concurrent libraries and List<T> inside your tasks that you're new to threading. Understanding what code is ran on another thread and the repercussions from that (race conditions, and so on) is huge.
I'm trying to read a binaryfile. I don't know the structure, but I do have some code written in R that can read it. I'm not familiar with R but have made some progress converting it to C# and struggle at the last bit.
I'm at a point where I need to list out results which I would expect to be a series of float or double.
The R code looks like this (I've removed some of the logic to keep it short):
Rcpp::NumericVector GetSwmmResult(int iType, int iIndex, int vIndex)
{
int offset;
std::vector<float> resultvec(SWMM_Nperiods);
size_t size;
// --- compute offset into output file
for ( int i=1; i<=SWMM_Nperiods; ++i)
{
offset = StartPos + (i-1)*BytesPerPeriod + 2*RECORDSIZE;
if ( iType == SUBCATCH )
{
offset += RECORDSIZE*(iIndex*SubcatchVars + vIndex);
}
else return wrap(resultvec);
// --- re-position the file and read the result
fseek(Fout, offset, SEEK_SET);
size = fread(&resultvec[i-1], RECORDSIZE, 1, Fout);
}
return wrap(resultvec);
}
In C# I expected to do something as follows, where br is my BinaryReader object:
public List<double> GetSwmmResult(int iType, int iIndex, int vIndex)
{
int offset;
List<double> resultvec = new();
int size;
// --- compute offset into output file
Debug.WriteLine("SWMM_Nperiods count = " + SWMM_Nperiods);
for (int i = 1; i <= SWMM_Nperiods; i++)
{
Debug.WriteLine("SWMM_Nperiods " + i);
offset = StartPos + (i - 1) * BytesPerPeriod + 2 * RECORDSIZE;
if (iType == SUBCATCH)
{
offset += RECORDSIZE * (iIndex * SubcatchVars + vIndex);
}
else
{
return resultvec;
}
// --- re-position the file and read the result
br.BaseStream.Position = offset;
resultvec.Add(br.ReadDouble());
Debug.WriteLine(resultvec[i - 1]);
}
return resultvec;
}
But my C# just returns a load of very large numbers like:
5.058993159887922E-15
3.10628841909217E-16
5.477524451492502E-17
I'm expecting a series of numbers, but in the 100's or 1000's, not such large numbers.
Can anybody see how I should be returning values in my C# code using the R function above as a guide? There's a variable in the R code called SEEK_SET. It's not declared anywhere, so I don't understand how it's being used, but suspect it may be what I'm missing.
I am currently writing a C# application to demonstrate the speedup of parallel computing over single threaded applications. My case is Median blur of an image. But putting more threads to work slows down the application significantly (60 seconds single threaded vs 75 seconds multithreaded) . Given my current approach, I don't know how I could improve the process for multithreading. Sorry in advance for the long code in this post.
my current approach:
first, I calculate how many pixels each thread needs to process to even out the work, the DateTime calculation is to know how much time is passed single threaded and how much time is passed multithreaded:
public void blurImage(int cores)
{
_startTotal = DateTime.Now;
int numberOfPixels = _originalImage.Width * _originalImage.Height;
if (cores>=numberOfPixels)
{
for (int i = 0; i < numberOfPixels; i++)
{
startThread(0, numberOfPixels);
}
}
else
{
int pixelsPerThread = numberOfPixels / cores;
int threshold = numberOfPixels - (pixelsPerThread * cores);
startThread(0, pixelsPerThread + threshold);
for (int i = 1; i < cores; i++)
{
int startPixel = i * pixelsPerThread + threshold;
startThread(startPixel, startPixel + pixelsPerThread);
}
}
_SeqTime = DateTime.Now.Subtract(_startTotal);
}
the startThread method starts a thread and saves the result into a special class object so it can all be merged into one image, I pass a copy of the input image in each thread.
private void startThread(int startPixel, int numberOfPixels)
{
BlurOperation operation = new BlurOperation(blurPixels);
_operations.Add(operation);
BlurResult result = new BlurResult();
operation.BeginInvoke((Bitmap)_processedImage.Clone(), startPixel, numberOfPixels, _windowSize, result, operation, new AsyncCallback(finish), result);
}
Every thread blurs their set of pixels and saves the result into a new list of colors, the result is saved into the result object as well as the startpixel and the current operation, so the program knows when all threads are finished:
private void blurPixels(Bitmap bitmap, int startPixel, int endPixel, int window, BlurResult result, BlurOperation operation)
{
List<Color> colors = new List<Color>();
for (int i = startPixel; i < endPixel; i++)
{
int x = i % bitmap.Width;
int y = i / bitmap.Width;
colors.Add(PixelBlurrer.ShadePixel(x, y, bitmap, window));
}
result._pixels = colors;
result._startPixel = startPixel;
result._operation = operation;
}
the PixelBlurrer calculates the median of each color channel and returns it:
public static Color ShadePixel(int x, int y, Bitmap image, int window)
{
List<byte> red = new List<byte>();
List<byte> green = new List<byte>();
List<byte> blue = new List<byte>();
int xBegin = Math.Max(x - window, 0);
int yBegin = Math.Max(y - window, 0);
int xEnd = Math.Min(x + window, image.Width - 1);
int yEnd = Math.Min(y + window, image.Height - 1);
for (int tx = xBegin; tx < xEnd; tx++)
{
for (int ty = yBegin; ty < yEnd; ty++)
{
Color c = image.GetPixel(tx, ty);
red.Add(c.R);
green.Add(c.G);
blue.Add(c.B);
}
}
red.Sort();
green.Sort();
blue.Sort();
Color output = Color.FromArgb(red[red.Count / 2], green[green.Count / 2], blue[blue.Count / 2]);
return output;
}
on the callback, we return to the GUI thread and merge all pixels into the resulting image. Lastly an event is called telling my form the process is done:
private void finish(IAsyncResult iar)
{
Application.Current.Dispatcher.BeginInvoke(new AsyncCallback(update), iar);
}
private void update(IAsyncResult iar)
{
BlurResult result = (BlurResult)iar.AsyncState;
updateImage(result._pixels, result._startPixel, result._operation);
}
private void updateImage(List<Color> colors, int startPixel, BlurOperation operation)
{
DateTime updateTime = DateTime.Now;
_operations.Remove(operation);
int end = startPixel + colors.Count;
for (int i = startPixel; i < end; i++)
{
int x = i % _processedImage.Width;
int y = i / _processedImage.Width;
_processedImage.SetPixel(x, y, colors[i - startPixel]);
}
if (_operations.Count==0)
{
done(this, null);
}
_SeqTime += DateTime.Now.Subtract(updateTime);
}
Any thoughts? I tried using Parallel.For instead of delegates, but that made it worse. Is there a way to speedup Median blur by multithreading or is this a failed case?
after some thinking, I figured out my logic is solid, but I didn't send a deep copy to each thread. After changing this line in StartThread:
operation.BeginInvoke((Bitmap)_processedImage.Clone(), startPixel, numberOfPixels, _windowSize, result, operation, new AsyncCallback(finish), result);
to this:
operation.BeginInvoke(new Bitmap(_processedImage), startPixel, numberOfPixels, _windowSize, result, operation, new AsyncCallback(finish), result);
I can see a speedup multithreaded
What I am making is a small audio editor that loads a .wav file and displays it in the time domain. The user can select a part of it and zoom in or DFT the chunk which displays a small window in the frequency domain. (extra functionality to be added later)
I think I am making a mistake when splitting my byte array into two float arrays.
I use NAudio to get my samples into a byte array.
Then I use a loop I found on stack overflow to split the array into left and right channels.
private void readToArrays(WaveFileReader pcm) {
wf = pcm.WaveFormat;
int samplesDesired = (int)pcm.Length;
buffer = new byte[samplesDesired * 4];
left = new float[samplesDesired];
right = new float[samplesDesired];
int bytesRead = pcm.Read(buffer, 0, samplesDesired);
if (wf.BitsPerSample == 16) {
if (wf.Channels == 1) {
for (int i = 0; i < buffer.Length / 4; i++)
//handle
}
else if (wf.Channels == 2) {
int index = 0;
for (int sample = 0; sample < bytesRead / 4; sample++) {
left[sample] = BitConverter.ToInt16(buffer, index);
index += 2;
right[sample] = BitConverter.ToInt16(buffer, index);
index += 2;
}
}
}else if (wf.BitsPerSample == 8) {
if (wf.Channels == 1) {
//handle
}
else if (wf.Channels == 2) {
//handle
}
}
}
TL:DR: I get a lot of noise from playback with individual files. What I need is some advice as to how I can still be able to modify my samples / DFT them AND be able to output them without noise. Do note that I am just a 2nd year Computing student and do not have years of experience.
Extra Info : Bit Depth = 16; Sample Rate = 22050;
I'm tasked with building a .NET client app to detect silence in a WAV files.
Is this possible with the built-in Windows APIs? Or alternately, any good libraries out there to help with this?
Audio analysis is a difficult thing requiring a lot of complex math (think Fourier Transforms). The question you have to ask is "what is silence". If the audio that you are trying to edit is captured from an analog source, the chances are that there isn't any silence... they will only be areas of soft noise (line hum, ambient background noise, etc).
All that said, an algorithm that should work would be to determine a minimum volume (amplitude) threshold and duration (say, <10dbA for more than 2 seconds) and then simply do a volume analysis of the waveform looking for areas that meet this criteria (with perhaps some filters for millisecond spikes). I've never written this in C#, but this CodeProject article looks interesting; it describes C# code to draw a waveform... that is the same kind of code which could be used to do other amplitude analysis.
If you want to efficiently calculate the average power over a sliding window: square each sample, then add it to a running total. Subtract the squared value from N samples previous. Then move to the next step. This is the simplest form of a CIC Filter. Parseval's Theorem tells us that this power calculation is applicable to both time and frequency domains.
Also you may want to add Hysteresis to the system to avoid switching on&off rapidly when power level is dancing about the threshold level.
http://www.codeproject.com/Articles/19590/WAVE-File-Processor-in-C
This has all the code necessary to strip silence, and mix wave files.
Enjoy.
I'm using NAudio, and I wanted to detect the silence in audio files so I can either report or truncate.
After a lot of research, I came up with this basic implementation. So, I wrote an extension method for the AudioFileReader class which returns the silence duration at the start/end of the file, or starting from a specific position.
Here:
static class AudioFileReaderExt
{
public enum SilenceLocation { Start, End }
private static bool IsSilence(float amplitude, sbyte threshold)
{
double dB = 20 * Math.Log10(Math.Abs(amplitude));
return dB < threshold;
}
public static TimeSpan GetSilenceDuration(this AudioFileReader reader,
SilenceLocation location,
sbyte silenceThreshold = -40)
{
int counter = 0;
bool volumeFound = false;
bool eof = false;
long oldPosition = reader.Position;
var buffer = new float[reader.WaveFormat.SampleRate * 4];
while (!volumeFound && !eof)
{
int samplesRead = reader.Read(buffer, 0, buffer.Length);
if (samplesRead == 0)
eof = true;
for (int n = 0; n < samplesRead; n++)
{
if (IsSilence(buffer[n], silenceThreshold))
{
counter++;
}
else
{
if (location == SilenceLocation.Start)
{
volumeFound = true;
break;
}
else if (location == SilenceLocation.End)
{
counter = 0;
}
}
}
}
// reset position
reader.Position = oldPosition;
double silenceSamples = (double)counter / reader.WaveFormat.Channels;
double silenceDuration = (silenceSamples / reader.WaveFormat.SampleRate) * 1000;
return TimeSpan.FromMilliseconds(silenceDuration);
}
}
This will accept almost any audio file format not just WAV.
Usage:
using (AudioFileReader reader = new AudioFileReader(filePath))
{
TimeSpan duration = reader.GetSilenceDuration(AudioFileReaderExt.SilenceLocation.Start);
Console.WriteLine(duration.TotalMilliseconds);
}
References:
How audio dB levels are calculated.
Floating-point samples range.
More about amplitude.
Here a nice variant to detect threshold alternatings:
static class AudioFileReaderExt
{
private static bool IsSilence(float amplitude, sbyte threshold)
{
double dB = 20 * Math.Log10(Math.Abs(amplitude));
return dB < threshold;
}
private static bool IsBeep(float amplitude, sbyte threshold)
{
double dB = 20 * Math.Log10(Math.Abs(amplitude));
return dB > threshold;
}
public static double GetBeepDuration(this AudioFileReader reader,
double StartPosition, sbyte silenceThreshold = -40)
{
int counter = 0;
bool eof = false;
int initial = (int)(StartPosition * reader.WaveFormat.Channels * reader.WaveFormat.SampleRate / 1000);
if (initial > reader.Length) return -1;
reader.Position = initial;
var buffer = new float[reader.WaveFormat.SampleRate * 4];
while (!eof)
{
int samplesRead = reader.Read(buffer, 0, buffer.Length);
if (samplesRead == 0)
eof = true;
for (int n = initial; n < samplesRead; n++)
{
if (IsBeep(buffer[n], silenceThreshold))
{
counter++;
}
else
{
eof=true; break;
}
}
}
double silenceSamples = (double)counter / reader.WaveFormat.Channels;
double silenceDuration = (silenceSamples / reader.WaveFormat.SampleRate) * 1000;
return TimeSpan.FromMilliseconds(silenceDuration).TotalMilliseconds;
}
public static double GetSilenceDuration(this AudioFileReader reader,
double StartPosition, sbyte silenceThreshold = -40)
{
int counter = 0;
bool eof = false;
int initial = (int)(StartPosition * reader.WaveFormat.Channels * reader.WaveFormat.SampleRate / 1000);
if (initial > reader.Length) return -1;
reader.Position = initial;
var buffer = new float[reader.WaveFormat.SampleRate * 4];
while (!eof)
{
int samplesRead = reader.Read(buffer, 0, buffer.Length);
if (samplesRead == 0)
eof=true;
for (int n = initial; n < samplesRead; n++)
{
if (IsSilence(buffer[n], silenceThreshold))
{
counter++;
}
else
{
eof=true; break;
}
}
}
double silenceSamples = (double)counter / reader.WaveFormat.Channels;
double silenceDuration = (silenceSamples / reader.WaveFormat.SampleRate) * 1000;
return TimeSpan.FromMilliseconds(silenceDuration).TotalMilliseconds;
}
}
Main usage:
using (AudioFileReader reader = new AudioFileReader("test.wav"))
{
double duratioff = 1;
double duration = 1;
double position = 1;
while (duratioff >-1 && duration >-1)
{
duration = reader.GetBeepDuration(position);
Console.WriteLine(duration);
position = position + duration;
duratioff = reader.GetSilenceDuration(position);
Console.WriteLine(-duratioff);
position = position + duratioff;
}
}
I don't think you'll find any built-in APIs for detection of silence. But you can always use good ol' math/discreete signal processing to find out loudness.
Here's a small example: http://msdn.microsoft.com/en-us/magazine/cc163341.aspx
Use Sox. It can remove leading and trailing silences, but you'll have to call it as an exe from your app.
See code below from Detecting audio silence in WAV files using C#
private static void SkipSilent(string fileName, short silentLevel)
{
WaveReader wr = new WaveReader(File.OpenRead(fileName));
IntPtr format = wr.ReadFormat();
WaveWriter ww = new WaveWriter(File.Create(fileName + ".wav"),
AudioCompressionManager.FormatBytes(format));
int i = 0;
while (true)
{
byte[] data = wr.ReadData(i, 1);
if (data.Length == 0)
{
break;
}
if (!AudioCompressionManager.CheckSilent(format, data, silentLevel))
{
ww.WriteData(data);
}
}
ww.Close();
wr.Close();
}