How can I re-encode video frames to another codec using ffmpeg? - c#

I am trying to learn ffmpeg, so I started a small project where I am sending an MP4 video stream to my C# application where I want to re-encode the video to webM and send it to an icecast server.
My icecast server is receiving the video but I am not able to reproduce it (the video time is updated each time I press play but the video doesn't play and I only see a black frame)
Anyone can help me? I have no idea of what is wrong in my code.
My code execution flow is openInput->openOutput->streamingTest
private void openInput()
{
_pInputFormatContext = ffmpeg.avformat_alloc_context();
var pFormatContext = _pInputFormatContext;
ffmpeg.avformat_open_input(&pFormatContext, configuration.Source, null, null).ThrowExceptionIfError();
ffmpeg.avformat_find_stream_info(_pInputFormatContext, null).ThrowExceptionIfError();
// find the first video stream
for (var i = 0; i < _pInputFormatContext->nb_streams; i++)
if (_pInputFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
{
pInputStream = _pInputFormatContext->streams[i];
break;
}
if (pInputStream == null) throw new InvalidOperationException("Could not found video stream.");
_inputStreamIndex = pInputStream->index;
_pInputCodecContext = pInputStream->codec;
var codecId = _pInputCodecContext->codec_id;
var pCodec = ffmpeg.avcodec_find_decoder(codecId);
if (pCodec == null) throw new InvalidOperationException("Unsupported codec.");
ffmpeg.avcodec_open2(_pInputCodecContext, pCodec, null).ThrowExceptionIfError();
configuration.CodecName = ffmpeg.avcodec_get_name(codecId);
configuration.FrameSize = new Size(_pInputCodecContext->width, _pInputCodecContext->height);
configuration.PixelFormat = _pInputCodecContext->pix_fmt;
_pPacket = ffmpeg.av_packet_alloc();
_pFrame = ffmpeg.av_frame_alloc();
}
private bool openOutput()
{
int ret;
_pOutputFormatContext = ffmpeg.avformat_alloc_context();
fixed (AVFormatContext** ppOutputFormatContext = &_pOutputFormatContext)
{
ret = ffmpeg.avformat_alloc_output_context2(ppOutputFormatContext, null, "webm", configuration.Destination);
if (ret < 0)
{
return false;
}
}
AVOutputFormat* out_format = ffmpeg.av_guess_format(null, configuration.Destination, null);
// Configure output video stream
_pOutputStream = ffmpeg.avformat_new_stream(_pOutputFormatContext, null);
AVStream* pInputVideoStream = null;
for (var i = 0; i < _pInputFormatContext->nb_streams; i++)
{
if (_pInputFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
{
pInputVideoStream = _pInputFormatContext->streams[i];
}
}
ffmpeg.avcodec_parameters_copy(_pOutputStream->codecpar, pInputVideoStream->codecpar);
_pOutputStream->codecpar->codec_type = AVMediaType.AVMEDIA_TYPE_VIDEO;
_pOutputStream->codecpar->codec_id = AVCodecID.AV_CODEC_ID_VP8;
AVDictionary* opt_dict;
ffmpeg.av_dict_set(&opt_dict, "content_type", "video/webm", 0);
ffmpeg.av_dict_set(&opt_dict, "user_agent", "GCS", 0);
fixed (AVFormatContext** ppOutputFormatContext = &_pOutputFormatContext)
{
ret = ffmpeg.avio_open2(&_pOutputFormatContext->pb, configuration.Destination, ffmpeg.AVIO_FLAG_WRITE, null, &opt_dict);
if (ret < 0)
{
return false;
}
}
ret = ffmpeg.avformat_write_header(_pOutputFormatContext, null);
if (ret < 0)
{
return false;
}
ffmpeg.av_dump_format(_pOutputFormatContext, 0, configuration.Destination, 1);
return true;
}
private unsafe void streamingTest(object gggg)
{
isStreamUp = true;
AVPacket frame = new AVPacket();
AVPacket* pFrame = &frame;
ffmpeg.av_init_packet(pFrame);
updateState(VideoStreamStates.Streaming);
try
{
long start_time = ffmpeg.av_gettime();
DateTime lastFrame = DateTime.MinValue;
while (isStreamUp)
{
if (cancelationToken.IsCancellationRequested)
{
throw new TaskCanceledException();
}
try
{
int error;
isReadingFrame = true;
do
{
error = ffmpeg.av_read_frame(_pInputFormatContext, pFrame);
if (error == ffmpeg.AVERROR_EOF)
{
frame = *pFrame;
continue;
}
error.ThrowExceptionIfError();
} while (frame.stream_index != _inputStreamIndex);
isWritingFrame = true;
//frame.stream_index = _outputStreamIndex;
_pOutputCodecContext = ffmpeg.avcodec_alloc_context3(_pOutputFormatContext->video_codec);
int ret = 0;
while (ret >= 0)
{
ret = ffmpeg.avcodec_receive_packet(_pOutputCodecContext, pFrame);
}
//ffmpeg.avcodec_send_frame(_pOutputCodecContext, pFrame);
//ffmpeg.avcodec_send_packet(_pOutputCodecContext, pFrame);
ret = ffmpeg.av_write_frame(_pOutputFormatContext, pFrame);
isWritingFrame = false;
if (frame.stream_index == _inputStreamIndex)
{
if (ret < 0)
{
Console.WriteLine("Missed frame");
missedFrames++;
}
else
{
Console.WriteLine("Sent frame");
sentFrames++;
}
AVRational time_base = _pInputFormatContext->streams[_inputStreamIndex]->time_base;
AVRational time_base_q = new AVRational();
time_base_q.num = 1;
time_base_q.den = ffmpeg.AV_TIME_BASE;
long pts_time = ffmpeg.av_rescale_q(frame.dts, time_base, time_base_q);
//long pts_time = ffmpeg.av_rescale_q(frame.dts, time_base_q, time_base);
long now_time = ffmpeg.av_gettime() - start_time;
if (pts_time > now_time)
ffmpeg.av_usleep((uint)(pts_time - now_time));
}
else
Console.WriteLine("????");
}
catch (Exception ex)
{
Console.WriteLine("Erro ao enviar: " + ex.Message);
}
finally
{
ffmpeg.av_packet_unref(pFrame);
}
}
}
catch (TaskCanceledException)
{
updateState(VideoStreamStates.Stopped);
}
catch (Exception e)
{
Console.WriteLine(e.Message.ToString());
}
}

Related

I tried to modify the IL of the System.String.Concat method through the ICorProfilerCallback interface, but something went wrong

I try to modify the IL of the System.String.Concat method through the ICorProfilerCallback interface, and add my own tracking code before the method is executed。
I modified the IL of other methods and added the tracking code without any problems. After the compilation passed, it can be run.
....
if (!isStatic) {
reWriterWrapper.LoadNull();// Ldnull
reWriterWrapper.StLocal(indexMethodTrace); //stloc
reWriterWrapper.LoadNull(); //Ldnull
reWriterWrapper.StLocal(indexEx);
reWriterWrapper.LoadNull();
reWriterWrapper.StLocal(indexRet);
ILInstr* pTryStartInstr = reWriterWrapper.CallMember0(getInstanceMemberRef, false);
reWriterWrapper.Cast(traceAgentTypeRef); //castclass
reWriterWrapper.LoadToken(functionInfo.type.id);//ldtoken
reWriterWrapper.CallMember(moduleMetaInfo->getTypeFromHandleToken, false);
reWriterWrapper.LoadArgument(0); //ldarg 0
auto argNum = functionInfo.signature.NumberOfArguments();
reWriterWrapper.CreateArray(objectTypeRef, argNum);
auto arguments = functionInfo.signature.GetMethodArguments();
for (unsigned i = 0; i < argNum; i++) {
reWriterWrapper.BeginLoadValueIntoArray(i);
reWriterWrapper.LoadArgument(i + 1);//ldarg
auto argTypeFlags = arguments[i].GetTypeFlags(elementType);
if (argTypeFlags & TypeFlagByRef) {
reWriterWrapper.LoadIND(elementType); //ldind 中
}
if (argTypeFlags & TypeFlagBoxedType) {
auto tok = arguments[i].GetTypeTok(pEmit, corLibAssemblyRef); /
if (tok == mdTokenNil) {
return S_OK;
}
reWriterWrapper.Box(tok);
}
reWriterWrapper.EndLoadValueIntoArray(); //stelem_ref
}
reWriterWrapper.LoadInt32((INT32)function_token); //ldc_i4
reWriterWrapper.CallMember(beforeMemberRef, true); // call
reWriterWrapper.Cast(methodTraceTypeRef); //Castclass
reWriterWrapper.StLocal(rewriter.cNewLocals - 1); //STLOC
ILInstr* pRetInstr = pReWriter->NewILInstr();
pRetInstr->m_opcode = CEE_RET;
pReWriter->InsertAfter(pReWriter->GetILList()->m_pPrev, pRetInstr);
bool isVoidMethod = (retTypeFlags & TypeFlagVoid) > 0;
auto ret = functionInfo.signature.GetRet();
bool retIsBoxedType = false;
mdToken retTypeTok;
if (!isVoidMethod) {
retTypeTok = ret.GetTypeTok(pEmit, corLibAssemblyRef);
if (ret.GetTypeFlags(elementType) & TypeFlagBoxedType) {
retIsBoxedType = true;
}
}
reWriterWrapper.SetILPosition(pRetInstr);
reWriterWrapper.StLocal(indexEx); //stloc
ILInstr* pRethrowInstr = reWriterWrapper.Rethrow(); //Rethrow
reWriterWrapper.LoadLocal(indexMethodTrace); //ldloc
ILInstr* pNewInstr = pReWriter->NewILInstr(); //Brfalse
pNewInstr->m_opcode = CEE_BRFALSE_S;
pReWriter->InsertBefore(pRetInstr, pNewInstr);
reWriterWrapper.LoadLocal(indexMethodTrace); //ldloc
reWriterWrapper.LoadLocal(indexRet);//ldloc
reWriterWrapper.LoadLocal(indexEx); //ldloc
reWriterWrapper.CallMember(endMemberRef, true); // call [
ILInstr* pEndFinallyInstr = reWriterWrapper.EndFinally();
pNewInstr->m_pTarget = pEndFinallyInstr;
if (!isVoidMethod) {
reWriterWrapper.LoadLocal(indexRet);//ldloc
if (retIsBoxedType) {
reWriterWrapper.UnboxAny(retTypeTok);
}
else {
reWriterWrapper.Cast(retTypeTok);
}
}
for (ILInstr* pInstr = pReWriter->GetILList()->m_pNext; pInstr != pReWriter->GetILList(); pInstr = pInstr->m_pNext) {
switch (pInstr->m_opcode)
{
case CEE_RET:
{
if (pInstr != pRetInstr) {
if (!isVoidMethod) {
reWriterWrapper.SetILPosition(pInstr);
if (retIsBoxedType) {
reWriterWrapper.Box(retTypeTok);
}
reWriterWrapper.StLocal(indexRet); //ldloc
}
pInstr->m_opcode = CEE_LEAVE_S; //Leave_S
pInstr->m_pTarget = pEndFinallyInstr->m_pNext;
}
break;
}
default:
break;
}
}
EHClause exClause{};
exClause.m_Flags = COR_ILEXCEPTION_CLAUSE_NONE;
exClause.m_pTryBegin = pTryStartInstr;
exClause.m_pTryEnd = pRethrowInstr->m_pPrev;
exClause.m_pHandlerBegin = pRethrowInstr->m_pPrev;
exClause.m_pHandlerEnd = pRethrowInstr;
exClause.m_ClassToken = exTypeRef;
EHClause finallyClause{};
finallyClause.m_Flags = COR_ILEXCEPTION_CLAUSE_FINALLY;
finallyClause.m_pTryBegin = pTryStartInstr;
finallyClause.m_pTryEnd = pRethrowInstr->m_pNext;
finallyClause.m_pHandlerBegin = pRethrowInstr->m_pNext;
finallyClause.m_pHandlerEnd = pEndFinallyInstr;
auto m_pEHNew = new EHClause[rewriter.m_nEH + 2];
for (unsigned i = 0; i < rewriter.m_nEH; i++) {
m_pEHNew[i] = rewriter.m_pEH[i];
}
rewriter.m_nEH += 2;
m_pEHNew[rewriter.m_nEH - 2] = exClause;
m_pEHNew[rewriter.m_nEH - 1] = finallyClause;
rewriter.m_pEH = m_pEHNew;
}
else
{
//static method
reWriterWrapper.LoadNull();// Ldnull
reWriterWrapper.StLocal(indexMethodTrace); //stloc
reWriterWrapper.LoadNull(); //Ldnull
reWriterWrapper.StLocal(indexEx); //stloc
reWriterWrapper.LoadNull();// Ldnull
reWriterWrapper.StLocal(indexRet);
ILInstr* pTryStartInstr = reWriterWrapper.CallMember0(getInstanceMemberRef, false);
reWriterWrapper.Cast(traceAgentTypeRef); //castclass
reWriterWrapper.LoadNull(); //ldstr
reWriterWrapper.LoadNull();
auto argNum = functionInfo.signature.NumberOfArguments();
reWriterWrapper.CreateArray(objectTypeRef, argNum); //newarr
auto arguments = functionInfo.signature.GetMethodArguments();
for (unsigned i = 0; i < argNum; i++) {
reWriterWrapper.BeginLoadValueIntoArray(i);
reWriterWrapper.LoadArgument(i);//ldarg Static index 0
auto argTypeFlags = arguments[i].GetTypeFlags(elementType);
if (argTypeFlags & TypeFlagByRef) {
reWriterWrapper.LoadIND(elementType); //ldind
}
if (argTypeFlags & TypeFlagBoxedType) {
auto tok = arguments[i].GetTypeTok(pEmit, corLibAssemblyRef);
if (tok == mdTokenNil) {
return S_OK;
}
reWriterWrapper.Box(tok);
}
reWriterWrapper.EndLoadValueIntoArray(); //stelem_ref
}
reWriterWrapper.LoadInt32((INT32)function_token); //ldc_i4
reWriterWrapper.CallMember(beforeMemberRef, true); // call
reWriterWrapper.Cast(methodTraceTypeRef); //Castclass
reWriterWrapper.StLocal(rewriter.cNewLocals - 1); //STLOC
ILInstr* pRetInstr = pReWriter->NewILInstr();/
pRetInstr->m_opcode = CEE_RET;
pReWriter->InsertAfter(pReWriter->GetILList()->m_pPrev, pRetInstr);
bool isVoidMethod = (retTypeFlags & TypeFlagVoid) > 0;
auto ret = functionInfo.signature.GetRet();
bool retIsBoxedType = false;
mdToken retTypeTok;
if (!isVoidMethod) {
retTypeTok = ret.GetTypeTok(pEmit, corLibAssemblyRef);
if (ret.GetTypeFlags(elementType) & TypeFlagBoxedType) {
retIsBoxedType = true;
}
}
reWriterWrapper.SetILPosition(pRetInstr);
reWriterWrapper.StLocal(indexEx); //stloc
ILInstr* pRethrowInstr = reWriterWrapper.Rethrow(); //Rethrow
reWriterWrapper.LoadLocal(indexMethodTrace); //ldloc
ILInstr* pNewInstr = pReWriter->NewILInstr(); //Brfalse
pNewInstr->m_opcode = CEE_BRFALSE_S;
pReWriter->InsertBefore(pRetInstr, pNewInstr);
reWriterWrapper.LoadLocal(indexMethodTrace); //ldloc
reWriterWrapper.LoadLocal(indexRet);//ldloc
reWriterWrapper.LoadLocal(indexEx); //ldloc
reWriterWrapper.CallMember(endMemberRef, true); // call
ILInstr* pEndFinallyInstr = reWriterWrapper.EndFinally(); //Endfinally
pNewInstr->m_pTarget = pEndFinallyInstr;
if (!isVoidMethod) {
reWriterWrapper.LoadLocal(indexRet);//ldloc
if (retIsBoxedType) {
reWriterWrapper.UnboxAny(retTypeTok); // Unbox_Any
}
else {
reWriterWrapper.Cast(retTypeTok);
}
}
for (ILInstr* pInstr = pReWriter->GetILList()->m_pNext; pInstr != pReWriter->GetILList(); pInstr = pInstr->m_pNext) {
switch (pInstr->m_opcode)
{
case CEE_RET:
{
if (pInstr != pRetInstr) {
if (!isVoidMethod) {
reWriterWrapper.SetILPosition(pInstr);
if (retIsBoxedType) {
reWriterWrapper.Box(retTypeTok);
}
reWriterWrapper.StLocal(indexRet); //ldloc
}
pInstr->m_opcode = CEE_LEAVE_S; //Leave_S
pInstr->m_pTarget = pEndFinallyInstr->m_pNext;
}
break;
}
default:
break;
}
}
EHClause exClause{};
exClause.m_Flags = COR_ILEXCEPTION_CLAUSE_NONE;
exClause.m_pTryBegin = pTryStartInstr;
exClause.m_pTryEnd = pRethrowInstr->m_pPrev;
exClause.m_pHandlerBegin = pRethrowInstr->m_pPrev;
exClause.m_pHandlerEnd = pRethrowInstr;
exClause.m_ClassToken = exTypeRef;
EHClause finallyClause{};
finallyClause.m_Flags = COR_ILEXCEPTION_CLAUSE_FINALLY;
finallyClause.m_pTryBegin = pTryStartInstr;
finallyClause.m_pTryEnd = pRethrowInstr->m_pNext;
finallyClause.m_pHandlerBegin = pRethrowInstr->m_pNext;
finallyClause.m_pHandlerEnd = pEndFinallyInstr;
auto m_pEHNew = new EHClause[rewriter.m_nEH + 2];
for (unsigned i = 0; i < rewriter.m_nEH; i++) {
m_pEHNew[i] = rewriter.m_pEH[i];
}
rewriter.m_nEH += 2;
m_pEHNew[rewriter.m_nEH - 2] = exClause;
m_pEHNew[rewriter.m_nEH - 1] = finallyClause;
rewriter.m_pEH = m_pEHNew;
}
hr = rewriter.Export();
....
LocalSigCode
HRESULT ModifyLocalSig(CComPtr<IMetaDataImport2>& pImport, CComPtr<IMetaDataEmit2>& pEmit, ILRewriter& reWriter, mdTypeRef exTypeRef,mdTypeRef methodTraceTypeRef)
{
HRESULT hr;
PCCOR_SIGNATURE rgbOrigSig = NULL;
ULONG cbOrigSig = 0;
UNALIGNED INT32 temp = 0;
if (reWriter.m_tkLocalVarSig != mdTokenNil)
{
IfFailRet(pImport->GetSigFromToken(reWriter.m_tkLocalVarSig, &rgbOrigSig, &cbOrigSig));
const auto len = CorSigCompressToken(methodTraceTypeRef, &temp);
if(cbOrigSig - len > 0){
if(rgbOrigSig[cbOrigSig - len -1]== ELEMENT_TYPE_CLASS){
if (memcmp(&rgbOrigSig[cbOrigSig - len], &temp, len) == 0) {
return E_FAIL;
}
}
}
}
auto exTypeRefSize = CorSigCompressToken(exTypeRef, &temp);
auto methodTraceTypeRefSize = CorSigCompressToken(methodTraceTypeRef, &temp);
ULONG cbNewSize = cbOrigSig + 1 + 1 + methodTraceTypeRefSize + 1 + exTypeRefSize;
ULONG cOrigLocals;
ULONG cNewLocalsLen;
ULONG cbOrigLocals = 0;
if (cbOrigSig == 0) {
cbNewSize += 2;
reWriter.cNewLocals = 3;
cNewLocalsLen = CorSigCompressData(reWriter.cNewLocals, &temp);
}
else {
cbOrigLocals = CorSigUncompressData(rgbOrigSig + 1, &cOrigLocals);
reWriter.cNewLocals = cOrigLocals + 3;
cNewLocalsLen = CorSigCompressData(reWriter.cNewLocals, &temp);
cbNewSize += cNewLocalsLen - cbOrigLocals;
}
auto rgbNewSig = new COR_SIGNATURE[cbNewSize];
*rgbNewSig = IMAGE_CEE_CS_CALLCONV_LOCAL_SIG;
ULONG rgbNewSigOffset = 1;
memcpy(rgbNewSig + rgbNewSigOffset, &temp, cNewLocalsLen);
rgbNewSigOffset += cNewLocalsLen;
if (cbOrigSig > 0) {
const auto cbOrigCopyLen = cbOrigSig - 1 - cbOrigLocals;
memcpy(rgbNewSig + rgbNewSigOffset, rgbOrigSig + 1 + cbOrigLocals, cbOrigCopyLen);
rgbNewSigOffset += cbOrigCopyLen;
}
rgbNewSig[rgbNewSigOffset++] = ELEMENT_TYPE_OBJECT;
rgbNewSig[rgbNewSigOffset++] = ELEMENT_TYPE_CLASS;
exTypeRefSize = CorSigCompressToken(exTypeRef, &temp);
memcpy(rgbNewSig + rgbNewSigOffset, &temp, exTypeRefSize);
rgbNewSigOffset += exTypeRefSize;
rgbNewSig[rgbNewSigOffset++] = ELEMENT_TYPE_CLASS;
methodTraceTypeRefSize = CorSigCompressToken(methodTraceTypeRef, &temp);
memcpy(rgbNewSig + rgbNewSigOffset, &temp, methodTraceTypeRefSize);
rgbNewSigOffset += methodTraceTypeRefSize;
IfFailRet(pEmit->GetTokenFromSig(&rgbNewSig[0], cbNewSize, &reWriter.m_tkLocalVarSig));
return S_OK;
}
Modification method template:
//before fixing method
private Task DataRead(string a, int b)
{
return Task.Delay(10);
}
//After modification
private Task DataReadWrapper(string a, int b)
{
object ret = null;
Exception ex = null;
MethodTrace methodTrace = null;
try
{
methodTrace = (MethodTrace)((TraceAgent)TraceAgent.GetInstance())
.BeforeMethod(this.GetType(), this, new object[]{ a, b }, functiontoken);
ret = Task.Delay(10);
goto T;
}
catch (Exception e)
{
ex = e;
throw;
}
finally
{
if (methodTrace != null)
{
methodTrace.EndMethod(ret, ex);
}
}
T:
return (Task)ret;
}
Error:
Unhandled exception.
Cannot print exception string because Exception.ToString() failed.
Application '/LM/W3SVC/1/ROOT' with physical root 'C:\inetpub\wwwroot\DotNetRangeCore\' failed to load coreclr. Exception message: CLR worker thread exited prematurely
There is no problem if I modify the IL through this code in other methods。such as:set_CommandText、System.Random.Next、System.Web.HttpContext.FinishPipelineRequest....
I don’t know why it would be wrong to modify Concat。
I solved it by replacing it. If an error is reported after this IL is modified, I replaced it with a packaging method.

ZKEmkeeper: Events not triggering, DLL look like not working

I'm stucked for a while trying to use zkemkeeper sdk to use on a application that uses a InBios(Controller) for fingerprint. While i trying to trigger some event nothing happen, i just created a Console Application for test the SDK before start implementing on ASP.NET MVC here is my code, i first connect to the device and then i add the event OnAttTransactionEx someone can point me what i'm doing wrong.
Here is my code:
private static void Main(string[] args)
{
CZKEMClass zkem = new CZKEMClass();
int idwErrorCode = 0;
const string ipAddr = "10.0.1.240";
bool isConnected;
try
{
isConnected = zkem.Connect_Net(ipAddr, Convert.ToInt32(4370));
}
catch (Exception ext)
{
Console.WriteLine("Erro: " + ext);
zkem.GetLastError(ref idwErrorCode);
if (idwErrorCode != 0)
{
zkem.GetLastError(idwErrorCode);
}
else
{
Console.WriteLine("No data from terminal returns!");
}
throw new Exception();
}
if (isConnected)
{
//Here you can register the realtime events that you want to be triggered(the parameters 65535 means registering all)
zkem.EnableDevice(1, true);
zkem.RegEvent(1, 65535);
zkem.OnAttTransactionEx += axCZKEM1_OnAttTransactionEx;
string sdwEnrollNumber = "";
int idwVerifyMode = 0;
int idwInOutMode = 0;
int idwYear = 0;
int idwMonth = 0;
int idwDay = 0;
int idwHour = 0;
int idwMinute = 0;
int idwSecond = 0;
int idwWorkcode = 0;
zkem.EnableDevice(1, false); //disable the device
if (zkem.ReadGeneralLogData(1))
{
//read all the attendance records to the memory
while (zkem.SSR_GetGeneralLogData(1, out sdwEnrollNumber, out idwVerifyMode,
out idwInOutMode, out idwYear, out idwMonth, out idwDay, out idwHour,
out idwMinute, out idwSecond, ref idwWorkcode))
{
//get records from the memory
DateTime datetime = new DateTime(idwYear, idwMonth, idwDay, idwHour, idwMinute, idwSecond);
int unixDate = (int) datetime.Subtract(new DateTime(1970, 1, 1)).TotalSeconds;
try
{
Console.WriteLine(idwInOutMode);
Console.WriteLine(sdwEnrollNumber);
Console.WriteLine(unixDate);
}
catch (Exception ex)
{
//ignored
}
try
{
Console.WriteLine("inserted: " +
$"{idwYear}/{idwMonth}/{idwDay} {idwHour}:{idwMinute}:{idwSecond}.000");
}
catch (Exception ex)
{
}
}
}
Console.WriteLine("Fim");
}
else
{
zkem.GetLastError(ref idwErrorCode);
if (idwErrorCode != 0)
{
zkem.GetLastError(idwErrorCode);
}
else
{
Console.WriteLine("No data from terminal returns!");
}
}
zkem.EnableDevice(1, true);
Console.WriteLine("Teste");
do
{
while (!Console.KeyAvailable)
{
}
} while (Console.ReadKey(true).Key != ConsoleKey.Escape);
}
public static void axCZKEM1_OnAttTransactionEx(string sEnrollNumber, int iIsInValid, int iAttState,
int iVerifyMethod, int iYear, int iMonth, int iDay, int iHour, int iMinute, int iSecond, int iWorkCode)
{
Console.WriteLine("Finger Recognized");
}
You must use STA thread .
Thread TT = new Thread(() =>
{
CZKEMClass zkem = new CZKEMClass();
Application.Run();
});
TT.IsBackground = true;
TT.SetApartmentState(ApartmentState.STA);
TT.Start();
Then create event. ZKEM events fire on STA threads.

Increase/Decrease audio volume using FFmpeg

I'm am currently using C# invokes to call the FFmpeg APIs to handle video and audio. I have the following code in place to extract the audio from a video and write it to a file.
while (ffmpeg.av_read_frame(formatContext, &packet) >= 0)
{
if (packet.stream_index == streamIndex)
{
while (packet.size > 0)
{
int frameDecoded;
int frameDecodedResult = ffmpeg.avcodec_decode_audio4(codecContext, frame, &frameDecoded, packet);
if (frameDecoded > 0 && frameDecodedResult >= 0)
{
//writeAudio.WriteFrame(frame);
packet.data += totalBytesDecoded;
packet.size -= totalBytesDecoded;
}
}
frameIndex++;
}
Avcodec.av_free_packet(&packet);
}
This is all working correctly. I'm currently using the FFmpeg.AutoGen project for the API access.
I want to be able to increase/decrease the volume of the audio before its written to the file, but I cannot seem to find a command or any help with this. Does it have to be done manually?
Update 1:
After receiving some help, this is the class layout I have:
public unsafe class FilterVolume
{
#region Private Member Variables
private AVFilterGraph* m_filterGraph = null;
private AVFilterContext* m_aBufferSourceFilterContext = null;
private AVFilterContext* m_aBufferSinkFilterContext = null;
#endregion
#region Private Constant Member Variables
private const int EAGAIN = 11;
#endregion
public FilterVolume(AVCodecContext* codecContext, AVStream* stream, float volume)
{
CodecContext = codecContext;
Stream = stream;
Volume = volume;
Initialise();
}
public AVFrame* Adjust(AVFrame* frame)
{
AVFrame* returnFilteredFrame = ffmpeg.av_frame_alloc();
if (m_aBufferSourceFilterContext != null && m_aBufferSinkFilterContext != null)
{
int bufferSourceAddFrameResult = ffmpeg.av_buffersrc_add_frame(m_aBufferSourceFilterContext, frame);
if (bufferSourceAddFrameResult < 0)
{
}
int bufferSinkGetFrameResult = ffmpeg.av_buffersink_get_frame(m_aBufferSinkFilterContext, returnFilteredFrame);
if (bufferSinkGetFrameResult < 0 && bufferSinkGetFrameResult != -EAGAIN)
{
}
}
return returnFilteredFrame;
}
public void Dispose()
{
Cleanup(m_filterGraph);
}
#region Private Properties
private AVCodecContext* CodecContext { get; set; }
private AVStream* Stream { get; set; }
private float Volume { get; set; }
#endregion
#region Private Setup Helper Functions
private void Initialise()
{
m_filterGraph = GetAllocatedFilterGraph();
string aBufferFilterArguments = string.Format("sample_fmt={0}:channel_layout={1}:sample_rate={2}:time_base={3}/{4}",
(int)CodecContext->sample_fmt,
CodecContext->channel_layout,
CodecContext->sample_rate,
Stream->time_base.num,
Stream->time_base.den);
AVFilterContext* aBufferSourceFilterContext = CreateFilter("abuffer", m_filterGraph, aBufferFilterArguments);
AVFilterContext* volumeFilterContext = CreateFilter("volume", m_filterGraph, string.Format("volume={0}", Volume));
AVFilterContext* aBufferSinkFilterContext = CreateFilter("abuffersink", m_filterGraph);
LinkFilter(aBufferSourceFilterContext, volumeFilterContext);
LinkFilter(volumeFilterContext, aBufferSinkFilterContext);
SetFilterGraphConfiguration(m_filterGraph, null);
m_aBufferSourceFilterContext = aBufferSourceFilterContext;
m_aBufferSinkFilterContext = aBufferSinkFilterContext;
}
#endregion
#region Private Cleanup Helper Functions
private static void Cleanup(AVFilterGraph* filterGraph)
{
if (filterGraph != null)
{
ffmpeg.avfilter_graph_free(&filterGraph);
}
}
#endregion
#region Provate Helpers
private AVFilterGraph* GetAllocatedFilterGraph()
{
AVFilterGraph* filterGraph = ffmpeg.avfilter_graph_alloc();
if (filterGraph == null)
{
}
return filterGraph;
}
private AVFilter* GetFilterByName(string name)
{
AVFilter* filter = ffmpeg.avfilter_get_by_name(name);
if (filter == null)
{
}
return filter;
}
private void SetFilterGraphConfiguration(AVFilterGraph* filterGraph, void* logContext)
{
int filterGraphConfigResult = ffmpeg.avfilter_graph_config(filterGraph, logContext);
if (filterGraphConfigResult < 0)
{
}
}
private AVFilterContext* CreateFilter(string filterName, AVFilterGraph* filterGraph, string filterArguments = null)
{
AVFilter* filter = GetFilterByName(filterName);
AVFilterContext* filterContext;
int aBufferFilterCreateResult = ffmpeg.avfilter_graph_create_filter(&filterContext, filter, filterName, filterArguments, null, filterGraph);
if (aBufferFilterCreateResult < 0)
{
}
return filterContext;
}
private void LinkFilter(AVFilterContext* source, AVFilterContext* destination)
{
int filterLinkResult = ffmpeg.avfilter_link(source, 0, destination, 0);
if (filterLinkResult < 0)
{
}
}
#endregion
}
The Adjust() function is called after a frame is decoded. I'm currently getting a -22 error when av_buffersrc_add_frame() is called. This indicates that a parameter is invalid, but after debugging, I cannot see anything that would be causing this.
This is how the code is called:
while (ffmpeg.av_read_frame(formatContext, &packet) >= 0)
{
if (packet.stream_index == streamIndex)
{
while (packet.size > 0)
{
int frameDecoded;
int frameDecodedResult = ffmpeg.avcodec_decode_audio4(codecContext, frame, &frameDecoded, packet);
if (frameDecoded > 0 && frameDecodedResult >= 0)
{
AVFrame* filteredFrame = m_filterVolume.Adjust(frame);
//writeAudio.WriteFrame(filteredFrame);
packet.data += totalBytesDecoded;
packet.size -= totalBytesDecoded;
}
}
frameIndex++;
}
Avcodec.av_free_packet(&packet);
}
Update 2:
Cracked it. The "channel_layout" option in the filter argument string is supposed to be a hexadecimal. This is what the string formatting should look like:
string aBufferFilterArguments = string.Format("sample_fmt={0}:channel_layout=0x{1}:sample_rate={2}:time_base={3}/{4}",
(int)CodecContext->sample_fmt,
CodecContext->channel_layout,
CodecContext->sample_rate,
Stream->time_base.num,
Stream->time_base.den);
I do not know what API you are using, but ffmpeg has a command that allows to increase or decrease audio:
Decrease to half:
ffmpeg -i input.wav -af "volume=0.5" output.wav
Increase 50%:
ffmpeg -i input.wav -af "volume=1.5" output.wav
or in dB:
ffmpeg -i input.wav -af "volume=10dB" output.wav
Hopes it helps you
What you need to do is build a filter graph and process the audio stream through that graph. In your case, the graph is just INPUT ("abuffer") -> VOLUME -> OUTPUT ("abuffersink").
Here is a sample console app that demonstrates that. It's loosely based on ffmpeg samples filtering_audio, filter_audio and remuxing.
You can use it like this:
ChangeVolume.exe http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4 bunny_half.mp4 0.5
And here is the code:
class Program
{
static unsafe void Main(string[] args)
{
Console.WriteLine(#"Current directory: " + Environment.CurrentDirectory);
Console.WriteLine(#"Running in {0}-bit mode.", Environment.Is64BitProcess ? #"64" : #"32");
// adapt this to your context
var ffmpegPath = string.Format(#"../../../FFmpeg/bin/{0}", Environment.Is64BitProcess ? #"x64" : #"x86");
InteropHelper.SetDllDirectory(ffmpegPath);
int ret, i;
if (args.Length < 3)
{
Console.WriteLine("usage: ChangeVolume input output <volume ratio>");
return;
}
string in_filename = args[0];
string out_filename = args[1];
double ratio = double.Parse(args[2]);
ffmpeg.av_register_all();
ffmpeg.avfilter_register_all();
// open input file
AVFormatContext* ifmt_ctx = null;
InteropHelper.Check(ffmpeg.avformat_open_input(&ifmt_ctx, in_filename, null, null));
// dump input
ffmpeg.av_dump_format(ifmt_ctx, 0, in_filename, 0);
// get streams info to determine audio stream index
InteropHelper.Check(ffmpeg.avformat_find_stream_info(ifmt_ctx, null));
// determine input decoder
AVCodec* dec;
int audio_stream_index = ffmpeg.av_find_best_stream(ifmt_ctx, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &dec, 0);
AVCodecContext* dec_ctx = ifmt_ctx->streams[audio_stream_index]->codec;
// open input decoder
InteropHelper.Check(ffmpeg.avcodec_open2(dec_ctx, dec, null));
// build a filter graph
AVFilterContext* buffersrc_ctx;
AVFilterContext* buffersink_ctx;
AVFilterGraph* filter_graph = init_filter_graph(ifmt_ctx, dec_ctx, audio_stream_index, &buffersrc_ctx, &buffersink_ctx, ratio);
// prepare output
AVFormatContext* ofmt_ctx = null;
InteropHelper.Check(ffmpeg.avformat_alloc_output_context2(&ofmt_ctx, null, null, out_filename));
InteropHelper.Check(ofmt_ctx);
// create output streams
AVCodecContext* enc_ctx = null;
ofmt_ctx->oformat->flags |= InteropHelper.AVFMT_NOTIMESTAMPS;
for (i = 0; i < ifmt_ctx->nb_streams; i++)
{
AVStream* in_stream = ifmt_ctx->streams[i];
if (in_stream->codec->codec_type == AVMediaType.AVMEDIA_TYPE_DATA) // skip these
continue;
AVStream* out_stream = ffmpeg.avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
InteropHelper.Check(out_stream);
InteropHelper.Check(ffmpeg.avcodec_copy_context(out_stream->codec, in_stream->codec));
out_stream->codec->codec_tag = 0;
if ((ofmt_ctx->oformat->flags & InteropHelper.AVFMT_GLOBALHEADER) != 0)
{
out_stream->codec->flags |= InteropHelper.AV_CODEC_FLAG_GLOBAL_HEADER;
}
if (i == audio_stream_index)
{
// create audio encoder from audio decoder
AVCodec* enc = ffmpeg.avcodec_find_encoder(dec_ctx->codec_id);
InteropHelper.Check(enc);
enc_ctx = ffmpeg.avcodec_alloc_context3(enc);
InteropHelper.Check(enc_ctx);
enc_ctx->sample_rate = dec_ctx->sample_rate;
enc_ctx->channel_layout = dec_ctx->channel_layout;
enc_ctx->channels = ffmpeg.av_get_channel_layout_nb_channels(enc_ctx->channel_layout);
enc_ctx->sample_fmt = enc->sample_fmts[0];
enc_ctx->time_base.num = 1;
enc_ctx->time_base.den = enc_ctx->sample_rate;
InteropHelper.Check(ffmpeg.avcodec_open2(enc_ctx, enc, null));
}
}
// dump output
ffmpeg.av_dump_format(ofmt_ctx, 0, out_filename, 1);
if ((ofmt_ctx->oformat->flags & InteropHelper.AVFMT_NOFILE) == 0)
{
// open output file
InteropHelper.Check(ffmpeg.avio_open(&ofmt_ctx->pb, out_filename, InteropHelper.AVIO_FLAG_WRITE));
}
// write output file header
InteropHelper.Check(ffmpeg.avformat_write_header(ofmt_ctx, null));
// read all packets and process
AVFrame* frame = ffmpeg.av_frame_alloc();
AVFrame* filt_frame = ffmpeg.av_frame_alloc();
while (true)
{
AVStream* in_stream;
AVStream* out_stream;
AVPacket pkt;
ret = ffmpeg.av_read_frame(ifmt_ctx, &pkt);
if (ret < 0)
break;
in_stream = ifmt_ctx->streams[pkt.stream_index];
if (in_stream->codec->codec_type == AVMediaType.AVMEDIA_TYPE_DATA)
continue;
// audio stream? we need to pass it through our filter graph
if (pkt.stream_index == audio_stream_index)
{
// decode audio (packet -> frame)
int got_frame = 0;
InteropHelper.Check(ffmpeg.avcodec_decode_audio4(dec_ctx, frame, &got_frame, &pkt));
if (got_frame > 0)
{
// add the frame into the filter graph
InteropHelper.Check(ffmpeg.av_buffersrc_add_frame(buffersrc_ctx, frame));
while (true)
{
// get the frame out from the filter graph
ret = ffmpeg.av_buffersink_get_frame(buffersink_ctx, filt_frame);
const int EAGAIN = 11;
if (ret == -EAGAIN)
break;
InteropHelper.Check(ret);
// encode audio (frame -> packet)
AVPacket enc_pkt = new AVPacket();
int got_packet = 0;
InteropHelper.Check(ffmpeg.avcodec_encode_audio2(enc_ctx, &enc_pkt, filt_frame, &got_packet));
enc_pkt.stream_index = pkt.stream_index;
InteropHelper.Check(ffmpeg.av_interleaved_write_frame(ofmt_ctx, &enc_pkt));
ffmpeg.av_frame_unref(filt_frame);
}
}
}
else
{
// write other (video) streams
out_stream = ofmt_ctx->streams[pkt.stream_index];
pkt.pts = ffmpeg.av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX);
pkt.dts = ffmpeg.av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX);
pkt.duration = ffmpeg.av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
InteropHelper.Check(ffmpeg.av_interleaved_write_frame(ofmt_ctx, &pkt));
}
ffmpeg.av_packet_unref(&pkt);
}
// write trailer, close file
ffmpeg.av_write_trailer(ofmt_ctx);
ffmpeg.avformat_close_input(&ifmt_ctx);
if ((ofmt_ctx->oformat->flags & InteropHelper.AVFMT_NOFILE) == 0)
{
ffmpeg.avio_closep(&ofmt_ctx->pb);
}
ffmpeg.avformat_free_context(ofmt_ctx);
ffmpeg.av_frame_free(&filt_frame);
ffmpeg.av_frame_free(&frame);
ffmpeg.avfilter_graph_free(&filter_graph);
return;
}
static unsafe AVFilterGraph* init_filter_graph(AVFormatContext* format, AVCodecContext* codec, int audio_stream_index, AVFilterContext** buffersrc_ctx, AVFilterContext** buffersink_ctx, double volumeRatio)
{
// create graph
var filter_graph = ffmpeg.avfilter_graph_alloc();
InteropHelper.Check(filter_graph);
// add input filter
var abuffersrc = ffmpeg.avfilter_get_by_name("abuffer");
if (abuffersrc == null) InteropHelper.CheckTag("\x00F8FIL");
string args = string.Format("sample_fmt={0}:channel_layout={1}:sample_rate={2}:time_base={3}/{4}",
(int)codec->sample_fmt,
codec->channel_layout,
codec->sample_rate,
format->streams[audio_stream_index]->time_base.num,
format->streams[audio_stream_index]->time_base.den);
InteropHelper.Check(ffmpeg.avfilter_graph_create_filter(buffersrc_ctx, abuffersrc, "IN", args, null, filter_graph));
// add volume filter
var volume = ffmpeg.avfilter_get_by_name("volume");
if (volume == null) InteropHelper.CheckTag("\x00F8FIL");
AVFilterContext* volume_ctx;
InteropHelper.Check(ffmpeg.avfilter_graph_create_filter(&volume_ctx, volume, "VOL", "volume=" + volumeRatio.ToString(CultureInfo.InvariantCulture), null, filter_graph));
// add output filter
var abuffersink = ffmpeg.avfilter_get_by_name("abuffersink");
if (abuffersink == null) InteropHelper.CheckTag("\x00F8FIL");
InteropHelper.Check(ffmpeg.avfilter_graph_create_filter(buffersink_ctx, abuffersink, "OUT", "", null, filter_graph));
// connect input -> volume -> output
InteropHelper.Check(ffmpeg.avfilter_link(*buffersrc_ctx, 0, volume_ctx, 0));
InteropHelper.Check(ffmpeg.avfilter_link(volume_ctx, 0, *buffersink_ctx, 0));
InteropHelper.Check(ffmpeg.avfilter_graph_config(filter_graph, null));
return filter_graph;
}
}
It uses a utility InteropHelper class derived from AutoGen's:
public class InteropHelper
{
[DllImport("kernel32", SetLastError = true)]
public static extern bool SetDllDirectory(string lpPathName);
public static readonly int AVERROR_EOF = -GetTag("EOF ");
public static readonly int AVERROR_UNKNOWN = -GetTag("UNKN");
public static readonly int AVFMT_GLOBALHEADER = 0x0040;
public static readonly int AVFMT_NOFILE = 0x0001;
public static readonly int AVIO_FLAG_WRITE = 2;
public static readonly int AV_CODEC_FLAG_GLOBAL_HEADER = (1 << 22);
public static readonly int AV_ROUND_ZERO = 0;
public static readonly int AV_ROUND_INF = 1;
public static readonly int AV_ROUND_DOWN = 2;
public static readonly int AV_ROUND_UP = 3;
public static readonly int AV_ROUND_PASS_MINMAX = 8192;
public static readonly int AV_ROUND_NEAR_INF = 5;
public static readonly int AVFMT_NOTIMESTAMPS = 0x0080;
public static unsafe void Check(void* ptr)
{
if (ptr != null)
return;
const int ENOMEM = 12;
Check(-ENOMEM);
}
public static unsafe void Check(IntPtr ptr)
{
if (ptr != IntPtr.Zero)
return;
Check((void*)null);
}
// example: "\x00F8FIL" is "Filter not found" (check libavutil/error.h)
public static void CheckTag(string tag)
{
Check(-GetTag(tag));
}
public static int GetTag(string tag)
{
var bytes = new byte[4];
for (int i = 0; i < 4; i++)
{
bytes[i] = (byte)tag[i];
}
return BitConverter.ToInt32(bytes, 0);
}
public static void Check(int res)
{
if (res >= 0)
return;
string err = "ffmpeg error " + res;
string text = GetErrorText(res);
if (!string.IsNullOrWhiteSpace(text))
{
err += ": " + text;
}
throw new Exception(err);
}
public static string GetErrorText(int res)
{
IntPtr err = Marshal.AllocHGlobal(256);
try
{
ffmpeg.av_strerror(res, err, 256);
return Marshal.PtrToStringAnsi(err);
}
finally
{
Marshal.FreeHGlobal(err);
}
}
}

How to get Active process file name using windows forms application?

public static string GetActiveProcessFileName()
{
try
{
IntPtr hwnd = GetForegroundWindow();
uint pid;
GetWindowThreadProcessId(hwnd, out pid);
Process p = Process.GetProcessById((int)pid);
// return p.MainModule.FileName;
CommandLine = GetMainModuleFilepath((int)pid);
if (CommandLine != null)
{
var array = CommandLine.Split('"');
if (array.Length == 3)
{
if (array[array.Length - 1].Equals(" "))
{
return "Application";
}
if (!array[array.Length - 1].Equals(" "))
{
return array[array.Length - 1];
}
return null;
}
if (array.Length == 5)
{
return array[array.Length - 2];
}
return "Explorer";
}
return "Explorer";
}
catch (Exception ex)
{
ErrorLog.ErrorLog.Log(ex);
return "Explorer";
}
}
Here "[CommandLine]" get current open file names correnctly..
if i run my application.executed successfully..Now
i open 3 notepad files like abc.txt,aaa.txt,dde.txt one by one then,Which will opened file will be display as normal...
If i opened word documents 3 files one by one or excel files..I get only first opened file names saved only...
How can i get correct result of open document Why i got this problem when open word or excel or pdf file open situvation...
use the below code to get the word file instances
private void timer1_Tick(object sender, EventArgs e)
{
y = GetActiveWindowTitle();
try
{
Microsoft.Office.Interop.Word.Application WordObj;
WordObj = (Microsoft.Office.Interop.Word.Application)System.Runtime.InteropServices.Marshal.GetActiveObject("Word.Application");
var vvv = WordObj.StartupPath;
x = "";
for (int i = 0; i < WordObj.Windows.Count; i++)
{
object idx = i + 1;
Microsoft.Office.Interop.Word.Window WinObj = WordObj.Windows.get_Item(ref idx);
// doc_list.Add(WinObj.Document.FullName);
x = x + "," + WinObj.Document.FullName;
//x = WinObj.Document.FullName;
}
}
catch (Exception ex)
{
// No documents opened
}
string[] ax=x.Split(',');
// string[] ax1 = x1.Split(',');
ForAllWordFiles.Text = x;
ForWordTitle.Text = y;
if (y != null)
{
ActiveWord.Text = " ";
if (y.Contains("- Microsoft Word"))
{
ForWordTitle.Text = y.Substring(0, y.Length - 17);
foreach (var item in ax)
{
if (item.Contains(ForWordTitle.Text))
{
ActiveWord.Text = item;
break;
}
ActiveWord.Text = " ";
}
}
}
}
this code is valid for 2010 office

System.InvalidOperationException in Output Window

I constantly get the following message in my output/debug windows. The app doesn't crash but I was wondering what the deal with it is:
A first chance exception of type 'System.InvalidOperationException' occurred in System.dll
my code :sol.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace ConsoleApplication1
{
class Sol
{
public LinkedList<int> tower1 = new LinkedList<int>();
public LinkedList<int> tower2 = new LinkedList<int>();
public LinkedList<int> tower3 = new LinkedList<int>();
public static LinkedList<string> BFS = new LinkedList<string>();
public static LinkedList<string> DFS = new LinkedList<string>();
public static LinkedList<string> IDS = new LinkedList<string>();
public int depth;
public LinkedList<Sol> neighbors;
public Sol()
{
}
public Sol(LinkedList<int> tower1, LinkedList<int> tower2, LinkedList<int> tower3)
{
this.tower1 = tower1;
this.tower2 = tower2;
this.tower3 = tower3;
neighbors = new LinkedList<Sol>();
}
public virtual void getneighbors()
{
Sol temp = this.copy();
Sol neighbor1 = this.copy();
Sol neighbor2 = this.copy();
Sol neighbor3 = this.copy();
Sol neighbor4 = this.copy();
Sol neighbor5 = this.copy();
Sol neighbor6 = this.copy();
if (temp.tower1.Count != 0)
{
if (neighbor1.tower2.Count != 0)
{
if (neighbor1.tower1.First.Value < neighbor1.tower2.First.Value)
{
neighbor1.tower2.AddFirst(neighbor1.tower1.First);
neighbor1.tower1.RemoveFirst();
neighbors.AddLast(neighbor1);
}
}
else
{
neighbor1.tower2.AddFirst(neighbor1.tower1.First);
neighbor1.tower1.RemoveFirst();
neighbors.AddLast(neighbor1);
}
if (neighbor2.tower3.Count != 0)
{
if (neighbor2.tower1.First.Value < neighbor2.tower3.First.Value)
{
neighbor2.tower3.AddFirst(neighbor2.tower1.First);
neighbor2.tower1.RemoveFirst();
neighbors.AddLast(neighbor2);
}
}
else
{
neighbor2.tower3.AddFirst(neighbor2.tower1.First);
neighbor2.tower1.RemoveFirst();
neighbors.AddLast(neighbor2);
}
}
//-------------
if (temp.tower2.Count != 0)
{
if (neighbor3.tower1.Count != 0)
{
if (neighbor3.tower2.First.Value < neighbor3.tower1.First.Value)
{
neighbor3.tower1.AddFirst(neighbor3.tower2.First);
neighbor3.tower2.RemoveFirst();
neighbors.AddLast(neighbor3);
}
}
else
{
neighbor3.tower1.AddFirst(neighbor3.tower2.First);
neighbor3.tower2.RemoveFirst();
neighbors.AddLast(neighbor3);
}
if (neighbor4.tower3.Count != 0)
{
if (neighbor4.tower2.First.Value < neighbor4.tower3.First.Value)
{
neighbor4.tower3.AddFirst(neighbor4.tower2.First);
neighbor4.tower2.RemoveFirst();
neighbors.AddLast(neighbor4);
}
}
else
{
neighbor4.tower3.AddFirst(neighbor4.tower2.First);
neighbor4.tower2.RemoveFirst();
neighbors.AddLast(neighbor4);
}
}
//------------------------
if (temp.tower3.Count() != 0)
{
if (neighbor5.tower1.Count() != 0)
{
if (neighbor5.tower3.ElementAtOrDefault(0) < neighbor5.tower1.ElementAtOrDefault(0))
{
neighbor5.tower1.AddFirst(neighbor5.tower3.First);
neighbor5.tower3.RemoveFirst();
neighbors.AddLast(neighbor5);
}
}
else
{
neighbor5.tower1.AddFirst(neighbor5.tower3.First);
neighbor5.tower3.RemoveFirst();
neighbors.AddLast(neighbor5);
}
if (neighbor6.tower2.Count() != 0)
{
if (neighbor6.tower3.ElementAtOrDefault(0) < neighbor6.tower2.ElementAtOrDefault(0))
{
neighbor6.tower2.AddFirst(neighbor6.tower3.First);
neighbor6.tower3.RemoveFirst();
neighbors.AddLast(neighbor6);
}
}
else
{
neighbor6.tower2.AddFirst(neighbor6.tower3.First);
neighbor6.tower3.RemoveFirst();
neighbors.AddLast(neighbor6);
}
}
}
public override string ToString()
{
string str;
str = "tower1" + tower1.ToString() + " tower2" + tower2.ToString() + " tower3" + tower3.ToString();
return str;
}
public Sol copy()
{
Sol So;
LinkedList<int> l1 = new LinkedList<int>();
LinkedList<int> l2 = new LinkedList<int>();
LinkedList<int> l3 = new LinkedList<int>();
for (int i = 0; i <= this.tower1.Count() - 1; i++)
{
l1.AddLast(tower1.ElementAt(i));
}
for (int i = 0; i <= this.tower2.Count - 1; i++)
{
l2.AddLast(tower2.ElementAt(i));
}
for (int i = 0; i <= this.tower3.Count - 1; i++)
{
l3.AddLast(tower3.ElementAt(i));
}
So = new Sol(l1, l2, l3);
return So;
}
public bool Equals(Sol sol)
{
if (this.tower1.Equals(sol.tower1) & this.tower2.Equals(sol.tower2) & this.tower3.Equals(sol.tower3))
return true;
return false;
}
public virtual bool containedin(Stack<Sol> vec)
{
bool found = false;
for (int i = 0; i <= vec.Count - 1; i++)
{
if (vec.ElementAt(i).tower1.Equals(this.tower1) && vec.ElementAt(i).tower2.Equals(this.tower2) && vec.ElementAt(i).tower3.Equals(this.tower3))
{
found = true;
break;
}
}
return found;
}
public virtual bool breadthFirst(Sol start, Sol goal)
{
Stack<Sol> nextStack = new Stack<Sol>();
Stack<Sol> traversed = new Stack<Sol>();
bool found = false;
start.depth = 0;
nextStack.Push(start);
while (nextStack.Count != 0)
{
Sol sol = nextStack.Pop();
BFS.AddFirst("poped State:" + sol.ToString() + "level " + sol.depth);
traversed.Push(sol);
if (sol.Equals(goal))
{
found = true;
BFS.AddFirst("Goal:" + sol.ToString());
break;
}
else
{
sol.getneighbors();
foreach (Sol neighbor in sol.neighbors)
{
if (!neighbor.containedin(traversed) && !neighbor.containedin(nextStack))
{
neighbor.depth = (sol.depth + 1);
nextStack.Push(neighbor);
}
}
}
}
return found;
}
public virtual bool depthFirst(Sol start, Sol goal)
{
Stack<Sol> nextStack = new Stack<Sol>();
Stack<Sol> traversed = new Stack<Sol>();
bool found = false;
start.depth = 0;
nextStack.Push(start);
while (nextStack.Count != 0)
{
//Dequeue next State for comparison
//And add it 2 list of traversed States
Sol sol = nextStack.Pop();
DFS.AddFirst("poped State:" + sol.ToString() + "level " + sol.depth);
traversed.Push(sol);
if (sol.Equals(goal))
{
found = true;
DFS.AddFirst("Goal:" + sol.ToString());
break;
}
else
{
sol.getneighbors();
foreach (Sol neighbor in sol.neighbors)
{
if (!neighbor.containedin(traversed) && !neighbor.containedin(nextStack))
{
neighbor.depth = sol.depth + 1;
nextStack.Push(neighbor);
}
}
}
}
return found;
}
public virtual bool iterativedeepening(Sol start, Sol goal)
{
bool found = false;
for (int level = 0; ; level++)
{
Stack<Sol> nextStack = new Stack<Sol>();
Stack<Sol> traversed = new Stack<Sol>();
start.depth = 0;
nextStack.Push(start);
while (nextStack.Count != 0)
{
Sol sol = nextStack.Pop();
IDS.AddFirst("poped State:" + sol.ToString() + "Level" + sol.depth);
traversed.Push(sol);
if (sol.Equals(goal))
{
found = true;
IDS.AddFirst("Goal:" + sol.ToString());
break;
}
else if (sol.depth < level)
{
sol.getneighbors();
foreach (Sol neighbor in sol.neighbors)
{
if (!neighbor.containedin(traversed) && !neighbor.containedin(nextStack))
{
neighbor.depth = sol.depth + 1;
nextStack.Push(neighbor);
} //end if
} //end for each
} //end else if
} // end while
if (found == true)
break;
} // end for
return found;
}
}
}
Just wondering if I may be doing something wrong somewhere or something. >>>
First-chance Exceptions
A first-chance exception indicates that something threw an exception - the debugger is reporting it because it gets to see all exceptions first. It then passes control back to the code that threw it - if that code doesn't handle the exception, the debugger will see it again as a second-chance exception and report it to you.
For more information on first- and second-chance exception handling, see this Microsoft support article, KB105675.
Where is it coming from?
Discovering which exact bit of code is doing this will require a little effort. Delete code bit by bit until the first-chance exception goes away - this will identify the code that is resulting in the exception.
As to whether you are doing something wrong, it is unlikely. The fact that your application didn't crash suggests that the code throwing the exception is ultimately handling it as it is some sort of expected situation (for example, a rudimentary way of detecting if some key exists in a dictionary would be to access that key and see if it threw an exception). However, you'll only know for sure once you've identified the source of the exception and investigated as to whether you're using the relevant calls correctly.

Categories