Converting from C: fputc and fwrite in C#? - c#

Question: In order to write a C# interface to libespeak, I need to convert the callback SynthCallback to C#.
See the below C code.
You might need this for reference:
https://ccrma.stanford.edu/courses/422/projects/WaveFormat/
http://www-mmsp.ece.mcgill.ca/documents/audioformats/wave/wave.html
Basically,
espeak_initialize
espeak_SetSynthCallback(SynthCallback);
espeak_SetParameter(espeakRATE, 510, 0);
espeak_Synth(".", 20, 0, POS_CHARACTER, 0, 0, NULL, NULL);
are DllImport-ed function.
And I already have them working asynchronously, without file.
Now I want to get the synchronous version working with files, but I have a little problem:
The callback function
static int SynthCallback(short *wav, int numsamples, espeak_EVENT *events)
First, I need to create a delegate for that I can pass this function to the C dll/so.
Which isn't a problem, but if I make short *wav to a System.IntPtr, how do I write the data to a file ?
In other words: Can somebody help me with fwrite, fputc, Write4Bytes converting this into proper C#?
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <espeak/speak_lib.h>
// gcc -o mine speak.cpp -I/usr/include/espeak/ -lespeak
FILE *f_wavfile = NULL;
static int SynthCallback(short *wav, int numsamples, espeak_EVENT *events);
// Write 4 bytes to a file, least significant first
static void Write4Bytes(FILE *f, int value)
{
int ix;
for(ix=0; ix<4; ix++)
{
fputc(value & 0xff,f);
value = value >> 8;
}
}
int OpenWavFile(char *path, int rate)
{
static unsigned char wave_hdr[44] = {
'R','I','F','F',0x24,0xf0,0xff,0x7f,'W','A','V','E','f','m','t',' ',
0x10,0,0,0,1,0,1,0, 9,0x3d,0,0,0x12,0x7a,0,0,
2,0,0x10,0,'d','a','t','a', 0x00,0xf0,0xff,0x7f
};
if(path == NULL)
return(2);
if(path[0] == 0)
return(0);
if(strcmp(path,"stdout")==0)
f_wavfile = stdout;
else
f_wavfile = fopen(path,"wb");
if(f_wavfile == NULL)
{
fprintf(stderr,"Can't write to: '%s'\n",path);
return(1);
}
fwrite(wave_hdr, 1, 24, f_wavfile);
Write4Bytes(f_wavfile, rate);
Write4Bytes(f_wavfile, rate * 2);
fwrite(&wave_hdr[32], 1, 12, f_wavfile);
return(0);
} // end of OpenWavFile
static void CloseWavFile()
{
unsigned int pos;
if((f_wavfile==NULL) || (f_wavfile == stdout))
return;
fflush(f_wavfile);
pos = ftell(f_wavfile);
fseek(f_wavfile, 4, SEEK_SET);
Write4Bytes(f_wavfile, pos - 8);
fseek(f_wavfile, 40, SEEK_SET);
Write4Bytes(f_wavfile, pos - 44);
fclose(f_wavfile);
} // end of CloseWavFile
int main()
{
char buf[22050];
int i = 0;
memset(&buf, 0, sizeof(buf));
// OpenWavFile((char*) "test.wav", 22050);
int SampleRate = espeak_Initialize(AUDIO_OUTPUT_SYNCHRONOUS, 300, NULL, 0);
OpenWavFile((char*) "test.wav", SampleRate);
espeak_SetSynthCallback(SynthCallback);
//espeak_SetParameter(espeakRATE, 510, 0);
//espeak_SetParameter(espeakRANGE, 75, 0);
for (i=0; i < 9;i++)
{
/*
espeak_ERROR espeak_Synth(
const void *text,
size_t size,
unsigned int position,
espeak_POSITION_TYPE position_type,
unsigned int end_position,
unsigned int flags,
unsigned int* unique_identifier,
void* user_data);
*/
//espeak_POSITION_TYPE.POS_CHARACTER
espeak_Synth("test", 10, 0, POS_CHARACTER, 0, 0, NULL, NULL);
fwrite(buf, 1, 5512, f_wavfile);
espeak_Synth(".", 20, 0, POS_CHARACTER, 0, 0, NULL, NULL);
fwrite(buf, 1, 22050, f_wavfile);
}
CloseWavFile();
}
static int SynthCallback(short *wav, int numsamples, espeak_EVENT *events)
{
if (wav == NULL)
return 0;
if (numsamples > 0)
{
fwrite(wav, numsamples * 2, 1, f_wavfile);
}
return 0;
}

FileStream sink;
Int32 SynthCallback(IntPtr wav, Int32 numsamples, IntPtr events)
{
var wavm = new Int16[numsamples];
Marshal.Copy(wav, wavm, 0, numsamples);
// and do something with wavm
//or
var wavbytes = new Byte[numsamples * 2];
Marshal.Copy(wav, wavbytes, 0, numsamples*2);
sink.Write(wavbytes, 0, numsamples*2);
}
For writing 32-integers, you can either use BitConverter.GetBytes and FileStream.Write or maybe BinaryWriter.

You could P/Invoke CreateFile/WriteFile to write the IntPtr directly.

Related

Destination array was not long enough. Check destIndex and length, and the array's lower bounds.?

Destination array was not long enough. Check destIndex and length, and the array's lower bounds <- what is this error???
how can i fix it?
This error occur after i add Array.Copy
i have no idea....
static byte[] sendData = new byte[5];
static int sendCount = 0;
try
{
Console.Write("->");
string text = Console.ReadLine();
foreach(string s in text.Split(' '))
{
if(null != s && "" != s)
{
sendData[sendCount++] = Convert.ToByte(s, 16);
}
}
byte[] LRC = new byte[1];
LRC = BitConverter.GetBytes((Int16)sendData[2] ^ sendData[3] ^ sendData[4]);
byte[] hexData = new byte[sendData.Length + LRC.Length];
Array.Copy(sendData, 0, hexData, 0, 5); // <-- error occurs this point
Array.Copy(LRC, 0, hexData, hexData.Length + 1, 1);//<-or this point
port.Write(hexData, 0, hexData.Length);
Array.Clear(sendData, 0, sendData.Length);
Array.Clear(LRC, 0, LRC.Length);
Array.Clear(hexData, 0, hexData.Length);
sendCount = 0;
}
catch(Exception e)
{
Console.WriteLine(e.Message);
Console.WriteLine("Check Data");
}
Answering on concrete question, the definition of the copy method:
public static void Copy (
Array sourceArray, int sourceIndex,
Array destinationArray, int destinationIndex,
int length);
In your case
Array.Copy(LRC, 0, hexData, hexData.Length + 1, 1);
So, you are trying to copy bytes to array hexData to the index hexData.Length + 1 that is basically out of its boundaries.
I have no idea what you are trying to implement, but you should make sure that destination index during the copy is less or equal than hexData.Length - 1 (also taking into account the length).

c# endianness and structure

im trying to get some packet from microcontroller to c# program:
i defined a structure in both places. i calculate crc32 of (sizeof(packet)-4bytes) and place it in mydata_t.crc32 ...
struct mydata_t
{
public byte cmd;
public UInt32 param;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 10)]
public Char[] str_buf;
public UInt32 crc32;
}
private byte[] getBytes(mydata_t str)
{
int size = Marshal.SizeOf(str);
byte[] arr = new byte[size];
IntPtr ptr = Marshal.AllocHGlobal(size);
Marshal.StructureToPtr(str, ptr, true);
Marshal.Copy(ptr, arr, 0, size);
Marshal.FreeHGlobal(ptr);
return arr;
}
private mydata_t fromBytes(byte[] arr)
{
mydata_t str = new mydata_t();
str.str_buf = new char[10];
int size = Marshal.SizeOf(str);
IntPtr ptr = Marshal.AllocHGlobal(size);
Marshal.Copy(arr, 0, ptr, size);
str = (mydata_t)Marshal.PtrToStructure(ptr, str.GetType());
Marshal.FreeHGlobal(ptr);
return str;
}
public static UInt32 xcrc32(byte[] buf, int len)
{
UInt32 crc = DefaultSeed;
UInt32 counter = 0;
while (len-- > 0)
{
crc = (crc << 8) ^ defaultTable[((crc >> 24) ^ buf[counter]) & 255];
counter++;
}
return crc;
}
my CRC function takes bytes and length .. so when i recieve data.. "in bytes" i convert them to structure by using (private mydata_t fromBytes(byte[] arr))
The problem:
when i convert to byte array from the structure, calculating CRC is wrong "i believe its because of the endianness of the data types in c#? how do i solve this issue ?
here what i send from microcontroller:
mydata_t datablockTX;
datablockTX.cmd = 2;
datablockTX.param = 0x98765432;
memcpy(datablockTX.str_buf,"hellohell",10);
datablockTX.crc32 = xcrc32((char*) &datablockTX,sizeof(datablockTX) - sizeof(uint32_t));
usb_write_buf((uint8_t*) &datablockTX,sizeof(datablockTX));
here what i recieved and printed:
Data Received:
CMD: 2
param: 2557891634
str: hellohell
crc: 658480750
and then the problem:
public bool ValidateCRC()
{
bool myvalidate;
UInt32 mycrc_val;
byte[] mybytes = getBytes(myblock);
mycrc_val = Crc32.Crc32Algorithm.xcrc32(mybytes, mybytes.Length- sizeof(UInt32));
//mycrc_val = Crc32.Crc32Algorithm.xcrc32(mybytes, 1);
myvalidate = (mycrc_val == myblock.crc32);
Console.WriteLine("c#:" + mycrc_val + " - MCU:" + myblock.crc32 + " - bool:" + myvalidate);
return myvalidate;
}
this is what it prints in console:
c#:667986744 - SAM:658480750 - bool:False
i tried this in MCU:
mydata_t datablockTX;
datablockTX.cmd = 2;
datablockTX.param = 0x98765432;
memcpy(datablockTX.str_buf,"hellohello",10);
//datablockTX.crc32 = xcrc32((char*) &datablockTX,sizeof(datablockTX) - sizeof(uint32_t));
datablockTX.crc32 = xcrc32((char*) &datablockTX, 5);
usb_write_buf((uint8_t*) &datablockTX,sizeof(datablockTX));
here it what i recieved:
CMD: 2
param: 2557891634
str: hellohello
crc: 1993296691
in c#:
byte[] mybytes = getBytes(myblock);
//mycrc_val = Crc32.Crc32Algorithm.xcrc32(mybytes, mybytes.Length- sizeof(UInt32));
mycrc_val = Crc32.Crc32Algorithm.xcrc32(mybytes, 5);
myvalidate = (mycrc_val == myblock.crc32);
Console.WriteLine("c#:" + mycrc_val + " - MCU:" + myblock.crc32 + " - bool:" + myvalidate);
Console:
c#:146416248 - MCU:1993296691 - bool:False
Change
public static UInt32 xcrc32(byte[] buf, int len)
{
UInt32 crc = DefaultSeed;
to
UInt32 crc = 0xff1fff1f;
The DefaultSeed you are using is wrong. (if you want to know, I bruteforced it... Tried all the 4 billion possible seeds). Works for both crcs you gave.

AMD Gpu Memory dll

I'm writing an C++ dll for using in a C# application.
The dll will check the total GPU memory and the usage of the GPU memory.
Now I have created three methods. The first one does initialize GLew and other OpeGl stuff. The second will read the total memory of the GPU. And the last one will read the GPU usage.
The inialize and the total memory methods does work but with the last one I get some problems. When I call the methode it stops and when I debug it I can set a breakpoint on the delete[] ids; line without any problem. But it does not return anythin on the return available line (it does not get there). When I remove the delte[] ids line I get an error:
'Run-Time Check Failure #2 - Stack around the variable 'nCurAvailMemoryInKB' was corrupted.'. Do I something wrong to read the usage of the GPU memory?
__declspec(dllexport) float getAvailableMemory()
{
int available = -1;
if (wglGetGPUIDsAMD && wglGetGPUInfoAMD)
{
UINT n = wglGetGPUIDsAMD(0, 0);
UINT * ids = new UINT[n];
wglGetGPUIDsAMD(n, ids);
GLint nCurAvailMemoryInKB = 0;
glGetIntegerv(GL_TEXTURE_FREE_MEMORY_ATI,
&nCurAvailMemoryInKB);
available = nCurAvailMemoryInKB;
delete[] ids;
}
return available;
}
I created a test caller for the Dll in C#:
class Program {
[DllImport("AmdLib.dll")]
public static extern bool init();
[DllImport("AmdLib.dll")]
public static extern int getTotalMemory();
[DllImport("AmdLib.dll")]
public static extern float getAvailableMemory();
static void Main(string[] args) {
init();
Console.WriteLine("Total");
Console.WriteLine(getTotalMemory());
Console.WriteLine("Available");
Console.WriteLine(getAvailableMemory());
}
}
And the full C++ DLL source does looks like:
#include <stdio.h>
#include <windows.h>
#include <GL/glew.h>
#include <GL/wglew.h>
#include <assert.h>
#include <vector>
#include <string>
using namespace std;
extern "C"
{
static HGLRC ctx = NULL;
__declspec(dllexport) bool init()
{
HWND hwnd = NULL;
HINSTANCE hinstance = (HINSTANCE)GetModuleHandle(NULL);
WNDCLASSA window_class;
window_class.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC | CS_GLOBALCLASS;
window_class.lpfnWndProc = DefWindowProc;
window_class.cbClsExtra = 0;
window_class.cbWndExtra = 0;
window_class.hInstance = hinstance;
window_class.hIcon = NULL;
window_class.hCursor = LoadCursor(NULL, IDC_ARROW);
window_class.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH);
window_class.lpszMenuName = NULL;
window_class.lpszClassName = "test_class";
ATOM atom = RegisterClassA(&window_class);
hwnd = CreateWindowA("test_class", "htest", WS_OVERLAPPEDWINDOW | WS_CLIPSIBLINGS | WS_CLIPCHILDREN, 1, 1, 1, 1, NULL, NULL, hinstance, NULL);
if (hwnd == NULL) {
DWORD err = GetLastError();
return false;
}
HDC hDC = GetDC(hwnd);
if (hDC == NULL) {
return false;
}
PIXELFORMATDESCRIPTOR const pfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_TYPE_RGBA,
0,
0, 0, 0, 0, 0, 0,
0,
0,
0,
0, 0, 0, 0,
0,
0,
0,
PFD_MAIN_PLANE,
0,
0, 0, 0
};
int pixel_format = ChoosePixelFormat(hDC, &pfd);
SetPixelFormat(hDC, pixel_format, &pfd);
ctx = wglCreateContext(hDC);
if (ctx) {
if (!wglMakeCurrent(hDC, ctx)) {
return false;
}
}
ReleaseDC(hwnd, hDC);
GLenum glew = glewInit();
return true;
}
static void check_gl_error()
{
GLenum error = glGetError();
assert(error == GL_NO_ERROR);
}
__declspec(dllexport) int getTotalMemory()
{
if (wglGetGPUIDsAMD && wglGetGPUInfoAMD)
{
UINT n = wglGetGPUIDsAMD(0, 0);
UINT * ids = new UINT[n];
UINT total_mem_mb = 0;
wglGetGPUIDsAMD(n, ids);
wglGetGPUInfoAMD(ids[0], WGL_GPU_RAM_AMD, GL_UNSIGNED_INT, sizeof(UINT), &total_mem_mb);
delete[] ids;
return total_mem_mb;
}
return -1;
}
__declspec(dllexport) float getAvailableMemory()
{
int available = -1;
if (wglGetGPUIDsAMD && wglGetGPUInfoAMD)
{
UINT n = wglGetGPUIDsAMD(0, 0);
UINT * ids = new UINT[n];
wglGetGPUIDsAMD(n, ids);
GLint nCurAvailMemoryInKB = 0;
glGetIntegerv(GL_TEXTURE_FREE_MEMORY_ATI,
&nCurAvailMemoryInKB);
available = nCurAvailMemoryInKB;
//delete[] ids;
}
return available;
}
}
Since I don't have an ATI card to test with, off the top of my head I'd guess the first wglGetGPUIDsAMD call returns 0, you allocate a 0-length array (which works) and at the end you try to delete it (which throws). Somewhere in-between you overwrite the memory around that pointer with data (thus corrupting the guards and making VS throw).
Now looking at what you're actually doing with that array, or the knowledge of how many GPUs you have, you never actually use either of them. You can literally delete both calls to wglGetGPUIDsAMD and the array allocation/deallocation, and just call glGetIntegerv.

P/Invoke from C to C# without knowing size of array

Right know in my code I have structure declared as like this, with fixed this 16, know at compile time.
struct CONSOLE_SCREEN_BUFFER_INFOEX
{
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)]
public int ColorTable[];
}
but what I need is to be able to have this structure:
struct CONSOLE_SCREEN_BUFFER_INFOEX
{
int arraySize;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 0)]
public int ColorTable[];
}
get the arraySize from C function response, initialize ColorTable array with proper size, put result of response into ColorTable.
Not sure if it's possible, just doing investigation right now, and any comments are very welcome.
You can do this easily enough with some manual marshalling using the Marshal class. For example:
[DllImport(#"MyLib.dll")]
private static extern void Foo(IntPtr structPtr);
private static IntPtr StructPtrFromColorTable(int[] colorTable)
{
int size = sizeof(int) + colorTable.Length*sizeof(int);
IntPtr structPtr = Marshal.AllocHGlobal(size);
Marshal.WriteInt32(structPtr, colorTable.Length);
Marshal.Copy(colorTable, 0, structPtr + sizeof(int), colorTable.Length);
return structPtr;
}
private static int[] ColorTableFromStructPtr(IntPtr structPtr)
{
int len = Marshal.ReadInt32(structPtr);
int[] result = new int[len];
Marshal.Copy(structPtr + sizeof(int), result, 0, len);
return result;
}
static void Main(string[] args)
{
int[] colorTable = new int[] { 1, 2, 3 };
IntPtr structPtr = StructPtrFromColorTable(colorTable);
try
{
Foo(structPtr);
colorTable = ColorTableFromStructPtr(structPtr);
}
finally
{
Marshal.FreeHGlobal(structPtr);
}
}

Converting byte arrays (from readfile) to string

So, I am using ReadFile from kernel32 for reading the file. Here is my code in reading files with the help of SetFilePointer and ReadFile.
public long ReadFileMe(IntPtr filehandle, int startpos, int length, byte[] outdata)
{
IntPtr filea = IntPtr.Zero;
long ntruelen = GetFileSize(filehandle, filea);
int nRequestStart;
uint nRequestLen;
uint nApproxLength;
int a = 0;
if (ntruelen <= -1)
{
return -1;
}
else if (ntruelen == 0)
{
return -2;
}
if (startpos > ntruelen)
{
return -3;
}
else if (length <= 0)
{
return -5;
}
else if (length > ntruelen)
{
return -6;
}
else
{
nRequestStart = startpos;
nRequestLen = (uint)length;
outdata = new byte[nRequestLen - 1];
SetFilePointer(filehandle, (nRequestStart - 1), ref a, 0);
ReadFile(filehandle, outdata, nRequestLen, out nApproxLength, IntPtr.Zero);
return nApproxLength; //just for telling how many bytes are read in this function
}
}
When I used this function, it works (for another purpose) so this code is tested and works.
But the main problem is, I now need to convert the outdata on the parameter which the function puts the bytes into string.
I tried using Encoding.Unicode and so on (all UTF), but it doesn't work.
Try to use Encoding.GetString (Byte[], Int32, Int32) method. this decodes a sequence of bytes from the specified byte array into a string.
Hmm... Encoding.Name_of_encoding.GetString must work...
try smth like this:
var convertedBuffer = Encoding.Convert(
Encoding.GetEncoding( /*name of encoding*/),Encoding.UTF8, outdata);
var str = Encoding.UTF8.GetString(convertedBuffer);
UPDATE:
and what about this?:
using (var streamReader = new StreamReader(#"C:\test.txt", true))
{
var currentEncoding = streamReader.CurrentEncoding.EncodingName;
Console.WriteLine(currentEncoding);
}
You might need to add the out parameter on outdata parameter :
Passing Arrays Using ref and out
public long ReadFileMe(IntPtr filehandle, int startpos, int length, out byte[] outdata)

Categories