im trying to get some packet from microcontroller to c# program:
i defined a structure in both places. i calculate crc32 of (sizeof(packet)-4bytes) and place it in mydata_t.crc32 ...
struct mydata_t
{
public byte cmd;
public UInt32 param;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 10)]
public Char[] str_buf;
public UInt32 crc32;
}
private byte[] getBytes(mydata_t str)
{
int size = Marshal.SizeOf(str);
byte[] arr = new byte[size];
IntPtr ptr = Marshal.AllocHGlobal(size);
Marshal.StructureToPtr(str, ptr, true);
Marshal.Copy(ptr, arr, 0, size);
Marshal.FreeHGlobal(ptr);
return arr;
}
private mydata_t fromBytes(byte[] arr)
{
mydata_t str = new mydata_t();
str.str_buf = new char[10];
int size = Marshal.SizeOf(str);
IntPtr ptr = Marshal.AllocHGlobal(size);
Marshal.Copy(arr, 0, ptr, size);
str = (mydata_t)Marshal.PtrToStructure(ptr, str.GetType());
Marshal.FreeHGlobal(ptr);
return str;
}
public static UInt32 xcrc32(byte[] buf, int len)
{
UInt32 crc = DefaultSeed;
UInt32 counter = 0;
while (len-- > 0)
{
crc = (crc << 8) ^ defaultTable[((crc >> 24) ^ buf[counter]) & 255];
counter++;
}
return crc;
}
my CRC function takes bytes and length .. so when i recieve data.. "in bytes" i convert them to structure by using (private mydata_t fromBytes(byte[] arr))
The problem:
when i convert to byte array from the structure, calculating CRC is wrong "i believe its because of the endianness of the data types in c#? how do i solve this issue ?
here what i send from microcontroller:
mydata_t datablockTX;
datablockTX.cmd = 2;
datablockTX.param = 0x98765432;
memcpy(datablockTX.str_buf,"hellohell",10);
datablockTX.crc32 = xcrc32((char*) &datablockTX,sizeof(datablockTX) - sizeof(uint32_t));
usb_write_buf((uint8_t*) &datablockTX,sizeof(datablockTX));
here what i recieved and printed:
Data Received:
CMD: 2
param: 2557891634
str: hellohell
crc: 658480750
and then the problem:
public bool ValidateCRC()
{
bool myvalidate;
UInt32 mycrc_val;
byte[] mybytes = getBytes(myblock);
mycrc_val = Crc32.Crc32Algorithm.xcrc32(mybytes, mybytes.Length- sizeof(UInt32));
//mycrc_val = Crc32.Crc32Algorithm.xcrc32(mybytes, 1);
myvalidate = (mycrc_val == myblock.crc32);
Console.WriteLine("c#:" + mycrc_val + " - MCU:" + myblock.crc32 + " - bool:" + myvalidate);
return myvalidate;
}
this is what it prints in console:
c#:667986744 - SAM:658480750 - bool:False
i tried this in MCU:
mydata_t datablockTX;
datablockTX.cmd = 2;
datablockTX.param = 0x98765432;
memcpy(datablockTX.str_buf,"hellohello",10);
//datablockTX.crc32 = xcrc32((char*) &datablockTX,sizeof(datablockTX) - sizeof(uint32_t));
datablockTX.crc32 = xcrc32((char*) &datablockTX, 5);
usb_write_buf((uint8_t*) &datablockTX,sizeof(datablockTX));
here it what i recieved:
CMD: 2
param: 2557891634
str: hellohello
crc: 1993296691
in c#:
byte[] mybytes = getBytes(myblock);
//mycrc_val = Crc32.Crc32Algorithm.xcrc32(mybytes, mybytes.Length- sizeof(UInt32));
mycrc_val = Crc32.Crc32Algorithm.xcrc32(mybytes, 5);
myvalidate = (mycrc_val == myblock.crc32);
Console.WriteLine("c#:" + mycrc_val + " - MCU:" + myblock.crc32 + " - bool:" + myvalidate);
Console:
c#:146416248 - MCU:1993296691 - bool:False
Change
public static UInt32 xcrc32(byte[] buf, int len)
{
UInt32 crc = DefaultSeed;
to
UInt32 crc = 0xff1fff1f;
The DefaultSeed you are using is wrong. (if you want to know, I bruteforced it... Tried all the 4 billion possible seeds). Works for both crcs you gave.
Related
So basically here is how I do it with C++:
enum ServerOpcode : uint16_t{
SERVER_AUTH_CONNECTION_RESPONSE = 0x001,
SERVER_LOGIN_REQUEST_RESPONSE = 0x002,
SERVER_NUM_MSG_TYPES = 0x003,
};
uint8_t* Vibranium::Packet::PreparePacket(ServerOpcode& serverOpcode, flatbuffers::FlatBufferBuilder& builder) {
size_t size = builder.GetSize();
uint8_t *buf = builder.GetBufferPointer();
uint8_t *actualBuffer = new uint8_t[size + 2];
actualBuffer[1] = (serverOpcode >> 8);
actualBuffer[0] = (serverOpcode&0xFF);
memcpy(actualBuffer + 2, buf, size);
return actualBuffer;
}
I know that uint16_t is exactly 2 bytes and that is why i add +2.
Can someone give example in C# of how can I cast the ByteBuffer to byte[] and than prefix it with:
public enum ServerOpcode : ushort
{
SERVER_AUTH_CONNECTION_RESPONSE = 0x001,
SERVER_LOGIN_REQUEST_RESPONSE = 0x002,
SERVER_NUM_MSG_TYPES = 0x003,
}
In C# I found out that the equivalent of uint16_t is ushort.
So my question is how can I convert ByteBuffer into byte[] and prefix it with ushort?
Can anyone make an answer showing and equivalent of PreparePacket in C# ?
P.S.
Note that I am familiar with the file_identifier but I would like to do that manually. Hope someone can provide an example in C#
Following is the solution:
public static Byte[] PrependServerOpcode(ByteBuffer byteBuffer, ServerOpcode code)
{
var originalArray = byteBuffer.ToArray(0, byteBuffer.Length);
byte[] buffer = new byte[originalArray.Length + 2];
buffer[0] = (byte)((ushort)code / 0x0100);
buffer[1] = (byte)code;
Array.Copy(originalArray, 0, buffer, 2, originalArray.Length);
return buffer;
}
public enum ServerOpcode : ushort
{
SERVER_AUTH_CONNECTION_RESPONSE = 0x001,
SERVER_LOGIN_REQUEST_RESPONSE = 0x002,
SERVER_NUM_MSG_TYPES = 0x003
}
Or alternative:
public static ByteBuffer PrependServerOpcode(ByteBuffer byteBuffer, ServerOpcode code)
{
var originalArray = byteBuffer.ToArray(0, byteBuffer.Length);
byte[] buffer = new byte[originalArray.Length + 2];
buffer[0] = (byte)((ushort)code / 0x0100);
buffer[1] = (byte)code;
Array.Copy(originalArray, 0, buffer, 2, originalArray.Length);
return new ByteBuffer(buffer);
}
Usage:
static void Main(string[] args)
{
var bb = new ByteBuffer(new byte[] { 0x01 });
var result = PrependServerOpcode(bb, ServerOpcode.SERVER_NUM_MSG_TYPES);
}
i'm trying to read some values from process memory with multiple pointers/offsets
on my console .Net App but i'm getting the wrong last 3 values, i don't know what i'm doing wrong I've been checking the code and trying different ways for hours but still the same results.
I'm reading these values from a 64-bit process
Here's a preview from my app and cheat engine at the same time (cheat engine contains the correct values).
Here is my code for reading these pointers :
Memory.OpenProcess(Data.Core.ProcessID);
Data.Core.GameBase = (uint)Memory.BaseAddress("Game.dll");
uint Num0 = Memory.ReadInt((int)Data.Core.GameBase +
(int)Data.Core.Offsets.Animation);
uint Num1 = Memory.ReadInt((int)Num0 + (int)Data.Core.Offsets.P1);
uint Num2 = Memory.ReadInt((int)Num1 + (int)Data.Core.Offsets.P2);
uint Num3 = Memory.ReadInt((int)Num2 + (int)Data.Core.Offsets.P3);
uint Num4 = Memory.ReadInt((int)Num3 + (int)Data.Core.Offsets.P4);
uint Num5 = Memory.ReadInt((int)Num4 + (int)Data.Core.Offsets.P5);
ReadInt function :
public uint ReadInt(int iMemoryAddress)
{
byte[] bBuffer = new byte[4];
IntPtr lpNumberOfBytesRead;
if (Mapi.ReadProcessMemory(this._hReadProcess, (IntPtr) iMemoryAddress,
bBuffer, 4U, out lpNumberOfBytesRead) == 0)
return 0;
return BitConverter.ToUInt32(bBuffer, 0);
}
also :
public uint ReadInt(int Address)
{
OpenProcessMemory();
int BytesRead = 0;
byte[] Data = new byte[4];
ReadProcessMemory((int)PHandle, Address, Data, 4, ref BytesRead);
CloseProcessMemory();
return BitConverter.ToUInt32(Data, 0);
}
Offsets enum :
public enum Offsets : uint
{
Animation = 0x1494198,
P1 = 0x68,
P2 = 0x70,
P3 = 0x28,
P4 = 0x378,
P5 = 0x522,
}
win api :
[DllImport("kernel32.dll")]
public static extern int ReadProcessMemory(IntPtr hProcess, IntPtr
lpBaseAddress, [In, Out] byte[] bBuffer, uint size, out IntPtr
lpNumberOfBytesRead);
I've tried to add pointers and offsets using IntPtr / uint / int / Int32 for each Pointer+Offset but still the same weird values at the end.
I think i can't do more than this obviously..
If the target process is x64, then you need to also compile for x64 and you should use IntPtr for all pointers, offsets and addresses to ensure they are the correct length to accept 64 bit addresses.
For walking pointer chains you should use this function which de-references each pointer and then adds the offset for you.
public static IntPtr FindDMAAddy(IntPtr hProc, IntPtr ptr, int[] offsets)
{
var buffer = new byte[IntPtr.Size];
foreach (int i in offsets)
{
ReadProcessMemory(hProc, ptr, buffer, buffer.Length, out var read);
ptr = (IntPtr.Size == 4)
? IntPtr.Add(new IntPtr(BitConverter.ToInt32(buffer, 0)), i)
: ptr = IntPtr.Add(new IntPtr(BitConverter.ToInt64(buffer, 0)), i);
}
return ptr;
}
var ammoAddr = FindDMAAddy(hProc, (IntPtr)(modBase + 0x10f4f4), new int[] { 0x374, 0x14, 0 });
Given these integers:
public uint ServerSequenceNumber;
public uint Reserved1;
public uint Reserved2;
public byte Reserved3;
public byte TotalPlayers;
What's the best way to create a byte[] array from them? If all their values are 1 the resulting array would be:
00000000000000000000000000000001 00000000000000000000000000000001 00000000000000000000000000000001 00000001 00000001
This should do what your looking for. BitConverter returns a byte array in the order of endianness of the processor being used. For x86 processors it is little-endian. This places the least significant byte first.
int value;
byte[] byte = BitConverter.GetBytes(value);
Array.Reverse(byte);
byte[] result = byte;
If you don't know the processor your going to be using the app on I suggest using:
int value;
byte[] bytes = BitConverter.GetBytes(value);
if (BitConverter.IsLittleEndian){
Array.Reverse(bytes);
}
byte[] result = bytes;
How's this?
byte[] bytes = new byte[14];
int i = 0;
foreach(uint num in new uint[]{SecureSequenceNumber, Reserved1, Reserved2})
{
bytes[i] = (byte)(num >> 24);
bytes[i + 1] = (byte)(num >> 16);
bytes[i + 2] = (byte)(num >> 8);
bytes[i + 3] = (byte)num;
i += 4;
}
bytes[12] = Reserved3;
bytes[13] = TotalPlayers;
Expanding on #Robert's answer I created a simple class that makes things neater when you're doing lots of concatanations:
class ByteJoiner
{
private int i;
public byte[] Bytes { get; private set; }
public ByteJoiner(int totalBytes)
{
i = 0;
Bytes = new byte[totalBytes];
}
public void Add(byte input)
{
Add(BitConverter.GetBytes(input));
}
public void Add(uint input)
{
Add(BitConverter.GetBytes(input));
}
public void Add(ushort input)
{
Add(BitConverter.GetBytes(input));
}
public void Add(byte[] input)
{
System.Buffer.BlockCopy(input, 0, Bytes, i, input.Length);
i += input.Length;
}
}
I have the 4 bytes that represent an integer stored in 2 separate byte arrays. I would like to convert these into an Int32 WITHOUT copying to a third byte array and reading that using memorystream.
The reason the data is split across two byte arrays is because this is a simplified example of my issue which involves huge amounts of data that cannot fit into a single bytearray.
Is there any way to achieve this? I do not wish to concatenate the two byte arrays into a thrid because of the performance implications which are critical to me.
Moon
You can use a struct layout like this
[StructLayout(LayoutKind.Explicit, Size=4)]
struct UnionInt32Value
{
[FieldOffset(0)] public byte byte1;
[FieldOffset(1)] public byte byte2;
[FieldOffset(2)] public byte byte3;
[FieldOffset(3)] public byte byte4;
[FieldOffset(0)] public Int32 iVal;
}
Assign your bytes in the correct order then read your Int32 from iVal;
EDIT: Sample code
using System;
using System.Runtime.InteropServices;
namespace Test
{
class Program
{
[StructLayout(LayoutKind.Explicit, Size=4)]
struct UnionInt32Value
{
[FieldOffset(0)] public byte byte1;
[FieldOffset(1)] public byte byte2;
[FieldOffset(2)] public byte byte3;
[FieldOffset(3)] public byte byte4;
[FieldOffset(0)] public Int32 iVal;
}
public static void Main(string[] args)
{
UnionInt32Value v = new UnionInt32Value();
v.byte1=1;
v.byte2=0;
v.byte3=0;
v.byte4=0;
Console.WriteLine("this is one " + v.iVal);
v.byte1=0xff;
v.byte2=0xff;
v.byte3=0xff;
v.byte4=0xff;
Console.WriteLine("this is minus one " + v.iVal);
Console.Write("Press any key to continue . . . ");
Console.ReadKey(true);
}
}
}
Something like this?
int x = (array1[index] << 16) + array2[index];
Of course, you didn't specify a language, but that's the gist of it.
The BitConverter class is intended for this:
byte[] parts = { byte1, byte2, byte3, byte4 };
int value = BitConverter.ToInt32(parts, 0);
You can use BitConverter twice, like:
byte[] bytes0 = new byte[] { 255, 255 };
byte[] bytes1 = new byte[] { 0, 0 };
int res = BitConverter.ToInt16(bytes0, 0) << 16;
res |= BitConverter.ToUInt16(bytes1, 0);
Which yields -65536 (0b11111111 11111111 00000000 00000000)
If your integer parts isn't at position 0 in the array, you just replace the 0 in ToUint16 to change the position.
Little extension method:
public static class BitConverterExt
{
public static int ToInt32(byte[] arr0, int index0, byte[] arr1, int index1)
{
int partRes = BitConverter.ToInt16(arr1, index1) << 16;
return partRes | BitConverter.ToUInt16(arr0, index0);
}
}
Usage:
byte[] bytes0 = new byte[] { 0x0, 0xA };
byte[] bytes1 = new byte[] { 0x64, 0xFF };
int res = BitConverterExt.ToInt32(bytes0, 0, bytes1, 0);
//Res -10221056 (0xFF640A00)
If I understand correctly, you are having a problem whilst reading across the boundary of the two arrays. If that is so, this routine will read an integer anywhere in the two arrays, even if it is across the two of them.
int ReadInteger(byte[] array1, byte[] array2, int offset)
{
if (offset < 0 || (offset + 4) > (array1.Length + array2.Length))
throw new ArgumentOutOfRangeException();
if (offset <= (array1.Length - 4))
return BitConverter.ToInt32(array1, offset);
else if (offset >= array1.Length)
return BitConverter.ToInt32(array2, offset - array1.Length);
else
{
var buffer = new byte[4];
var numFirst = array1.Length - offset;
Array.Copy(array1, offset, buffer, 0, numFirst);
Array.Copy(array2, 0, buffer, numFirst, 4 - numFirst);
return BitConverter.ToInt32(buffer, 0);
}
}
Note: depending on how your integers are stored, you might want to change the order in which bytes are copied.
Below is the code sample which I got from online resource but it's suppose to work with fullframework, but when I try to build it using C# smart device, it throws exception saying it's out of memory. Does anybody know how can I fix it to use on compact? the out of memory exception when I make the second call to VerQueryValue which is the last one.
thanks,
[DllImport("coredll.dll")]
public static extern bool VerQueryValue(byte[] buffer, string subblock, out IntPtr blockbuffer, out uint len);
[DllImport("coredll.dll")]
public static extern bool VerQueryValue(byte[] pBlock, string pSubBlock, out string pValue, out uint len);
//
private static void GetAssemblyVersion()
{
string filename = #"\Windows\MyLibrary.dll";
if (File.Exists(filename))
{
try {
int handle = 0;
Int32 size = 0;
size = GetFileVersionInfoSize(filename, out handle);
if (size > 0)
{
bool retValue;
byte[] buffer = new byte[size];
retValue = GetFileVersionInfo(filename, handle, size, buffer);
if (retValue == true)
{
bool success = false;
IntPtr blockbuffer = IntPtr.Zero;
uint len = 0;
//success = VerQueryValue(buffer, "\\", out blockbuffer, out len);
success = VerQueryValue(buffer, #"\VarFileInfo\Translation", out blockbuffer, out len);
if(success)
{
int p = (int)blockbuffer;
//Reads a 16-bit signed integer from unmanaged memory
int j = Marshal.ReadInt16((IntPtr)p);
p += 2;
//Reads a 16-bit signed integer from unmanaged memory
int k = Marshal.ReadInt16((IntPtr)p);
string sb = string.Format("{0:X4}{1:X4}", j, k);
string spv = #"\StringFileInfo\" + sb + #"\ProductVersion";
string versionInfo;
VerQueryValue(buffer, spv, out versionInfo, out len);
}
}
}
}
catch (Exception err)
{
string error = err.Message;
}
}
}
After adding these two statements:
Int32 dwVerMinor = j & 0xffff;
Int32 dwVerBuild = k & 0xffff;
it's able to retrieve the DLL version.
Here's an implementation:
using DWORD = System.UInt32;
public static class NativeFile
{
public struct NativeFileInfo
{
public Version Version;
public NameValueCollection StringTable;
}
public unsafe static NativeFileInfo GetFileInfo(string path)
{
if (!File.Exists(path))
{
throw new FileNotFoundException();
}
IntPtr handle;
var size = GetFileVersionInfoSize(path, out handle);
var buffer = Marshal.AllocHGlobal(size);
try
{
if (!GetFileVersionInfo(path, handle, size, buffer))
{
throw new Win32Exception(Marshal.GetLastWin32Error());
}
IntPtr pVersion;
int versionLength;
VerQueryValue(buffer, “\”, out pVersion, out versionLength);
var versionInfo = (VS_FIXEDFILEINFO)Marshal.PtrToStructure(pVersion, typeof(VS_FIXEDFILEINFO));
var version = new Version((int)versionInfo.dwFileVersionMS >> 16,
(int)versionInfo.dwFileVersionMS & 0xFFFF,
(int)versionInfo.dwFileVersionLS >> 16,
(int)versionInfo.dwFileVersionLS & 0xFFFF);
// move to the string table and parse
var pStringTable = ((byte*)pVersion.ToPointer()) + versionLength;
var strings = ParseStringTable(pStringTable, size – versionLength);
return new NativeFileInfo
{
Version = version,
StringTable = strings
};
}
finally
{
Marshal.FreeHGlobal(buffer);
}
}
private unsafe static NameValueCollection ParseStringTable(byte* pStringTable, int length)
{
NameValueCollection nvc = new NameValueCollection();
byte* p = pStringTable;
short stringFileInfoLength = (short)*p;
byte* end = pStringTable + length;
p += (2 + 2 + 2); // length + valuelength + type
// verify key
var key = Marshal.PtrToStringUni(new IntPtr(p), 14);
if (key != "StringFileInfo") throw new ArgumentException();
// move past the key to the first string table
p += 30;
short stringTableLength = (short)*p;
p += (2 + 2 + 2); // length + valuelength + type
// get locale info
key = Marshal.PtrToStringUni(new IntPtr(p), 8);
// move to the first string
p += 18;
while (p < end)
{
short stringLength = (short)*p;
p += 2;
short valueChars = (short)*p;
p += 2;
short type = (short)*p;
p += 2;
if (stringLength == 0) break;
if ((valueChars == 0) || (type != 1))
{
p += stringLength;
continue;
}
var keyLength = stringLength – (valueChars * 2) – 6;
key = Marshal.PtrToStringUni(new IntPtr(p), keyLength / 2).TrimEnd(”);
p += keyLength;
var value = Marshal.PtrToStringUni(new IntPtr(p), valueChars).TrimEnd(”);
p += valueChars * 2;
if ((int)p % 4 != 0) p += 2;
nvc.Add(key, value);
}
return nvc;
}
private const string COREDLL = "coredll.dll";
[DllImport(COREDLL, SetLastError = true)]
private static extern int GetFileVersionInfoSize(string lptstrFilename, out IntPtr lpdwHandle);
[DllImport(COREDLL, SetLastError = true)]
private static extern bool GetFileVersionInfo(string lptstrFilename, IntPtr dwHandle, int dwLen, IntPtr lpData);
[DllImport(COREDLL, SetLastError = true)]
private static extern bool VerQueryValue(IntPtr pBlock, string lpSubBlock, out IntPtr lplpBuffer, out int puLen);
[StructLayout(LayoutKind.Sequential)]
private struct VS_FIXEDFILEINFO
{
public DWORD dwSignature;
public DWORD dwStrucVersion;
public DWORD dwFileVersionMS;
public DWORD dwFileVersionLS;
public DWORD dwProductVersionMS;
public DWORD dwProductVersionLS;
public DWORD dwFileFlagsMask;
public DWORD dwFileFlags;
public FileOS dwFileOS;
public FileType dwFileType;
public DWORD dwFileSubtype;
public DWORD dwFileDateMS;
public DWORD dwFileDateLS;
};
public enum FileOS : uint
{
Unknown = 0x00000000,
DOS = 0x00010000,
OS2_16 = 0x00020000,
OS2_32 = 0x00030000,
NT = 0x00040000,
WindowsCE = 0x00050000,
}
public enum FileType : uint
{
Unknown = 0x00,
Application = 0x01,
DLL = 0x02,
Driver = 0x03,
Font = 0x04,
VXD = 0x05,
StaticLib = 0x07
}
}
And an example of usage:
class Program
{
static void Main(string[] args)
{
string target = “\FlashFX Disk\ARMv4i\conmanclient2.exe”;
var version = NativeFile.GetFileInfo(target);
Console.WriteLine(string.Format(“File: { 0}”, Path.GetFileName(target)));
Console.WriteLine(string.Format(“Version: { 0}”, version.Version.ToString(4)));
foreach (var key in version.StringTable.AllKeys)
{
Console.WriteLine(string.Format(“{ 0}: { 1}”, key, version.StringTable[key]));
}
Console.ReadLine();
}