C++ & C# Call CryptEncrypt and CryptDecrypt - c#

Hello i am trying to Encrypt and Decrypt between C++ and C# have made a dll project using CryptEncrypt wincrypt here code i used
C++
#include <Windows.h>
#include <stdio.h>
extern "C" __declspec(dllexport)
BOOL EncryptData(char* szData, char* szPassword, char* szErroror, BYTE* pData, BYTE* pDataLen, BOOL Encrypt)
{
HANDLE hSourceFile = INVALID_HANDLE_VALUE;
HANDLE hDestinationFile = INVALID_HANDLE_VALUE;
HCRYPTPROV hProv = NULL;
HCRYPTKEY hKey = NULL;
HCRYPTKEY hXchgKey = NULL;
HCRYPTHASH hHash = NULL;
PBYTE pbKeyBlob = NULL;
DWORD dwKeyBlobLen;
PBYTE pbBuffer = NULL;
DWORD dwBlockLen = 0;
DWORD dwBufferLen = 0;
DWORD dwCount = 0;
bool bRet = true;
int len = strlen(szData);
if (!CryptAcquireContext(&hProv, NULL, MS_ENHANCED_PROV, PROV_RSA_FULL, 0))
{
if (!CryptAcquireContext(&hProv, NULL, MS_ENHANCED_PROV, PROV_RSA_FULL, CRYPT_NEWKEYSET))
return false;
}
// Create a hash object.
if (!CryptCreateHash(hProv, CALG_MD5, 0, 0, &hHash))
return false;
// Hash in the password data.
if (!CryptHashData(hHash, (BYTE*)szPassword, strlen(szPassword), 0))
return false;
// Derive a session key from the hash object.
if (!CryptDeriveKey(hProv, CALG_RC4, hHash, 0x00800000, &hKey))
return false;
dwBlockLen = 1000 - 1000 % 8;
// Allocate memory.
if ((pbBuffer = (BYTE*)malloc(dwBufferLen)) == NULL)
return false;
if (Encrypt)
{
if (!CryptEncrypt(hKey, 0, false, 0, pbBuffer, &dwCount, dwBufferLen))
{
DWORD dwError = GetLastError();
strcpy(szErroror, "CryptEncrypt Failed Error ");
char szError[10];
memset(szError, 0, 10);
sprintf(szError, "%d", dwError);
strcat(szErroror, szError);
bRet = false;
}
}
else
{
if (!CryptDecrypt(hKey, 0, false, 0, pbBuffer, &dwCount))
{
DWORD dwError = GetLastError();
strcpy(szErroror, "CryptDecrypt Failed Error ");
char szError[10];
memset(szError, 0, 10);
sprintf(szError, "%d", dwError);
strcat(szErroror, szError);
bRet = false;
}
}
char szDataLen[16];
memset(szDataLen, 0, 16);
sprintf(szDataLen, "%d", dwCount);
memcpy(pDataLen, szDataLen, 16);
BYTE* pMyData = (BYTE*)malloc(len);
memset(pMyData, 0, len);
memcpy(pData, pbBuffer, len);
// Free memory.
if (pbKeyBlob) free(pbKeyBlob);
if (pbBuffer) free(pbBuffer);
if (pMyData) free(pMyData);
// Destroy session key.
if (hKey) CryptDestroyKey(hKey);
// Release key exchange key handle.
if (hXchgKey) CryptDestroyKey(hXchgKey);
// Destroy hash object.
if (hHash) CryptDestroyHash(hHash);
// Release provider handle.
if (hProv) CryptReleaseContext(hProv, 0);
return bRet;
}
And C# Project Calling the dll and the funcation
namespace ConsoleApp1
{
class Program
{
[DllImport("Project3.dll", CallingConvention = CallingConvention.Cdecl)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool EncryptData(byte[] szData, char[] szPassword, StringBuilder sbError, byte[] pData, byte[] pDataLen, bool Encrypt);
static void Main(string[] args)
{
StringBuilder sbError = new StringBuilder(255);
byte[] szData = Encoding.ASCII.GetBytes("datatest");
char[] szPassword = ("test").ToCharArray();
byte[] pData = new byte[1008];
byte[] pDataLen = new byte[16];
Console.WriteLine("Encrypt");
bool bRet = EncryptData(szData, szPassword, sbError, pData, pDataLen, true);
Console.WriteLine(Encoding.ASCII.GetString(pData));
Console.WriteLine("Decrypt");
bool bRet2 = EncryptData(szData, szPassword, sbError, pData, pDataLen, false);
Console.WriteLine(Encoding.ASCII.GetString(pData));
Console.ReadKey();
}
}
}
but i have a problem with Decrypt
out
Encrypt ??r c?
Decrypt datatestx?
Encrypt 1?8V?6N5l???
Decrypt
d ?? -
as you can see the Decrypt is wrong what could be wrong

First of all your C/C++ code uses deprecated Windows functions, secondly, there are a lot of "unsecure" C runtime functions used like: "strcpy", "strcat", "sprintf" etc. etc., thirdly not all code is guarding against buffer coding errors. The C code as is does not compile in any recent Visual Studio compiler without significant corrections.
Pls try fixing all errors, simplify code and post it again.
On the C# side, data buffers that are passed to C code are not pinned and can be moved by GC at any time. Due to its small size, they could be eagerly promoted from Gen 0 to Gen 1 by GC what will cause memory move operation and invalidation of the pointer to buffer. To put it simply pointers to all buffers may be invalid at the moment they are read by C code.
On the C# side you could do the following:
[DllImport("Project3.dll", CallingConvention = CallingConvention.Cdecl)]
[return: MarshalAs(UnmanagedType.Bool)]
public static unsafe extern bool EncryptData(
byte* szData, char* szPassword, StringBuilder sbError, byte* pData, byte* pDataLen, bool Encrypt);
static unsafe void Main(string[] args)
{
StringBuilder sbError = new StringBuilder(255);
byte[] szDataBuff = Encoding.ASCII.GetBytes("datatest");
char[] szPasswordBuff = ("test").ToCharArray();
byte[] pDataBuff = new byte[1008];
byte[] pDataLenBuff = new byte[16];
fixed (byte* szData = szDataBuff)
fixed (char* szPassword = szPasswordBuff)
fixed (byte* pData = pDataBuff)
fixed (byte* pDataLen = pDataLenBuff)
{
Console.WriteLine("Encrypt");
bool bRet = EncryptData(szData, szPassword, sbError, pData, pDataLen, true);
Console.WriteLine("Encrypted: {0}", bRet);
Console.WriteLine(Marshal.PtrToStringAnsi((IntPtr)pData));
Console.WriteLine("Decrypt");
bool bRet2 = EncryptData(szData, szPassword, sbError, pData, pDataLen, false);
Console.WriteLine("Derypted: {0}", bRet2);
Console.WriteLine(Marshal.PtrToStringAnsi((IntPtr)pData));
}
Console.ReadKey();
}
There are other approaches possible but this one seems to be most straightforward and simple.

Related

heap corrupt when convert Intptr in C# to char* in C++ DLL

I'm trying to convert an array to DLL and modify the values in it.It goes well and the values in array changed.But the problem is,it cause a heap corruption that sometimes it thows out an AccessViolationException,other times the program crash at random
function in DLL code in C++:
MCUPROTOCOL_API int funMcuReadEEprom(const char bank, unsigned char page, char* EEprom,int DataSize)
{
st_PROTOCOL stResponsePackage;
RS232_AT_COMMAND Command;
memset(Command.szCommand, 0x00, sizeof(Command.szCommand));
unsigned char Package[8] = { 0x00 };
unsigned char ResponsePackage[32] = { 0x00 };
unsigned char Data[2] = { 0x00 };
int State;
int ResponsePackageLen;
Data[0] = bank;
Data[1] = page;
fun_ProducePackage(TEST_READ_SOFTWARE_VERSION, Data, Package);
memcpy(Command.szCommand, Package, sizeof(Package));
Command.dwLength = sizeof(Package);
Uart.WriteByte(Command);
Sleep(200);
State = Uart.ReadByte((char *)ResponsePackage, &ResponsePackageLen);
if (ERROR_SUCCESS != State)
{
return FAIL_UART;
}
State = fun_AnalyzeResponsePackage(ResponsePackageLen, ResponsePackage, &stResponsePackage);
if (ERROR_SUCCESS != State)
{
return State;
}
//memcpy(EEprom, stResponsePackage.data, DataSize);
return SUCCESS;
}
C#:
[DllImport("McuProtocol.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern int funMcuReadEEprom(byte bank, byte page, IntPtr EEprom, int DataSize);
and I call it like this:
byte page = 129;
byte bank = 2;
string TransferStr = "";
IntPtr SoftwareVersion;
byte[] transfer = new byte[20];
SoftwareVersion = Marshal.StringToCoTaskMemAuto(TransferStr);
McuProtocolApi.funMcuRegister(int.Parse(Info.UartCom.Substring(3)), int.Parse(Info.UartBaud));
McuProtocolApi.funMcuReadEEprom(bank, page, SoftwareVersion, SoftwareVersionSize);
McuProtocolApi.funMcuRelease();
transfer = System.Text.Encoding.Unicode.GetBytes(Marshal.PtrToStringAuto(SoftwareVersion, SoftwareVersionSize / 2));
TransferStr = System.Text.Encoding.UTF8.GetString(transfer);
StrSW = TransferStr;
If I annotate funMcuReadEEprom ,the program went well,otherwise,when it reach this line,the Program would crash.
EDIT 1:
I optimized the code by the suggestions in the comment and the problem still exist.
byte page = 128;
byte bank = 2;
string TransferStr;
IntPtr SoftwareVersion = Marshal.AllocHGlobal(100);
byte[] transfer = new byte[20];
McuProtocolApi.funMcuRegister(int.Parse(Info.UartCom.Substring(3)), int.Parse(Info.UartBaud));
McuProtocolApi.funMcuReadEEprom(bank, page, SoftwareVersion, SoftwareVersionSize);
McuProtocolApi.funMcuRelease();
transfer = System.Text.Encoding.Unicode.GetBytes(Marshal.PtrToStringAuto(SoftwareVersion, SoftwareVersionSize / 2));
TransferStr = System.Text.Encoding.UTF8.GetString(transfer);
StrSW = TransferStr;
Marshal.FreeHGlobal(SoftwareVersion);

Calling C++ function with LPStr return value from C#

I have a C++ dll in 64-Bit, which contains a function that returns an LPStr. I would like to call this function in C#. The function declaration looks like this:
__declspec(dllexport) LPSTR __stdcall function(int16_t error_code);
In my C# code I have tried the following:
[DllImport(#"<PathToInterface.dll>", EntryPoint = "function")]
[return: MarshalAs(UnmanagedType.LPStr)]
public static extern string function(Int16 error_code);
And then in the program:
string ErrorMessage = "";
ErrorMessage = function(-10210);
I know that the function itself is good, as I can call it from another program (written in LabVIEW FWIW). But when I execute the C# Program, it just exits with error code 0x80000003, I can't even try, catch the exeption.
How do I call this function properly?
As a side node: I do have other functions in this dll, that use LPStr as parameters, which I can call without a problem. It is only two functions that return LPStr that make problems
How do I call this function properly?
As interop? you can't ... it is also error prone in plain C++
you should rather do it like
extern "C" __declspec(dllexport) int __stdcall function(int16_t error_code,
LPSTR buffer, size_t size)
{
LPCSTR format = "error: %i";
size_t req = _scprintf(format, error_code); // check for require size
if (req > size) //buffer size is too small
{
return req; //return required size
}
sprintf_s(buffer, size, format, error_code); //fill buffer
return 0;
}
And usage
class Program
{
static void Main(string[] args)
{
short error_code = -10210;
var ret = function(error_code, null, 0); // check for required size of the buffer
var sb = new StringBuilder(ret); // create large enough buffer
ret = function(error_code, sb, (uint)sb.Capacity + 1); //call again
var error_desc = sb.ToString(); //get results
Console.WriteLine(error_desc);
Console.ReadKey();
}
[DllImport("TestDll.dll", EntryPoint = "function", CharSet = CharSet.Ansi)]
public static extern int function(short error_code, StringBuilder sb, int size);
}
usage in C++
typedef int (__stdcall *function)(int16_t error_code, LPSTR buffer, size_t size);
int main()
{
auto handle = LoadLibrary(L"TestDll.dll");
auto proc = (function)GetProcAddress(handle, "_function#12");
// of course it can be done via linking
int16_t error_code = 333;
const int ret = proc(error_code, NULL, 0); // check for size
CHAR* buffer = new CHAR[ret + 1];
//CHAR buffer[200]; //eventually allocate on stack
//but size have to be constant value and may be too small
proc(error_code, buffer, ret+1); // call again
MessageBoxA(0, buffer, "Return", 0); //show result
delete[] buffer; //free buffer!
FreeLibrary(handle);
return 0;
}

C# Reading process memory returning wrong values

i'm trying to read some values from process memory with multiple pointers/offsets
on my console .Net App but i'm getting the wrong last 3 values, i don't know what i'm doing wrong I've been checking the code and trying different ways for hours but still the same results.
I'm reading these values from a 64-bit process
Here's a preview from my app and cheat engine at the same time (cheat engine contains the correct values).
Here is my code for reading these pointers :
Memory.OpenProcess(Data.Core.ProcessID);
Data.Core.GameBase = (uint)Memory.BaseAddress("Game.dll");
uint Num0 = Memory.ReadInt((int)Data.Core.GameBase +
(int)Data.Core.Offsets.Animation);
uint Num1 = Memory.ReadInt((int)Num0 + (int)Data.Core.Offsets.P1);
uint Num2 = Memory.ReadInt((int)Num1 + (int)Data.Core.Offsets.P2);
uint Num3 = Memory.ReadInt((int)Num2 + (int)Data.Core.Offsets.P3);
uint Num4 = Memory.ReadInt((int)Num3 + (int)Data.Core.Offsets.P4);
uint Num5 = Memory.ReadInt((int)Num4 + (int)Data.Core.Offsets.P5);
ReadInt function :
public uint ReadInt(int iMemoryAddress)
{
byte[] bBuffer = new byte[4];
IntPtr lpNumberOfBytesRead;
if (Mapi.ReadProcessMemory(this._hReadProcess, (IntPtr) iMemoryAddress,
bBuffer, 4U, out lpNumberOfBytesRead) == 0)
return 0;
return BitConverter.ToUInt32(bBuffer, 0);
}
also :
public uint ReadInt(int Address)
{
OpenProcessMemory();
int BytesRead = 0;
byte[] Data = new byte[4];
ReadProcessMemory((int)PHandle, Address, Data, 4, ref BytesRead);
CloseProcessMemory();
return BitConverter.ToUInt32(Data, 0);
}
Offsets enum :
public enum Offsets : uint
{
Animation = 0x1494198,
P1 = 0x68,
P2 = 0x70,
P3 = 0x28,
P4 = 0x378,
P5 = 0x522,
}
win api :
[DllImport("kernel32.dll")]
public static extern int ReadProcessMemory(IntPtr hProcess, IntPtr
lpBaseAddress, [In, Out] byte[] bBuffer, uint size, out IntPtr
lpNumberOfBytesRead);
I've tried to add pointers and offsets using IntPtr / uint / int / Int32 for each Pointer+Offset but still the same weird values at the end.
I think i can't do more than this obviously..
If the target process is x64, then you need to also compile for x64 and you should use IntPtr for all pointers, offsets and addresses to ensure they are the correct length to accept 64 bit addresses.
For walking pointer chains you should use this function which de-references each pointer and then adds the offset for you.
public static IntPtr FindDMAAddy(IntPtr hProc, IntPtr ptr, int[] offsets)
{
var buffer = new byte[IntPtr.Size];
foreach (int i in offsets)
{
ReadProcessMemory(hProc, ptr, buffer, buffer.Length, out var read);
ptr = (IntPtr.Size == 4)
? IntPtr.Add(new IntPtr(BitConverter.ToInt32(buffer, 0)), i)
: ptr = IntPtr.Add(new IntPtr(BitConverter.ToInt64(buffer, 0)), i);
}
return ptr;
}
var ammoAddr = FindDMAAddy(hProc, (IntPtr)(modBase + 0x10f4f4), new int[] { 0x374, 0x14, 0 });

How to use extern "C" dll function taking char** as an argument in C# application?

I have dll with function:
extern "C"
int
doJob(char** buffer);
Its usage with C++ looks like this:
char* buf;
int status = doJob(&buf);
What definition should I have for this function in C#?
How can I use this function in C#?
One of the possible patterns is:
[DllImport("containsdojob.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern Int32 doJob(out IntPtr buffer);
[DllImport("containsdojob.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern void freeMemory(IntPtr buffer);
and
IntPtr buffer = IntPtr.Zero;
string str = null;
try
{
doJob(out buffer);
if (buffer != IntPtr.Zero)
{
str = Marshal.PtrToStringAnsi(buffer);
}
}
finally
{
if (buffer != IntPtr.Zero)
{
freeMemory(buffer);
}
}
Note that you'll need a freeMemory method to free the memory allocated by doJob.
There are other possible patterns, for example based on BSTR and SysAllocString that are easier to implement C#-side (but more difficult to implement C-side)
The "pattern" for using BSTR:
C-side:
char *str = "Foo"; // your string
int len = strlen(str);
int wslen = MultiByteToWideChar(CP_ACP, 0, str, len, 0, 0);
BSTR bstr = SysAllocStringLen(NULL, wslen);
MultiByteToWideChar(CP_ACP, 0, str, len, bstr, wslen);
// bstr is the returned string
C#-side:
[DllImport("containsdojob.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern Int32 doJob([MarshalAs(UnmanagedType.BStr)] out string buffer);
string str;
doJob(out str);
The memory is automatically handled (freed) by the CLR.
If you are using Visual C++ you can even
char *str = "Foo"; // your string
_bstr_t bstrt(str);
BSTR bstr = bstrt.Detach();
// bstr is the returned string
Or C-side you could use one of the two allocators that can be freed C#-side: LocalAlloc or CoTaskMemAlloc:
char *str = "Foo"; // your string
char *buf = (char*)LocalAlloc(LMEM_FIXED, strlen(str) + 1);
// or char *buf = (char*)CoTaskMemAlloc(strlen(str) + 1);
strcpy(buf, str);
// buf is the returned string
Then you use the first example, but instead of calling
freeMemory(buffer);
you call:
Marshal.FreeHGlobal(buffer); // for LocalAlloc
or
Marshal.FreeCoTaskMem(buffer); // for CoTaskMemAlloc

PInvoke problem

This is the signature of the native c method:
bool nativeMethod1
(unsigned char *arrayIn,
unsigned int arrayInSize,
unsigned char *arrayOut,
unsigned int *arrayOutSize);
I have no idea why arrayOutSize is a pointer to unsigned int but not int itself.
This is how I invoke it from C#:
byte[] arrayIn= Encoding.UTF8.GetBytes(source);
uint arrayInSize = (uint)arrayIn.Length;
byte[] arrayOut = new byte[100];
uint[] arrayOutSize = new uint[1];
arrayOutSize[0] = (uint)arrayOut.Length;
fixed (byte* ptrIn = arrayIn, ptrOut = arrayOut)
{
if (nativeMethod1(ptrIn, arrayInSize, ptrOut, arrayOutSize))
{
Console.WriteLine("True");
}
else
{
Console.WriteLine("False");
}
}
and some DllImport code
[DllImport(#"IcaCert.dll", EntryPoint = "CreateCert2", CallingConvention = CallingConvention.Cdecl)]<br>
public unsafe static extern bool CreateCert2WithArrays(
byte* data, uint dataSize,<br>
byte* result, uint[] resultSize);
According to the documentation, native method should return arrayOut fulfilled with the values depending on arrayIn. If its size is less than needed, it returns false. True otherwise. I figured that it's needed 850 elements in arrayOut. So, when I create new byte[100] array, function should return false, but it always returns true. WHY?
You don't need unsafe code and fixed here. The standard P/Invoke marshaller is more than up to the task:
[DllImport(#"IcaCert.dll", EntryPoint = "CreateCert2", CallingConvention = CallingConvention.Cdecl)]
public static extern bool CreateCert2WithArrays(
byte[] arrayIn,
uint arrayInSize,
byte[] arrayOut,
ref uint arrayOutSize
);
byte[] arrayIn = Encoding.UTF8.GetBytes(source);
uint arrayInSize = (uint)arrayIn.Length;
uint arrayOutSize = 0;
CreateCert2WithArrays(arrayIn, arrayInSize, null, ref arrayOutSize);
byte[] arrayOut = new byte[arrayOutSize];
CreateCert2WithArrays(arrayIn, arrayInSize, arrayOut, ref arrayOutSize);
I don't know for sure what the protocol of the function is, but it is normal for such functions to be able to receive NULL if the output array has size 0.
I don't think an array is what you're looking for. It's a pointer to the size of the array, not a pointer to an array of sizes. Try this:
[DllImport(#"IcaCert.dll", EntryPoint = "CreateCert2", CallingConvention = CallingConvention.Cdecl)]
public unsafe static extern bool CreateCert2WithArrays(
byte* data, uint dataSize,
byte* result, ref uint resultSize);
byte[] arrayIn= Encoding.UTF8.GetBytes(source);
uint arrayInSize = (uint)arrayIn.Length;
byte[] arrayOut = new byte[100];
uint arrayOutSize = (uint)arrayOut.Length;
CreateCert2WithArrays (arrayIn, (uint) arrayIn.Length, arrayOut, ref arrayOutSize);
uint[] arrayOutSize = new uint[1];
arrayOut = new byte[(int)arrayOut];
CreateCert2WithArrays (arrayIn, (uint) arrayIn.Length, arrayOut, ref arrayOutSize);

Categories