Cannot decrypt with iv as plain text AES - c#

I have logic that works perfectly in C# for encrypting and decrypting text using AES CBC 128 Bit
Now I have a problem where the other party cannot decrypt the text and nor can this site:
https://www.devglan.com/online-tools/aes-encryption-decryption
How can I get the IV into a version that can be used to decrypt outside of C#?
I tried
stream = encryptionInfo.InversionVectorText.ToMemoryStream(Encoding.ASCII);
also
stream = encryptionInfo.InversionVectorText.ToMemoryStream(Encoding.UTF8);
Neither of these give me a value that I can paste into the site above and have work
The other side want me to send the IV in plain text
My full code is below for reference:
public static class SecurityExtensions
{
private static Aes GetAes(string keyText, byte[] iv)
{
var key = keyText.ToByteArray();
var result = Aes.Create();
result.Mode = CipherMode.CBC;
result.KeySize = 128;
if (iv.Length > 0)
{
result.IV = iv;
}
result.Key = key;
return result;
}
private static string GenerateRandomCryptoString(int length, string charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890")
{
using (var crypto = new System.Security.Cryptography.RNGCryptoServiceProvider())
{
var result = crypto.GenerateRandomCryptoString(length, charset);
return result;
}
}
private static string GenerateRandomCryptoString(this RNGCryptoServiceProvider random,
int length,
string charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890") =>
RandomString(random.GetBytes, length, charset.ToCharArray());
private static string RandomString(Action<byte[]> fillRandomBuffer, int length, char[] charset)
{
var maxIdx = charset.Length;
var chars = new char[length];
var randomBuffer = new byte[length * 4];
fillRandomBuffer(randomBuffer);
for (var i = 0; i < length; i++)
chars[i] = charset[BitConverter.ToUInt32(randomBuffer, i * 4) % maxIdx];
var result = new string(chars);
return result;
}
public static AesEncryptionInfo EncryptWithAes(this string plainText, string keyText)
{
//Generate an IV made up of only alphanumeric characters to avoid encoding/decoding issues
var ivText = GenerateRandomCryptoString(16);
var iv = Encoding.ASCII.GetBytes(ivText);
var aesAlg = GetAes(keyText, iv);
var encryptor = aesAlg.CreateEncryptor(aesAlg.Key, aesAlg.IV);
byte[] encrypted;
using (var msEncrypt = new MemoryStream())
{
using (var csEncrypt = new CryptoStream(msEncrypt, encryptor, CryptoStreamMode.Write))
{
using (var swEncrypt = new StreamWriter(csEncrypt))
{
swEncrypt.Write(plainText);
}
encrypted = msEncrypt.ToArray();
}
}
aesAlg.Dispose();
var result = new AesEncryptionInfo(encrypted, aesAlg.IV);
return result;
}
public static string DecryptFromAes(this byte[] cipherText, string keyText, byte[] iv)
{
string result;
var aesAlg = GetAes(keyText, iv);
var decryptor = aesAlg.CreateDecryptor(aesAlg.Key, aesAlg.IV);
// Create the streams used for decryption.
using (MemoryStream msDecrypt = new MemoryStream(cipherText))
{
using (CryptoStream csDecrypt = new CryptoStream(msDecrypt, decryptor, CryptoStreamMode.Read))
{
using (StreamReader srDecrypt = new StreamReader(csDecrypt))
{
result = srDecrypt.ReadToEnd();
}
}
}
return result;
}
}
public class AesEncryptionInfo
{
public AesEncryptionInfo(byte[] encrypted, byte[] inversionVector)
{
InversionVector = inversionVector;
Encrypted = encrypted;
}
public byte[] InversionVector { get; set; }
public byte[] Encrypted { get; set; }
public string InversionVectorText => Encoding.Default.GetString(InversionVector);
}
The other side are adamant they do not want to do any kind of decoding, which is obviously not a good approach!
So I have some code that generates a random 16 character string of numbers or letters. This definitely generates a string of 16 characters.
When I decrypt this using the logic above the text is decrypted correctly, but I also see random characters at the start. I dont know why this happens?

Related

AES Padding is Invalid And Cannot Be Removed

I am using AES criptography algorithms to encrypt and decrypt my values in my project. My code works almost everytime but sometimes I get Padding is invalid and cannot be removed error. My project is ASP .NET Core 3.1 project and it's published on IIS Server 8.5.
As said at Padding is invalid and cannot be removed? question asked 9 years ago, my keys and salts are always set 128 bits and padding mode is always set to PKCS#7 like this code: aes.Padding = PaddingMode.PKCS7;.
But sometimes, I got this error. After debugging my code with the same key, salt and decrypted value I didn't get any error and my code works fine for another 10 hours or so. I have no idea why my code behaves like this but I couldn't find any solution.
My Constructor:
public void KriptoAlgoritmasiniAyarla(string password, string salt, SymmetricAlgorithm algorithm)
{
if (password == null) throw new ArgumentNullException(nameof(password));
if (salt == null) throw new ArgumentNullException(nameof(salt));
DeriveBytes rgb = new Rfc2898DeriveBytes(password, Encoding.Unicode.GetBytes(salt));
var rgbKey = rgb.GetBytes(algorithm.KeySize >> 3);
var rgbIv = rgb.GetBytes(algorithm.BlockSize >> 3);
_sifreleyici = algorithm.CreateEncryptor(rgbKey, rgbIv);
_desifreleyici = algorithm.CreateDecryptor(rgbKey, rgbIv);
}
My encrption code:
public byte[] ByteDizisineSifrele(string plainText)
{
try
{
byte[] encrypted;
// Create a new AesManaged.
using (AesManaged aes = new AesManaged())
{
aes.Padding = PaddingMode.PKCS7;
// Create MemoryStream
using (MemoryStream ms = new MemoryStream())
{
// Create crypto stream using the CryptoStream class. This class is the key to encryption
// and encrypts and decrypts data from any given stream. In this case, we will pass a memory stream
// to encrypt
using (CryptoStream cs = new CryptoStream(ms, _sifreleyici, CryptoStreamMode.Write))
{
// Create StreamWriter and write data to a stream
using (StreamWriter sw = new StreamWriter(cs))
sw.Write(plainText);
encrypted = ms.ToArray();
}
}
}
// Return encrypted data
return encrypted;
}
catch (Exception exp)
{
throw exp;
}
}
My decryption code:
public string ByteDizisiDesifreEt(byte[] cipherText)
{
try
{
string plaintext = null;
// Create AesManaged
using (AesManaged aes = new AesManaged())
{
aes.Padding = PaddingMode.PKCS7;
// Create the streams used for decryption.
using (MemoryStream ms = new MemoryStream(cipherText))
{
// Create crypto stream
using (CryptoStream cs = new CryptoStream(ms, _desifreleyici, CryptoStreamMode.Read))
{
// Read crypto stream
using (StreamReader reader = new StreamReader(cs))
plaintext = reader.ReadToEnd();
}
}
}
return plaintext;
}
catch (Exception exp)
{
throw exp;
}
}
Probably because you are reusing the same ICryptoTransform objects (_sifreleyici and _desifreleyici). At some point, the transform object can't be reused anymore and therefore the interface has a property to determine that. The ICryptoTransform.CanReuseTransform property.
Consequently, you need to check this property and recreate the objects when you get false.
Example
private readonly byte[] Key, IV;
public void KriptoAlgoritmasiniAyarla(
string password,
string salt,
SymmetricAlgorithm algorithm)
{
// ...
Key = // Get the key..
IV = // Get the IV..
}
private ICryptoTransform encryptor;
private ICryptoTransform Encryptor
{
get
{
if (encryptor == null || !encryptor.CanReuseTransform)
{
encryptor?.Dispose();
encryptor = Algorithm.CreateEncryptor(Key, IV);
}
return encryptor;
}
}
private ICryptoTransform decryptor;
private ICryptoTransform Decryptor
{
get
{
if (decryptor == null || !decryptor.CanReuseTransform)
{
decryptor?.Dispose();
decryptor = Algorithm.CreateDecryptor(Key, IV);
}
return decryptor;
}
}
Then use these two properties in the related methods to create the CryptoStream.
Alternative
I'd like to propose the code below as an alternative that can be used with the classes that derive from the SymmetricAlgorithm abstract class.
public class SymmetricCrypto<T> : IDisposable where T : SymmetricAlgorithm, new()
{
private readonly T Algorithm = new T();
public SymmetricCrypto()
{
Algorithm.GenerateKey();
Algorithm.GenerateIV();
}
public SymmetricCrypto(byte[] key, byte[] iv)
{
Algorithm.Key = key;
Algorithm.IV = iv;
}
public SymmetricCrypto(string pass)
{
var bytes = Encoding.UTF8.GetBytes(pass);
var rfc = new Rfc2898DeriveBytes(pass,
new SHA256Managed().ComputeHash(bytes), 1000);
Algorithm.Key = rfc.GetBytes(Algorithm.LegalKeySizes[0].MaxSize / 8);
Algorithm.IV = rfc.GetBytes(Algorithm.LegalBlockSizes[0].MinSize / 8);
}
public SymmetricCrypto(byte[] pass)
{
var rfc = new Rfc2898DeriveBytes(pass,
new SHA256Managed().ComputeHash(pass), 1000);
Algorithm.Key = rfc.GetBytes(Algorithm.LegalKeySizes[0].MaxSize / 8);
Algorithm.IV = rfc.GetBytes(Algorithm.LegalBlockSizes[0].MinSize / 8);
}
public byte[] Encrypt(string input) =>
Transform(Encoding.UTF8.GetBytes(input), Algorithm.CreateEncryptor());
public string Decrypt(byte[] input) =>
Encoding.UTF8.GetString(Transform(input, Algorithm.CreateDecryptor()));
private byte[] Transform(byte[] input, ICryptoTransform cryptoTrans)
{
using (var ms = new MemoryStream())
using (var cs = new CryptoStream(ms, cryptoTrans, CryptoStreamMode.Write))
{
cs.Write(input, 0, input.Length);
cs.FlushFinalBlock();
return ms.ToArray();
}
}
public void Dispose() => Algorithm.Dispose();
}
Usage:
void SomeCaller()
{
using (var crypt = new SymmetricCrypto<AesManaged>("password"))
{
var bytes = crypt.Encrypt("Plain Text....");
// ...
var plainText = crypt.Decrypt(bytes);
// ...
}
}

Extra null characters when decrypting AES-CBC-PKCS7 with BouncyCastle

I need to implement AES encryption in 2 different projects, but one must use the .NET standard crypto libraries and the other must use BouncyCastle. Both are C# code. Relevant methods are as follows:
.NET:
internal class NETAesCryptor : IAesCryptor
{
public Tuple<byte[], byte[]> Encrypt(string plaintext, byte[] key)
{
byte[] ciphertext, iv;
using (var aes_provider = new AesCryptoServiceProvider())
{
aes_provider.Padding = PaddingMode.PKCS7;
aes_provider.GenerateIV();
iv = aes_provider.IV;
var encryptor = aes_provider.CreateEncryptor(key, iv);
using (var ms = new MemoryStream())
{
using (var cs = new CryptoStream(ms, encryptor, CryptoStreamMode.Write))
{
using (var sw = new StreamWriter(cs))
{
sw.Write(plaintext);
}
ciphertext = ms.ToArray();
}
}
}
var result = new Tuple<byte[], byte[](ciphertext, iv);
return result;
}
public string Decrypt(byte[] ciphertext, byte[] iv, byte[] key)
{
string plaintext;
using (var aes_provider = new AesCryptoServiceProvider())
{
aes_provider.Padding = PaddingMode.PKCS7;
aes_provider.IV = iv;
var decryptor = aes_provider.CreateDecryptor(key, iv);
using (var ms = new MemoryStream(ciphertext))
{
using (var cs = new CryptoStream(ms, decryptor, CryptoStreamMode.Read))
{
using (var sr = new StreamReader(cs))
{
plaintext = sr.ReadToEnd();
}
}
}
}
return plaintext;
}
}
Bouncycastle:
internal class BCAesCryptor : IAesCryptor
{
private SecureRandom _r;
public BCAesCryptor()
{
_r = new SecureRandom();
}
public Tuple<byte[], byte[]> Encrypt(string plaintext, byte[] key)
{
var plaintext_bytes = Encoding.UTF8.GetBytes(plaintext);
var iv = GenerateRandomBytes(16);
var engine = new AesEngine();
var cbc_cipher = new CbcBlockCipher(engine);
var cipher = new PaddedBufferedBlockCipher(cbc_cipher, new Pkcs7Padding());
var key_param = new KeyParameter(key);
var key_param_with_iv = new ParametersWithIV(key_param, iv);
cipher.Init(true, key_param_with_iv);
var ciphertext = new byte[cipher.GetOutputSize(plaintext_bytes.Length)];
var length = cipher.ProcessBytes(plaintext_bytes, ciphertext, 0);
cipher.DoFinal(ciphertext, length);
var result = new Tuple<byte[], byte[]>(ciphertext, iv);
return result;
}
public string Decrypt(byte[] ciphertext, byte[] iv, byte[] key)
{
var engine = new AesEngine();
var cbc_cipher = new CbcBlockCipher(engine);
var cipher = new PaddedBufferedBlockCipher(cbc_cipher, new Pkcs7Padding());
var key_param = new KeyParameter(key);
var key_param_with_iv = new ParametersWithIV(key_param, iv);
cipher.Init(false, key_param_with_iv);
var plaintext = new byte[cipher.GetOutputSize(ciphertext.Length)];
var length = cipher.ProcessBytes(ciphertext, plaintext, 0);
cipher.DoFinal(plaintext, length);
var result = Encoding.UTF8.GetString(plaintext);
return result;
}
private byte[] GenerateRandomBytes(int length = 16)
{
var result = new byte[length];
_r.NextBytes(result);
return result;
}
}
Encryption/decryption between .NET methods works OK, and Bouncycastle encryption/.NET decryption also works OK. But for some reason, Bouncycastle decryption adds a variable number of \0 characters at the end of the plaintext, and I don't know why is this happening.
Test code I'm using:
[TestClass]
public class AesCryptorTests
{
private byte[] _key;
private string _plaintext;
public AesCryptorTests()
{
_key = GenerateRandomBytes();
_plaintext = "Lorem ipsum dolor sit amet";
}
[TestMethod]
public void TestMethod2()
{
var bc = new BCAesCryptor();
var net = new NETAesCryptor();
var result = net.Encrypt(_plaintext, _key);
var new_plaintext = bc.Decrypt(result.Ciphertext, result.IV, _key);
Assert.AreEqual(_plaintext, new_plaintext);
}
private byte[] GenerateRandomBytes(int cantidad = 16)
{
var result = new byte[cantidad];
using (var r = new RNGCryptoServiceProvider())
{
r.GetBytes(result);
}
return result;
}
}
In the previous test, the decryption returns Lorem ipsum dolor sit amet\0\0\0\0\0\0 instead of the plaintext.
Any advice/comment would be greatly appreciated.
The Bouncy Castle can only guess the output size of the plaintext message in advance during the call to GetOutputSize. It cannot know how many padding bytes are used, because those are only available after decryption. So they would have to partially decrypt the ciphertext to know the amount of padding, and that's taking it a step too far. Therefore you get just an estimate on the high side so that the maximum number of bytes can still fit in your newly created buffer.
You'll need the return value of the ProcessBytes and DoFinal to see the actual number of bytes that are decrypted from the ciphertext (in the input buffer and internal buffer) when the methods are called. DoFinal decrypts the last block(s) and then removes the padding from the final block, so only at that time is the size of the (remaining) plaintext known.
What you're currently seeing as zero valued bytes are just the unused bytes of the buffer, as the plaintext size is smaller than the value returned by GetOutputSize.
Of course, this is all hidden in the streaming code of the .NET sample, where ReadToEnd is required to doing some advanced buffering (probably using a MemoryStream internally itself).
Following instructions from Maarten Bodewes, the final working code is as follows:
public string Decrypt(byte[] ciphertext, byte[] iv, byte[] key)
{
var engine = new AesEngine();
var cbc_cipher = new CbcBlockCipher(engine);
var cipher = new PaddedBufferedBlockCipher(cbc_cipher, new Pkcs7Padding());
var key_param = new KeyParameter(key);
var key_param_with_iv = new ParametersWithIV(key_param, iv);
cipher.Init(false, key_param_with_iv);
var decryption_buffer = new byte[cipher.GetOutputSize(ciphertext.Length)];
var initial_length = cipher.ProcessBytes(ciphertext, decryption_buffer, 0);
var last_bytes = cipher.DoFinal(decryption_buffer, initial_length);
var total_bytes = initial_length + last_bytes;
var plaintext = new byte[total_bytes];
Array.Copy(decryption_buffer, plaintext, total_bytes);
var result = Encoding.UTF8.GetString(plaintext);
return result;
}
Note that the length of the plaintext is now calculated with the integer outputs of the decryption methods, and a simple array copy is able to create a plaintext without extra characters.

zeros_Padding result different output

Why it does get wrong results?
It not pkcs7 supported by the crypto ++?
I would like to know the value of the result to be like what to do.
Iv value is equal to the supposed well-delivered.
// c# code
private byte[] _iv;
private readonly string key = "7794b12op901252bfcea66d6f0521212";
public string decrypt(string Input)
{
string str = "";
RijndaelManaged managed = new RijndaelManaged();
managed.KeySize = 128;
managed.BlockSize = 128;
managed.Mode = CipherMode.CBC;
managed.Padding = PaddingMode.Zeros;
managed.Key = Encoding.UTF8.GetBytes(this.key);
managed.IV = this._iv;
try
{
ICryptoTransform transform = managed.CreateDecryptor();
byte[] bytes = null;
using (MemoryStream stream = new MemoryStream())
{
using (CryptoStream stream2 = new CryptoStream(stream, transform, CryptoStreamMode.Write))
{
byte[] buffer = Convert.FromBase64String(Input);
stream2.Write(buffer, 0, buffer.Length);
}
bytes = stream.ToArray();
}
str = Encoding.ASCII.GetString(bytes);
}
catch (Exception)
{
}
return str;
}
public string encrypt(string Input)
{
RijndaelManaged managed = new RijndaelManaged();
managed.KeySize = 128;
managed.BlockSize = 128;
managed.Mode = CipherMode.CBC;
managed.Padding = PaddingMode.Zeros;
managed.Key = Encoding.ASCII.GetBytes(this.key);
managed.GenerateIV();
this._iv = managed.IV;
ICryptoTransform transform = managed.CreateEncryptor(managed.Key, managed.IV);
byte[] inArray = null;
using (MemoryStream stream = new MemoryStream())
{
using (CryptoStream stream2 = new CryptoStream(stream, transform, CryptoStreamMode.Write))
{
byte[] bytes = Encoding.UTF8.GetBytes(Input);
stream2.Write(bytes, 0, bytes.Length);
}
inArray = stream.ToArray();
}
return Convert.ToBase64String(inArray);
}
Below is qt5 code.
Omit details.
QT code
QString aeskey = "7794b12op901252bfcea66d6f0521212";
QString _iv;
void Cipher::GenerateIV()
{
AutoSeededRandomPool rnd;
byte iv3[AES::BLOCKSIZE];
rnd.GenerateBlock(iv3, AES::BLOCKSIZE);
QByteArray out((char*)iv3, AES::BLOCKSIZE);
_iv = out.toBase64();
}
QString Cipher::AESencrypt(QString Qstr_in)
{
string str_in = Qstr_in.toStdString();
string key = aeskey.toStdString();
GenerateIV();
string iv = _iv.toStdString();
string str_out;
CBC_Mode<AES>::Encryption encryption;
encryption.SetKeyWithIV((byte*)key.c_str(), key.length(), (byte*)iv.c_str());
StringSource encryptor(str_in, true,
new StreamTransformationFilter(encryption,
new Base64Encoder(
new StringSink(str_out)
// ,StreamTransformationFilter::PKCS_PADDING
,StreamTransformationFilter::ZEROS_PADDING
)
)
);
return QString::fromStdString(str_out);
}
QString Cipher::AESdecrypt(QString Qstr_in)
{
string str_in = Qstr_in.toStdString();
string key = aeskey.toStdString();
string iv = _iv.toStdString();
string str_out;
CBC_Mode<AES>::Decryption decryption;
decryption.SetKeyWithIV((byte*)key.c_str(), key.length(), (byte*)iv.c_str());
StringSource decryptor(str_in, true,
new Base64Decoder(
new StreamTransformationFilter(decryption,
new StringSink(str_out)
// ,StreamTransformationFilter::PKCS_PADDING
,StreamTransformationFilter::DEFAULT_PADDING
)
)
);
return QString::fromStdString(str_out);
}
I don't understand really what your question is and I can't really comment so here what I think:
ICryptoTransform transform = managed.CreateEncryptor(managed.Key, managed.IV);
ICryptoTransform transform = managed.CreateDecryptor();
Both need key and IV, or at least need to be the same....
Then you used once Rijndael then AES. You could use AES in you C# too.
A couple things jump out... In C# code, you do this:
private readonly string key = "7794b12op901252bfcea66d6f0521212";
...
managed.Key = Encoding.UTF8.GetBytes(this.key);
In Crypto++ code, you do this:
QString aeskey = "7794b12op901252bfcea66d6f0521212";
...
string key = aeskey.toStdString();
You need to HexDecode the string in Crypto++.
Also, GenerateIV Base64 encodes on the Qt side of things:
AutoSeededRandomPool rnd;
byte iv3[AES::BLOCKSIZE];
rnd.GenerateBlock(iv3, AES::BLOCKSIZE);
QByteArray out((char*)iv3, AES::BLOCKSIZE);
_iv = out.toBase64();
But C# uses a byte[] (presumably not Base64 encoded):
private byte[] _iv;

What in the Salt causes it to fail in decryption

The code below and in the Fiddle isn't for production, it is for educational purposes. I do not want to fix anything, as I have a viable solution. However, I would like to know why:
var password = "password";
var salt = Encoding.ASCII.GetBytes(password.Length.ToString());
var secret = new PasswordDeriveBytes(password, salt);
When the above is implemented, in the following method FixedEncryptor will work.
// Valid:
public static string FixedEncryptor(string content)
{
var cipher = new RijndaelManaged();
var plain = Encoding.Unicode.GetBytes(content);
var key = new PasswordDeriveBytes(password, salt);
using (var encrypt = cipher.CreateEncryptor(key.GetBytes(32), key.GetBytes(16)))
using (var stream = new MemoryStream())
using (var crypto = new CryptoStream(stream, encrypt, CryptoStreamMode.Write))
{
crypto.Write(plain, 0, plain.Length);
crypto.FlushFinalBlock();
return Convert.ToBase64String(stream.ToArray());
}
}
However, if you implement:
var secret = new PasswordDeriveBytes("password",
Encoding.ASCII.GetBytes("password"));
The code will suddenly produce:
Run-time exception (line 70): Padding is invalid and cannot be
removed.
Stack Trace:
[System.Security.Cryptography.CryptographicException: Padding is
invalid and cannot be removed.] at Crypt.Decryptor(String content):
line 70 at Program.Main(): line 17
As denoted in the following method:
// Invalid:
public static string Encryptor(string content)
{
var cipher = new RijndaelManaged();
var plain = Encoding.Unicode.GetBytes(content);
var key = new PasswordDeriveBytes("password", Encoding.ASCII.GetBytes("password"));
using (var encrypt = cipher.CreateEncryptor(key.GetBytes(32), key.GetBytes(16)))
using (var stream = new MemoryStream())
using (var crypto = new CryptoStream(stream, encrypt, CryptoStreamMode.Write))
{
crypto.Write(plain, 0, plain.Length);
crypto.FlushFinalBlock();
return Convert.ToBase64String(stream.ToArray());
}
}
So why can one successfully decrypt, while the other doesn't decrypt correctly and produces the above error?
A Fiddle with a small example is here.
From your posted code example your problem comes from the fact you are using two different salts.
In FixedEncryptor you use a salt of
Encoding.ASCII.GetBytes(password.Length.ToString());
That encodes to be a byte array equal to { 56 }, this is because Length returns 8 then calling ToString() on that returns the string "8" which you convert in to the ascii value 56.
In Encryptor you use a salt of
Encoding.ASCII.GetBytes("password")
That encodes to be a byte array equal to { 112, 97, 115, 115, 119, 111, 114, 100}, which is the ascii values of the characters "p", "a", "s", "s", "w", "o", "r", and "d".
The problem you are running in to is you only attempt to use { 56 } in your decrypt function, so your problem comes down to your encrypt function and your decrypt function are using two different salts.
If I make a make a new Decrypter to use the same salt and password as Encryptor then make a separate FixedDecryptor to match the salt of FixedEncryptor everything will work fine
public class Program
{
public static void Main()
{
var message = "Hello World!";
var fixedCipherText = Crypt.FixedEncryptor(message);
var cipherText = Crypt.Encryptor(message);
Console.WriteLine(cipherText);
Console.WriteLine(fixedCipherText);
var plainText = Crypt.Decryptor(cipherText);
var fixedPlainText = Crypt.FixedDecryptor(fixedCipherText);
Console.WriteLine(plainText);
Console.WriteLine(fixedPlainText);
}
}
public static class Crypt
{
private const string password = "password";
private readonly static byte[] salt = Encoding.ASCII.GetBytes(password.Length.ToString());
public static string FixedEncryptor(string content)
{
var cipher = new RijndaelManaged();
var plain = Encoding.Unicode.GetBytes(content);
var key = new PasswordDeriveBytes(password, salt);
using (var encrypt = cipher.CreateEncryptor(key.GetBytes(32), key.GetBytes(16)))
using (var stream = new MemoryStream())
using (var crypto = new CryptoStream(stream, encrypt, CryptoStreamMode.Write))
{
crypto.Write(plain, 0, plain.Length);
crypto.FlushFinalBlock();
return Convert.ToBase64String(stream.ToArray());
}
}
public static string Encryptor(string content)
{
var cipher = new RijndaelManaged();
var plain = Encoding.Unicode.GetBytes(content);
var key = new PasswordDeriveBytes("password", Encoding.ASCII.GetBytes("password"));
using (var encrypt = cipher.CreateEncryptor(key.GetBytes(32), key.GetBytes(16)))
using (var stream = new MemoryStream())
using (var crypto = new CryptoStream(stream, encrypt, CryptoStreamMode.Write))
{
crypto.Write(plain, 0, plain.Length);
crypto.FlushFinalBlock();
return Convert.ToBase64String(stream.ToArray());
}
}
public static string FixedDecryptor(string content)
{
var cipher = new RijndaelManaged();
var encrypted = Convert.FromBase64String(content);
var key = new PasswordDeriveBytes(password, salt);
using (var decryptor = cipher.CreateDecryptor(key.GetBytes(32), key.GetBytes(16)))
using (var stream = new MemoryStream(encrypted))
using (var crypto = new CryptoStream(stream, decryptor, CryptoStreamMode.Read))
{
byte[] plain = new byte[encrypted.Length];
int decrypted = crypto.Read(plain, 0, plain.Length);
string data = Encoding.Unicode.GetString(plain, 0, decrypted);
return data;
}
}
public static string Decryptor(string content)
{
var cipher = new RijndaelManaged();
var encrypted = Convert.FromBase64String(content);
var key = new PasswordDeriveBytes("password", Encoding.ASCII.GetBytes("password"));
using (var decryptor = cipher.CreateDecryptor(key.GetBytes(32), key.GetBytes(16)))
using (var stream = new MemoryStream(encrypted))
using (var crypto = new CryptoStream(stream, decryptor, CryptoStreamMode.Read))
{
byte[] plain = new byte[encrypted.Length];
int decrypted = crypto.Read(plain, 0, plain.Length);
string data = Encoding.Unicode.GetString(plain, 0, decrypted);
return data;
}
}
}
Fiddel of the code.
However this still is not the "correct" way to do things. See Sine Nomen's answer
First of all, the method by which you generate a salt is not secure at all; secondly, PasswordDerivedBytes is deprecated and you should be looking at its successor, Rfc2898DeriveBytes.
Try something like the following - note that this requires a few using statements: System, System.IO, System.Security.Cryptography and System.Text.
Simply encrypt the data with Encrypt(PlainText, Password) and decrypt it again with Decrypt(EncryptedData, Password). The salt is rolled into the encrypted data as the first 16 bytes and it is completely random for each encryption/decryption round.
This code is part of my own open source password manager.
/*
* Encryption/Decryption, based on AES256 and PBKDF2
*/
public string Encrypt (string plainText, string passPhrase, bool fast_encrypt = false)
{
string result;
using (Rijndael algR = Rijndael.Create ()) {
RNGCryptoServiceProvider rngC = new RNGCryptoServiceProvider ();
byte[] iv = new byte[16];
rngC.GetBytes (iv);
Rfc2898DeriveBytes derived = new Rfc2898DeriveBytes (passPhrase, iv, fast_encrypt ? 10 : 3000);
algR.KeySize = 256;
algR.BlockSize = 128;
algR.Key = derived.GetBytes (32);
algR.IV = iv;
using (MemoryStream memoryStream = new MemoryStream ()) {
memoryStream.Write (iv, 0, 16);
using (CryptoStream cryptoStreamEncrypt = new CryptoStream (memoryStream, algR.CreateEncryptor (algR.Key, algR.IV), CryptoStreamMode.Write)) {
using (StreamWriter streamWriterEncrypt = new StreamWriter (cryptoStreamEncrypt)) {
streamWriterEncrypt.Write (plainText);
}
}
result = Convert.ToBase64String (memoryStream.ToArray ());
}
}
return result;
}
public string Decrypt (string cipherText, string passPhrase, bool fast_decrypt = false)
{
string result;
using (Rijndael algR = Rijndael.Create ()) {
using (MemoryStream memoryStream = new MemoryStream (Convert.FromBase64String (cipherText))) {
byte[] iv = new byte[16];
memoryStream.Read (iv, 0, 16);
Rfc2898DeriveBytes derived = new Rfc2898DeriveBytes (passPhrase, iv, fast_decrypt ? 10 : 3000);
algR.KeySize = 256;
algR.BlockSize = 128;
algR.Key = derived.GetBytes (32);
algR.IV = iv;
using (CryptoStream cryptoStreamDecrypt = new CryptoStream (memoryStream, algR.CreateDecryptor (algR.Key, algR.IV), CryptoStreamMode.Read)) {
using (StreamReader streamReaderDecrypt = new StreamReader (cryptoStreamDecrypt)) {
result = streamReaderDecrypt.ReadToEnd ();
}
}
}
}
return result;
}

Rijndael Padding Error

Hello I am trying to encrypt / decrypt a string via Rijaendal.
I simply can't figure out why the decryption blows up. I always end up with an incorrect padding error. One thing that throws me off is the result of my encryption which I return as HEX array. It has a length of 14 bytes. In my decryption function, the same byte array ends up having 16 bytes upon conversion from HEX.
Any help would be appreciated:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace rjandal
{
class Program
{
static void Main(string[] args)
{
string DataForEncrypting = "this is a test";
string key = string.Empty;
string iv = string.Empty;
using (System.Security.Cryptography.RijndaelManaged rmt = new System.Security.Cryptography.RijndaelManaged())
{
rmt.KeySize = 256;
rmt.BlockSize = 128;
rmt.Mode = System.Security.Cryptography.CipherMode.CBC;
rmt.Padding = System.Security.Cryptography.PaddingMode.ISO10126;
rmt.GenerateKey();
rmt.GenerateIV();
key = Convert.ToBase64String(rmt.Key);
iv = Convert.ToBase64String(rmt.IV);
}
string encryptedData = _encrypt(DataForEncrypting, key, iv);
string unencryptedData = _decrypt(key, iv, HexString2Ascii(encryptedData));
Console.WriteLine(unencryptedData);
Console.WriteLine(encryptedData);
Console.ReadKey();
}
private static string _encrypt(string value, string key, string initVector)
{
byte[] buffer = ASCIIEncoding.ASCII.GetBytes(value);
byte[] encBuffer;
using (System.Security.Cryptography.RijndaelManaged rmt = new System.Security.Cryptography.RijndaelManaged())
{
rmt.KeySize = 256;
rmt.BlockSize = 128;
rmt.Mode = System.Security.Cryptography.CipherMode.CBC;
rmt.Padding = System.Security.Cryptography.PaddingMode.ISO10126;
encBuffer = rmt.CreateEncryptor(Convert.FromBase64String(key),
Convert.FromBase64String(initVector)).TransformFinalBlock(buffer, 0, buffer.Length);
}
string encryptValue = ConvertToHex(ASCIIEncoding.ASCII.GetString(encBuffer));
return encryptValue;
}
private static string _decrypt(string key, string initVector, string value)
{
byte[] hexBuffer = ASCIIEncoding.ASCII.GetBytes(value);
byte[] decBuffer;
using (System.Security.Cryptography.RijndaelManaged rmt = new System.Security.Cryptography.RijndaelManaged())
{
rmt.KeySize = 256;
rmt.BlockSize = 128;
rmt.Mode = System.Security.Cryptography.CipherMode.CBC;
rmt.Padding = System.Security.Cryptography.PaddingMode.ISO10126;
decBuffer = rmt.CreateDecryptor(Convert.FromBase64String(key),
Convert.FromBase64String(initVector)).TransformFinalBlock(hexBuffer, 0, hexBuffer.Length);
}
return System.Text.ASCIIEncoding.ASCII.GetString(decBuffer);
}
private static string ConvertToHex(string asciiString)
{
string hex = "";
foreach (char c in asciiString)
{
int tmp = c;
hex += String.Format("{0:x2}", (uint)System.Convert.ToUInt32(tmp.ToString()));
}
return hex;
}
private static string HexString2Ascii(string hexString)
{
StringBuilder sb = new StringBuilder();
for (int i = 0; i <= hexString.Length - 2; i += 2)
{
sb.Append(Convert.ToString(Convert.ToChar(Int32.Parse(hexString.Substring(i, 2), System.Globalization.NumberStyles.HexNumber))));
}
return sb.ToString();
}
}
}
You're doing way too much conversion between text and data, basically. Look at this, for example:
string encryptValue = ConvertToHex(ASCIIEncoding.ASCII.GetString(encBuffer));
Once you've got an ASCII string, why would you need to convert that into hex? It's already text! But by then you'll already have lost the data. Unless you really need it in hex (in which case follow Adam's suggestion and change your HexToAscii method to take a byte[] instead of a string) you should just use Convert.ToBase64String:
string encryptValue = Convert.ToBase64String(encBuffer);
Use Convert.FromBase64String at the other end when decrypting. You can then get rid of your hex methods completely.
Oh, and in general I wouldn't use Encoding.ASCII to start with... I'd almost always use Encoding.UTF8 instead. Currently you'll fail to encrypt (correctly) any strings containing non-ASCII characters such as accents.
Here's a rejigged version of your test program, with a few of those changes made. Note that the names "cipher text" and "plain text" are in terms of encryption... they're still binary data rather than text!
using System;
using System.Security.Cryptography;
using System.Text;
class Program
{
static void Main(string[] args)
{
string DataForEncrypting = "this is a test";
string key = string.Empty;
string iv = string.Empty;
using (RijndaelManaged rmt = new RijndaelManaged())
{
rmt.KeySize = 256;
rmt.BlockSize = 128;
rmt.Mode = CipherMode.CBC;
rmt.Padding = PaddingMode.ISO10126;
rmt.GenerateKey();
rmt.GenerateIV();
key = Convert.ToBase64String(rmt.Key);
iv = Convert.ToBase64String(rmt.IV);
}
string encryptedData = _encrypt(DataForEncrypting, key, iv);
string unencryptedData = _decrypt(key, iv, encryptedData);
Console.WriteLine(unencryptedData);
Console.WriteLine(encryptedData);
Console.ReadKey();
}
private static string _encrypt(string value, string key, string initVector)
{
using (RijndaelManaged rmt = new RijndaelManaged())
{
rmt.KeySize = 256;
rmt.BlockSize = 128;
rmt.Mode = CipherMode.CBC;
rmt.Padding = PaddingMode.ISO10126;
byte[] plainText = Encoding.UTF8.GetBytes(value);
byte[] cipherText = rmt.CreateEncryptor(Convert.FromBase64String(key),
Convert.FromBase64String(initVector))
.TransformFinalBlock(plainText, 0, plainText.Length);
return Convert.ToBase64String(cipherText);
}
}
private static string _decrypt(string key, string initVector, string value)
{
using (RijndaelManaged rmt = new RijndaelManaged())
{
rmt.KeySize = 256;
rmt.BlockSize = 128;
rmt.Mode = CipherMode.CBC;
rmt.Padding = PaddingMode.ISO10126;
byte[] cipherText = Convert.FromBase64String(value);
byte[] plainText = rmt.CreateDecryptor(Convert.FromBase64String(key),
Convert.FromBase64String(initVector))
.TransformFinalBlock(cipherText, 0, cipherText.Length);
return Encoding.UTF8.GetString(plainText);
}
}
}
You shouldn't be using ASCII character encoding as an intermediate step; you should change your functions that go from hex to ASCII (and back again) to go from a byte[] to hex (and back again) instead.
private static string ConvertToHex(byte[] data)
{
string hex = "";
foreach (byte b in data)
{
hex += b.ToString("X2");
}
return hex;
}
private static byte[] HexString2ByteArray(string hexString)
{
byte[] output = new byte[hexString.Length / 2];
for (int i = 0; i <= hexString.Length - 2; i += 2)
{
output[i/2] = Convert.ToByte(hexString.Substring(i, 2), 16);
}
return output;
}
As a side note, is there a reason that you're looking for a hex representation of the array versus something more compact like Base64? You're using Base64 in your example to transfer the key and IV, so I'm just curious about what makes you want to return the encrypted data as hex here.
In any case, here's something that should work for you:
private static string _encrypt(string value, string key, string initVector)
{
byte[] buffer = Encoding.Unicode.GetBytes(value);
byte[] encBuffer;
using (System.Security.Cryptography.RijndaelManaged rmt = new System.Security.Cryptography.RijndaelManaged())
{
rmt.KeySize = 256;
rmt.BlockSize = 128;
rmt.Mode = System.Security.Cryptography.CipherMode.CBC;
rmt.Padding = System.Security.Cryptography.PaddingMode.ISO10126;
encBuffer = rmt.CreateEncryptor(Convert.FromBase64String(key),
Convert.FromBase64String(initVector)).TransformFinalBlock(buffer, 0, buffer.Length);
}
string encryptValue = ConvertToHex(encBuffer);
return encryptValue;
}
private static string _decrypt(string key, string initVector, string value)
{
byte[] hexBuffer = HexString2ByteArray(value);
byte[] decBuffer;
using (System.Security.Cryptography.RijndaelManaged rmt = new System.Security.Cryptography.RijndaelManaged())
{
rmt.KeySize = 256;
rmt.BlockSize = 128;
rmt.Mode = System.Security.Cryptography.CipherMode.CBC;
rmt.Padding = System.Security.Cryptography.PaddingMode.ISO10126;
decBuffer = rmt.CreateDecryptor(Convert.FromBase64String(key),
Convert.FromBase64String(initVector)).TransformFinalBlock(hexBuffer, 0, hexBuffer.Length);
}
return Encoding.Unicode.GetString(decBuffer);
}
You may avoid the issues with Decypting/Encrypting and usign System.Text.Encoding and avoid using Base64 encoding work around, by adding a few methods that completely bypass microsoft's mismatched conversions in the System.Text.Encoding, by allowing you to encrypt the real bytes in memory without any translations.
Since using these I have avoided padding errors caused by System.Text.Encoding methods, without using the Base64 conversions either.
private static Byte[] GetBytes(String SomeString)
{
Char[] SomeChars = SomeString.ToCharArray();
Int32 Size = SomeChars.Length * 2;
List<Byte> TempList = new List<Byte>(Size);
foreach (Char Character in SomeChars)
{
TempList.AddRange(BitConverter.GetBytes(Character));
}
return TempList.ToArray();
}
private static String GetString(Byte[] ByteArray)
{
Int32 Size = ByteArray.Length / 2;
List<Char> TempList = new List<Char>(Size);
for (Int32 i = 0; i < ByteArray.Length; i += 2)
{
TempList.Add(BitConverter.ToChar(ByteArray, i));
}
return new String(TempList.ToArray());
}
And how they are used with encryption
private static String Encrypt(String Test1, Byte[] Key, Byte[] IV)
{
Byte[] Encrypted;
using (AesCryptoServiceProvider AesMan = new AesCryptoServiceProvider())
{
AesMan.Mode = CipherMode.CBC;
AesMan.Padding = PaddingMode.ISO10126;
ICryptoTransform EncThis = AesMan.CreateEncryptor(Key, IV);
using (MemoryStream msEncrypt = new MemoryStream())
{
using (CryptoStream csEncrypt = new CryptoStream(msEncrypt, EncThis, CryptoStreamMode.Write))
{
using (StreamWriter swEncrypt = new StreamWriter(csEncrypt))
{
//Write all data to the stream.
swEncrypt.Write(Test1);
}
Encrypted = msEncrypt.ToArray();
}
}
};
return GetString(Encrypted);
}
private static String Decrypt(String Data, Byte[] Key, Byte[] IV)
{
String Decrypted;
using (AesCryptoServiceProvider AesMan = new AesCryptoServiceProvider())
{
AesMan.Mode = CipherMode.CBC;
AesMan.Padding = PaddingMode.ISO10126;
ICryptoTransform EncThis = AesMan.CreateDecryptor(Key, IV);
using (MemoryStream msDecrypt = new MemoryStream(GetBytes(Data)))
{
using (CryptoStream csDecrypt = new CryptoStream(msDecrypt, EncThis, CryptoStreamMode.Read))
{
using (StreamReader srDecrypt = new StreamReader(csDecrypt))
{
// Read the decrypted bytes from the decrypting stream
// and place them in a string.
Decrypted = srDecrypt.ReadToEnd();
}
}
}
}
return Decrypted;
}

Categories