1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-26 23:17:46 +00:00

Libraries: Move to Userland/Libraries/

This commit is contained in:
Andreas Kling 2021-01-12 12:17:30 +01:00
parent dc28c07fa5
commit 13d7c09125
1857 changed files with 266 additions and 274 deletions

View file

@ -0,0 +1,429 @@
/*
* Copyright (c) 2020, Ali Mohammad Pur <ali.mpfard@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <AK/StringBuilder.h>
#include <LibCrypto/Cipher/AES.h>
namespace Crypto {
namespace Cipher {
template<typename T>
constexpr u32 get_key(T pt)
{
return ((u32)(pt)[0] << 24) ^ ((u32)(pt)[1] << 16) ^ ((u32)(pt)[2] << 8) ^ ((u32)(pt)[3]);
}
constexpr void swap_keys(u32* keys, size_t i, size_t j)
{
u32 temp = keys[i];
keys[i] = keys[j];
keys[j] = temp;
}
String AESCipherBlock::to_string() const
{
StringBuilder builder;
for (size_t i = 0; i < BlockSizeInBits / 8; ++i)
builder.appendf("%02x", m_data[i]);
return builder.build();
}
String AESCipherKey::to_string() const
{
StringBuilder builder;
for (size_t i = 0; i < (rounds() + 1) * 4; ++i)
builder.appendf("%02x", m_rd_keys[i]);
return builder.build();
}
void AESCipherKey::expand_encrypt_key(ReadonlyBytes user_key, size_t bits)
{
u32* round_key;
u32 temp;
size_t i { 0 };
ASSERT(!user_key.is_null());
ASSERT(is_valid_key_size(bits));
ASSERT(user_key.size() == bits / 8);
round_key = round_keys();
if (bits == 128) {
m_rounds = 10;
} else if (bits == 192) {
m_rounds = 12;
} else {
m_rounds = 14;
}
round_key[0] = get_key(user_key.data());
round_key[1] = get_key(user_key.data() + 4);
round_key[2] = get_key(user_key.data() + 8);
round_key[3] = get_key(user_key.data() + 12);
if (bits == 128) {
for (;;) {
temp = round_key[3];
// clang-format off
round_key[4] = round_key[0] ^
(AESTables::Encode2[(temp >> 16) & 0xff] & 0xff000000) ^
(AESTables::Encode3[(temp >> 8) & 0xff] & 0x00ff0000) ^
(AESTables::Encode0[(temp ) & 0xff] & 0x0000ff00) ^
(AESTables::Encode1[(temp >> 24) ] & 0x000000ff) ^ AESTables::RCON[i];
// clang-format on
round_key[5] = round_key[1] ^ round_key[4];
round_key[6] = round_key[2] ^ round_key[5];
round_key[7] = round_key[3] ^ round_key[6];
++i;
if (i == 10)
break;
round_key += 4;
}
return;
}
round_key[4] = get_key(user_key.data() + 16);
round_key[5] = get_key(user_key.data() + 20);
if (bits == 192) {
for (;;) {
temp = round_key[5];
// clang-format off
round_key[6] = round_key[0] ^
(AESTables::Encode2[(temp >> 16) & 0xff] & 0xff000000) ^
(AESTables::Encode3[(temp >> 8) & 0xff] & 0x00ff0000) ^
(AESTables::Encode0[(temp ) & 0xff] & 0x0000ff00) ^
(AESTables::Encode1[(temp >> 24) ] & 0x000000ff) ^ AESTables::RCON[i];
// clang-format on
round_key[7] = round_key[1] ^ round_key[6];
round_key[8] = round_key[2] ^ round_key[7];
round_key[9] = round_key[3] ^ round_key[8];
++i;
if (i == 8)
break;
round_key[10] = round_key[4] ^ round_key[9];
round_key[11] = round_key[5] ^ round_key[10];
round_key += 6;
}
return;
}
round_key[6] = get_key(user_key.data() + 24);
round_key[7] = get_key(user_key.data() + 28);
if (true) { // bits == 256
for (;;) {
temp = round_key[7];
// clang-format off
round_key[8] = round_key[0] ^
(AESTables::Encode2[(temp >> 16) & 0xff] & 0xff000000) ^
(AESTables::Encode3[(temp >> 8) & 0xff] & 0x00ff0000) ^
(AESTables::Encode0[(temp ) & 0xff] & 0x0000ff00) ^
(AESTables::Encode1[(temp >> 24) ] & 0x000000ff) ^ AESTables::RCON[i];
// clang-format on
round_key[9] = round_key[1] ^ round_key[8];
round_key[10] = round_key[2] ^ round_key[9];
round_key[11] = round_key[3] ^ round_key[10];
++i;
if (i == 7)
break;
temp = round_key[11];
// clang-format off
round_key[12] = round_key[4] ^
(AESTables::Encode2[(temp >> 24) ] & 0xff000000) ^
(AESTables::Encode3[(temp >> 16) & 0xff] & 0x00ff0000) ^
(AESTables::Encode0[(temp >> 8) & 0xff] & 0x0000ff00) ^
(AESTables::Encode1[(temp ) & 0xff] & 0x000000ff) ;
// clang-format on
round_key[13] = round_key[5] ^ round_key[12];
round_key[14] = round_key[6] ^ round_key[13];
round_key[15] = round_key[7] ^ round_key[14];
round_key += 8;
}
return;
}
}
void AESCipherKey::expand_decrypt_key(ReadonlyBytes user_key, size_t bits)
{
u32* round_key;
expand_encrypt_key(user_key, bits);
round_key = round_keys();
// reorder round keys
for (size_t i = 0, j = 4 * rounds(); i < j; i += 4, j -= 4) {
swap_keys(round_key, i, j);
swap_keys(round_key, i + 1, j + 1);
swap_keys(round_key, i + 2, j + 2);
swap_keys(round_key, i + 3, j + 3);
}
// apply inverse mix-column to middle rounds
for (size_t i = 1; i < rounds(); ++i) {
round_key += 4;
// clang-format off
round_key[0] =
AESTables::Decode0[AESTables::Encode1[(round_key[0] >> 24) ] & 0xff] ^
AESTables::Decode1[AESTables::Encode1[(round_key[0] >> 16) & 0xff] & 0xff] ^
AESTables::Decode2[AESTables::Encode1[(round_key[0] >> 8) & 0xff] & 0xff] ^
AESTables::Decode3[AESTables::Encode1[(round_key[0] ) & 0xff] & 0xff] ;
round_key[1] =
AESTables::Decode0[AESTables::Encode1[(round_key[1] >> 24) ] & 0xff] ^
AESTables::Decode1[AESTables::Encode1[(round_key[1] >> 16) & 0xff] & 0xff] ^
AESTables::Decode2[AESTables::Encode1[(round_key[1] >> 8) & 0xff] & 0xff] ^
AESTables::Decode3[AESTables::Encode1[(round_key[1] ) & 0xff] & 0xff] ;
round_key[2] =
AESTables::Decode0[AESTables::Encode1[(round_key[2] >> 24) ] & 0xff] ^
AESTables::Decode1[AESTables::Encode1[(round_key[2] >> 16) & 0xff] & 0xff] ^
AESTables::Decode2[AESTables::Encode1[(round_key[2] >> 8) & 0xff] & 0xff] ^
AESTables::Decode3[AESTables::Encode1[(round_key[2] ) & 0xff] & 0xff] ;
round_key[3] =
AESTables::Decode0[AESTables::Encode1[(round_key[3] >> 24) ] & 0xff] ^
AESTables::Decode1[AESTables::Encode1[(round_key[3] >> 16) & 0xff] & 0xff] ^
AESTables::Decode2[AESTables::Encode1[(round_key[3] >> 8) & 0xff] & 0xff] ^
AESTables::Decode3[AESTables::Encode1[(round_key[3] ) & 0xff] & 0xff] ;
// clang-format on
}
}
void AESCipher::encrypt_block(const AESCipherBlock& in, AESCipherBlock& out)
{
u32 s0, s1, s2, s3, t0, t1, t2, t3;
size_t r { 0 };
const auto& dec_key = key();
const auto* round_keys = dec_key.round_keys();
s0 = get_key(in.bytes().offset_pointer(0)) ^ round_keys[0];
s1 = get_key(in.bytes().offset_pointer(4)) ^ round_keys[1];
s2 = get_key(in.bytes().offset_pointer(8)) ^ round_keys[2];
s3 = get_key(in.bytes().offset_pointer(12)) ^ round_keys[3];
r = dec_key.rounds() >> 1;
// apply the first |r - 1| rounds
auto i { 0 };
for (;;) {
++i;
// clang-format off
t0 = AESTables::Encode0[(s0 >> 24) ] ^
AESTables::Encode1[(s1 >> 16) & 0xff] ^
AESTables::Encode2[(s2 >> 8) & 0xff] ^
AESTables::Encode3[(s3 ) & 0xff] ^ round_keys[4];
t1 = AESTables::Encode0[(s1 >> 24) ] ^
AESTables::Encode1[(s2 >> 16) & 0xff] ^
AESTables::Encode2[(s3 >> 8) & 0xff] ^
AESTables::Encode3[(s0 ) & 0xff] ^ round_keys[5];
t2 = AESTables::Encode0[(s2 >> 24) ] ^
AESTables::Encode1[(s3 >> 16) & 0xff] ^
AESTables::Encode2[(s0 >> 8) & 0xff] ^
AESTables::Encode3[(s1 ) & 0xff] ^ round_keys[6];
t3 = AESTables::Encode0[(s3 >> 24) ] ^
AESTables::Encode1[(s0 >> 16) & 0xff] ^
AESTables::Encode2[(s1 >> 8) & 0xff] ^
AESTables::Encode3[(s2 ) & 0xff] ^ round_keys[7];
// clang-format on
round_keys += 8;
--r;
++i;
if (r == 0)
break;
// clang-format off
s0 = AESTables::Encode0[(t0 >> 24) ] ^
AESTables::Encode1[(t1 >> 16) & 0xff] ^
AESTables::Encode2[(t2 >> 8) & 0xff] ^
AESTables::Encode3[(t3 ) & 0xff] ^ round_keys[0];
s1 = AESTables::Encode0[(t1 >> 24) ] ^
AESTables::Encode1[(t2 >> 16) & 0xff] ^
AESTables::Encode2[(t3 >> 8) & 0xff] ^
AESTables::Encode3[(t0 ) & 0xff] ^ round_keys[1];
s2 = AESTables::Encode0[(t2 >> 24) ] ^
AESTables::Encode1[(t3 >> 16) & 0xff] ^
AESTables::Encode2[(t0 >> 8) & 0xff] ^
AESTables::Encode3[(t1 ) & 0xff] ^ round_keys[2];
s3 = AESTables::Encode0[(t3 >> 24) ] ^
AESTables::Encode1[(t0 >> 16) & 0xff] ^
AESTables::Encode2[(t1 >> 8) & 0xff] ^
AESTables::Encode3[(t2 ) & 0xff] ^ round_keys[3];
// clang-format on
}
// apply the last round and put the encrypted data into out
// clang-format off
s0 = (AESTables::Encode2[(t0 >> 24) ] & 0xff000000) ^
(AESTables::Encode3[(t1 >> 16) & 0xff] & 0x00ff0000) ^
(AESTables::Encode0[(t2 >> 8) & 0xff] & 0x0000ff00) ^
(AESTables::Encode1[(t3 ) & 0xff] & 0x000000ff) ^ round_keys[0];
out.put(0, s0);
s1 = (AESTables::Encode2[(t1 >> 24) ] & 0xff000000) ^
(AESTables::Encode3[(t2 >> 16) & 0xff] & 0x00ff0000) ^
(AESTables::Encode0[(t3 >> 8) & 0xff] & 0x0000ff00) ^
(AESTables::Encode1[(t0 ) & 0xff] & 0x000000ff) ^ round_keys[1];
out.put(4, s1);
s2 = (AESTables::Encode2[(t2 >> 24) ] & 0xff000000) ^
(AESTables::Encode3[(t3 >> 16) & 0xff] & 0x00ff0000) ^
(AESTables::Encode0[(t0 >> 8) & 0xff] & 0x0000ff00) ^
(AESTables::Encode1[(t1 ) & 0xff] & 0x000000ff) ^ round_keys[2];
out.put(8, s2);
s3 = (AESTables::Encode2[(t3 >> 24) ] & 0xff000000) ^
(AESTables::Encode3[(t0 >> 16) & 0xff] & 0x00ff0000) ^
(AESTables::Encode0[(t1 >> 8) & 0xff] & 0x0000ff00) ^
(AESTables::Encode1[(t2 ) & 0xff] & 0x000000ff) ^ round_keys[3];
out.put(12, s3);
// clang-format on
}
void AESCipher::decrypt_block(const AESCipherBlock& in, AESCipherBlock& out)
{
u32 s0, s1, s2, s3, t0, t1, t2, t3;
size_t r { 0 };
const auto& dec_key = key();
const auto* round_keys = dec_key.round_keys();
s0 = get_key(in.bytes().offset_pointer(0)) ^ round_keys[0];
s1 = get_key(in.bytes().offset_pointer(4)) ^ round_keys[1];
s2 = get_key(in.bytes().offset_pointer(8)) ^ round_keys[2];
s3 = get_key(in.bytes().offset_pointer(12)) ^ round_keys[3];
r = dec_key.rounds() >> 1;
// apply the first |r - 1| rounds
for (;;) {
// clang-format off
t0 = AESTables::Decode0[(s0 >> 24) ] ^
AESTables::Decode1[(s3 >> 16) & 0xff] ^
AESTables::Decode2[(s2 >> 8) & 0xff] ^
AESTables::Decode3[(s1 ) & 0xff] ^ round_keys[4];
t1 = AESTables::Decode0[(s1 >> 24) ] ^
AESTables::Decode1[(s0 >> 16) & 0xff] ^
AESTables::Decode2[(s3 >> 8) & 0xff] ^
AESTables::Decode3[(s2 ) & 0xff] ^ round_keys[5];
t2 = AESTables::Decode0[(s2 >> 24) ] ^
AESTables::Decode1[(s1 >> 16) & 0xff] ^
AESTables::Decode2[(s0 >> 8) & 0xff] ^
AESTables::Decode3[(s3 ) & 0xff] ^ round_keys[6];
t3 = AESTables::Decode0[(s3 >> 24) ] ^
AESTables::Decode1[(s2 >> 16) & 0xff] ^
AESTables::Decode2[(s1 >> 8) & 0xff] ^
AESTables::Decode3[(s0 ) & 0xff] ^ round_keys[7];
// clang-format on
round_keys += 8;
--r;
if (r == 0)
break;
// clang-format off
s0 = AESTables::Decode0[(t0 >> 24) ] ^
AESTables::Decode1[(t3 >> 16) & 0xff] ^
AESTables::Decode2[(t2 >> 8) & 0xff] ^
AESTables::Decode3[(t1 ) & 0xff] ^ round_keys[0];
s1 = AESTables::Decode0[(t1 >> 24) ] ^
AESTables::Decode1[(t0 >> 16) & 0xff] ^
AESTables::Decode2[(t3 >> 8) & 0xff] ^
AESTables::Decode3[(t2 ) & 0xff] ^ round_keys[1];
s2 = AESTables::Decode0[(t2 >> 24) ] ^
AESTables::Decode1[(t1 >> 16) & 0xff] ^
AESTables::Decode2[(t0 >> 8) & 0xff] ^
AESTables::Decode3[(t3 ) & 0xff] ^ round_keys[2];
s3 = AESTables::Decode0[(t3 >> 24) ] ^
AESTables::Decode1[(t2 >> 16) & 0xff] ^
AESTables::Decode2[(t1 >> 8) & 0xff] ^
AESTables::Decode3[(t0 ) & 0xff] ^ round_keys[3];
// clang-format on
}
// apply the last round and put the decrypted data into out
// clang-format off
s0 = ((u32)AESTables::Decode4[(t0 >> 24) ] << 24) ^
((u32)AESTables::Decode4[(t3 >> 16) & 0xff] << 16) ^
((u32)AESTables::Decode4[(t2 >> 8) & 0xff] << 8) ^
((u32)AESTables::Decode4[(t1 ) & 0xff] ) ^ round_keys[0];
out.put(0, s0);
s1 = ((u32)AESTables::Decode4[(t1 >> 24) ] << 24) ^
((u32)AESTables::Decode4[(t0 >> 16) & 0xff] << 16) ^
((u32)AESTables::Decode4[(t3 >> 8) & 0xff] << 8) ^
((u32)AESTables::Decode4[(t2 ) & 0xff] ) ^ round_keys[1];
out.put(4, s1);
s2 = ((u32)AESTables::Decode4[(t2 >> 24) ] << 24) ^
((u32)AESTables::Decode4[(t1 >> 16) & 0xff] << 16) ^
((u32)AESTables::Decode4[(t0 >> 8) & 0xff] << 8) ^
((u32)AESTables::Decode4[(t3 ) & 0xff] ) ^ round_keys[2];
out.put(8, s2);
s3 = ((u32)AESTables::Decode4[(t3 >> 24) ] << 24) ^
((u32)AESTables::Decode4[(t2 >> 16) & 0xff] << 16) ^
((u32)AESTables::Decode4[(t1 >> 8) & 0xff] << 8) ^
((u32)AESTables::Decode4[(t0 ) & 0xff] ) ^ round_keys[3];
out.put(12, s3);
// clang-format on
}
void AESCipherBlock::overwrite(ReadonlyBytes bytes)
{
auto data = bytes.data();
auto length = bytes.size();
ASSERT(length <= this->data_size());
this->bytes().overwrite(0, data, length);
if (length < this->data_size()) {
switch (padding_mode()) {
case PaddingMode::Null:
// fill with zeros
__builtin_memset(m_data + length, 0, this->data_size() - length);
break;
case PaddingMode::CMS:
// fill with the length of the padding bytes
__builtin_memset(m_data + length, this->data_size() - length, this->data_size() - length);
break;
case PaddingMode::RFC5246:
// fill with the length of the padding bytes minus one
__builtin_memset(m_data + length, this->data_size() - length - 1, this->data_size() - length);
break;
default:
// FIXME: We should handle the rest of the common padding modes
ASSERT_NOT_REACHED();
break;
}
}
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,139 @@
/*
* Copyright (c) 2020, Ali Mohammad Pur <ali.mpfard@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <AK/Optional.h>
#include <AK/Span.h>
#include <AK/Types.h>
namespace Crypto {
namespace Cipher {
enum class Intent {
Encryption,
Decryption,
};
enum class PaddingMode {
CMS, // RFC 1423
RFC5246, // very similar to CMS, but filled with |length - 1|, instead of |length|
Null,
// FIXME: We do not implement these yet
Bit,
Random,
Space,
ZeroLength,
};
template<typename B, typename T>
class Cipher;
struct CipherBlock {
public:
explicit CipherBlock(PaddingMode mode)
: m_padding_mode(mode)
{
}
static size_t block_size() { ASSERT_NOT_REACHED(); }
virtual ReadonlyBytes bytes() const = 0;
virtual void overwrite(ReadonlyBytes) = 0;
virtual void overwrite(const u8* data, size_t size) { overwrite({ data, size }); }
virtual void apply_initialization_vector(const u8* ivec) = 0;
PaddingMode padding_mode() const { return m_padding_mode; }
void set_padding_mode(PaddingMode mode) { m_padding_mode = mode; }
template<typename T>
void put(size_t offset, T value)
{
ASSERT(offset + sizeof(T) <= bytes().size());
auto* ptr = bytes().offset_pointer(offset);
auto index { 0 };
ASSERT(sizeof(T) <= 4);
if constexpr (sizeof(T) > 3)
ptr[index++] = (u8)(value >> 24);
if constexpr (sizeof(T) > 2)
ptr[index++] = (u8)(value >> 16);
if constexpr (sizeof(T) > 1)
ptr[index++] = (u8)(value >> 8);
ptr[index] = (u8)value;
}
private:
virtual Bytes bytes() = 0;
PaddingMode m_padding_mode;
};
struct CipherKey {
virtual ReadonlyBytes bytes() const = 0;
static bool is_valid_key_size(size_t) { return false; };
virtual ~CipherKey() { }
protected:
virtual void expand_encrypt_key(ReadonlyBytes user_key, size_t bits) = 0;
virtual void expand_decrypt_key(ReadonlyBytes user_key, size_t bits) = 0;
size_t bits { 0 };
};
template<typename KeyT = CipherKey, typename BlockT = CipherBlock>
class Cipher {
public:
using KeyType = KeyT;
using BlockType = BlockT;
explicit Cipher<KeyT, BlockT>(PaddingMode mode)
: m_padding_mode(mode)
{
}
virtual const KeyType& key() const = 0;
virtual KeyType& key() = 0;
static size_t block_size() { return BlockType::block_size(); }
PaddingMode padding_mode() const { return m_padding_mode; }
virtual void encrypt_block(const BlockType& in, BlockType& out) = 0;
virtual void decrypt_block(const BlockType& in, BlockType& out) = 0;
virtual String class_name() const = 0;
private:
PaddingMode m_padding_mode;
};
}
}

View file

@ -0,0 +1,142 @@
/*
* Copyright (c) 2020, Ali Mohammad Pur <ali.mpfard@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <AK/String.h>
#include <AK/StringBuilder.h>
#include <AK/StringView.h>
#include <LibCrypto/Cipher/Mode/Mode.h>
namespace Crypto {
namespace Cipher {
template<typename T>
class CBC : public Mode<T> {
public:
constexpr static size_t IVSizeInBits = 128;
virtual ~CBC() { }
template<typename... Args>
explicit constexpr CBC<T>(Args... args)
: Mode<T>(args...)
{
}
virtual String class_name() const override
{
StringBuilder builder;
builder.append(this->cipher().class_name());
builder.append("_CBC");
return builder.build();
}
virtual size_t IV_length() const override { return IVSizeInBits / 8; }
virtual void encrypt(ReadonlyBytes in, Bytes& out, ReadonlyBytes ivec = {}, Bytes* ivec_out = nullptr) override
{
auto length = in.size();
if (length == 0)
return;
auto& cipher = this->cipher();
// FIXME: We should have two of these encrypt/decrypt functions that
// we SFINAE out based on whether the Cipher mode needs an ivec
ASSERT(!ivec.is_empty());
const auto* iv = ivec.data();
m_cipher_block.set_padding_mode(cipher.padding_mode());
size_t offset { 0 };
auto block_size = cipher.block_size();
while (length >= block_size) {
m_cipher_block.overwrite(in.slice(offset, block_size));
m_cipher_block.apply_initialization_vector(iv);
cipher.encrypt_block(m_cipher_block, m_cipher_block);
ASSERT(offset + block_size <= out.size());
__builtin_memcpy(out.offset(offset), m_cipher_block.bytes().data(), block_size);
iv = out.offset(offset);
length -= block_size;
offset += block_size;
}
if (length > 0) {
m_cipher_block.overwrite(in.slice(offset, length));
m_cipher_block.apply_initialization_vector(iv);
cipher.encrypt_block(m_cipher_block, m_cipher_block);
ASSERT(offset + block_size <= out.size());
__builtin_memcpy(out.offset(offset), m_cipher_block.bytes().data(), block_size);
iv = out.offset(offset);
}
if (ivec_out)
__builtin_memcpy(ivec_out->data(), iv, min(IV_length(), ivec_out->size()));
}
virtual void decrypt(ReadonlyBytes in, Bytes& out, ReadonlyBytes ivec = {}) override
{
auto length = in.size();
if (length == 0)
return;
auto& cipher = this->cipher();
ASSERT(!ivec.is_empty());
const auto* iv = ivec.data();
auto block_size = cipher.block_size();
// if the data is not aligned, it's not correct encrypted data
// FIXME (ponder): Should we simply decrypt as much as we can?
ASSERT(length % block_size == 0);
m_cipher_block.set_padding_mode(cipher.padding_mode());
size_t offset { 0 };
while (length > 0) {
auto* slice = in.offset(offset);
m_cipher_block.overwrite(slice, block_size);
cipher.decrypt_block(m_cipher_block, m_cipher_block);
m_cipher_block.apply_initialization_vector(iv);
auto decrypted = m_cipher_block.bytes();
ASSERT(offset + decrypted.size() <= out.size());
__builtin_memcpy(out.offset(offset), decrypted.data(), decrypted.size());
iv = slice;
length -= block_size;
offset += block_size;
}
out = out.slice(0, offset);
this->prune_padding(out);
}
private:
typename T::BlockType m_cipher_block {};
};
}
}

View file

@ -0,0 +1,209 @@
/*
* Copyright (c) 2020, Peter Elliott <pelliott@ualberta.ca>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <AK/String.h>
#include <AK/StringBuilder.h>
#include <AK/StringView.h>
#include <LibCrypto/Cipher/Mode/Mode.h>
namespace Crypto {
namespace Cipher {
/*
* Heads up: CTR is a *family* of modes, because the "counter" function is
* implementation-defined. This makes interoperability a pain in the neurons.
* Here are several contradicting(!) interpretations:
*
* "The counter can be *any function* which produces a sequence which is
* guaranteed not to repeat for a long time, although an actual increment-by-one
* counter is the simplest and most popular."
* The illustrations show that first increment should happen *after* the first
* round. I call this variant BIGINT_INCR_0.
* The AESAVS goes a step further and requires only that "counters" do not
* repeat, leaving the method of counting completely open.
* See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Counter_(CTR)
* See: https://csrc.nist.gov/csrc/media/projects/cryptographic-algorithm-validation-program/documents/aes/aesavs.pdf
*
* BIGINT_INCR_0 is the behavior of the OpenSSL command "openssl enc -aes-128-ctr",
* and the behavior of CRYPTO_ctr128_encrypt(). OpenSSL is not alone in the
* assumption that BIGINT_INCR_0 is all there is; even some NIST
* specification/survey(?) doesn't consider counting any other way.
* See: https://github.com/openssl/openssl/blob/33388b44b67145af2181b1e9528c381c8ea0d1b6/crypto/modes/ctr128.c#L71
* See: http://www.cryptogrium.com/aes-ctr.html
* See: https://web.archive.org/web/20150226072817/http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/ctr/ctr-spec.pdf
*
* "[T]he successive counter blocks are derived by applying an incrementing
* function."
* It defines a *family* of functions called "Standard Incrementing Function"
* which only increment the lower-m bits, for some number 0<m<=blocksize.
* The included test vectors suggest that the first increment should happen
* *after* the first round. I call this INT32_INCR_0, or in general INTm_INCR_0.
* This in particular is the behavior of CRYPTO_ctr128_encrypt_ctr32() in OpenSSL.
* See: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38a.pdf
* See: https://github.com/openssl/openssl/blob/33388b44b67145af2181b1e9528c381c8ea0d1b6/crypto/modes/ctr128.c#L147
*
* The python package "cryptography" and RFC 3686 (which appears among the
* first online search results when searching for "AES CTR 128 test vector")
* share a peculiar interpretation of CTR mode: the counter is incremented *before*
* the first round. RFC 3686 does not consider any other interpretation. I call
* this variant BIGINT_INCR_1.
* See: https://tools.ietf.org/html/rfc3686.html#section-6
* See: https://cryptography.io/en/latest/development/test-vectors/#symmetric-ciphers
*
* And finally, because the method is left open, a different increment could be
* used, for example little endian, or host endian, or mixed endian. Or any crazy
* LSFR with sufficiently large period. That is the reason for the constant part
* "INCR" in the previous counters.
*
* Due to this plethora of mutually-incompatible counters,
* the method of counting should be a template parameter.
* This currently implements BIGINT_INCR_0, which means perfect
* interoperability with openssl. The test vectors from RFC 3686 just need to be
* incremented by 1.
* TODO: Implement other counters?
*/
struct IncrementInplace {
void operator()(Bytes& in) const
{
for (size_t i = in.size(); i > 0;) {
--i;
if (in[i] == (u8)-1) {
in[i] = 0;
} else {
in[i]++;
break;
}
}
}
};
template<typename T, typename IncrementFunctionType = IncrementInplace>
class CTR : public Mode<T> {
public:
constexpr static size_t IVSizeInBits = 128;
virtual ~CTR() { }
// Must intercept `Intent`, because AES must always be set to
// Encryption, even when decrypting AES-CTR.
// TODO: How to deal with ciphers that take different arguments?
// FIXME: Add back the default intent parameter once clang-11 is the default in GitHub Actions.
// Once added back, remove the parameter where it's constructed in get_random_bytes in Kernel/Random.h.
template<typename KeyType, typename... Args>
explicit constexpr CTR(const KeyType& user_key, size_t key_bits, Intent, Args... args)
: Mode<T>(user_key, key_bits, Intent::Encryption, args...)
{
}
virtual String class_name() const override
{
StringBuilder builder;
builder.append(this->cipher().class_name());
builder.append("_CTR");
return builder.build();
}
virtual size_t IV_length() const override { return IVSizeInBits / 8; }
virtual void encrypt(ReadonlyBytes in, Bytes& out, ReadonlyBytes ivec = {}, Bytes* ivec_out = nullptr) override
{
// Our interpretation of "ivec" is what AES-CTR
// would define as nonce + IV + 4 zero bytes.
this->encrypt_or_stream(&in, out, ivec, ivec_out);
}
void key_stream(Bytes& out, const Bytes& ivec = {}, Bytes* ivec_out = nullptr)
{
this->encrypt_or_stream(nullptr, out, ivec, ivec_out);
}
virtual void decrypt(ReadonlyBytes in, Bytes& out, ReadonlyBytes ivec = {}) override
{
// XOR (and thus CTR) is the most symmetric mode.
this->encrypt(in, out, ivec);
}
private:
u8 m_ivec_storage[IVSizeInBits / 8];
typename T::BlockType m_cipher_block {};
protected:
constexpr static IncrementFunctionType increment {};
void encrypt_or_stream(const ReadonlyBytes* in, Bytes& out, ReadonlyBytes ivec, Bytes* ivec_out = nullptr)
{
size_t length;
if (in) {
ASSERT(in->size() <= out.size());
length = in->size();
if (length == 0)
return;
} else {
length = out.size();
}
auto& cipher = this->cipher();
// FIXME: We should have two of these encrypt/decrypt functions that
// we SFINAE out based on whether the Cipher mode needs an ivec
ASSERT(!ivec.is_empty());
ASSERT(ivec.size() >= IV_length());
m_cipher_block.set_padding_mode(cipher.padding_mode());
__builtin_memcpy(m_ivec_storage, ivec.data(), IV_length());
Bytes iv { m_ivec_storage, IV_length() };
size_t offset { 0 };
auto block_size = cipher.block_size();
while (length > 0) {
m_cipher_block.overwrite(iv.slice(0, block_size));
cipher.encrypt_block(m_cipher_block, m_cipher_block);
if (in) {
m_cipher_block.apply_initialization_vector(in->data() + offset);
}
auto write_size = min(block_size, length);
ASSERT(offset + write_size <= out.size());
__builtin_memcpy(out.offset(offset), m_cipher_block.bytes().data(), write_size);
increment(iv);
length -= write_size;
offset += write_size;
}
if (ivec_out)
__builtin_memcpy(ivec_out->data(), iv.data(), min(ivec_out->size(), IV_length()));
}
};
}
}

View file

@ -0,0 +1,154 @@
/*
* Copyright (c) 2020, Ali Mohammad Pur <ali.mpfard@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <AK/OwnPtr.h>
#include <AK/String.h>
#include <AK/StringBuilder.h>
#include <AK/StringView.h>
#include <LibCrypto/Authentication/GHash.h>
#include <LibCrypto/Cipher/Mode/CTR.h>
#include <LibCrypto/Verification.h>
namespace Crypto {
namespace Cipher {
using IncrementFunction = IncrementInplace;
template<typename T>
class GCM : public CTR<T, IncrementFunction> {
public:
constexpr static size_t IVSizeInBits = 128;
virtual ~GCM() { }
template<typename... Args>
explicit constexpr GCM<T>(Args... args)
: CTR<T>(args...)
{
static_assert(T::BlockSizeInBits == 128u, "GCM Mode is only available for 128-bit Ciphers");
__builtin_memset(m_auth_key_storage, 0, block_size);
typename T::BlockType key_block(m_auth_key_storage, block_size);
this->cipher().encrypt_block(key_block, key_block);
key_block.bytes().copy_to(m_auth_key);
m_ghash = make<Authentication::GHash>(m_auth_key);
}
virtual String class_name() const override
{
StringBuilder builder;
builder.append(this->cipher().class_name());
builder.append("_GCM");
return builder.build();
}
virtual size_t IV_length() const override { return IVSizeInBits / 8; }
// FIXME: This overload throws away the auth stuff, think up a better way to return more than a single bytebuffer.
virtual void encrypt(ReadonlyBytes in, Bytes& out, ReadonlyBytes ivec = {}, Bytes* = nullptr) override
{
ASSERT(!ivec.is_empty());
static ByteBuffer dummy;
encrypt(in, out, ivec, dummy, dummy);
}
virtual void decrypt(ReadonlyBytes in, Bytes& out, ReadonlyBytes ivec = {}) override
{
encrypt(in, out, ivec);
}
void encrypt(const ReadonlyBytes& in, Bytes out, const ReadonlyBytes& iv_in, const ReadonlyBytes& aad, Bytes tag)
{
auto iv_buf = ByteBuffer::copy(iv_in.data(), iv_in.size());
auto iv = iv_buf.bytes();
// Increment the IV for block 0
CTR<T>::increment(iv);
typename T::BlockType block0;
block0.overwrite(iv);
this->cipher().encrypt_block(block0, block0);
// Skip past block 0
CTR<T>::increment(iv);
if (in.is_empty())
CTR<T>::key_stream(out, iv);
else
CTR<T>::encrypt(in, out, iv);
auto auth_tag = m_ghash->process(aad, out);
block0.apply_initialization_vector(auth_tag.data);
block0.bytes().copy_to(tag);
}
VerificationConsistency decrypt(ReadonlyBytes in, Bytes out, ReadonlyBytes iv_in, ReadonlyBytes aad, ReadonlyBytes tag)
{
auto iv_buf = ByteBuffer::copy(iv_in.data(), iv_in.size());
auto iv = iv_buf.bytes();
// Increment the IV for block 0
CTR<T>::increment(iv);
typename T::BlockType block0;
block0.overwrite(iv);
this->cipher().encrypt_block(block0, block0);
// Skip past block 0
CTR<T>::increment(iv);
auto auth_tag = m_ghash->process(aad, in);
block0.apply_initialization_vector(auth_tag.data);
auto test_consistency = [&] {
if (block0.block_size() != tag.size() || __builtin_memcmp(block0.bytes().data(), tag.data(), tag.size()) != 0)
return VerificationConsistency::Inconsistent;
return VerificationConsistency::Consistent;
};
// FIXME: This block needs constant-time comparisons.
if (in.is_empty()) {
out = {};
return test_consistency();
}
CTR<T>::encrypt(in, out, iv);
return test_consistency();
}
private:
static constexpr auto block_size = T::BlockType::BlockSizeInBits / 8;
u8 m_auth_key_storage[block_size];
Bytes m_auth_key { m_auth_key_storage, block_size };
OwnPtr<Authentication::GHash> m_ghash;
};
}
}

View file

@ -0,0 +1,118 @@
/*
* Copyright (c) 2020, Ali Mohammad Pur <ali.mpfard@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <AK/ByteBuffer.h>
#include <AK/Span.h>
#include <AK/StdLibExtras.h>
#include <LibCrypto/Cipher/Cipher.h>
namespace Crypto {
namespace Cipher {
template<typename T>
class Mode {
public:
virtual ~Mode() { }
virtual void encrypt(ReadonlyBytes in, Bytes& out, ReadonlyBytes ivec = {}, Bytes* ivec_out = nullptr) = 0;
virtual void decrypt(ReadonlyBytes in, Bytes& out, ReadonlyBytes ivec = {}) = 0;
virtual size_t IV_length() const = 0;
const T& cipher() const { return m_cipher; }
ByteBuffer create_aligned_buffer(size_t input_size) const
{
size_t remainder = (input_size + T::block_size()) % T::block_size();
if (remainder == 0)
return ByteBuffer::create_uninitialized(input_size);
else
return ByteBuffer::create_uninitialized(input_size + T::block_size() - remainder);
}
virtual String class_name() const = 0;
T& cipher() { return m_cipher; }
protected:
virtual void prune_padding(Bytes& data)
{
auto size = data.size();
switch (m_cipher.padding_mode()) {
case PaddingMode::CMS: {
auto maybe_padding_length = data[size - 1];
if (maybe_padding_length >= T::block_size()) {
// cannot be padding (the entire block cannot be padding)
return;
}
for (auto i = size - maybe_padding_length; i < size; ++i) {
if (data[i] != maybe_padding_length) {
// not padding, part of data
return;
}
}
data = data.slice(0, size - maybe_padding_length);
break;
}
case PaddingMode::RFC5246: {
auto maybe_padding_length = data[size - 1];
// FIXME: If we want constant-time operations, this loop should not stop
for (auto i = size - maybe_padding_length - 1; i < size; ++i) {
if (data[i] != maybe_padding_length) {
// note that this is likely invalid padding
return;
}
}
data = data.slice(0, size - maybe_padding_length - 1);
break;
}
case PaddingMode::Null: {
while (data[size - 1] == 0)
--size;
data = data.slice(0, size);
break;
}
default:
// FIXME: support other padding modes
ASSERT_NOT_REACHED();
break;
}
}
// FIXME: Somehow add a reference version of this
template<typename... Args>
Mode(Args... args)
: m_cipher(args...)
{
}
private:
T m_cipher;
};
}
}