1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-27 07:47:35 +00:00

LibGfx: Move all image loaders and writers to a subdirectory

This commit is contained in:
Lucas CHOLLET 2023-03-21 14:58:06 -04:00 committed by Andreas Kling
parent 752f06f228
commit 496b7ffb2b
73 changed files with 129 additions and 129 deletions

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,47 @@
/*
* Copyright (c) 2020, Matthew Olsson <mattco@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibGfx/ImageFormats/ICOLoader.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
struct BMPLoadingContext;
class ICOImageDecoderPlugin;
class BMPImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<BMPImageDecoderPlugin>> create_as_included_in_ico(Badge<ICOImageDecoderPlugin>, ReadonlyBytes);
enum class IncludedInICO {
Yes,
No,
};
virtual ~BMPImageDecoderPlugin() override;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
bool sniff_dib();
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
BMPImageDecoderPlugin(u8 const*, size_t, IncludedInICO included_in_ico = IncludedInICO::No);
OwnPtr<BMPLoadingContext> m_context;
};
}

View file

@ -0,0 +1,177 @@
/*
* Copyright (c) 2020, Ben Jilks <benjyjilks@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibGfx/Bitmap.h>
#include <LibGfx/ImageFormats/BMPWriter.h>
namespace Gfx {
class OutputStreamer {
public:
OutputStreamer(u8* data)
: m_data(data)
{
}
void write_u8(u8 i)
{
*(m_data++) = i;
}
void write_u16(u16 i)
{
*(m_data++) = i & 0xFF;
*(m_data++) = (i >> 8) & 0xFF;
}
void write_u32(u32 i)
{
write_u16(i & 0xFFFF);
write_u16((i >> 16) & 0xFFFF);
}
void write_i32(i32 i)
{
write_u32(static_cast<u32>(i));
}
private:
u8* m_data;
};
static ErrorOr<ByteBuffer> write_pixel_data(Bitmap const& bitmap, int pixel_row_data_size, int bytes_per_pixel, bool include_alpha_channel)
{
int image_size = pixel_row_data_size * bitmap.height();
auto buffer = TRY(ByteBuffer::create_uninitialized(image_size));
int current_row = 0;
for (int y = bitmap.physical_height() - 1; y >= 0; --y) {
auto* row = buffer.data() + (pixel_row_data_size * current_row++);
for (int x = 0; x < bitmap.physical_width(); x++) {
auto pixel = bitmap.get_pixel(x, y);
row[x * bytes_per_pixel + 0] = pixel.blue();
row[x * bytes_per_pixel + 1] = pixel.green();
row[x * bytes_per_pixel + 2] = pixel.red();
if (include_alpha_channel)
row[x * bytes_per_pixel + 3] = pixel.alpha();
}
}
return buffer;
}
ErrorOr<ByteBuffer> BMPWriter::encode(Bitmap const& bitmap, Options options)
{
return BMPWriter().dump(bitmap, options);
}
ByteBuffer BMPWriter::compress_pixel_data(ByteBuffer pixel_data, BMPWriter::Compression compression)
{
switch (compression) {
case BMPWriter::Compression::BI_BITFIELDS:
case BMPWriter::Compression::BI_RGB:
return pixel_data;
}
VERIFY_NOT_REACHED();
}
ErrorOr<ByteBuffer> BMPWriter::dump(Bitmap const& bitmap, Options options)
{
Options::DibHeader dib_header = options.dib_header;
auto icc_data = options.icc_data;
if (icc_data.has_value() && dib_header < Options::DibHeader::V5)
return Error::from_string_literal("can only embed ICC profiles in v5+ bmps");
switch (dib_header) {
case Options::DibHeader::Info:
m_compression = Compression::BI_RGB;
m_bytes_per_pixel = 3;
m_include_alpha_channel = false;
break;
case Options::DibHeader::V3:
case Options::DibHeader::V4:
case Options::DibHeader::V5:
m_compression = Compression::BI_BITFIELDS;
m_bytes_per_pixel = 4;
m_include_alpha_channel = true;
}
const size_t file_header_size = 14;
size_t header_size = file_header_size + (u32)dib_header;
int pixel_row_data_size = (m_bytes_per_pixel * 8 * bitmap.width() + 31) / 32 * 4;
int image_size = pixel_row_data_size * bitmap.height();
auto buffer = TRY(ByteBuffer::create_uninitialized(header_size));
auto pixel_data = TRY(write_pixel_data(bitmap, pixel_row_data_size, m_bytes_per_pixel, m_include_alpha_channel));
pixel_data = compress_pixel_data(move(pixel_data), m_compression);
size_t icc_profile_size = 0;
if (icc_data.has_value())
icc_profile_size = icc_data->size();
size_t pixel_data_offset = header_size + icc_profile_size;
size_t file_size = pixel_data_offset + pixel_data.size();
OutputStreamer streamer(buffer.data());
streamer.write_u8('B');
streamer.write_u8('M');
streamer.write_u32(file_size);
streamer.write_u32(0);
streamer.write_u32(pixel_data_offset);
streamer.write_u32((u32)dib_header); // Header size
streamer.write_i32(bitmap.width()); // ImageWidth
streamer.write_i32(bitmap.height()); // ImageHeight
streamer.write_u16(1); // Planes
streamer.write_u16(m_bytes_per_pixel * 8); // BitsPerPixel
streamer.write_u32((u32)m_compression); // Compression
streamer.write_u32(image_size); // ImageSize
streamer.write_i32(0); // XpixelsPerMeter
streamer.write_i32(0); // YpixelsPerMeter
streamer.write_u32(0); // TotalColors
streamer.write_u32(0); // ImportantColors
if (dib_header >= Options::DibHeader::V3) {
streamer.write_u32(0x00ff0000); // Red bitmask
streamer.write_u32(0x0000ff00); // Green bitmask
streamer.write_u32(0x000000ff); // Blue bitmask
streamer.write_u32(0xff000000); // Alpha bitmask
}
if (dib_header >= Options::DibHeader::V4) {
if (icc_data.has_value())
streamer.write_u32(0x4D424544); // Colorspace EMBEDDED
else
streamer.write_u32(0); // Colorspace CALIBRATED_RGB
for (int i = 0; i < 12; i++) {
streamer.write_u32(0); // Endpoints and gamma
}
}
if (dib_header >= Options::DibHeader::V5) {
streamer.write_u32(4); // Rendering intent IMAGES / Perceptual.
if (icc_data.has_value()) {
streamer.write_u32((u32)dib_header); // Profile data (relative to file_header_size)
streamer.write_u32(icc_data->size()); // Profile size
} else {
streamer.write_u32(0); // Profile data
streamer.write_u32(0); // Profile size
}
streamer.write_u32(0); // Reserved
}
if (icc_data.has_value())
TRY(buffer.try_append(icc_data.value()));
TRY(buffer.try_append(pixel_data));
return buffer;
}
}

View file

@ -0,0 +1,51 @@
/*
* Copyright (c) 2020, Ben Jilks <benjyjilks@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/ByteBuffer.h>
namespace Gfx {
class Bitmap;
// This is not a nested struct to work around https://llvm.org/PR36684
struct BMPWriterOptions {
enum class DibHeader : u32 {
Info = 40,
V3 = 56,
V4 = 108,
V5 = 124,
};
DibHeader dib_header = DibHeader::V5;
Optional<ReadonlyBytes> icc_data;
};
class BMPWriter {
public:
using Options = BMPWriterOptions;
static ErrorOr<ByteBuffer> encode(Bitmap const&, Options options = Options {});
private:
BMPWriter() = default;
ErrorOr<ByteBuffer> dump(Bitmap const&, Options options);
enum class Compression : u32 {
BI_RGB = 0,
BI_BITFIELDS = 3,
};
static ByteBuffer compress_pixel_data(ByteBuffer, Compression);
Compression m_compression { Compression::BI_BITFIELDS };
int m_bytes_per_pixel { 4 };
bool m_include_alpha_channel { true };
};
}

View file

@ -0,0 +1,710 @@
/*
* Copyright (c) 2021, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Debug.h>
#include <AK/DeprecatedString.h>
#include <AK/Endian.h>
#include <AK/Error.h>
#include <AK/MemoryStream.h>
#include <AK/StringBuilder.h>
#include <AK/Try.h>
#include <AK/Vector.h>
#include <LibGfx/ImageFormats/DDSLoader.h>
#include <fcntl.h>
#include <math.h>
#include <stdio.h>
#include <string.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <unistd.h>
namespace Gfx {
struct DDSLoadingContext {
enum State {
NotDecoded = 0,
Error,
BitmapDecoded,
};
State state { State::NotDecoded };
u8 const* data { nullptr };
size_t data_size { 0 };
DDSHeader header;
DDSHeaderDXT10 header10;
RefPtr<Gfx::Bitmap> bitmap;
void dump_debug();
};
static constexpr u32 create_four_cc(char c0, char c1, char c2, char c3)
{
return c0 | c1 << 8 | c2 << 16 | c3 << 24;
}
static u64 get_width(DDSHeader header, size_t mipmap_level)
{
if (mipmap_level >= header.mip_map_count) {
return header.width;
}
return header.width >> mipmap_level;
}
static u64 get_height(DDSHeader header, size_t mipmap_level)
{
if (mipmap_level >= header.mip_map_count) {
return header.height;
}
return header.height >> mipmap_level;
}
static constexpr bool has_bitmask(DDSPixelFormat format, u32 r, u32 g, u32 b, u32 a)
{
return format.r_bit_mask == r && format.g_bit_mask == g && format.b_bit_mask == b && format.a_bit_mask == a;
}
static DXGIFormat get_format(DDSPixelFormat format)
{
if ((format.flags & PixelFormatFlags::DDPF_RGB) == PixelFormatFlags::DDPF_RGB) {
switch (format.rgb_bit_count) {
case 32: {
if (has_bitmask(format, 0x000000FF, 0x0000FF00, 0x00FF0000, 0xFF000000))
return DXGI_FORMAT_R8G8B8A8_UNORM;
if (has_bitmask(format, 0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000))
return DXGI_FORMAT_B8G8R8A8_UNORM;
if (has_bitmask(format, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000))
return DXGI_FORMAT_B8G8R8X8_UNORM;
if (has_bitmask(format, 0x3FF00000, 0x000FFC00, 0x000003FF, 0xC0000000))
return DXGI_FORMAT_R10G10B10A2_UNORM;
if (has_bitmask(format, 0x0000FFFF, 0xFFFF0000, 0x00000000, 0x00000000))
return DXGI_FORMAT_R16G16_UNORM;
if (has_bitmask(format, 0xFFFFFFFF, 0x00000000, 0x00000000, 0x00000000))
return DXGI_FORMAT_R32_FLOAT;
break;
}
case 24:
break;
case 16: {
if (has_bitmask(format, 0x7C00, 0x03E0, 0x001F, 0x8000))
return DXGI_FORMAT_B5G5R5A1_UNORM;
if (has_bitmask(format, 0xF800, 0x07E0, 0x001F, 0x0000))
return DXGI_FORMAT_B5G6R5_UNORM;
if (has_bitmask(format, 0xF800, 0x07E0, 0x001F, 0x0000))
return DXGI_FORMAT_B5G6R5_UNORM;
if (has_bitmask(format, 0x0F00, 0x00F0, 0x000F, 0xF000))
return DXGI_FORMAT_B4G4R4A4_UNORM;
if (has_bitmask(format, 0x00FF, 0x0000, 0x0000, 0xFF00))
return DXGI_FORMAT_R8G8_UNORM;
if (has_bitmask(format, 0xFFFF, 0x0000, 0x0000, 0x0000))
return DXGI_FORMAT_R16_UNORM;
break;
}
case 8: {
if (has_bitmask(format, 0xFF, 0x00, 0x00, 0x00))
return DXGI_FORMAT_R8_UNORM;
break;
}
}
} else if ((format.flags & PixelFormatFlags::DDPF_LUMINANCE) == PixelFormatFlags::DDPF_LUMINANCE) {
switch (format.rgb_bit_count) {
case 16: {
if (has_bitmask(format, 0xFFFF, 0x0000, 0x0000, 0x0000))
return DXGI_FORMAT_R16_UNORM;
if (has_bitmask(format, 0x00FF, 0x0000, 0x0000, 0xFF00))
return DXGI_FORMAT_R8G8_UNORM;
break;
}
case 8: {
if (has_bitmask(format, 0xFF, 0x00, 0x00, 0x00))
return DXGI_FORMAT_R8_UNORM;
// Some writers mistakenly write this as 8 bpp.
if (has_bitmask(format, 0x00FF, 0x0000, 0x0000, 0xFF00))
return DXGI_FORMAT_R8G8_UNORM;
break;
}
}
} else if ((format.flags & PixelFormatFlags::DDPF_ALPHA) == PixelFormatFlags::DDPF_ALPHA) {
if (format.rgb_bit_count == 8)
return DXGI_FORMAT_A8_UNORM;
} else if ((format.flags & PixelFormatFlags::DDPF_BUMPDUDV) == PixelFormatFlags::DDPF_BUMPDUDV) {
switch (format.rgb_bit_count) {
case 32: {
if (has_bitmask(format, 0x000000FF, 0x0000FF00, 0x00FF0000, 0xFF000000))
return DXGI_FORMAT_R8G8B8A8_SNORM;
if (has_bitmask(format, 0x0000FFFF, 0xFFFF0000, 0x00000000, 0x00000000))
return DXGI_FORMAT_R16G16_SNORM;
break;
}
case 16: {
if (has_bitmask(format, 0x00FF, 0xFF00, 0x0000, 0x0000))
return DXGI_FORMAT_R8G8_SNORM;
break;
}
}
} else if ((format.flags & PixelFormatFlags::DDPF_FOURCC) == PixelFormatFlags::DDPF_FOURCC) {
if (format.four_cc == create_four_cc('D', 'X', 'T', '1'))
return DXGI_FORMAT_BC1_UNORM;
if (format.four_cc == create_four_cc('D', 'X', 'T', '2'))
return DXGI_FORMAT_BC2_UNORM;
if (format.four_cc == create_four_cc('D', 'X', 'T', '3'))
return DXGI_FORMAT_BC2_UNORM;
if (format.four_cc == create_four_cc('D', 'X', 'T', '4'))
return DXGI_FORMAT_BC3_UNORM;
if (format.four_cc == create_four_cc('D', 'X', 'T', '5'))
return DXGI_FORMAT_BC3_UNORM;
if (format.four_cc == create_four_cc('A', 'T', 'I', '1'))
return DXGI_FORMAT_BC4_UNORM;
if (format.four_cc == create_four_cc('B', 'C', '4', 'U'))
return DXGI_FORMAT_BC4_UNORM;
if (format.four_cc == create_four_cc('B', 'C', '4', 'S'))
return DXGI_FORMAT_BC4_SNORM;
if (format.four_cc == create_four_cc('A', 'T', 'I', '2'))
return DXGI_FORMAT_BC5_UNORM;
if (format.four_cc == create_four_cc('B', 'C', '5', 'U'))
return DXGI_FORMAT_BC5_UNORM;
if (format.four_cc == create_four_cc('B', 'C', '5', 'S'))
return DXGI_FORMAT_BC5_SNORM;
if (format.four_cc == create_four_cc('R', 'G', 'B', 'G'))
return DXGI_FORMAT_R8G8_B8G8_UNORM;
if (format.four_cc == create_four_cc('G', 'R', 'G', 'B'))
return DXGI_FORMAT_G8R8_G8B8_UNORM;
if (format.four_cc == create_four_cc('Y', 'U', 'Y', '2'))
return DXGI_FORMAT_YUY2;
switch (format.four_cc) {
case 36:
return DXGI_FORMAT_R16G16B16A16_UNORM;
case 110:
return DXGI_FORMAT_R16G16B16A16_SNORM;
case 111:
return DXGI_FORMAT_R16_FLOAT;
case 112:
return DXGI_FORMAT_R16G16_FLOAT;
case 113:
return DXGI_FORMAT_R16G16B16A16_FLOAT;
case 114:
return DXGI_FORMAT_R32_FLOAT;
case 115:
return DXGI_FORMAT_R32G32_FLOAT;
case 116:
return DXGI_FORMAT_R32G32B32A32_FLOAT;
}
}
return DXGI_FORMAT_UNKNOWN;
}
static ErrorOr<void> decode_dx5_alpha_block(Stream& stream, DDSLoadingContext& context, u64 bitmap_x, u64 bitmap_y)
{
auto color0 = TRY(stream.read_value<LittleEndian<u8>>());
auto color1 = TRY(stream.read_value<LittleEndian<u8>>());
auto code0 = TRY(stream.read_value<LittleEndian<u8>>());
auto code1 = TRY(stream.read_value<LittleEndian<u8>>());
auto code2 = TRY(stream.read_value<LittleEndian<u8>>());
auto code3 = TRY(stream.read_value<LittleEndian<u8>>());
auto code4 = TRY(stream.read_value<LittleEndian<u8>>());
auto code5 = TRY(stream.read_value<LittleEndian<u8>>());
u32 codes[6] = { 0 };
codes[0] = code0 + 256 * (code1 + 256);
codes[1] = code1 + 256 * (code2 + 256);
codes[2] = code2 + 256 * (code3 + 256);
codes[3] = code3 + 256 * (code4 + 256);
codes[4] = code4 + 256 * code5;
codes[5] = code5;
u32 color[8] = { 0 };
if (color0 > 128) {
color[0] = color0;
}
if (color1 > 128) {
color[1] = color1;
}
if (color0 > color1) {
color[2] = (6 * color[0] + 1 * color[1]) / 7;
color[3] = (5 * color[0] + 2 * color[1]) / 7;
color[4] = (4 * color[0] + 3 * color[1]) / 7;
color[5] = (3 * color[0] + 4 * color[1]) / 7;
color[6] = (2 * color[0] + 5 * color[1]) / 7;
color[7] = (1 * color[0] + 6 * color[1]) / 7;
} else {
color[2] = (4 * color[0] + 1 * color[1]) / 5;
color[3] = (3 * color[0] + 2 * color[1]) / 5;
color[4] = (2 * color[0] + 3 * color[1]) / 5;
color[5] = (1 * color[0] + 4 * color[1]) / 5;
color[6] = 0;
color[7] = 255;
}
for (size_t y = 0; y < 4; y++) {
for (size_t x = 0; x < 4; x++) {
u8 index = 3 * (4 * y + x);
u8 bit_location = floor(index / 8.0);
u8 adjusted_index = index - (bit_location * 8);
u8 code = (codes[bit_location] >> adjusted_index) & 7;
u8 alpha = color[code];
Color color = Color(0, 0, 0, alpha);
context.bitmap->set_pixel(bitmap_x + x, bitmap_y + y, color);
}
}
return {};
}
static ErrorOr<void> decode_dx3_alpha_block(Stream& stream, DDSLoadingContext& context, u64 bitmap_x, u64 bitmap_y)
{
auto a0 = TRY(stream.read_value<LittleEndian<u8>>());
auto a1 = TRY(stream.read_value<LittleEndian<u8>>());
auto a2 = TRY(stream.read_value<LittleEndian<u8>>());
auto a3 = TRY(stream.read_value<LittleEndian<u8>>());
auto a4 = TRY(stream.read_value<LittleEndian<u8>>());
auto a5 = TRY(stream.read_value<LittleEndian<u8>>());
auto a6 = TRY(stream.read_value<LittleEndian<u8>>());
auto a7 = TRY(stream.read_value<LittleEndian<u8>>());
u64 alpha_0 = a0 + 256u * (a1 + 256u * (a2 + 256u * (a3 + 256u)));
u64 alpha_1 = a4 + 256u * (a5 + 256u * (a6 + 256u * a7));
for (size_t y = 0; y < 4; y++) {
for (size_t x = 0; x < 4; x++) {
u8 code = 4 * (4 * y + x);
if (code >= 32) {
code = code - 32;
u8 alpha = ((alpha_1 >> code) & 0x0F) * 17;
Color color = Color(0, 0, 0, alpha);
context.bitmap->set_pixel(bitmap_x + x, bitmap_y + y, color);
} else {
u8 alpha = ((alpha_0 >> code) & 0x0F) * 17;
Color color = Color(0, 0, 0, alpha);
context.bitmap->set_pixel(bitmap_x + x, bitmap_y + y, color);
}
}
}
return {};
}
static void unpack_rbg_565(u32 rgb, u8* output)
{
u8 r = (rgb >> 11) & 0x1F;
u8 g = (rgb >> 5) & 0x3F;
u8 b = rgb & 0x1F;
output[0] = (r << 3) | (r >> 2);
output[1] = (g << 2) | (g >> 4);
output[2] = (b << 3) | (b >> 2);
output[3] = 255;
}
static ErrorOr<void> decode_color_block(Stream& stream, DDSLoadingContext& context, bool dxt1, u64 bitmap_x, u64 bitmap_y)
{
auto c0_low = TRY(stream.read_value<LittleEndian<u8>>());
auto c0_high = TRY(stream.read_value<LittleEndian<u8>>());
auto c1_low = TRY(stream.read_value<LittleEndian<u8>>());
auto c1_high = TRY(stream.read_value<LittleEndian<u8>>());
auto codes_0 = TRY(stream.read_value<LittleEndian<u8>>());
auto codes_1 = TRY(stream.read_value<LittleEndian<u8>>());
auto codes_2 = TRY(stream.read_value<LittleEndian<u8>>());
auto codes_3 = TRY(stream.read_value<LittleEndian<u8>>());
u64 code = codes_0 + 256 * (codes_1 + 256 * (codes_2 + 256 * codes_3));
u32 color_0 = c0_low + (c0_high * 256);
u32 color_1 = c1_low + (c1_high * 256);
u8 rgba[4][4];
unpack_rbg_565(color_0, rgba[0]);
unpack_rbg_565(color_1, rgba[1]);
if (color_0 > color_1) {
for (size_t i = 0; i < 3; i++) {
rgba[2][i] = (2 * rgba[0][i] + rgba[1][i]) / 3;
rgba[3][i] = (rgba[0][i] + 2 * rgba[1][i]) / 3;
}
rgba[2][3] = 255;
rgba[3][3] = 255;
} else {
for (size_t i = 0; i < 3; i++) {
rgba[2][i] = (rgba[0][i] + rgba[1][i]) / 2;
rgba[3][i] = 0;
}
rgba[2][3] = 255;
rgba[3][3] = dxt1 ? 0 : 255;
}
size_t i = 0;
for (size_t y = 0; y < 4; y++) {
for (size_t x = 0; x < 4; x++) {
u8 code_byte = (code >> (i * 2)) & 3;
u8 r = rgba[code_byte][0];
u8 g = rgba[code_byte][1];
u8 b = rgba[code_byte][2];
u8 a = dxt1 ? rgba[code_byte][3] : context.bitmap->get_pixel(bitmap_x + x, bitmap_y + y).alpha();
Color color = Color(r, g, b, a);
context.bitmap->set_pixel(bitmap_x + x, bitmap_y + y, color);
i++;
}
}
return {};
}
static ErrorOr<void> decode_dxt(Stream& stream, DDSLoadingContext& context, DXGIFormat format, u64 width, u64 y)
{
if (format == DXGI_FORMAT_BC1_UNORM) {
for (size_t x = 0; x < width; x += 4) {
TRY(decode_color_block(stream, context, true, x, y));
}
}
if (format == DXGI_FORMAT_BC2_UNORM) {
for (size_t x = 0; x < width; x += 4) {
TRY(decode_dx3_alpha_block(stream, context, x, y));
TRY(decode_color_block(stream, context, false, x, y));
}
}
if (format == DXGI_FORMAT_BC3_UNORM) {
for (size_t x = 0; x < width; x += 4) {
TRY(decode_dx5_alpha_block(stream, context, x, y));
TRY(decode_color_block(stream, context, false, x, y));
}
}
return {};
}
static ErrorOr<void> decode_bitmap(Stream& stream, DDSLoadingContext& context, DXGIFormat format, u64 width, u64 height)
{
Vector<u32> dxt_formats = { DXGI_FORMAT_BC1_UNORM, DXGI_FORMAT_BC2_UNORM, DXGI_FORMAT_BC3_UNORM };
if (dxt_formats.contains_slow(format)) {
for (u64 y = 0; y < height; y += 4) {
TRY(decode_dxt(stream, context, format, width, y));
}
}
// FIXME: Support more encodings (ATI, YUV, RAW, etc...).
return {};
}
static ErrorOr<void> decode_dds(DDSLoadingContext& context)
{
// All valid DDS files are at least 128 bytes long.
if (context.data_size < 128) {
dbgln_if(DDS_DEBUG, "File is too short for DDS");
context.state = DDSLoadingContext::State::Error;
return Error::from_string_literal("File is too short for DDS");
}
FixedMemoryStream stream { ReadonlyBytes { context.data, context.data_size } };
auto magic = TRY(stream.read_value<u32>());
if (magic != create_four_cc('D', 'D', 'S', ' ')) {
dbgln_if(DDS_DEBUG, "Missing magic number");
context.state = DDSLoadingContext::State::Error;
return Error::from_string_literal("Missing magic number");
}
context.header = TRY(stream.read_value<DDSHeader>());
if (context.header.size != 124) {
dbgln_if(DDS_DEBUG, "Header size is malformed");
context.state = DDSLoadingContext::State::Error;
return Error::from_string_literal("Header size is malformed");
}
if (context.header.pixel_format.size != 32) {
dbgln_if(DDS_DEBUG, "Pixel format size is malformed");
context.state = DDSLoadingContext::State::Error;
return Error::from_string_literal("Pixel format size is malformed");
}
if ((context.header.pixel_format.flags & PixelFormatFlags::DDPF_FOURCC) == PixelFormatFlags::DDPF_FOURCC) {
if (context.header.pixel_format.four_cc == create_four_cc('D', 'X', '1', '0')) {
if (context.data_size < 148) {
dbgln_if(DDS_DEBUG, "DX10 header is too short");
context.state = DDSLoadingContext::State::Error;
return Error::from_string_literal("DX10 header is too short");
}
context.header10 = TRY(stream.read_value<DDSHeaderDXT10>());
}
}
if constexpr (DDS_DEBUG) {
context.dump_debug();
}
DXGIFormat format = get_format(context.header.pixel_format);
Vector<u32> supported_formats = { DXGI_FORMAT_BC1_UNORM, DXGI_FORMAT_BC2_UNORM, DXGI_FORMAT_BC3_UNORM };
if (!supported_formats.contains_slow(format)) {
dbgln_if(DDS_DEBUG, "Format of type {} is not supported at the moment", static_cast<u32>(format));
context.state = DDSLoadingContext::State::Error;
return Error::from_string_literal("Format type is not supported at the moment");
}
// We support parsing mipmaps, but we only care about the largest one :^) (At least for now)
if (size_t mipmap_level = 0; mipmap_level < max(context.header.mip_map_count, 1u)) {
u64 width = get_width(context.header, mipmap_level);
u64 height = get_height(context.header, mipmap_level);
context.bitmap = TRY(Bitmap::create(BitmapFormat::BGRA8888, { width, height }));
TRY(decode_bitmap(stream, context, format, width, height));
}
context.state = DDSLoadingContext::State::BitmapDecoded;
return {};
}
void DDSLoadingContext::dump_debug()
{
StringBuilder builder;
builder.append("\nDDS:\n"sv);
builder.appendff("\tHeader Size: {}\n", header.size);
builder.append("\tFlags:"sv);
if ((header.flags & DDSFlags::DDSD_CAPS) == DDSFlags::DDSD_CAPS)
builder.append(" DDSD_CAPS"sv);
if ((header.flags & DDSFlags::DDSD_HEIGHT) == DDSFlags::DDSD_HEIGHT)
builder.append(" DDSD_HEIGHT"sv);
if ((header.flags & DDSFlags::DDSD_WIDTH) == DDSFlags::DDSD_WIDTH)
builder.append(" DDSD_WIDTH"sv);
if ((header.flags & DDSFlags::DDSD_PITCH) == DDSFlags::DDSD_PITCH)
builder.append(" DDSD_PITCH"sv);
if ((header.flags & DDSFlags::DDSD_PIXELFORMAT) == DDSFlags::DDSD_PIXELFORMAT)
builder.append(" DDSD_PIXELFORMAT"sv);
if ((header.flags & DDSFlags::DDSD_MIPMAPCOUNT) == DDSFlags::DDSD_MIPMAPCOUNT)
builder.append(" DDSD_MIPMAPCOUNT"sv);
if ((header.flags & DDSFlags::DDSD_LINEARSIZE) == DDSFlags::DDSD_LINEARSIZE)
builder.append(" DDSD_LINEARSIZE"sv);
if ((header.flags & DDSFlags::DDSD_DEPTH) == DDSFlags::DDSD_DEPTH)
builder.append(" DDSD_DEPTH"sv);
builder.append("\n"sv);
builder.appendff("\tHeight: {}\n", header.height);
builder.appendff("\tWidth: {}\n", header.width);
builder.appendff("\tPitch: {}\n", header.pitch);
builder.appendff("\tDepth: {}\n", header.depth);
builder.appendff("\tMipmap Count: {}\n", header.mip_map_count);
builder.append("\tCaps:"sv);
if ((header.caps1 & Caps1Flags::DDSCAPS_COMPLEX) == Caps1Flags::DDSCAPS_COMPLEX)
builder.append(" DDSCAPS_COMPLEX"sv);
if ((header.caps1 & Caps1Flags::DDSCAPS_MIPMAP) == Caps1Flags::DDSCAPS_MIPMAP)
builder.append(" DDSCAPS_MIPMAP"sv);
if ((header.caps1 & Caps1Flags::DDSCAPS_TEXTURE) == Caps1Flags::DDSCAPS_TEXTURE)
builder.append(" DDSCAPS_TEXTURE"sv);
builder.append("\n"sv);
builder.append("\tCaps2:"sv);
if ((header.caps2 & Caps2Flags::DDSCAPS2_CUBEMAP) == Caps2Flags::DDSCAPS2_CUBEMAP)
builder.append(" DDSCAPS2_CUBEMAP"sv);
if ((header.caps2 & Caps2Flags::DDSCAPS2_CUBEMAP_POSITIVEX) == Caps2Flags::DDSCAPS2_CUBEMAP_POSITIVEX)
builder.append(" DDSCAPS2_CUBEMAP_POSITIVEX"sv);
if ((header.caps2 & Caps2Flags::DDSCAPS2_CUBEMAP_NEGATIVEX) == Caps2Flags::DDSCAPS2_CUBEMAP_NEGATIVEX)
builder.append(" DDSCAPS2_CUBEMAP_NEGATIVEX"sv);
if ((header.caps2 & Caps2Flags::DDSCAPS2_CUBEMAP_POSITIVEY) == Caps2Flags::DDSCAPS2_CUBEMAP_POSITIVEY)
builder.append(" DDSCAPS2_CUBEMAP_POSITIVEY"sv);
if ((header.caps2 & Caps2Flags::DDSCAPS2_CUBEMAP_NEGATIVEY) == Caps2Flags::DDSCAPS2_CUBEMAP_NEGATIVEY)
builder.append(" DDSCAPS2_CUBEMAP_NEGATIVEY"sv);
if ((header.caps2 & Caps2Flags::DDSCAPS2_CUBEMAP_POSITIVEZ) == Caps2Flags::DDSCAPS2_CUBEMAP_POSITIVEZ)
builder.append(" DDSCAPS2_CUBEMAP_POSITIVEZ"sv);
if ((header.caps2 & Caps2Flags::DDSCAPS2_CUBEMAP_NEGATIVEZ) == Caps2Flags::DDSCAPS2_CUBEMAP_NEGATIVEZ)
builder.append(" DDSCAPS2_CUBEMAP_NEGATIVEZ"sv);
if ((header.caps2 & Caps2Flags::DDSCAPS2_VOLUME) == Caps2Flags::DDSCAPS2_VOLUME)
builder.append(" DDSCAPS2_VOLUME"sv);
builder.append("\n"sv);
builder.append("Pixel Format:\n"sv);
builder.appendff("\tStruct Size: {}\n", header.pixel_format.size);
builder.append("\tFlags:"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_ALPHAPIXELS) == PixelFormatFlags::DDPF_ALPHAPIXELS)
builder.append(" DDPF_ALPHAPIXELS"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_ALPHA) == PixelFormatFlags::DDPF_ALPHA)
builder.append(" DDPF_ALPHA"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_FOURCC) == PixelFormatFlags::DDPF_FOURCC)
builder.append(" DDPF_FOURCC"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_PALETTEINDEXED8) == PixelFormatFlags::DDPF_PALETTEINDEXED8)
builder.append(" DDPF_PALETTEINDEXED8"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_RGB) == PixelFormatFlags::DDPF_RGB)
builder.append(" DDPF_RGB"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_YUV) == PixelFormatFlags::DDPF_YUV)
builder.append(" DDPF_YUV"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_LUMINANCE) == PixelFormatFlags::DDPF_LUMINANCE)
builder.append(" DDPF_LUMINANCE"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_BUMPDUDV) == PixelFormatFlags::DDPF_BUMPDUDV)
builder.append(" DDPF_BUMPDUDV"sv);
if ((header.pixel_format.flags & PixelFormatFlags::DDPF_NORMAL) == PixelFormatFlags::DDPF_NORMAL)
builder.append(" DDPF_NORMAL"sv);
builder.append("\n"sv);
builder.append("\tFour CC: "sv);
builder.appendff("{:c}", (header.pixel_format.four_cc >> (8 * 0)) & 0xFF);
builder.appendff("{:c}", (header.pixel_format.four_cc >> (8 * 1)) & 0xFF);
builder.appendff("{:c}", (header.pixel_format.four_cc >> (8 * 2)) & 0xFF);
builder.appendff("{:c}", (header.pixel_format.four_cc >> (8 * 3)) & 0xFF);
builder.append("\n"sv);
builder.appendff("\tRGB Bit Count: {}\n", header.pixel_format.rgb_bit_count);
builder.appendff("\tR Bit Mask: {}\n", header.pixel_format.r_bit_mask);
builder.appendff("\tG Bit Mask: {}\n", header.pixel_format.g_bit_mask);
builder.appendff("\tB Bit Mask: {}\n", header.pixel_format.b_bit_mask);
builder.appendff("\tA Bit Mask: {}\n", header.pixel_format.a_bit_mask);
builder.append("DDS10:\n"sv);
builder.appendff("\tFormat: {}\n", static_cast<u32>(header10.format));
builder.append("\tResource Dimension:"sv);
if ((header10.resource_dimension & ResourceDimensions::DDS_DIMENSION_UNKNOWN) == ResourceDimensions::DDS_DIMENSION_UNKNOWN)
builder.append(" DDS_DIMENSION_UNKNOWN"sv);
if ((header10.resource_dimension & ResourceDimensions::DDS_DIMENSION_BUFFER) == ResourceDimensions::DDS_DIMENSION_BUFFER)
builder.append(" DDS_DIMENSION_BUFFER"sv);
if ((header10.resource_dimension & ResourceDimensions::DDS_DIMENSION_TEXTURE1D) == ResourceDimensions::DDS_DIMENSION_TEXTURE1D)
builder.append(" DDS_DIMENSION_TEXTURE1D"sv);
if ((header10.resource_dimension & ResourceDimensions::DDS_DIMENSION_TEXTURE2D) == ResourceDimensions::DDS_DIMENSION_TEXTURE2D)
builder.append(" DDS_DIMENSION_TEXTURE2D"sv);
if ((header10.resource_dimension & ResourceDimensions::DDS_DIMENSION_TEXTURE3D) == ResourceDimensions::DDS_DIMENSION_TEXTURE3D)
builder.append(" DDS_DIMENSION_TEXTURE3D"sv);
builder.append("\n"sv);
builder.appendff("\tArray Size: {}\n", header10.array_size);
builder.append("\tMisc Flags:"sv);
if ((header10.misc_flag & MiscFlags::DDS_RESOURCE_MISC_TEXTURECUBE) == MiscFlags::DDS_RESOURCE_MISC_TEXTURECUBE)
builder.append(" DDS_RESOURCE_MISC_TEXTURECUBE"sv);
builder.append("\n"sv);
builder.append("\tMisc Flags 2:"sv);
if ((header10.misc_flag2 & Misc2Flags::DDS_ALPHA_MODE_UNKNOWN) == Misc2Flags::DDS_ALPHA_MODE_UNKNOWN)
builder.append(" DDS_ALPHA_MODE_UNKNOWN"sv);
if ((header10.misc_flag2 & Misc2Flags::DDS_ALPHA_MODE_STRAIGHT) == Misc2Flags::DDS_ALPHA_MODE_STRAIGHT)
builder.append(" DDS_ALPHA_MODE_STRAIGHT"sv);
if ((header10.misc_flag2 & Misc2Flags::DDS_ALPHA_MODE_PREMULTIPLIED) == Misc2Flags::DDS_ALPHA_MODE_PREMULTIPLIED)
builder.append(" DDS_ALPHA_MODE_PREMULTIPLIED"sv);
if ((header10.misc_flag2 & Misc2Flags::DDS_ALPHA_MODE_OPAQUE) == Misc2Flags::DDS_ALPHA_MODE_OPAQUE)
builder.append(" DDS_ALPHA_MODE_OPAQUE"sv);
if ((header10.misc_flag2 & Misc2Flags::DDS_ALPHA_MODE_CUSTOM) == Misc2Flags::DDS_ALPHA_MODE_CUSTOM)
builder.append(" DDS_ALPHA_MODE_CUSTOM"sv);
builder.append("\n"sv);
dbgln("{}", builder.to_deprecated_string());
}
DDSImageDecoderPlugin::DDSImageDecoderPlugin(u8 const* data, size_t size)
{
m_context = make<DDSLoadingContext>();
m_context->data = data;
m_context->data_size = size;
}
DDSImageDecoderPlugin::~DDSImageDecoderPlugin() = default;
IntSize DDSImageDecoderPlugin::size()
{
if (m_context->state == DDSLoadingContext::State::Error)
return {};
if (m_context->state == DDSLoadingContext::State::BitmapDecoded)
return { m_context->header.width, m_context->header.height };
return {};
}
void DDSImageDecoderPlugin::set_volatile()
{
if (m_context->bitmap)
m_context->bitmap->set_volatile();
}
bool DDSImageDecoderPlugin::set_nonvolatile(bool& was_purged)
{
if (!m_context->bitmap)
return false;
return m_context->bitmap->set_nonvolatile(was_purged);
}
bool DDSImageDecoderPlugin::initialize()
{
// The header is always at least 128 bytes, so if the file is smaller, it can't be a DDS.
return m_context->data_size > 128
&& m_context->data[0] == 0x44
&& m_context->data[1] == 0x44
&& m_context->data[2] == 0x53
&& m_context->data[3] == 0x20;
}
bool DDSImageDecoderPlugin::sniff(ReadonlyBytes data)
{
// The header is always at least 128 bytes, so if the file is smaller, it can't be a DDS.
return data.size() > 128
&& data.data()[0] == 0x44
&& data.data()[1] == 0x44
&& data.data()[2] == 0x53
&& data.data()[3] == 0x20;
}
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> DDSImageDecoderPlugin::create(ReadonlyBytes data)
{
return adopt_nonnull_own_or_enomem(new (nothrow) DDSImageDecoderPlugin(data.data(), data.size()));
}
bool DDSImageDecoderPlugin::is_animated()
{
return false;
}
size_t DDSImageDecoderPlugin::loop_count()
{
return 0;
}
size_t DDSImageDecoderPlugin::frame_count()
{
return 1;
}
ErrorOr<ImageFrameDescriptor> DDSImageDecoderPlugin::frame(size_t index)
{
if (index > 0)
return Error::from_string_literal("DDSImageDecoderPlugin: Invalid frame index");
if (m_context->state == DDSLoadingContext::State::Error)
return Error::from_string_literal("DDSImageDecoderPlugin: Decoding failed");
if (m_context->state < DDSLoadingContext::State::BitmapDecoded) {
TRY(decode_dds(*m_context));
}
VERIFY(m_context->bitmap);
return ImageFrameDescriptor { m_context->bitmap, 0 };
}
ErrorOr<Optional<ReadonlyBytes>> DDSImageDecoderPlugin::icc_data()
{
return OptionalNone {};
}
}

View file

@ -0,0 +1,269 @@
/*
* Copyright (c) 2021, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
enum MiscFlags : u32 {
DDS_RESOURCE_MISC_TEXTURECUBE = 0x4,
};
enum Misc2Flags : u32 {
DDS_ALPHA_MODE_UNKNOWN = 0x0,
DDS_ALPHA_MODE_STRAIGHT = 0x1,
DDS_ALPHA_MODE_PREMULTIPLIED = 0x2,
DDS_ALPHA_MODE_OPAQUE = 0x3,
DDS_ALPHA_MODE_CUSTOM = 0x4,
};
enum Caps1Flags : u32 {
DDSCAPS_COMPLEX = 0x8,
DDSCAPS_TEXTURE = 0x1000,
DDSCAPS_MIPMAP = 0x400000,
};
enum Caps2Flags : u32 {
DDSCAPS2_CUBEMAP = 0x200,
DDSCAPS2_CUBEMAP_POSITIVEX = 0x400,
DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800,
DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000,
DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000,
DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000,
DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000,
DDSCAPS2_VOLUME = 0x200000,
};
enum ResourceDimensions : u32 {
DDS_DIMENSION_UNKNOWN,
DDS_DIMENSION_BUFFER,
DDS_DIMENSION_TEXTURE1D = 2,
DDS_DIMENSION_TEXTURE2D = 3,
DDS_DIMENSION_TEXTURE3D = 4,
};
enum DXGIFormat : u32 {
DXGI_FORMAT_UNKNOWN = 0,
DXGI_FORMAT_R32G32B32A32_TYPELESS,
DXGI_FORMAT_R32G32B32A32_FLOAT,
DXGI_FORMAT_R32G32B32A32_UINT,
DXGI_FORMAT_R32G32B32A32_SINT,
DXGI_FORMAT_R32G32B32_TYPELESS,
DXGI_FORMAT_R32G32B32_FLOAT,
DXGI_FORMAT_R32G32B32_UINT,
DXGI_FORMAT_R32G32B32_SINT,
DXGI_FORMAT_R16G16B16A16_TYPELESS,
DXGI_FORMAT_R16G16B16A16_FLOAT,
DXGI_FORMAT_R16G16B16A16_UNORM,
DXGI_FORMAT_R16G16B16A16_UINT,
DXGI_FORMAT_R16G16B16A16_SNORM,
DXGI_FORMAT_R16G16B16A16_SINT,
DXGI_FORMAT_R32G32_TYPELESS,
DXGI_FORMAT_R32G32_FLOAT,
DXGI_FORMAT_R32G32_UINT,
DXGI_FORMAT_R32G32_SINT,
DXGI_FORMAT_R32G8X24_TYPELESS,
DXGI_FORMAT_D32_FLOAT_S8X24_UINT,
DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS,
DXGI_FORMAT_X32_TYPELESS_G8X24_UINT,
DXGI_FORMAT_R10G10B10A2_TYPELESS,
DXGI_FORMAT_R10G10B10A2_UNORM,
DXGI_FORMAT_R10G10B10A2_UINT,
DXGI_FORMAT_R11G11B10_FLOAT,
DXGI_FORMAT_R8G8B8A8_TYPELESS,
DXGI_FORMAT_R8G8B8A8_UNORM,
DXGI_FORMAT_R8G8B8A8_UNORM_SRGB,
DXGI_FORMAT_R8G8B8A8_UINT,
DXGI_FORMAT_R8G8B8A8_SNORM,
DXGI_FORMAT_R8G8B8A8_SINT,
DXGI_FORMAT_R16G16_TYPELESS,
DXGI_FORMAT_R16G16_FLOAT,
DXGI_FORMAT_R16G16_UNORM,
DXGI_FORMAT_R16G16_UINT,
DXGI_FORMAT_R16G16_SNORM,
DXGI_FORMAT_R16G16_SINT,
DXGI_FORMAT_R32_TYPELESS,
DXGI_FORMAT_D32_FLOAT,
DXGI_FORMAT_R32_FLOAT,
DXGI_FORMAT_R32_UINT,
DXGI_FORMAT_R32_SINT,
DXGI_FORMAT_R24G8_TYPELESS,
DXGI_FORMAT_D24_UNORM_S8_UINT,
DXGI_FORMAT_R24_UNORM_X8_TYPELESS,
DXGI_FORMAT_X24_TYPELESS_G8_UINT,
DXGI_FORMAT_R8G8_TYPELESS,
DXGI_FORMAT_R8G8_UNORM,
DXGI_FORMAT_R8G8_UINT,
DXGI_FORMAT_R8G8_SNORM,
DXGI_FORMAT_R8G8_SINT,
DXGI_FORMAT_R16_TYPELESS,
DXGI_FORMAT_R16_FLOAT,
DXGI_FORMAT_D16_UNORM,
DXGI_FORMAT_R16_UNORM,
DXGI_FORMAT_R16_UINT,
DXGI_FORMAT_R16_SNORM,
DXGI_FORMAT_R16_SINT,
DXGI_FORMAT_R8_TYPELESS,
DXGI_FORMAT_R8_UNORM,
DXGI_FORMAT_R8_UINT,
DXGI_FORMAT_R8_SNORM,
DXGI_FORMAT_R8_SINT,
DXGI_FORMAT_A8_UNORM,
DXGI_FORMAT_R1_UNORM,
DXGI_FORMAT_R9G9B9E5_SHAREDEXP,
DXGI_FORMAT_R8G8_B8G8_UNORM,
DXGI_FORMAT_G8R8_G8B8_UNORM,
DXGI_FORMAT_BC1_TYPELESS,
DXGI_FORMAT_BC1_UNORM,
DXGI_FORMAT_BC1_UNORM_SRGB,
DXGI_FORMAT_BC2_TYPELESS,
DXGI_FORMAT_BC2_UNORM,
DXGI_FORMAT_BC2_UNORM_SRGB,
DXGI_FORMAT_BC3_TYPELESS,
DXGI_FORMAT_BC3_UNORM,
DXGI_FORMAT_BC3_UNORM_SRGB,
DXGI_FORMAT_BC4_TYPELESS,
DXGI_FORMAT_BC4_UNORM,
DXGI_FORMAT_BC4_SNORM,
DXGI_FORMAT_BC5_TYPELESS,
DXGI_FORMAT_BC5_UNORM,
DXGI_FORMAT_BC5_SNORM,
DXGI_FORMAT_B5G6R5_UNORM,
DXGI_FORMAT_B5G5R5A1_UNORM,
DXGI_FORMAT_B8G8R8A8_UNORM,
DXGI_FORMAT_B8G8R8X8_UNORM,
DXGI_FORMAT_R10G10B10_XR_BIAS_A2_UNORM,
DXGI_FORMAT_B8G8R8A8_TYPELESS,
DXGI_FORMAT_B8G8R8A8_UNORM_SRGB,
DXGI_FORMAT_B8G8R8X8_TYPELESS,
DXGI_FORMAT_B8G8R8X8_UNORM_SRGB,
DXGI_FORMAT_BC6H_TYPELESS,
DXGI_FORMAT_BC6H_UF16,
DXGI_FORMAT_BC6H_SF16,
DXGI_FORMAT_BC7_TYPELESS,
DXGI_FORMAT_BC7_UNORM,
DXGI_FORMAT_BC7_UNORM_SRGB,
DXGI_FORMAT_AYUV,
DXGI_FORMAT_Y410,
DXGI_FORMAT_Y416,
DXGI_FORMAT_NV12,
DXGI_FORMAT_P010,
DXGI_FORMAT_P016,
DXGI_FORMAT_420_OPAQUE,
DXGI_FORMAT_YUY2,
DXGI_FORMAT_Y210,
DXGI_FORMAT_Y216,
DXGI_FORMAT_NV11,
DXGI_FORMAT_AI44,
DXGI_FORMAT_IA44,
DXGI_FORMAT_P8,
DXGI_FORMAT_A8P8,
DXGI_FORMAT_B4G4R4A4_UNORM,
DXGI_FORMAT_P208,
DXGI_FORMAT_V208,
DXGI_FORMAT_V408,
DXGI_FORMAT_SAMPLER_FEEDBACK_MIN_MIP_OPAQUE,
DXGI_FORMAT_SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE,
DXGI_FORMAT_FORCE_UINT
};
enum DDSFlags : u32 {
DDSD_CAPS = 0x1,
DDSD_HEIGHT = 0x2,
DDSD_WIDTH = 0x4,
DDSD_PITCH = 0x8,
DDSD_PIXELFORMAT = 0x1000,
DDSD_MIPMAPCOUNT = 0x20000,
DDSD_LINEARSIZE = 0x80000,
DDSD_DEPTH = 0x800000,
};
enum PixelFormatFlags : u32 {
DDPF_ALPHAPIXELS = 0x1,
DDPF_ALPHA = 0x2,
DDPF_FOURCC = 0x4,
DDPF_PALETTEINDEXED8 = 0x20,
DDPF_RGB = 0x40,
DDPF_YUV = 0x200,
DDPF_LUMINANCE = 0x20000,
DDPF_BUMPDUDV = 0x80000,
DDPF_NORMAL = 0x80000000,
};
struct [[gnu::packed]] DDSPixelFormat {
u32 size {};
u32 flags {};
u32 four_cc {};
u32 rgb_bit_count {};
u32 r_bit_mask {};
u32 g_bit_mask {};
u32 b_bit_mask {};
u32 a_bit_mask {};
};
struct [[gnu::packed]] DDSHeader {
u32 size {};
u32 flags {};
u32 height {};
u32 width {};
u32 pitch {};
u32 depth {};
u32 mip_map_count {};
u32 reserved[11];
DDSPixelFormat pixel_format;
u32 caps1 {};
u32 caps2 {};
u32 caps3 {};
u32 caps4 {};
u32 reserved2 {};
};
struct [[gnu::packed]] DDSHeaderDXT10 {
DXGIFormat format {};
u32 resource_dimension {};
u32 misc_flag {};
u32 array_size {};
u32 misc_flag2 {};
};
struct DDSLoadingContext;
class DDSImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
virtual ~DDSImageDecoderPlugin() override;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
DDSImageDecoderPlugin(u8 const*, size_t);
OwnPtr<DDSLoadingContext> m_context;
};
}
template<>
struct AK::Traits<Gfx::DDSHeader> : public AK::GenericTraits<Gfx::DDSHeader> {
static constexpr bool is_trivially_serializable() { return true; }
};
template<>
struct AK::Traits<Gfx::DDSHeaderDXT10> : public AK::GenericTraits<Gfx::DDSHeaderDXT10> {
static constexpr bool is_trivially_serializable() { return true; }
};

View file

@ -0,0 +1,673 @@
/*
* Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org>
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Array.h>
#include <AK/Debug.h>
#include <AK/Endian.h>
#include <AK/Error.h>
#include <AK/IntegralMath.h>
#include <AK/Memory.h>
#include <AK/MemoryStream.h>
#include <AK/Try.h>
#include <LibGfx/ImageFormats/GIFLoader.h>
#include <string.h>
namespace Gfx {
// Row strides and offsets for each interlace pass.
static constexpr Array<int, 4> INTERLACE_ROW_STRIDES = { 8, 8, 4, 2 };
static constexpr Array<int, 4> INTERLACE_ROW_OFFSETS = { 0, 4, 2, 1 };
struct GIFImageDescriptor {
u16 x { 0 };
u16 y { 0 };
u16 width { 0 };
u16 height { 0 };
bool use_global_color_map { true };
bool interlaced { false };
Color color_map[256];
u8 lzw_min_code_size { 0 };
Vector<u8> lzw_encoded_bytes;
// Fields from optional graphic control extension block
enum DisposalMethod : u8 {
None = 0,
InPlace = 1,
RestoreBackground = 2,
RestorePrevious = 3,
};
DisposalMethod disposal_method { None };
u8 transparency_index { 0 };
u16 duration { 0 };
bool transparent { false };
bool user_input { false };
const IntRect rect() const
{
return { this->x, this->y, this->width, this->height };
}
};
struct LogicalScreen {
u16 width;
u16 height;
Color color_map[256];
};
struct GIFLoadingContext {
enum State {
NotDecoded = 0,
FrameDescriptorsLoaded,
FrameComplete,
};
State state { NotDecoded };
enum ErrorState {
NoError = 0,
FailedToDecodeAllFrames,
FailedToDecodeAnyFrame,
FailedToLoadFrameDescriptors,
};
ErrorState error_state { NoError };
u8 const* data { nullptr };
size_t data_size { 0 };
LogicalScreen logical_screen {};
u8 background_color_index { 0 };
Vector<NonnullOwnPtr<GIFImageDescriptor>> images {};
size_t loops { 1 };
RefPtr<Gfx::Bitmap> frame_buffer;
size_t current_frame { 0 };
RefPtr<Gfx::Bitmap> prev_frame_buffer;
};
enum class GIFFormat {
GIF87a,
GIF89a,
};
static ErrorOr<GIFFormat> decode_gif_header(Stream& stream)
{
static auto valid_header_87 = "GIF87a"sv;
static auto valid_header_89 = "GIF89a"sv;
Array<u8, 6> header;
TRY(stream.read_until_filled(header));
if (header.span() == valid_header_87.bytes())
return GIFFormat::GIF87a;
if (header.span() == valid_header_89.bytes())
return GIFFormat::GIF89a;
return Error::from_string_literal("GIF header unknown");
}
class LZWDecoder {
private:
static constexpr int max_code_size = 12;
public:
explicit LZWDecoder(Vector<u8> const& lzw_bytes, u8 min_code_size)
: m_lzw_bytes(lzw_bytes)
, m_code_size(min_code_size)
, m_original_code_size(min_code_size)
, m_table_capacity(AK::exp2<u32>(min_code_size))
{
init_code_table();
}
u16 add_control_code()
{
const u16 control_code = m_code_table.size();
m_code_table.append(Vector<u8> {});
m_original_code_table.append(Vector<u8> {});
if (m_code_table.size() >= m_table_capacity && m_code_size < max_code_size) {
++m_code_size;
++m_original_code_size;
m_table_capacity *= 2;
}
return control_code;
}
void reset()
{
m_code_table.clear();
m_code_table.extend(m_original_code_table);
m_code_size = m_original_code_size;
m_table_capacity = AK::exp2<u32>(m_code_size);
m_output.clear();
}
ErrorOr<u16> next_code()
{
size_t current_byte_index = m_current_bit_index / 8;
if (current_byte_index >= m_lzw_bytes.size()) {
return Error::from_string_literal("LZWDecoder tries to read ouf of bounds");
}
// Extract the code bits using a 32-bit mask to cover the possibility that if
// the current code size > 9 bits then the code can span 3 bytes.
u8 current_bit_offset = m_current_bit_index % 8;
u32 mask = (u32)(m_table_capacity - 1) << current_bit_offset;
// Make a padded copy of the final bytes in the data to ensure we don't read past the end.
if (current_byte_index + sizeof(mask) > m_lzw_bytes.size()) {
u8 padded_last_bytes[sizeof(mask)] = { 0 };
for (int i = 0; current_byte_index + i < m_lzw_bytes.size(); ++i) {
padded_last_bytes[i] = m_lzw_bytes[current_byte_index + i];
}
u32 const* addr = (u32 const*)&padded_last_bytes;
m_current_code = (*addr & mask) >> current_bit_offset;
} else {
u32 tmp_word;
memcpy(&tmp_word, &m_lzw_bytes.at(current_byte_index), sizeof(u32));
m_current_code = (tmp_word & mask) >> current_bit_offset;
}
if (m_current_code > m_code_table.size()) {
dbgln_if(GIF_DEBUG, "Corrupted LZW stream, invalid code: {} at bit index {}, code table size: {}",
m_current_code,
m_current_bit_index,
m_code_table.size());
return Error::from_string_literal("Corrupted LZW stream, invalid code");
} else if (m_current_code == m_code_table.size() && m_output.is_empty()) {
dbgln_if(GIF_DEBUG, "Corrupted LZW stream, valid new code but output buffer is empty: {} at bit index {}, code table size: {}",
m_current_code,
m_current_bit_index,
m_code_table.size());
return Error::from_string_literal("Corrupted LZW stream, valid new code but output buffer is empty");
}
m_current_bit_index += m_code_size;
return m_current_code;
}
Vector<u8>& get_output()
{
VERIFY(m_current_code <= m_code_table.size());
if (m_current_code < m_code_table.size()) {
Vector<u8> new_entry = m_output;
m_output = m_code_table.at(m_current_code);
new_entry.append(m_output[0]);
extend_code_table(new_entry);
} else if (m_current_code == m_code_table.size()) {
VERIFY(!m_output.is_empty());
m_output.append(m_output[0]);
extend_code_table(m_output);
}
return m_output;
}
private:
void init_code_table()
{
m_code_table.ensure_capacity(m_table_capacity);
for (u16 i = 0; i < m_table_capacity; ++i) {
m_code_table.unchecked_append({ (u8)i });
}
m_original_code_table = m_code_table;
}
void extend_code_table(Vector<u8> const& entry)
{
if (entry.size() > 1 && m_code_table.size() < 4096) {
m_code_table.append(entry);
if (m_code_table.size() >= m_table_capacity && m_code_size < max_code_size) {
++m_code_size;
m_table_capacity *= 2;
}
}
}
Vector<u8> const& m_lzw_bytes;
int m_current_bit_index { 0 };
Vector<Vector<u8>> m_code_table {};
Vector<Vector<u8>> m_original_code_table {};
u8 m_code_size { 0 };
u8 m_original_code_size { 0 };
u32 m_table_capacity { 0 };
u16 m_current_code { 0 };
Vector<u8> m_output {};
};
static void copy_frame_buffer(Bitmap& dest, Bitmap const& src)
{
VERIFY(dest.size_in_bytes() == src.size_in_bytes());
memcpy(dest.scanline(0), src.scanline(0), dest.size_in_bytes());
}
static void clear_rect(Bitmap& bitmap, IntRect const& rect, Color color)
{
auto intersection_rect = rect.intersected(bitmap.rect());
if (intersection_rect.is_empty())
return;
ARGB32* dst = bitmap.scanline(intersection_rect.top()) + intersection_rect.left();
const size_t dst_skip = bitmap.pitch() / sizeof(ARGB32);
for (int i = intersection_rect.height() - 1; i >= 0; --i) {
fast_u32_fill(dst, color.value(), intersection_rect.width());
dst += dst_skip;
}
}
static ErrorOr<void> decode_frame(GIFLoadingContext& context, size_t frame_index)
{
if (frame_index >= context.images.size()) {
return Error::from_string_literal("frame_index size too high");
}
if (context.state >= GIFLoadingContext::State::FrameComplete && frame_index == context.current_frame) {
return {};
}
size_t start_frame = context.current_frame + 1;
if (context.state < GIFLoadingContext::State::FrameComplete) {
start_frame = 0;
context.frame_buffer = TRY(Bitmap::create(BitmapFormat::BGRA8888, { context.logical_screen.width, context.logical_screen.height }));
context.prev_frame_buffer = TRY(Bitmap::create(BitmapFormat::BGRA8888, { context.logical_screen.width, context.logical_screen.height }));
} else if (frame_index < context.current_frame) {
start_frame = 0;
}
for (size_t i = start_frame; i <= frame_index; ++i) {
auto& image = context.images.at(i);
auto const previous_image_disposal_method = i > 0 ? context.images.at(i - 1)->disposal_method : GIFImageDescriptor::DisposalMethod::None;
if (i == 0) {
context.frame_buffer->fill(Color::Transparent);
} else if (i > 0 && image->disposal_method == GIFImageDescriptor::DisposalMethod::RestorePrevious
&& previous_image_disposal_method != GIFImageDescriptor::DisposalMethod::RestorePrevious) {
// This marks the start of a run of frames that once disposed should be restored to the
// previous underlying image contents. Therefore we make a copy of the current frame
// buffer so that it can be restored later.
copy_frame_buffer(*context.prev_frame_buffer, *context.frame_buffer);
}
if (previous_image_disposal_method == GIFImageDescriptor::DisposalMethod::RestoreBackground) {
// Note: RestoreBackground could be interpreted either as restoring the underlying
// background of the entire image (e.g. container element's background-color), or the
// background color of the GIF itself. It appears that all major browsers and most other
// GIF decoders adhere to the former interpretation, therefore we will do the same by
// clearing the entire frame buffer to transparent.
clear_rect(*context.frame_buffer, context.images[i - 1]->rect(), Color::Transparent);
} else if (i > 0 && previous_image_disposal_method == GIFImageDescriptor::DisposalMethod::RestorePrevious) {
// Previous frame indicated that once disposed, it should be restored to *its* previous
// underlying image contents, therefore we restore the saved previous frame buffer.
copy_frame_buffer(*context.frame_buffer, *context.prev_frame_buffer);
}
if (image->lzw_min_code_size > 8)
return Error::from_string_literal("LZW minimum code size is greater than 8");
LZWDecoder decoder(image->lzw_encoded_bytes, image->lzw_min_code_size);
// Add GIF-specific control codes
int const clear_code = decoder.add_control_code();
int const end_of_information_code = decoder.add_control_code();
auto const& color_map = image->use_global_color_map ? context.logical_screen.color_map : image->color_map;
int pixel_index = 0;
int row = 0;
int interlace_pass = 0;
while (true) {
ErrorOr<u16> code = decoder.next_code();
if (code.is_error()) {
dbgln_if(GIF_DEBUG, "Unexpectedly reached end of gif frame data");
return code.release_error();
}
if (code.value() == clear_code) {
decoder.reset();
continue;
}
if (code.value() == end_of_information_code)
break;
if (!image->width)
continue;
auto colors = decoder.get_output();
for (auto const& color : colors) {
auto c = color_map[color];
int x = pixel_index % image->width + image->x;
int y = row + image->y;
if (context.frame_buffer->rect().contains(x, y) && (!image->transparent || color != image->transparency_index)) {
context.frame_buffer->set_pixel(x, y, c);
}
++pixel_index;
if (pixel_index % image->width == 0) {
if (image->interlaced) {
if (interlace_pass < 4) {
if (row + INTERLACE_ROW_STRIDES[interlace_pass] >= image->height) {
++interlace_pass;
if (interlace_pass < 4)
row = INTERLACE_ROW_OFFSETS[interlace_pass];
} else {
row += INTERLACE_ROW_STRIDES[interlace_pass];
}
}
} else {
++row;
}
}
}
}
context.current_frame = i;
context.state = GIFLoadingContext::State::FrameComplete;
}
return {};
}
static ErrorOr<void> load_gif_frame_descriptors(GIFLoadingContext& context)
{
if (context.data_size < 32)
return Error::from_string_literal("Size too short for GIF frame descriptors");
FixedMemoryStream stream { { context.data, context.data_size } };
TRY(decode_gif_header(stream));
context.logical_screen.width = TRY(stream.read_value<LittleEndian<u16>>());
context.logical_screen.height = TRY(stream.read_value<LittleEndian<u16>>());
if (context.logical_screen.width > maximum_width_for_decoded_images || context.logical_screen.height > maximum_height_for_decoded_images) {
dbgln("This GIF is too large for comfort: {}x{}", context.logical_screen.width, context.logical_screen.height);
return Error::from_string_literal("This GIF is too large for comfort");
}
auto gcm_info = TRY(stream.read_value<u8>());
context.background_color_index = TRY(stream.read_value<u8>());
[[maybe_unused]] auto pixel_aspect_ratio = TRY(stream.read_value<u8>());
u8 bits_per_pixel = (gcm_info & 7) + 1;
int color_map_entry_count = 1;
for (int i = 0; i < bits_per_pixel; ++i)
color_map_entry_count *= 2;
for (int i = 0; i < color_map_entry_count; ++i) {
u8 r = TRY(stream.read_value<u8>());
u8 g = TRY(stream.read_value<u8>());
u8 b = TRY(stream.read_value<u8>());
context.logical_screen.color_map[i] = { r, g, b };
}
NonnullOwnPtr<GIFImageDescriptor> current_image = make<GIFImageDescriptor>();
for (;;) {
u8 sentinel = TRY(stream.read_value<u8>());
if (sentinel == '!') {
u8 extension_type = TRY(stream.read_value<u8>());
u8 sub_block_length = 0;
Vector<u8> sub_block {};
for (;;) {
sub_block_length = TRY(stream.read_value<u8>());
if (sub_block_length == 0)
break;
TRY(sub_block.try_resize(sub_block.size() + sub_block_length));
TRY(stream.read_until_filled(sub_block.span().slice_from_end(sub_block_length)));
}
if (extension_type == 0xF9) {
if (sub_block.size() != 4) {
dbgln_if(GIF_DEBUG, "Unexpected graphic control size");
continue;
}
u8 disposal_method = (sub_block[0] & 0x1C) >> 2;
current_image->disposal_method = (GIFImageDescriptor::DisposalMethod)disposal_method;
u8 user_input = (sub_block[0] & 0x2) >> 1;
current_image->user_input = user_input == 1;
u8 transparent = sub_block[0] & 1;
current_image->transparent = transparent == 1;
u16 duration = sub_block[1] + ((u16)sub_block[2] << 8);
current_image->duration = duration;
current_image->transparency_index = sub_block[3];
}
if (extension_type == 0xFF) {
if (sub_block.size() != 14) {
dbgln_if(GIF_DEBUG, "Unexpected application extension size: {}", sub_block.size());
continue;
}
if (sub_block[11] != 1) {
dbgln_if(GIF_DEBUG, "Unexpected application extension format");
continue;
}
u16 loops = sub_block[12] + (sub_block[13] << 8);
context.loops = loops;
}
continue;
}
if (sentinel == ',') {
context.images.append(move(current_image));
auto& image = context.images.last();
image->x = TRY(stream.read_value<LittleEndian<u16>>());
image->y = TRY(stream.read_value<LittleEndian<u16>>());
image->width = TRY(stream.read_value<LittleEndian<u16>>());
image->height = TRY(stream.read_value<LittleEndian<u16>>());
auto packed_fields = TRY(stream.read_value<u8>());
image->use_global_color_map = !(packed_fields & 0x80);
image->interlaced = (packed_fields & 0x40) != 0;
if (!image->use_global_color_map) {
size_t local_color_table_size = AK::exp2<size_t>((packed_fields & 7) + 1);
for (size_t i = 0; i < local_color_table_size; ++i) {
u8 r = TRY(stream.read_value<u8>());
u8 g = TRY(stream.read_value<u8>());
u8 b = TRY(stream.read_value<u8>());
image->color_map[i] = { r, g, b };
}
}
image->lzw_min_code_size = TRY(stream.read_value<u8>());
u8 lzw_encoded_bytes_expected = 0;
for (;;) {
lzw_encoded_bytes_expected = TRY(stream.read_value<u8>());
if (lzw_encoded_bytes_expected == 0)
break;
Array<u8, 256> buffer;
TRY(stream.read_until_filled(buffer.span().trim(lzw_encoded_bytes_expected)));
for (int i = 0; i < lzw_encoded_bytes_expected; ++i) {
image->lzw_encoded_bytes.append(buffer[i]);
}
}
current_image = make<GIFImageDescriptor>();
continue;
}
if (sentinel == ';') {
break;
}
return Error::from_string_literal("Unexpected sentinel");
}
context.state = GIFLoadingContext::State::FrameDescriptorsLoaded;
return {};
}
GIFImageDecoderPlugin::GIFImageDecoderPlugin(u8 const* data, size_t size)
{
m_context = make<GIFLoadingContext>();
m_context->data = data;
m_context->data_size = size;
}
GIFImageDecoderPlugin::~GIFImageDecoderPlugin() = default;
IntSize GIFImageDecoderPlugin::size()
{
if (m_context->error_state == GIFLoadingContext::ErrorState::FailedToLoadFrameDescriptors) {
return {};
}
if (m_context->state < GIFLoadingContext::State::FrameDescriptorsLoaded) {
if (load_gif_frame_descriptors(*m_context).is_error()) {
m_context->error_state = GIFLoadingContext::ErrorState::FailedToLoadFrameDescriptors;
return {};
}
}
return { m_context->logical_screen.width, m_context->logical_screen.height };
}
void GIFImageDecoderPlugin::set_volatile()
{
if (m_context->frame_buffer) {
m_context->frame_buffer->set_volatile();
}
}
bool GIFImageDecoderPlugin::set_nonvolatile(bool& was_purged)
{
if (!m_context->frame_buffer)
return false;
return m_context->frame_buffer->set_nonvolatile(was_purged);
}
bool GIFImageDecoderPlugin::initialize()
{
FixedMemoryStream stream { { m_context->data, m_context->data_size } };
return !decode_gif_header(stream).is_error();
}
bool GIFImageDecoderPlugin::sniff(ReadonlyBytes data)
{
FixedMemoryStream stream { data };
return !decode_gif_header(stream).is_error();
}
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> GIFImageDecoderPlugin::create(ReadonlyBytes data)
{
return adopt_nonnull_own_or_enomem(new (nothrow) GIFImageDecoderPlugin(data.data(), data.size()));
}
bool GIFImageDecoderPlugin::is_animated()
{
if (m_context->error_state != GIFLoadingContext::ErrorState::NoError) {
return false;
}
if (m_context->state < GIFLoadingContext::State::FrameDescriptorsLoaded) {
if (load_gif_frame_descriptors(*m_context).is_error()) {
m_context->error_state = GIFLoadingContext::ErrorState::FailedToLoadFrameDescriptors;
return false;
}
}
return m_context->images.size() > 1;
}
size_t GIFImageDecoderPlugin::loop_count()
{
if (m_context->error_state != GIFLoadingContext::ErrorState::NoError) {
return 0;
}
if (m_context->state < GIFLoadingContext::State::FrameDescriptorsLoaded) {
if (load_gif_frame_descriptors(*m_context).is_error()) {
m_context->error_state = GIFLoadingContext::ErrorState::FailedToLoadFrameDescriptors;
return 0;
}
}
return m_context->loops;
}
size_t GIFImageDecoderPlugin::frame_count()
{
if (m_context->error_state != GIFLoadingContext::ErrorState::NoError) {
return 1;
}
if (m_context->state < GIFLoadingContext::State::FrameDescriptorsLoaded) {
if (load_gif_frame_descriptors(*m_context).is_error()) {
m_context->error_state = GIFLoadingContext::ErrorState::FailedToLoadFrameDescriptors;
return 1;
}
}
return m_context->images.size();
}
ErrorOr<ImageFrameDescriptor> GIFImageDecoderPlugin::frame(size_t index)
{
if (m_context->error_state >= GIFLoadingContext::ErrorState::FailedToDecodeAnyFrame) {
return Error::from_string_literal("GIFImageDecoderPlugin: Decoding failed");
}
if (m_context->state < GIFLoadingContext::State::FrameDescriptorsLoaded) {
if (auto result = load_gif_frame_descriptors(*m_context); result.is_error()) {
m_context->error_state = GIFLoadingContext::ErrorState::FailedToLoadFrameDescriptors;
return result.release_error();
}
}
if (m_context->error_state == GIFLoadingContext::ErrorState::NoError) {
if (auto result = decode_frame(*m_context, index); result.is_error()) {
if (m_context->state < GIFLoadingContext::State::FrameComplete) {
m_context->error_state = GIFLoadingContext::ErrorState::FailedToDecodeAnyFrame;
return result.release_error();
}
if (auto result = decode_frame(*m_context, 0); result.is_error()) {
m_context->error_state = GIFLoadingContext::ErrorState::FailedToDecodeAnyFrame;
return result.release_error();
}
m_context->error_state = GIFLoadingContext::ErrorState::FailedToDecodeAllFrames;
}
}
ImageFrameDescriptor frame {};
frame.image = TRY(m_context->frame_buffer->clone());
frame.duration = m_context->images[index]->duration * 10;
if (frame.duration <= 10) {
frame.duration = 100;
}
return frame;
}
ErrorOr<Optional<ReadonlyBytes>> GIFImageDecoderPlugin::icc_data()
{
return OptionalNone {};
}
}

View file

@ -0,0 +1,39 @@
/*
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibGfx/Bitmap.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
struct GIFLoadingContext;
class GIFImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
virtual ~GIFImageDecoderPlugin() override;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
GIFImageDecoderPlugin(u8 const*, size_t);
OwnPtr<GIFLoadingContext> m_context;
};
}

View file

@ -0,0 +1,281 @@
/*
* Copyright (c) 2020, Paul Roukema <roukemap@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/ByteBuffer.h>
#include <AK/Debug.h>
#include <AK/MemoryStream.h>
#include <AK/Types.h>
#include <LibGfx/ImageFormats/BMPLoader.h>
#include <LibGfx/ImageFormats/ICOLoader.h>
#include <LibGfx/ImageFormats/PNGLoader.h>
#include <string.h>
namespace Gfx {
// FIXME: This is in little-endian order. Maybe need a NetworkOrdered<T> equivalent eventually.
struct ICONDIR {
u16 must_be_0 = 0;
u16 must_be_1 = 0;
u16 image_count = 0;
};
static_assert(AssertSize<ICONDIR, 6>());
struct ICONDIRENTRY {
u8 width;
u8 height;
u8 color_count;
u8 reserved_0;
u16 planes;
u16 bits_per_pixel;
u32 size;
u32 offset;
};
static_assert(AssertSize<ICONDIRENTRY, 16>());
};
template<>
class AK::Traits<Gfx::ICONDIR> : public GenericTraits<Gfx::ICONDIR> {
public:
static constexpr bool is_trivially_serializable() { return true; }
};
template<>
class AK::Traits<Gfx::ICONDIRENTRY> : public GenericTraits<Gfx::ICONDIRENTRY> {
public:
static constexpr bool is_trivially_serializable() { return true; }
};
namespace Gfx {
struct ICOImageDescriptor {
u16 width;
u16 height;
u16 bits_per_pixel;
size_t offset;
size_t size;
RefPtr<Gfx::Bitmap> bitmap;
};
struct ICOLoadingContext {
enum State {
NotDecoded = 0,
Error,
DirectoryDecoded,
BitmapDecoded
};
State state { NotDecoded };
u8 const* data { nullptr };
size_t data_size { 0 };
Vector<ICOImageDescriptor> images;
size_t largest_index;
};
static ErrorOr<size_t> decode_ico_header(Stream& stream)
{
auto header = TRY(stream.read_value<ICONDIR>());
if (header.must_be_0 != 0 || header.must_be_1 != 1)
return Error::from_string_literal("Invalid ICO header");
return { header.image_count };
}
static ErrorOr<ICOImageDescriptor> decode_ico_direntry(Stream& stream)
{
auto entry = TRY(stream.read_value<ICONDIRENTRY>());
ICOImageDescriptor desc = { entry.width, entry.height, entry.bits_per_pixel, entry.offset, entry.size, nullptr };
if (desc.width == 0)
desc.width = 256;
if (desc.height == 0)
desc.height = 256;
return { desc };
}
static size_t find_largest_image(ICOLoadingContext const& context)
{
size_t max_area = 0;
size_t index = 0;
size_t largest_index = 0;
u16 max_bits_per_pixel = 0;
for (auto const& desc : context.images) {
if (static_cast<size_t>(desc.width) * static_cast<size_t>(desc.height) >= max_area) {
if (desc.bits_per_pixel > max_bits_per_pixel) {
max_area = desc.width * desc.height;
largest_index = index;
max_bits_per_pixel = desc.bits_per_pixel;
}
}
++index;
}
return largest_index;
}
static ErrorOr<void> load_ico_directory(ICOLoadingContext& context)
{
FixedMemoryStream stream { { context.data, context.data_size } };
auto image_count = TRY(decode_ico_header(stream));
if (image_count == 0)
return Error::from_string_literal("ICO file has no images");
for (size_t i = 0; i < image_count; ++i) {
auto desc = TRY(decode_ico_direntry(stream));
if (desc.offset + desc.size < desc.offset // detect integer overflow
|| (desc.offset + desc.size) > context.data_size) {
dbgln_if(ICO_DEBUG, "load_ico_directory: offset: {} size: {} doesn't fit in ICO size: {}", desc.offset, desc.size, context.data_size);
return Error::from_string_literal("ICO size too large");
}
dbgln_if(ICO_DEBUG, "load_ico_directory: index {} width: {} height: {} offset: {} size: {}", i, desc.width, desc.height, desc.offset, desc.size);
TRY(context.images.try_append(desc));
}
context.largest_index = find_largest_image(context);
context.state = ICOLoadingContext::State::DirectoryDecoded;
return {};
}
ErrorOr<void> ICOImageDecoderPlugin::load_ico_bitmap(ICOLoadingContext& context, Optional<size_t> index)
{
if (context.state < ICOLoadingContext::State::DirectoryDecoded)
TRY(load_ico_directory(context));
size_t real_index = context.largest_index;
if (index.has_value())
real_index = index.value();
if (real_index >= context.images.size())
return Error::from_string_literal("Index out of bounds");
ICOImageDescriptor& desc = context.images[real_index];
if (PNGImageDecoderPlugin::sniff({ context.data + desc.offset, desc.size })) {
auto png_decoder = TRY(PNGImageDecoderPlugin::create({ context.data + desc.offset, desc.size }));
if (png_decoder->initialize()) {
auto decoded_png_frame = TRY(png_decoder->frame(0));
if (!decoded_png_frame.image) {
dbgln_if(ICO_DEBUG, "load_ico_bitmap: failed to load PNG encoded image index: {}", real_index);
return Error::from_string_literal("Encoded image not null");
}
desc.bitmap = decoded_png_frame.image;
return {};
}
return Error::from_string_literal("Couldn't initialize PNG Decoder");
} else {
auto bmp_decoder = TRY(BMPImageDecoderPlugin::create_as_included_in_ico({}, { context.data + desc.offset, desc.size }));
// NOTE: We don't initialize a BMP decoder in the usual way, but rather
// we just create an object and try to sniff for a frame when it's included
// inside an ICO image.
if (bmp_decoder->sniff_dib()) {
auto decoded_bmp_frame = TRY(bmp_decoder->frame(0));
if (!decoded_bmp_frame.image) {
dbgln_if(ICO_DEBUG, "load_ico_bitmap: failed to load BMP encoded image index: {}", real_index);
return Error::from_string_literal("Encoded image not null");
}
desc.bitmap = decoded_bmp_frame.image;
} else {
dbgln_if(ICO_DEBUG, "load_ico_bitmap: encoded image not supported at index: {}", real_index);
return Error::from_string_literal("Encoded image not supported");
}
return {};
}
}
bool ICOImageDecoderPlugin::sniff(ReadonlyBytes data)
{
FixedMemoryStream stream { data };
return !decode_ico_header(stream).is_error();
}
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> ICOImageDecoderPlugin::create(ReadonlyBytes data)
{
return adopt_nonnull_own_or_enomem(new (nothrow) ICOImageDecoderPlugin(data.data(), data.size()));
}
ICOImageDecoderPlugin::ICOImageDecoderPlugin(u8 const* data, size_t size)
{
m_context = make<ICOLoadingContext>();
m_context->data = data;
m_context->data_size = size;
}
ICOImageDecoderPlugin::~ICOImageDecoderPlugin() = default;
IntSize ICOImageDecoderPlugin::size()
{
if (m_context->state == ICOLoadingContext::State::Error) {
return {};
}
if (m_context->state < ICOLoadingContext::State::DirectoryDecoded) {
if (!load_ico_directory(*m_context).is_error()) {
m_context->state = ICOLoadingContext::State::Error;
return {};
}
m_context->state = ICOLoadingContext::State::DirectoryDecoded;
}
return { m_context->images[m_context->largest_index].width, m_context->images[m_context->largest_index].height };
}
void ICOImageDecoderPlugin::set_volatile()
{
if (m_context->images[0].bitmap)
m_context->images[0].bitmap->set_volatile();
}
bool ICOImageDecoderPlugin::set_nonvolatile(bool& was_purged)
{
if (!m_context->images[0].bitmap)
return false;
return m_context->images[0].bitmap->set_nonvolatile(was_purged);
}
bool ICOImageDecoderPlugin::initialize()
{
FixedMemoryStream stream { { m_context->data, m_context->data_size } };
return !decode_ico_header(stream).is_error();
}
bool ICOImageDecoderPlugin::is_animated()
{
return false;
}
size_t ICOImageDecoderPlugin::loop_count()
{
return 0;
}
size_t ICOImageDecoderPlugin::frame_count()
{
return 1;
}
ErrorOr<ImageFrameDescriptor> ICOImageDecoderPlugin::frame(size_t index)
{
if (index > 0)
return Error::from_string_literal("ICOImageDecoderPlugin: Invalid frame index");
if (m_context->state == ICOLoadingContext::State::Error)
return Error::from_string_literal("ICOImageDecoderPlugin: Decoding failed");
if (m_context->state < ICOLoadingContext::State::BitmapDecoded) {
// NOTE: This forces the chunk decoding to happen.
auto maybe_error = load_ico_bitmap(*m_context, {});
if (maybe_error.is_error()) {
m_context->state = ICOLoadingContext::State::Error;
return Error::from_string_literal("ICOImageDecoderPlugin: Decoding failed");
}
m_context->state = ICOLoadingContext::State::BitmapDecoded;
}
VERIFY(m_context->images[m_context->largest_index].bitmap);
return ImageFrameDescriptor { m_context->images[m_context->largest_index].bitmap, 0 };
}
ErrorOr<Optional<ReadonlyBytes>> ICOImageDecoderPlugin::icc_data()
{
return OptionalNone {};
}
}

View file

@ -0,0 +1,39 @@
/*
* Copyright (c) 2020, Paul Roukema <roukemap@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
struct ICOLoadingContext;
class ICOImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
virtual ~ICOImageDecoderPlugin() override;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
ICOImageDecoderPlugin(u8 const*, size_t);
static ErrorOr<void> load_ico_bitmap(ICOLoadingContext& context, Optional<size_t> index);
OwnPtr<ICOLoadingContext> m_context;
};
}

View file

@ -0,0 +1,101 @@
/*
* Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/LexicalPath.h>
#include <LibGfx/ImageFormats/BMPLoader.h>
#include <LibGfx/ImageFormats/DDSLoader.h>
#include <LibGfx/ImageFormats/GIFLoader.h>
#include <LibGfx/ImageFormats/ICOLoader.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
#include <LibGfx/ImageFormats/JPEGLoader.h>
#include <LibGfx/ImageFormats/PBMLoader.h>
#include <LibGfx/ImageFormats/PGMLoader.h>
#include <LibGfx/ImageFormats/PNGLoader.h>
#include <LibGfx/ImageFormats/PPMLoader.h>
#include <LibGfx/ImageFormats/QOILoader.h>
#include <LibGfx/ImageFormats/TGALoader.h>
#include <LibGfx/ImageFormats/WebPLoader.h>
namespace Gfx {
struct ImagePluginInitializer {
bool (*sniff)(ReadonlyBytes) = nullptr;
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> (*create)(ReadonlyBytes) = nullptr;
};
static constexpr ImagePluginInitializer s_initializers[] = {
{ PNGImageDecoderPlugin::sniff, PNGImageDecoderPlugin::create },
{ GIFImageDecoderPlugin::sniff, GIFImageDecoderPlugin::create },
{ BMPImageDecoderPlugin::sniff, BMPImageDecoderPlugin::create },
{ PBMImageDecoderPlugin::sniff, PBMImageDecoderPlugin::create },
{ PGMImageDecoderPlugin::sniff, PGMImageDecoderPlugin::create },
{ PPMImageDecoderPlugin::sniff, PPMImageDecoderPlugin::create },
{ ICOImageDecoderPlugin::sniff, ICOImageDecoderPlugin::create },
{ JPEGImageDecoderPlugin::sniff, JPEGImageDecoderPlugin::create },
{ DDSImageDecoderPlugin::sniff, DDSImageDecoderPlugin::create },
{ QOIImageDecoderPlugin::sniff, QOIImageDecoderPlugin::create },
{ WebPImageDecoderPlugin::sniff, WebPImageDecoderPlugin::create },
};
struct ImagePluginWithMIMETypeInitializer {
ErrorOr<bool> (*validate_before_create)(ReadonlyBytes) = nullptr;
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> (*create)(ReadonlyBytes) = nullptr;
StringView mime_type;
};
static constexpr ImagePluginWithMIMETypeInitializer s_initializers_with_mime_type[] = {
{ TGAImageDecoderPlugin::validate_before_create, TGAImageDecoderPlugin::create, "image/x-targa"sv },
};
static OwnPtr<ImageDecoderPlugin> probe_and_sniff_for_appropriate_plugin(ReadonlyBytes bytes)
{
for (auto& plugin : s_initializers) {
auto sniff_result = plugin.sniff(bytes);
if (!sniff_result)
continue;
auto plugin_decoder = plugin.create(bytes).release_value_but_fixme_should_propagate_errors();
if (plugin_decoder->initialize())
return plugin_decoder;
}
return {};
}
static OwnPtr<ImageDecoderPlugin> probe_and_sniff_for_appropriate_plugin_with_known_mime_type(StringView mime_type, ReadonlyBytes bytes)
{
for (auto& plugin : s_initializers_with_mime_type) {
if (plugin.mime_type != mime_type)
continue;
auto validation_result = plugin.validate_before_create(bytes).release_value_but_fixme_should_propagate_errors();
if (!validation_result)
continue;
auto plugin_decoder = plugin.create(bytes).release_value_but_fixme_should_propagate_errors();
if (plugin_decoder->initialize())
return plugin_decoder;
}
return {};
}
RefPtr<ImageDecoder> ImageDecoder::try_create_for_raw_bytes(ReadonlyBytes bytes, Optional<DeprecatedString> mime_type)
{
OwnPtr<ImageDecoderPlugin> plugin = probe_and_sniff_for_appropriate_plugin(bytes);
if (!plugin) {
if (mime_type.has_value()) {
plugin = probe_and_sniff_for_appropriate_plugin_with_known_mime_type(mime_type.value(), bytes);
if (!plugin)
return {};
} else {
return {};
}
}
return adopt_ref_if_nonnull(new (nothrow) ImageDecoder(plugin.release_nonnull()));
}
ImageDecoder::ImageDecoder(NonnullOwnPtr<ImageDecoderPlugin> plugin)
: m_plugin(move(plugin))
{
}
}

View file

@ -0,0 +1,71 @@
/*
* Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/ByteBuffer.h>
#include <AK/OwnPtr.h>
#include <AK/RefCounted.h>
#include <AK/RefPtr.h>
#include <LibGfx/Bitmap.h>
#include <LibGfx/Size.h>
namespace Gfx {
class Bitmap;
static constexpr size_t maximum_width_for_decoded_images = 16384;
static constexpr size_t maximum_height_for_decoded_images = 16384;
struct ImageFrameDescriptor {
RefPtr<Bitmap> image;
int duration { 0 };
};
class ImageDecoderPlugin {
public:
virtual ~ImageDecoderPlugin() = default;
virtual IntSize size() = 0;
virtual void set_volatile() = 0;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) = 0;
virtual bool initialize() = 0;
virtual bool is_animated() = 0;
virtual size_t loop_count() = 0;
virtual size_t frame_count() = 0;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) = 0;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() = 0;
protected:
ImageDecoderPlugin() = default;
};
class ImageDecoder : public RefCounted<ImageDecoder> {
public:
static RefPtr<ImageDecoder> try_create_for_raw_bytes(ReadonlyBytes, Optional<DeprecatedString> mime_type = {});
~ImageDecoder() = default;
IntSize size() const { return m_plugin->size(); }
int width() const { return size().width(); }
int height() const { return size().height(); }
void set_volatile() { m_plugin->set_volatile(); }
[[nodiscard]] bool set_nonvolatile(bool& was_purged) { return m_plugin->set_nonvolatile(was_purged); }
bool is_animated() const { return m_plugin->is_animated(); }
size_t loop_count() const { return m_plugin->loop_count(); }
size_t frame_count() const { return m_plugin->frame_count(); }
ErrorOr<ImageFrameDescriptor> frame(size_t index) const { return m_plugin->frame(index); }
ErrorOr<Optional<ReadonlyBytes>> icc_data() const { return m_plugin->icc_data(); }
private:
explicit ImageDecoder(NonnullOwnPtr<ImageDecoderPlugin>);
NonnullOwnPtr<ImageDecoderPlugin> mutable m_plugin;
};
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,40 @@
/*
* Copyright (c) 2020, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/MemoryStream.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
struct JPEGLoadingContext;
// For the specification, see: https://www.w3.org/Graphics/JPEG/itu-t81.pdf
class JPEGImageDecoderPlugin : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
virtual ~JPEGImageDecoderPlugin() override;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
JPEGImageDecoderPlugin(NonnullOwnPtr<FixedMemoryStream>);
OwnPtr<JPEGLoadingContext> m_context;
};
}

View file

@ -0,0 +1,67 @@
/*
* Copyright (c) 2020, Hüseyin ASLITÜRK <asliturk@hotmail.com>
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include "PBMLoader.h"
#include "AK/Endian.h"
#include "PortableImageLoaderCommon.h"
#include "Userland/Libraries/LibGfx/Streamer.h"
#include <string.h>
namespace Gfx {
bool read_image_data(PBMLoadingContext& context, Streamer& streamer)
{
u8 byte;
Vector<Gfx::Color> color_data;
if (context.type == PBMLoadingContext::Type::ASCII) {
while (streamer.read(byte)) {
if (byte == '0') {
color_data.append(Color::White);
} else if (byte == '1') {
color_data.append(Color::Black);
}
}
} else if (context.type == PBMLoadingContext::Type::RAWBITS) {
size_t color_index = 0;
while (streamer.read(byte)) {
for (int i = 0; i < 8; i++) {
int val = byte & 0x80;
if (val == 0) {
color_data.append(Color::White);
} else {
color_data.append(Color::Black);
}
byte = byte << 1;
color_index++;
if (color_index % context.width == 0) {
break;
}
}
}
}
size_t context_size = (u32)context.width * (u32)context.height;
if (context_size != color_data.size()) {
dbgln("Not enough color data in image.");
return false;
}
if (!create_bitmap(context)) {
return false;
}
set_pixels(context, color_data);
context.state = PBMLoadingContext::State::Bitmap;
return true;
}
}

View file

@ -0,0 +1,26 @@
/*
* Copyright (c) 2020, Hüseyin ASLITÜRK <asliturk@hotmail.com>
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/StringView.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
#include <LibGfx/ImageFormats/PortableImageMapLoader.h>
namespace Gfx {
struct PBM {
static constexpr auto ascii_magic_number = '1';
static constexpr auto binary_magic_number = '4';
static constexpr StringView image_type = "PBM"sv;
};
using PBMLoadingContext = PortableImageMapLoadingContext<PBM>;
using PBMImageDecoderPlugin = PortableImageDecoderPlugin<PBMLoadingContext>;
bool read_image_data(PBMLoadingContext& context, Streamer& streamer);
}

View file

@ -0,0 +1,68 @@
/*
* Copyright (c) 2020, Hüseyin ASLITÜRK <asliturk@hotmail.com>
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Endian.h>
#include <LibGfx/ImageFormats/PGMLoader.h>
#include <LibGfx/ImageFormats/PortableImageLoaderCommon.h>
#include <LibGfx/Streamer.h>
#include <string.h>
namespace Gfx {
static void set_adjusted_pixels(PGMLoadingContext& context, Vector<Gfx::Color> const& color_data)
{
size_t index = 0;
for (size_t y = 0; y < context.height; ++y) {
for (size_t x = 0; x < context.width; ++x) {
Color color = color_data.at(index);
if (context.format_details.max_val < 255) {
color = adjust_color(context.format_details.max_val, color);
}
context.bitmap->set_pixel(x, y, color);
++index;
}
}
}
bool read_image_data(PGMLoadingContext& context, Streamer& streamer)
{
Vector<Gfx::Color> color_data;
if (context.type == PGMLoadingContext::Type::ASCII) {
u16 value;
while (true) {
if (!read_number(streamer, &value))
break;
if (!read_whitespace(context, streamer))
break;
color_data.append({ (u8)value, (u8)value, (u8)value });
}
} else if (context.type == PGMLoadingContext::Type::RAWBITS) {
u8 pixel;
while (streamer.read(pixel)) {
color_data.append({ pixel, pixel, pixel });
}
}
size_t context_size = (u32)context.width * (u32)context.height;
if (context_size != color_data.size()) {
dbgln("Not enough color data in image.");
return false;
}
if (!create_bitmap(context))
return false;
set_adjusted_pixels(context, color_data);
context.state = PGMLoadingContext::State::Bitmap;
return true;
}
}

View file

@ -0,0 +1,27 @@
/*
* Copyright (c) 2020, Hüseyin ASLITÜRK <asliturk@hotmail.com>
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/StringView.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
#include <LibGfx/ImageFormats/PortableImageMapLoader.h>
namespace Gfx {
struct PGM {
static constexpr auto ascii_magic_number = '2';
static constexpr auto binary_magic_number = '5';
static constexpr StringView image_type = "PGM"sv;
u16 max_val { 0 };
};
using PGMLoadingContext = PortableImageMapLoadingContext<PGM>;
using PGMImageDecoderPlugin = PortableImageDecoderPlugin<PGMLoadingContext>;
bool read_image_data(PGMLoadingContext& context, Streamer& streamer);
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,38 @@
/*
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
struct PNGLoadingContext;
class PNGImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
virtual ~PNGImageDecoderPlugin() override;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
PNGImageDecoderPlugin(u8 const*, size_t);
OwnPtr<PNGLoadingContext> m_context;
};
}

View file

@ -0,0 +1,59 @@
/*
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Array.h>
#include <AK/SIMD.h>
namespace Gfx::PNG {
// https://www.w3.org/TR/PNG/#5PNG-file-signature
static constexpr Array<u8, 8> header = { 0x89, 'P', 'N', 'G', 13, 10, 26, 10 };
// https://www.w3.org/TR/PNG/#6Colour-values
enum class ColorType : u8 {
Greyscale = 0,
Truecolor = 2, // RGB
IndexedColor = 3,
GreyscaleWithAlpha = 4,
TruecolorWithAlpha = 6,
};
// https://www.w3.org/TR/PNG/#9Filter-types
enum class FilterType : u8 {
None,
Sub,
Up,
Average,
Paeth,
};
// https://www.w3.org/TR/PNG/#9Filter-type-4-Paeth
ALWAYS_INLINE u8 paeth_predictor(u8 a, u8 b, u8 c)
{
int p = a + b - c;
int pa = AK::abs(p - a);
int pb = AK::abs(p - b);
int pc = AK::abs(p - c);
if (pa <= pb && pa <= pc)
return a;
if (pb <= pc)
return b;
return c;
}
ALWAYS_INLINE AK::SIMD::u8x4 paeth_predictor(AK::SIMD::u8x4 a, AK::SIMD::u8x4 b, AK::SIMD::u8x4 c)
{
return AK::SIMD::u8x4 {
paeth_predictor(a[0], b[0], c[0]),
paeth_predictor(a[1], b[1], c[1]),
paeth_predictor(a[2], b[2], c[2]),
paeth_predictor(a[3], b[3], c[3]),
};
}
};

View file

@ -0,0 +1,284 @@
/*
* Copyright (c) 2021, Pierre Hoffmeister
* Copyright (c) 2021, Andreas Kling <kling@serenityos.org>
* Copyright (c) 2021, Aziz Berkay Yesilyurt <abyesilyurt@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Concepts.h>
#include <AK/FixedArray.h>
#include <AK/SIMDExtras.h>
#include <AK/String.h>
#include <LibCompress/Zlib.h>
#include <LibCrypto/Checksum/CRC32.h>
#include <LibGfx/Bitmap.h>
#include <LibGfx/ImageFormats/PNGWriter.h>
#pragma GCC diagnostic ignored "-Wpsabi"
namespace Gfx {
class PNGChunk {
using data_length_type = u32;
public:
explicit PNGChunk(String);
auto const& data() const { return m_data; };
String const& type() const { return m_type; };
ErrorOr<void> reserve(size_t bytes) { return m_data.try_ensure_capacity(bytes); }
template<typename T>
ErrorOr<void> add_as_big_endian(T);
ErrorOr<void> add_u8(u8);
ErrorOr<void> compress_and_add(ReadonlyBytes);
ErrorOr<void> add(ReadonlyBytes);
ErrorOr<void> store_type();
void store_data_length();
u32 crc();
private:
ByteBuffer m_data;
String m_type;
};
PNGChunk::PNGChunk(String type)
: m_type(move(type))
{
VERIFY(m_type.bytes().size() == 4);
// NOTE: These are MUST() because they should always be able to fit in m_data's inline capacity.
MUST(add_as_big_endian<data_length_type>(0));
MUST(store_type());
}
ErrorOr<void> PNGChunk::store_type()
{
TRY(add(type().bytes()));
return {};
}
void PNGChunk::store_data_length()
{
auto data_length = BigEndian<u32>(m_data.size() - sizeof(data_length_type) - m_type.bytes().size());
__builtin_memcpy(m_data.offset_pointer(0), &data_length, sizeof(u32));
}
u32 PNGChunk::crc()
{
u32 crc = Crypto::Checksum::CRC32({ m_data.offset_pointer(sizeof(data_length_type)), m_data.size() - sizeof(data_length_type) }).digest();
return crc;
}
ErrorOr<void> PNGChunk::compress_and_add(ReadonlyBytes uncompressed_bytes)
{
return add(TRY(Compress::ZlibCompressor::compress_all(uncompressed_bytes, Compress::ZlibCompressionLevel::Best)));
}
ErrorOr<void> PNGChunk::add(ReadonlyBytes bytes)
{
TRY(m_data.try_append(bytes));
return {};
}
template<typename T>
ErrorOr<void> PNGChunk::add_as_big_endian(T data)
{
auto data_out = AK::convert_between_host_and_big_endian(data);
TRY(m_data.try_append(&data_out, sizeof(T)));
return {};
}
ErrorOr<void> PNGChunk::add_u8(u8 data)
{
TRY(m_data.try_append(data));
return {};
}
ErrorOr<void> PNGWriter::add_chunk(PNGChunk& png_chunk)
{
png_chunk.store_data_length();
u32 crc = png_chunk.crc();
TRY(png_chunk.add_as_big_endian(crc));
TRY(m_data.try_append(png_chunk.data().data(), png_chunk.data().size()));
return {};
}
ErrorOr<void> PNGWriter::add_png_header()
{
TRY(m_data.try_append(PNG::header.data(), PNG::header.size()));
return {};
}
ErrorOr<void> PNGWriter::add_IHDR_chunk(u32 width, u32 height, u8 bit_depth, PNG::ColorType color_type, u8 compression_method, u8 filter_method, u8 interlace_method)
{
PNGChunk png_chunk { "IHDR"_short_string };
TRY(png_chunk.add_as_big_endian(width));
TRY(png_chunk.add_as_big_endian(height));
TRY(png_chunk.add_u8(bit_depth));
TRY(png_chunk.add_u8(to_underlying(color_type)));
TRY(png_chunk.add_u8(compression_method));
TRY(png_chunk.add_u8(filter_method));
TRY(png_chunk.add_u8(interlace_method));
TRY(add_chunk(png_chunk));
return {};
}
ErrorOr<void> PNGWriter::add_iCCP_chunk(ReadonlyBytes icc_data)
{
// https://www.w3.org/TR/png/#11iCCP
PNGChunk chunk { "iCCP"_short_string };
TRY(chunk.add("embedded profile"sv.bytes()));
TRY(chunk.add_u8(0)); // \0-terminate profile name
TRY(chunk.add_u8(0)); // compression method deflate
TRY(chunk.compress_and_add(icc_data));
TRY(add_chunk(chunk));
return {};
}
ErrorOr<void> PNGWriter::add_IEND_chunk()
{
PNGChunk png_chunk { "IEND"_short_string };
TRY(add_chunk(png_chunk));
return {};
}
union [[gnu::packed]] Pixel {
ARGB32 rgba { 0 };
struct {
u8 red;
u8 green;
u8 blue;
u8 alpha;
};
AK::SIMD::u8x4 simd;
ALWAYS_INLINE static AK::SIMD::u8x4 gfx_to_png(Pixel pixel)
{
swap(pixel.red, pixel.blue);
return pixel.simd;
}
};
static_assert(AssertSize<Pixel, 4>());
ErrorOr<void> PNGWriter::add_IDAT_chunk(Gfx::Bitmap const& bitmap)
{
PNGChunk png_chunk { "IDAT"_short_string };
TRY(png_chunk.reserve(bitmap.size_in_bytes()));
ByteBuffer uncompressed_block_data;
TRY(uncompressed_block_data.try_ensure_capacity(bitmap.size_in_bytes() + bitmap.height()));
auto dummy_scanline = TRY(FixedArray<Pixel>::create(bitmap.width()));
auto const* scanline_minus_1 = dummy_scanline.data();
for (int y = 0; y < bitmap.height(); ++y) {
auto* scanline = reinterpret_cast<Pixel const*>(bitmap.scanline(y));
struct Filter {
PNG::FilterType type;
ByteBuffer buffer {};
int sum = 0;
ErrorOr<void> append(u8 byte)
{
TRY(buffer.try_append(byte));
sum += static_cast<i8>(byte);
return {};
}
ErrorOr<void> append(AK::SIMD::u8x4 simd)
{
TRY(append(simd[0]));
TRY(append(simd[1]));
TRY(append(simd[2]));
TRY(append(simd[3]));
return {};
}
};
Filter none_filter { .type = PNG::FilterType::None };
TRY(none_filter.buffer.try_ensure_capacity(sizeof(Pixel) * bitmap.height()));
Filter sub_filter { .type = PNG::FilterType::Sub };
TRY(sub_filter.buffer.try_ensure_capacity(sizeof(Pixel) * bitmap.height()));
Filter up_filter { .type = PNG::FilterType::Up };
TRY(up_filter.buffer.try_ensure_capacity(sizeof(Pixel) * bitmap.height()));
Filter average_filter { .type = PNG::FilterType::Average };
TRY(average_filter.buffer.try_ensure_capacity(sizeof(ARGB32) * bitmap.height()));
Filter paeth_filter { .type = PNG::FilterType::Paeth };
TRY(paeth_filter.buffer.try_ensure_capacity(sizeof(ARGB32) * bitmap.height()));
auto pixel_x_minus_1 = Pixel::gfx_to_png(dummy_scanline[0]);
auto pixel_xy_minus_1 = Pixel::gfx_to_png(dummy_scanline[0]);
for (int x = 0; x < bitmap.width(); ++x) {
auto pixel = Pixel::gfx_to_png(scanline[x]);
auto pixel_y_minus_1 = Pixel::gfx_to_png(scanline_minus_1[x]);
TRY(none_filter.append(pixel));
TRY(sub_filter.append(pixel - pixel_x_minus_1));
TRY(up_filter.append(pixel - pixel_y_minus_1));
// The sum Orig(a) + Orig(b) shall be performed without overflow (using at least nine-bit arithmetic).
auto sum = AK::SIMD::to_u16x4(pixel_x_minus_1) + AK::SIMD::to_u16x4(pixel_y_minus_1);
auto average = AK::SIMD::to_u8x4(sum / 2);
TRY(average_filter.append(pixel - average));
TRY(paeth_filter.append(pixel - PNG::paeth_predictor(pixel_x_minus_1, pixel_y_minus_1, pixel_xy_minus_1)));
pixel_x_minus_1 = pixel;
pixel_xy_minus_1 = pixel_y_minus_1;
}
scanline_minus_1 = scanline;
// 12.8 Filter selection: https://www.w3.org/TR/PNG/#12Filter-selection
// For best compression of truecolour and greyscale images, the recommended approach
// is adaptive filtering in which a filter is chosen for each scanline.
// The following simple heuristic has performed well in early tests:
// compute the output scanline using all five filters, and select the filter that gives the smallest sum of absolute values of outputs.
// (Consider the output bytes as signed differences for this test.)
Filter& best_filter = none_filter;
if (abs(best_filter.sum) > abs(sub_filter.sum))
best_filter = sub_filter;
if (abs(best_filter.sum) > abs(up_filter.sum))
best_filter = up_filter;
if (abs(best_filter.sum) > abs(average_filter.sum))
best_filter = average_filter;
if (abs(best_filter.sum) > abs(paeth_filter.sum))
best_filter = paeth_filter;
TRY(uncompressed_block_data.try_append(to_underlying(best_filter.type)));
TRY(uncompressed_block_data.try_append(best_filter.buffer));
}
TRY(png_chunk.compress_and_add(uncompressed_block_data));
TRY(add_chunk(png_chunk));
return {};
}
ErrorOr<ByteBuffer> PNGWriter::encode(Gfx::Bitmap const& bitmap, Options options)
{
PNGWriter writer;
TRY(writer.add_png_header());
TRY(writer.add_IHDR_chunk(bitmap.width(), bitmap.height(), 8, PNG::ColorType::TruecolorWithAlpha, 0, 0, 0));
if (options.icc_data.has_value())
TRY(writer.add_iCCP_chunk(options.icc_data.value()));
TRY(writer.add_IDAT_chunk(bitmap));
TRY(writer.add_IEND_chunk());
return ByteBuffer::copy(writer.m_data);
}
}

View file

@ -0,0 +1,44 @@
/*
* Copyright (c) 2021, Pierre Hoffmeister
* Copyright (c) 2021, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Optional.h>
#include <AK/Vector.h>
#include <LibGfx/Forward.h>
#include <LibGfx/ImageFormats/PNGShared.h>
namespace Gfx {
class PNGChunk;
// This is not a nested struct to work around https://llvm.org/PR36684
struct PNGWriterOptions {
// Data for the iCCP chunk.
// FIXME: Allow writing cICP, sRGB, or gAMA instead too.
Optional<ReadonlyBytes> icc_data;
};
class PNGWriter {
public:
using Options = PNGWriterOptions;
static ErrorOr<ByteBuffer> encode(Gfx::Bitmap const&, Options options = Options {});
private:
PNGWriter() = default;
Vector<u8> m_data;
ErrorOr<void> add_chunk(PNGChunk&);
ErrorOr<void> add_png_header();
ErrorOr<void> add_IHDR_chunk(u32 width, u32 height, u8 bit_depth, PNG::ColorType color_type, u8 compression_method, u8 filter_method, u8 interlace_method);
ErrorOr<void> add_iCCP_chunk(ReadonlyBytes icc_data);
ErrorOr<void> add_IDAT_chunk(Gfx::Bitmap const&);
ErrorOr<void> add_IEND_chunk();
};
}

View file

@ -0,0 +1,72 @@
/*
* Copyright (c) 2020, Hüseyin ASLITÜRK <asliturk@hotmail.com>
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include "PPMLoader.h"
#include "PortableImageLoaderCommon.h"
#include <AK/Endian.h>
#include <AK/LexicalPath.h>
#include <AK/ScopeGuard.h>
#include <AK/StringBuilder.h>
#include <LibGfx/Streamer.h>
#include <string.h>
namespace Gfx {
bool read_image_data(PPMLoadingContext& context, Streamer& streamer)
{
Vector<Gfx::Color> color_data;
color_data.ensure_capacity(context.width * context.height);
if (context.type == PPMLoadingContext::Type::ASCII) {
u16 red;
u16 green;
u16 blue;
while (true) {
if (!read_number(streamer, &red))
break;
if (!read_whitespace(context, streamer))
break;
if (!read_number(streamer, &green))
break;
if (!read_whitespace(context, streamer))
break;
if (!read_number(streamer, &blue))
break;
if (!read_whitespace(context, streamer))
break;
Color color { (u8)red, (u8)green, (u8)blue };
if (context.format_details.max_val < 255)
color = adjust_color(context.format_details.max_val, color);
color_data.append(color);
}
} else if (context.type == PPMLoadingContext::Type::RAWBITS) {
u8 pixel[3];
while (streamer.read_bytes(pixel, 3)) {
color_data.append({ pixel[0], pixel[1], pixel[2] });
}
}
if (context.width * context.height != color_data.size())
return false;
if (!create_bitmap(context)) {
return false;
}
set_pixels(context, color_data);
context.state = PPMLoadingContext::State::Bitmap;
return true;
}
}

View file

@ -0,0 +1,27 @@
/*
* Copyright (c) 2020, Hüseyin ASLITÜRK <asliturk@hotmail.com>
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/StringView.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
#include <LibGfx/ImageFormats/PortableImageMapLoader.h>
namespace Gfx {
struct PPM {
static constexpr auto ascii_magic_number = '3';
static constexpr auto binary_magic_number = '6';
static constexpr StringView image_type = "PPM"sv;
u16 max_val { 0 };
};
using PPMLoadingContext = PortableImageMapLoadingContext<PPM>;
using PPMImageDecoderPlugin = PortableImageDecoderPlugin<PPMLoadingContext>;
bool read_image_data(PPMLoadingContext& context, Streamer& streamer);
}

View file

@ -0,0 +1,54 @@
/*
* Copyright (c) 2023, Lucas Chollet <lucas.chollet@serenityos.org >
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include "PortableFormatWriter.h"
#include <AK/String.h>
namespace Gfx {
ErrorOr<ByteBuffer> PortableFormatWriter::encode(Bitmap const& bitmap, Options options)
{
ByteBuffer buffer;
// FIXME: Add support for PBM and PGM
TRY(add_header(buffer, options, bitmap.width(), bitmap.height(), 255));
TRY(add_pixels(buffer, options, bitmap));
return buffer;
}
ErrorOr<void> PortableFormatWriter::add_header(ByteBuffer& buffer, Options const& options, u32 width, u32 height, u32 maximal_value)
{
TRY(buffer.try_append(TRY(String::formatted("P{}\n", options.format == Options::Format::ASCII ? "3"sv : "6"sv)).bytes()));
TRY(buffer.try_append(TRY(String::formatted("# {}\n", options.comment)).bytes()));
TRY(buffer.try_append(TRY(String::formatted("{} {}\n", width, height)).bytes()));
TRY(buffer.try_append(TRY(String::formatted("{}\n", maximal_value)).bytes()));
return {};
}
ErrorOr<void> PortableFormatWriter::add_pixels(ByteBuffer& buffer, Options const& options, Bitmap const& bitmap)
{
for (int i = 0; i < bitmap.height(); ++i) {
for (int j = 0; j < bitmap.width(); ++j) {
auto color = bitmap.get_pixel(j, i);
if (options.format == Options::Format::ASCII) {
TRY(buffer.try_append(TRY(String::formatted("{} {} {}\t", color.red(), color.green(), color.blue())).bytes()));
} else {
TRY(buffer.try_append(color.red()));
TRY(buffer.try_append(color.green()));
TRY(buffer.try_append(color.blue()));
}
}
if (options.format == Options::Format::ASCII)
TRY(buffer.try_append('\n'));
}
return {};
}
}

View file

@ -0,0 +1,38 @@
/*
* Copyright (c) 2023, Lucas Chollet <lucas.chollet@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/ByteBuffer.h>
#include <LibGfx/Bitmap.h>
namespace Gfx {
// This is not a nested struct to work around https://llvm.org/PR36684
struct PortableFormatWriterOptions {
enum class Format {
ASCII,
Raw,
};
Format format = Format::Raw;
StringView comment = "Generated with SerenityOS - LibGfx."sv;
};
class PortableFormatWriter {
public:
using Options = PortableFormatWriterOptions;
static ErrorOr<ByteBuffer> encode(Bitmap const&, Options options = Options {});
private:
PortableFormatWriter() = delete;
static ErrorOr<void> add_header(ByteBuffer&, Options const& options, u32 width, u32 height, u32 max_value);
static ErrorOr<void> add_pixels(ByteBuffer&, Options const& options, Bitmap const&);
};
}

View file

@ -0,0 +1,272 @@
/*
* Copyright (c) 2020, Hüseyin Aslıtürk <asliturk@hotmail.com>
* Copyright (c) 2020-2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Debug.h>
#include <AK/DeprecatedString.h>
#include <AK/Endian.h>
#include <AK/ScopeGuard.h>
#include <AK/StringBuilder.h>
#include <AK/Types.h>
#include <AK/Vector.h>
#include <LibGfx/Bitmap.h>
#include <LibGfx/Color.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
#include <LibGfx/Streamer.h>
namespace Gfx {
static constexpr Color adjust_color(u16 max_val, Color color)
{
color.set_red((color.red() * 255) / max_val);
color.set_green((color.green() * 255) / max_val);
color.set_blue((color.blue() * 255) / max_val);
return color;
}
template<typename TValue>
static bool read_number(Streamer& streamer, TValue* value)
{
u8 byte {};
StringBuilder sb {};
while (streamer.read(byte)) {
if (byte == ' ' || byte == '\t' || byte == '\n' || byte == '\r') {
streamer.step_back();
break;
}
sb.append(byte);
}
auto const opt_value = sb.to_deprecated_string().to_uint();
if (!opt_value.has_value()) {
*value = 0;
return false;
}
*value = static_cast<u16>(opt_value.value());
return true;
}
template<typename TContext>
static bool read_comment([[maybe_unused]] TContext& context, Streamer& streamer)
{
bool exist = false;
u8 byte {};
while (streamer.read(byte)) {
if (byte == '#') {
exist = true;
} else if (byte == '\t' || byte == '\n') {
return exist;
}
}
return exist;
}
template<typename TContext>
static bool read_magic_number(TContext& context, Streamer& streamer)
{
if (context.state >= TContext::State::MagicNumber) {
return true;
}
if (!context.data || context.data_size < 2) {
context.state = TContext::State::Error;
dbgln_if(PORTABLE_IMAGE_LOADER_DEBUG, "There is no enough data for {}", TContext::FormatDetails::image_type);
return false;
}
u8 magic_number[2] {};
if (!streamer.read_bytes(magic_number, 2)) {
context.state = TContext::State::Error;
dbgln_if(PORTABLE_IMAGE_LOADER_DEBUG, "We can't read magic number for {}", TContext::FormatDetails::image_type);
return false;
}
if (magic_number[0] == 'P' && magic_number[1] == TContext::FormatDetails::ascii_magic_number) {
context.type = TContext::Type::ASCII;
context.state = TContext::State::MagicNumber;
return true;
}
if (magic_number[0] == 'P' && magic_number[1] == TContext::FormatDetails::binary_magic_number) {
context.type = TContext::Type::RAWBITS;
context.state = TContext::State::MagicNumber;
return true;
}
context.state = TContext::State::Error;
dbgln_if(PORTABLE_IMAGE_LOADER_DEBUG, "Magic number is not valid for {}{}{}", magic_number[0], magic_number[1], TContext::FormatDetails::image_type);
return false;
}
template<typename TContext>
static bool read_whitespace(TContext& context, Streamer& streamer)
{
bool exist = false;
u8 byte {};
while (streamer.read(byte)) {
if (byte == ' ' || byte == '\t' || byte == '\n' || byte == '\r') {
exist = true;
} else if (byte == '#') {
streamer.step_back();
read_comment(context, streamer);
} else {
streamer.step_back();
return exist;
}
}
return exist;
}
template<typename TContext>
static bool read_width(TContext& context, Streamer& streamer)
{
if (bool const result = read_number(streamer, &context.width);
!result || context.width == 0) {
return false;
}
context.state = TContext::State::Width;
return true;
}
template<typename TContext>
static bool read_height(TContext& context, Streamer& streamer)
{
if (bool const result = read_number(streamer, &context.height);
!result || context.height == 0) {
return false;
}
context.state = TContext::State::Height;
return true;
}
template<typename TContext>
static bool read_max_val(TContext& context, Streamer& streamer)
{
if (bool const result = read_number(streamer, &context.format_details.max_val);
!result || context.format_details.max_val == 0) {
return false;
}
if (context.format_details.max_val > 255) {
dbgln_if(PORTABLE_IMAGE_LOADER_DEBUG, "We can't parse 2 byte color for {}", TContext::FormatDetails::image_type);
context.state = TContext::State::Error;
return false;
}
context.state = TContext::State::Maxval;
return true;
}
template<typename TContext>
static bool create_bitmap(TContext& context)
{
auto bitmap_or_error = Bitmap::create(BitmapFormat::BGRx8888, { context.width, context.height });
if (bitmap_or_error.is_error()) {
context.state = TContext::State::Error;
return false;
}
context.bitmap = bitmap_or_error.release_value_but_fixme_should_propagate_errors();
return true;
}
template<typename TContext>
static void set_pixels(TContext& context, Vector<Gfx::Color> const& color_data)
{
size_t index = 0;
for (size_t y = 0; y < context.height; ++y) {
for (size_t x = 0; x < context.width; ++x) {
context.bitmap->set_pixel(x, y, color_data.at(index));
index++;
}
}
}
template<typename TContext>
static bool decode(TContext& context)
{
if (context.state >= TContext::State::Decoded)
return true;
auto error_guard = ArmedScopeGuard([&] {
context.state = TContext::State::Error;
});
Streamer streamer(context.data, context.data_size);
if (!read_magic_number(context, streamer))
return false;
if (!read_whitespace(context, streamer))
return false;
if (!read_width(context, streamer))
return false;
if (!read_whitespace(context, streamer))
return false;
if (!read_height(context, streamer))
return false;
if (context.width > maximum_width_for_decoded_images || context.height > maximum_height_for_decoded_images) {
dbgln("This portable network image is too large for comfort: {}x{}", context.width, context.height);
return false;
}
if (!read_whitespace(context, streamer))
return false;
if constexpr (requires { context.format_details.max_val; }) {
if (!read_max_val(context, streamer))
return false;
if (!read_whitespace(context, streamer))
return false;
}
if (!read_image_data(context, streamer))
return false;
error_guard.disarm();
context.state = TContext::State::Decoded;
return true;
}
template<typename TContext>
static RefPtr<Gfx::Bitmap> load_impl(u8 const* data, size_t data_size)
{
TContext context {};
context.data = data;
context.data_size = data_size;
if (!decode(context)) {
return nullptr;
}
return context.bitmap;
}
template<typename TContext>
static RefPtr<Gfx::Bitmap> load_from_memory(u8 const* data, size_t length, DeprecatedString const& mmap_name)
{
auto bitmap = load_impl<TContext>(data, length);
if (bitmap)
bitmap->set_mmap_name(DeprecatedString::formatted("Gfx::Bitmap [{}] - Decoded {}: {}", bitmap->size(), TContext::FormatDetails::image_type, mmap_name));
return bitmap;
}
}

View file

@ -0,0 +1,194 @@
/*
* Copyright (c) 2020, Hüseyin ASLITÜRK <asliturk@hotmail.com>
* Copyright (c) 2022, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/RefPtr.h>
#include <AK/StringView.h>
#include <AK/Types.h>
#include <LibGfx/Bitmap.h>
#include <LibGfx/ImageFormats/PortableImageLoaderCommon.h>
namespace Gfx {
template<class TFormatDetails>
struct PortableImageMapLoadingContext {
using FormatDetails = TFormatDetails;
enum class Type {
Unknown,
ASCII,
RAWBITS
};
enum class State {
NotDecoded = 0,
Error,
MagicNumber,
Width,
Height,
Maxval,
Bitmap,
Decoded
};
Type type { Type::Unknown };
State state { State::NotDecoded };
u8 const* data { nullptr };
size_t data_size { 0 };
size_t width { 0 };
size_t height { 0 };
FormatDetails format_details {};
RefPtr<Gfx::Bitmap> bitmap;
};
template<typename TContext>
class PortableImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
PortableImageDecoderPlugin(u8 const*, size_t);
virtual ~PortableImageDecoderPlugin() override = default;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
OwnPtr<TContext> m_context;
};
template<typename TContext>
PortableImageDecoderPlugin<TContext>::PortableImageDecoderPlugin(u8 const* data, size_t size)
{
m_context = make<TContext>();
m_context->data = data;
m_context->data_size = size;
}
template<typename TContext>
IntSize PortableImageDecoderPlugin<TContext>::size()
{
if (m_context->state == TContext::State::Error)
return {};
if (m_context->state < TContext::State::Decoded) {
bool success = decode(*m_context);
if (!success)
return {};
}
return { m_context->width, m_context->height };
}
template<typename TContext>
void PortableImageDecoderPlugin<TContext>::set_volatile()
{
if (m_context->bitmap)
m_context->bitmap->set_volatile();
}
template<typename TContext>
bool PortableImageDecoderPlugin<TContext>::set_nonvolatile(bool& was_purged)
{
if (!m_context->bitmap)
return false;
return m_context->bitmap->set_nonvolatile(was_purged);
}
template<typename TContext>
bool PortableImageDecoderPlugin<TContext>::initialize()
{
using Context = TContext;
if (m_context->data_size < 2)
return false;
if (m_context->data[0] == 'P' && m_context->data[1] == Context::FormatDetails::ascii_magic_number)
return true;
if (m_context->data[0] == 'P' && m_context->data[1] == Context::FormatDetails::binary_magic_number)
return true;
return false;
}
template<typename TContext>
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> PortableImageDecoderPlugin<TContext>::create(ReadonlyBytes data)
{
return adopt_nonnull_own_or_enomem(new (nothrow) PortableImageDecoderPlugin<TContext>(data.data(), data.size()));
}
template<typename TContext>
bool PortableImageDecoderPlugin<TContext>::sniff(ReadonlyBytes data)
{
using Context = TContext;
if (data.size() < 2)
return false;
if (data.data()[0] == 'P' && data.data()[1] == Context::FormatDetails::ascii_magic_number)
return true;
if (data.data()[0] == 'P' && data.data()[1] == Context::FormatDetails::binary_magic_number)
return true;
return false;
}
template<typename TContext>
bool PortableImageDecoderPlugin<TContext>::is_animated()
{
return false;
}
template<typename TContext>
size_t PortableImageDecoderPlugin<TContext>::loop_count()
{
return 0;
}
template<typename TContext>
size_t PortableImageDecoderPlugin<TContext>::frame_count()
{
return 1;
}
template<typename TContext>
ErrorOr<ImageFrameDescriptor> PortableImageDecoderPlugin<TContext>::frame(size_t index)
{
if (index > 0)
return Error::from_string_literal("PortableImageDecoderPlugin: Invalid frame index");
if (m_context->state == TContext::State::Error)
return Error::from_string_literal("PortableImageDecoderPlugin: Decoding failed");
if (m_context->state < TContext::State::Decoded) {
bool success = decode(*m_context);
if (!success)
return Error::from_string_literal("PortableImageDecoderPlugin: Decoding failed");
}
VERIFY(m_context->bitmap);
return ImageFrameDescriptor { m_context->bitmap, 0 };
}
template<typename TContext>
ErrorOr<Optional<ReadonlyBytes>> PortableImageDecoderPlugin<TContext>::icc_data()
{
return OptionalNone {};
}
}

View file

@ -0,0 +1,266 @@
/*
* Copyright (c) 2021, Linus Groh <linusg@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Endian.h>
#include <AK/MemoryStream.h>
#include <LibGfx/Bitmap.h>
#include <LibGfx/ImageFormats/QOILoader.h>
namespace Gfx {
static constexpr auto QOI_MAGIC = "qoif"sv;
static constexpr u8 QOI_OP_RGB = 0b11111110;
static constexpr u8 QOI_OP_RGBA = 0b11111111;
static constexpr u8 QOI_OP_INDEX = 0b00000000;
static constexpr u8 QOI_OP_DIFF = 0b01000000;
static constexpr u8 QOI_OP_LUMA = 0b10000000;
static constexpr u8 QOI_OP_RUN = 0b11000000;
static constexpr u8 QOI_MASK_2 = 0b11000000;
static constexpr u8 END_MARKER[] = { 0, 0, 0, 0, 0, 0, 0, 1 };
static ErrorOr<QOIHeader> decode_qoi_header(Stream& stream)
{
auto header = TRY(stream.read_value<QOIHeader>());
if (StringView { header.magic, array_size(header.magic) } != QOI_MAGIC)
return Error::from_string_literal("Invalid QOI image: incorrect header magic");
header.width = AK::convert_between_host_and_big_endian(header.width);
header.height = AK::convert_between_host_and_big_endian(header.height);
return header;
}
static ErrorOr<Color> decode_qoi_op_rgb(Stream& stream, u8 first_byte, Color pixel)
{
VERIFY(first_byte == QOI_OP_RGB);
u8 bytes[3];
TRY(stream.read_until_filled({ &bytes, array_size(bytes) }));
// The alpha value remains unchanged from the previous pixel.
return Color { bytes[0], bytes[1], bytes[2], pixel.alpha() };
}
static ErrorOr<Color> decode_qoi_op_rgba(Stream& stream, u8 first_byte)
{
VERIFY(first_byte == QOI_OP_RGBA);
u8 bytes[4];
TRY(stream.read_until_filled({ &bytes, array_size(bytes) }));
return Color { bytes[0], bytes[1], bytes[2], bytes[3] };
}
static ErrorOr<u8> decode_qoi_op_index(Stream&, u8 first_byte)
{
VERIFY((first_byte & QOI_MASK_2) == QOI_OP_INDEX);
u8 index = first_byte & ~QOI_MASK_2;
VERIFY(index <= 63);
return index;
}
static ErrorOr<Color> decode_qoi_op_diff(Stream&, u8 first_byte, Color pixel)
{
VERIFY((first_byte & QOI_MASK_2) == QOI_OP_DIFF);
u8 dr = (first_byte & 0b00110000) >> 4;
u8 dg = (first_byte & 0b00001100) >> 2;
u8 db = (first_byte & 0b00000011);
VERIFY(dr <= 3 && dg <= 3 && db <= 3);
// Values are stored as unsigned integers with a bias of 2.
return Color {
static_cast<u8>(pixel.red() + static_cast<i8>(dr - 2)),
static_cast<u8>(pixel.green() + static_cast<i8>(dg - 2)),
static_cast<u8>(pixel.blue() + static_cast<i8>(db - 2)),
pixel.alpha(),
};
}
static ErrorOr<Color> decode_qoi_op_luma(Stream& stream, u8 first_byte, Color pixel)
{
VERIFY((first_byte & QOI_MASK_2) == QOI_OP_LUMA);
auto byte = TRY(stream.read_value<u8>());
u8 diff_green = (first_byte & ~QOI_MASK_2);
u8 dr_dg = (byte & 0b11110000) >> 4;
u8 db_dg = (byte & 0b00001111);
// Values are stored as unsigned integers with a bias of 32 for the green channel and a bias of 8 for the red and blue channel.
return Color {
static_cast<u8>(pixel.red() + static_cast<i8>((diff_green - 32) + (dr_dg - 8))),
static_cast<u8>(pixel.green() + static_cast<i8>(diff_green - 32)),
static_cast<u8>(pixel.blue() + static_cast<i8>((diff_green - 32) + (db_dg - 8))),
pixel.alpha(),
};
}
static ErrorOr<u8> decode_qoi_op_run(Stream&, u8 first_byte)
{
VERIFY((first_byte & QOI_MASK_2) == QOI_OP_RUN);
u8 run = first_byte & ~QOI_MASK_2;
// The run-length is stored with a bias of -1.
run += 1;
// Note that the run-lengths 63 and 64 (b111110 and b111111) are illegal as they are occupied by the QOI_OP_RGB and QOI_OP_RGBA tags.
if (run == QOI_OP_RGB || run == QOI_OP_RGBA)
return Error::from_string_literal("Invalid QOI image: illegal run length");
VERIFY(run >= 1 && run <= 62);
return run;
}
static ErrorOr<void> decode_qoi_end_marker(Stream& stream)
{
u8 bytes[array_size(END_MARKER)];
TRY(stream.read_until_filled({ &bytes, array_size(bytes) }));
if (!stream.is_eof())
return Error::from_string_literal("Invalid QOI image: expected end of stream but more bytes are available");
if (memcmp(&END_MARKER, &bytes, array_size(bytes)) != 0)
return Error::from_string_literal("Invalid QOI image: incorrect end marker");
return {};
}
static ErrorOr<NonnullRefPtr<Bitmap>> decode_qoi_image(Stream& stream, u32 width, u32 height)
{
// FIXME: Why is Gfx::Bitmap's size signed? Makes no sense whatsoever.
if (width > NumericLimits<int>::max())
return Error::from_string_literal("Cannot create bitmap for QOI image of valid size, width exceeds maximum Gfx::Bitmap width");
if (height > NumericLimits<int>::max())
return Error::from_string_literal("Cannot create bitmap for QOI image of valid size, height exceeds maximum Gfx::Bitmap height");
auto bitmap = TRY(Bitmap::create(BitmapFormat::BGRA8888, { width, height }));
u8 run = 0;
Color pixel = { 0, 0, 0, 255 };
Color previous_pixels[64] {};
for (u32 y = 0; y < height; ++y) {
for (u32 x = 0; x < width; ++x) {
if (run > 0)
--run;
if (run == 0) {
auto first_byte = TRY(stream.read_value<u8>());
if (first_byte == QOI_OP_RGB)
pixel = TRY(decode_qoi_op_rgb(stream, first_byte, pixel));
else if (first_byte == QOI_OP_RGBA)
pixel = TRY(decode_qoi_op_rgba(stream, first_byte));
else if ((first_byte & QOI_MASK_2) == QOI_OP_INDEX)
pixel = previous_pixels[TRY(decode_qoi_op_index(stream, first_byte))];
else if ((first_byte & QOI_MASK_2) == QOI_OP_DIFF)
pixel = TRY(decode_qoi_op_diff(stream, first_byte, pixel));
else if ((first_byte & QOI_MASK_2) == QOI_OP_LUMA)
pixel = TRY(decode_qoi_op_luma(stream, first_byte, pixel));
else if ((first_byte & QOI_MASK_2) == QOI_OP_RUN)
run = TRY(decode_qoi_op_run(stream, first_byte));
else
return Error::from_string_literal("Invalid QOI image: unknown chunk tag");
}
auto index_position = (pixel.red() * 3 + pixel.green() * 5 + pixel.blue() * 7 + pixel.alpha() * 11) % 64;
previous_pixels[index_position] = pixel;
bitmap->set_pixel(x, y, pixel);
}
}
TRY(decode_qoi_end_marker(stream));
return { move(bitmap) };
}
QOIImageDecoderPlugin::QOIImageDecoderPlugin(NonnullOwnPtr<Stream> stream)
{
m_context = make<QOILoadingContext>();
m_context->stream = move(stream);
}
IntSize QOIImageDecoderPlugin::size()
{
if (m_context->state < QOILoadingContext::State::HeaderDecoded) {
// FIXME: This is a weird API (inherited from ImageDecoderPlugin), should probably propagate errors by returning ErrorOr<IntSize>.
// For the time being, ignore the result and rely on the context's state.
(void)decode_header_and_update_context(*m_context->stream);
}
if (m_context->state == QOILoadingContext::State::Error)
return {};
return { m_context->header.width, m_context->header.height };
}
void QOIImageDecoderPlugin::set_volatile()
{
if (m_context->bitmap)
m_context->bitmap->set_volatile();
}
bool QOIImageDecoderPlugin::set_nonvolatile(bool& was_purged)
{
if (!m_context->bitmap)
return false;
return m_context->bitmap->set_nonvolatile(was_purged);
}
bool QOIImageDecoderPlugin::initialize()
{
return !decode_header_and_update_context(*m_context->stream).is_error();
}
bool QOIImageDecoderPlugin::sniff(ReadonlyBytes data)
{
FixedMemoryStream stream { { data.data(), data.size() } };
return !decode_qoi_header(stream).is_error();
}
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> QOIImageDecoderPlugin::create(ReadonlyBytes data)
{
auto stream = TRY(try_make<FixedMemoryStream>(data));
return adopt_nonnull_own_or_enomem(new (nothrow) QOIImageDecoderPlugin(move(stream)));
}
ErrorOr<ImageFrameDescriptor> QOIImageDecoderPlugin::frame(size_t index)
{
if (index > 0)
return Error::from_string_literal("Invalid frame index");
// No one should try to decode the frame again after an error was already returned.
VERIFY(m_context->state != QOILoadingContext::State::Error);
if (m_context->state == QOILoadingContext::State::NotDecoded) {
TRY(decode_header_and_update_context(*m_context->stream));
TRY(decode_image_and_update_context(*m_context->stream));
} else if (m_context->state == QOILoadingContext::State::HeaderDecoded) {
TRY(decode_image_and_update_context(*m_context->stream));
}
VERIFY(m_context->state == QOILoadingContext::State::ImageDecoded);
VERIFY(m_context->bitmap);
return ImageFrameDescriptor { m_context->bitmap, 0 };
}
ErrorOr<void> QOIImageDecoderPlugin::decode_header_and_update_context(Stream& stream)
{
VERIFY(m_context->state < QOILoadingContext::State::HeaderDecoded);
auto error_or_header = decode_qoi_header(stream);
if (error_or_header.is_error()) {
m_context->state = QOILoadingContext::State::Error;
return error_or_header.release_error();
}
m_context->state = QOILoadingContext::State::HeaderDecoded;
m_context->header = error_or_header.release_value();
return {};
}
ErrorOr<void> QOIImageDecoderPlugin::decode_image_and_update_context(Stream& stream)
{
VERIFY(m_context->state < QOILoadingContext::State::ImageDecoded);
auto error_or_bitmap = decode_qoi_image(stream, m_context->header.width, m_context->header.height);
if (error_or_bitmap.is_error()) {
m_context->state = QOILoadingContext::State::Error;
return error_or_bitmap.release_error();
}
m_context->state = QOILoadingContext::State::ImageDecoded;
m_context->bitmap = error_or_bitmap.release_value();
return {};
}
ErrorOr<Optional<ReadonlyBytes>> QOIImageDecoderPlugin::icc_data()
{
return OptionalNone {};
}
}

View file

@ -0,0 +1,70 @@
/*
* Copyright (c) 2021, Linus Groh <linusg@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Forward.h>
#include <LibGfx/Forward.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
// Decoder for the "Quite OK Image" format (v1.0).
// https://qoiformat.org/qoi-specification.pdf
struct [[gnu::packed]] QOIHeader {
char magic[4];
u32 width;
u32 height;
u8 channels;
u8 colorspace;
};
struct QOILoadingContext {
enum class State {
NotDecoded = 0,
HeaderDecoded,
ImageDecoded,
Error,
};
State state { State::NotDecoded };
OwnPtr<Stream> stream {};
QOIHeader header {};
RefPtr<Bitmap> bitmap;
};
class QOIImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
virtual ~QOIImageDecoderPlugin() override = default;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override { return false; }
virtual size_t loop_count() override { return 0; }
virtual size_t frame_count() override { return 1; }
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
ErrorOr<void> decode_header_and_update_context(Stream&);
ErrorOr<void> decode_image_and_update_context(Stream&);
QOIImageDecoderPlugin(NonnullOwnPtr<Stream>);
OwnPtr<QOILoadingContext> m_context;
};
}
template<>
struct AK::Traits<Gfx::QOIHeader> : public GenericTraits<Gfx::QOIHeader> {
static constexpr bool is_trivially_serializable() { return true; }
};

View file

@ -0,0 +1,225 @@
/*
* Copyright (c) 2022, Olivier De Cannière <olivier.decanniere96@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include "QOIWriter.h"
#include <AK/DeprecatedString.h>
#include <AK/Endian.h>
namespace Gfx {
static constexpr Array<u8, 4> qoi_magic_bytes = { 'q', 'o', 'i', 'f' };
static constexpr Array<u8, 8> qoi_end_marker = { 0, 0, 0, 0, 0, 0, 0, 1 };
enum class Colorspace {
sRGB,
Linear,
};
enum class Channels {
RGB,
RGBA,
};
ErrorOr<ByteBuffer> QOIWriter::encode(Bitmap const& bitmap)
{
QOIWriter writer;
TRY(writer.add_header(bitmap.width(), bitmap.height(), Channels::RGBA, Colorspace::sRGB));
Color previous_pixel = { 0, 0, 0, 255 };
bool creating_run = false;
int run_length = 0;
for (auto y = 0; y < bitmap.height(); y++) {
for (auto x = 0; x < bitmap.width(); x++) {
auto pixel = bitmap.get_pixel(x, y);
// Check for at most 62 consecutive identical pixels.
if (pixel == previous_pixel) {
if (!creating_run) {
creating_run = true;
run_length = 0;
writer.insert_into_running_array(pixel);
}
run_length++;
// If the run reaches a maximum length of 62 or if this is the last pixel then create the chunk.
if (run_length == 62 || (y == bitmap.height() - 1 && x == bitmap.width() - 1)) {
TRY(writer.add_run_chunk(run_length));
creating_run = false;
}
continue;
}
// Run ended with the previous pixel. Create a chunk for it and continue processing this pixel.
if (creating_run) {
TRY(writer.add_run_chunk(run_length));
creating_run = false;
}
// Check if the pixel matches a pixel in the running array.
auto index = pixel_hash_function(pixel);
auto& array_pixel = writer.running_array[index];
if (array_pixel == pixel) {
TRY(writer.add_index_chunk(index));
previous_pixel = pixel;
continue;
}
writer.running_array[index] = pixel;
// Check if pixel can be expressed as a difference of the previous pixel.
if (pixel.alpha() == previous_pixel.alpha()) {
int red_difference = pixel.red() - previous_pixel.red();
int green_difference = pixel.green() - previous_pixel.green();
int blue_difference = pixel.blue() - previous_pixel.blue();
int relative_red_difference = red_difference - green_difference;
int relative_blue_difference = blue_difference - green_difference;
if (red_difference > -3 && red_difference < 2
&& green_difference > -3 && green_difference < 2
&& blue_difference > -3 && blue_difference < 2) {
TRY(writer.add_diff_chunk(red_difference, green_difference, blue_difference));
previous_pixel = pixel;
continue;
}
if (relative_red_difference > -9 && relative_red_difference < 8
&& green_difference > -33 && green_difference < 32
&& relative_blue_difference > -9 && relative_blue_difference < 8) {
TRY(writer.add_luma_chunk(relative_red_difference, green_difference, relative_blue_difference));
previous_pixel = pixel;
continue;
}
TRY(writer.add_rgb_chunk(pixel.red(), pixel.green(), pixel.blue()));
previous_pixel = pixel;
continue;
}
previous_pixel = pixel;
// Write full color values.
TRY(writer.add_rgba_chunk(pixel.red(), pixel.green(), pixel.blue(), pixel.alpha()));
}
}
TRY(writer.add_end_marker());
return ByteBuffer::copy(writer.m_data);
}
ErrorOr<void> QOIWriter::add_header(u32 width, u32 height, Channels channels = Channels::RGBA, Colorspace color_space = Colorspace::sRGB)
{
// FIXME: Handle RGB and all linear channels.
if (channels == Channels::RGB || color_space == Colorspace::Linear)
TODO();
TRY(m_data.try_append(qoi_magic_bytes.data(), sizeof(qoi_magic_bytes)));
auto big_endian_width = AK::convert_between_host_and_big_endian(width);
TRY(m_data.try_append(bit_cast<u8*>(&big_endian_width), sizeof(width)));
auto big_endian_height = AK::convert_between_host_and_big_endian(height);
TRY(m_data.try_append(bit_cast<u8*>(&big_endian_height), sizeof(height)));
// Number of channels: 3 = RGB, 4 = RGBA.
TRY(m_data.try_append(4));
// Colorspace: 0 = sRGB, 1 = all linear channels.
TRY(m_data.try_append(color_space == Colorspace::sRGB ? 0 : 1));
return {};
}
ErrorOr<void> QOIWriter::add_rgb_chunk(u8 r, u8 g, u8 b)
{
constexpr static u8 rgb_tag = 0b1111'1110;
TRY(m_data.try_append(rgb_tag));
TRY(m_data.try_append(r));
TRY(m_data.try_append(g));
TRY(m_data.try_append(b));
return {};
}
ErrorOr<void> QOIWriter::add_rgba_chunk(u8 r, u8 g, u8 b, u8 a)
{
constexpr static u8 rgba_tag = 0b1111'1111;
TRY(m_data.try_append(rgba_tag));
TRY(m_data.try_append(r));
TRY(m_data.try_append(g));
TRY(m_data.try_append(b));
TRY(m_data.try_append(a));
return {};
}
ErrorOr<void> QOIWriter::add_index_chunk(unsigned int index)
{
constexpr static u8 index_tag = 0b0000'0000;
u8 chunk = index_tag | index;
TRY(m_data.try_append(chunk));
return {};
}
ErrorOr<void> QOIWriter::add_diff_chunk(i8 red_difference, i8 green_difference, i8 blue_difference)
{
constexpr static u8 diff_tag = 0b0100'0000;
u8 bias = 2;
u8 red = red_difference + bias;
u8 green = green_difference + bias;
u8 blue = blue_difference + bias;
u8 chunk = diff_tag | (red << 4) | (green << 2) | blue;
TRY(m_data.try_append(chunk));
return {};
}
ErrorOr<void> QOIWriter::add_luma_chunk(i8 relative_red_difference, i8 green_difference, i8 relative_blue_difference)
{
constexpr static u8 luma_tag = 0b1000'0000;
u8 green_bias = 32;
u8 red_blue_bias = 8;
u8 chunk1 = luma_tag | (green_difference + green_bias);
u8 chunk2 = ((relative_red_difference + red_blue_bias) << 4) | (relative_blue_difference + red_blue_bias);
TRY(m_data.try_append(chunk1));
TRY(m_data.try_append(chunk2));
return {};
}
ErrorOr<void> QOIWriter::add_run_chunk(unsigned run_length)
{
constexpr static u8 run_tag = 0b1100'0000;
int bias = -1;
u8 chunk = run_tag | (run_length + bias);
TRY(m_data.try_append(chunk));
return {};
}
ErrorOr<void> QOIWriter::add_end_marker()
{
TRY(m_data.try_append(qoi_end_marker.data(), sizeof(qoi_end_marker)));
return {};
}
u32 QOIWriter::pixel_hash_function(Color pixel)
{
return (pixel.red() * 3 + pixel.green() * 5 + pixel.blue() * 7 + pixel.alpha() * 11) % 64;
}
void QOIWriter::insert_into_running_array(Color pixel)
{
auto index = pixel_hash_function(pixel);
running_array[index] = pixel;
}
}

View file

@ -0,0 +1,40 @@
/*
* Copyright (c) 2022, Olivier De Cannière <olivier.decanniere96@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Error.h>
#include <AK/Vector.h>
#include <LibGfx/Bitmap.h>
namespace Gfx {
enum class Colorspace;
enum class Channels;
class QOIWriter {
public:
static ErrorOr<ByteBuffer> encode(Gfx::Bitmap const&);
private:
QOIWriter() = default;
Vector<u8> m_data;
ErrorOr<void> add_header(u32 width, u32 height, Channels, Colorspace);
ErrorOr<void> add_rgb_chunk(u8, u8, u8);
ErrorOr<void> add_rgba_chunk(u8, u8, u8, u8);
ErrorOr<void> add_index_chunk(u32 index);
ErrorOr<void> add_diff_chunk(i8 red_difference, i8 green_difference, i8 blue_difference);
ErrorOr<void> add_luma_chunk(i8 relative_red_difference, i8 green_difference, i8 relative_blue_difference);
ErrorOr<void> add_run_chunk(u32 run_length);
ErrorOr<void> add_end_marker();
Array<Color, 64> running_array;
static u32 pixel_hash_function(Color pixel);
void insert_into_running_array(Color pixel);
};
}

View file

@ -0,0 +1,370 @@
/*
* Copyright (c) 2022, Tom Needham <06needhamt@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Span.h>
#include <AK/StdLibExtraDetails.h>
#include <AK/String.h>
#include <LibGfx/ImageFormats/TGALoader.h>
namespace Gfx {
enum TGADataType : u8 {
None = 0,
UncompressedColorMapped = 1,
UncompressedRGB = 2,
UncompressedBlackAndWhite = 3,
RunLengthEncodedColorMapped = 9,
RunLengthEncodedRGB = 10,
CompressedBlackAndWhite = 11,
CompressedColorMapped = 32,
CompressedColorMappedFourPass = 33
};
struct [[gnu::packed]] TGAHeader {
u8 id_length;
u8 color_map_type;
TGADataType data_type_code;
i16 color_map_origin;
i16 color_map_length;
u8 color_map_depth;
i16 x_origin;
i16 y_origin;
u16 width;
u16 height;
u8 bits_per_pixel;
u8 image_descriptor;
};
static_assert(sizeof(TGAHeader) == 18);
union [[gnu::packed]] TGAPixel {
struct TGAColor {
u8 blue;
u8 green;
u8 red;
u8 alpha;
} components;
u32 data;
};
struct TGAPixelPacket {
bool raw;
u8 pixels_count;
};
static_assert(AssertSize<TGAPixel, 4>());
class TGAReader {
public:
TGAReader(ReadonlyBytes data)
: m_data(move(data))
{
}
TGAReader(ReadonlyBytes data, size_t index)
: m_data(move(data))
, m_index(index)
{
}
ALWAYS_INLINE u8 read_u8()
{
u8 value = m_data[m_index];
m_index++;
return value;
}
ALWAYS_INLINE i8 read_i8()
{
return static_cast<i8>(read_u8());
}
ALWAYS_INLINE u16 read_u16()
{
return read_u8() | read_u8() << 8;
}
ALWAYS_INLINE i16 read_i16()
{
return read_i8() | read_i8() << 8;
}
ALWAYS_INLINE u32 read_u32()
{
return read_u16() | read_u16() << 16;
}
ALWAYS_INLINE i32 read_i32()
{
return read_i16() | read_i16() << 16;
}
ALWAYS_INLINE TGAPixelPacket read_packet_type()
{
auto pixel_packet_type = read_u8();
auto pixel_packet = TGAPixelPacket();
pixel_packet.raw = !(pixel_packet_type & 0x80);
pixel_packet.pixels_count = (pixel_packet_type & 0x7f);
// NOTE: Run-length-encoded/Raw pixel packets cannot encode zero pixels,
// so value 0 stands for 1 pixel, 1 stands for 2, etc...
pixel_packet.pixels_count++;
return pixel_packet;
}
ALWAYS_INLINE TGAPixel read_pixel(u8 bits_per_pixel)
{
auto pixel = TGAPixel();
switch (bits_per_pixel) {
case 24:
pixel.components.blue = read_u8();
pixel.components.green = read_u8();
pixel.components.red = read_u8();
pixel.components.alpha = 0xFF;
return pixel;
case 32:
pixel.components.blue = read_u8();
pixel.components.green = read_u8();
pixel.components.red = read_u8();
pixel.components.alpha = read_u8();
return pixel;
default:
VERIFY_NOT_REACHED();
}
}
size_t index() const
{
return m_index;
}
ReadonlyBytes data() const
{
return m_data;
}
private:
ReadonlyBytes m_data;
size_t m_index { 0 };
};
struct TGALoadingContext {
TGAHeader header;
OwnPtr<TGAReader> reader = { nullptr };
RefPtr<Gfx::Bitmap> bitmap;
};
TGAImageDecoderPlugin::TGAImageDecoderPlugin(u8 const* file_data, size_t file_size)
{
m_context = make<TGALoadingContext>();
m_context->reader = make<TGAReader>(ReadonlyBytes { file_data, file_size });
}
TGAImageDecoderPlugin::~TGAImageDecoderPlugin() = default;
IntSize TGAImageDecoderPlugin::size()
{
return IntSize { m_context->header.width, m_context->header.height };
}
void TGAImageDecoderPlugin::set_volatile()
{
if (m_context->bitmap)
m_context->bitmap->set_volatile();
}
bool TGAImageDecoderPlugin::set_nonvolatile(bool& was_purged)
{
if (!m_context->bitmap)
return false;
return m_context->bitmap->set_nonvolatile(was_purged);
}
bool TGAImageDecoderPlugin::decode_tga_header()
{
auto& reader = m_context->reader;
if (reader->data().size() < sizeof(TGAHeader))
return false;
m_context->header = TGAHeader();
m_context->header.id_length = reader->read_u8();
m_context->header.color_map_type = reader->read_u8();
m_context->header.data_type_code = static_cast<TGADataType>(reader->read_u8());
m_context->header.color_map_origin = reader->read_i16();
m_context->header.color_map_length = reader->read_i16();
m_context->header.color_map_depth = reader->read_u8();
m_context->header.x_origin = reader->read_i16();
m_context->header.y_origin = reader->read_i16();
m_context->header.width = reader->read_u16();
m_context->header.height = reader->read_u16();
m_context->header.bits_per_pixel = reader->read_u8();
m_context->header.image_descriptor = reader->read_u8();
auto bytes_remaining = reader->data().size() - reader->index();
// FIXME: Check for multiplication overflow!
if (m_context->header.data_type_code == TGADataType::UncompressedRGB && bytes_remaining < static_cast<size_t>(m_context->header.width * m_context->header.height * (m_context->header.bits_per_pixel / 8)))
return false;
if (m_context->header.bits_per_pixel < 8 || m_context->header.bits_per_pixel > 32)
return false;
return true;
}
bool TGAImageDecoderPlugin::initialize()
{
return decode_tga_header();
}
ErrorOr<bool> TGAImageDecoderPlugin::validate_before_create(ReadonlyBytes data)
{
if (data.size() < sizeof(TGAHeader))
return false;
TGAHeader const& header = *reinterpret_cast<TGAHeader const*>(data.data());
// FIXME: Check for multiplication overflow!
if (header.data_type_code == TGADataType::UncompressedRGB && data.size() < static_cast<size_t>(header.width * header.height * (header.bits_per_pixel / 8)))
return false;
if (header.bits_per_pixel < 8 || header.bits_per_pixel > 32)
return false;
return true;
}
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> TGAImageDecoderPlugin::create(ReadonlyBytes data)
{
return adopt_nonnull_own_or_enomem(new (nothrow) TGAImageDecoderPlugin(data.data(), data.size()));
}
bool TGAImageDecoderPlugin::is_animated()
{
return false;
}
size_t TGAImageDecoderPlugin::loop_count()
{
return 0;
}
size_t TGAImageDecoderPlugin::frame_count()
{
return 1;
}
ErrorOr<ImageFrameDescriptor> TGAImageDecoderPlugin::frame(size_t index)
{
auto bits_per_pixel = m_context->header.bits_per_pixel;
auto color_map = m_context->header.color_map_type;
auto data_type = m_context->header.data_type_code;
auto width = m_context->header.width;
auto height = m_context->header.height;
auto x_origin = m_context->header.x_origin;
auto y_origin = m_context->header.y_origin;
if (index != 0)
return Error::from_string_literal("TGAImageDecoderPlugin: frame index must be 0");
if (color_map > 1)
return Error::from_string_literal("TGAImageDecoderPlugin: Invalid color map type");
if (m_context->bitmap) {
return ImageFrameDescriptor { m_context->bitmap, 0 };
} else {
// NOTE: Just to be on the safe side, if m_context->bitmap is nullptr, then
// just re-construct the reader object. This will ensure that if the bitmap
// was set as volatile and therefore it is gone, we can always re-generate it
// with a new call to this method!
VERIFY(m_context->reader);
m_context->reader = make<TGAReader>(m_context->reader->data(), sizeof(TGAHeader));
}
RefPtr<Gfx::Bitmap> bitmap;
switch (bits_per_pixel) {
case 24:
bitmap = TRY(Bitmap::create(BitmapFormat::BGRx8888, { m_context->header.width, m_context->header.height }));
break;
case 32:
bitmap = TRY(Bitmap::create(BitmapFormat::BGRA8888, { m_context->header.width, m_context->header.height }));
break;
default:
// FIXME: Implement other TGA bit depths
return Error::from_string_literal("TGAImageDecoderPlugin: Can only handle 24 and 32 bits per pixel");
}
// FIXME: Try to understand the Image origin (instead of X and Y origin coordinates)
// based on the Image descriptor, Field 5.6, bits 4 and 5.
// NOTE: If Y origin is set to a negative number, just assume the generating software
// meant that we start with Y origin at the top height of the picture.
// At least this is the observed behavior when generating some pictures in GIMP.
if (y_origin < 0)
y_origin = height;
if (y_origin != 0 && y_origin != height)
return Error::from_string_literal("TGAImageDecoderPlugin: Can only handle Y origin which is 0 or the entire height");
if (x_origin != 0 && x_origin != width)
return Error::from_string_literal("TGAImageDecoderPlugin: Can only handle X origin which is 0 or the entire width");
switch (data_type) {
case TGADataType::UncompressedRGB: {
for (int row = 0; row < height; ++row) {
for (int col = 0; col < width; ++col) {
auto pixel = m_context->reader->read_pixel(bits_per_pixel);
auto actual_row = row;
if (y_origin < height)
actual_row = height - 1 - row;
auto actual_col = col;
if (x_origin > width)
actual_col = width - 1 - col;
bitmap->scanline(actual_row)[actual_col] = pixel.data;
}
}
break;
}
case TGADataType::RunLengthEncodedRGB: {
size_t pixel_index = 0;
size_t pixel_count = height * width;
while (pixel_index < pixel_count) {
auto packet_type = m_context->reader->read_packet_type();
VERIFY(packet_type.pixels_count > 0);
TGAPixel pixel = m_context->reader->read_pixel(bits_per_pixel);
auto max_pixel_index = min(pixel_index + packet_type.pixels_count, pixel_count);
for (size_t current_pixel_index = pixel_index; current_pixel_index < max_pixel_index; ++current_pixel_index) {
int row = current_pixel_index / width;
int col = current_pixel_index % width;
auto actual_row = row;
if (y_origin < height)
actual_row = height - 1 - row;
auto actual_col = col;
if (x_origin > width)
actual_col = width - 1 - col;
bitmap->scanline(actual_row)[actual_col] = pixel.data;
if (packet_type.raw && (current_pixel_index + 1) < max_pixel_index)
pixel = m_context->reader->read_pixel(bits_per_pixel);
}
pixel_index += packet_type.pixels_count;
}
break;
}
default:
// FIXME: Implement other TGA data types
return Error::from_string_literal("TGAImageDecoderPlugin: Can currently only handle the UncompressedRGB or CompressedRGB data type");
}
m_context->bitmap = bitmap;
return ImageFrameDescriptor { m_context->bitmap, 0 };
}
ErrorOr<Optional<ReadonlyBytes>> TGAImageDecoderPlugin::icc_data()
{
return OptionalNone {};
}
}

View file

@ -0,0 +1,38 @@
/*
* Copyright (c) 2022, Tom Needham <06needhamt@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
struct TGALoadingContext;
class TGAImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static ErrorOr<bool> validate_before_create(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
virtual ~TGAImageDecoderPlugin() override;
TGAImageDecoderPlugin(u8 const*, size_t);
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
bool decode_tga_header();
OwnPtr<TGALoadingContext> m_context;
};
}

View file

@ -0,0 +1,612 @@
/*
* Copyright (c) 2023, Nico Weber <thakis@chromium.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Debug.h>
#include <AK/Endian.h>
#include <AK/Format.h>
#include <AK/Vector.h>
#include <LibGfx/ImageFormats/WebPLoader.h>
// Overview: https://developers.google.com/speed/webp/docs/compression
// Container: https://developers.google.com/speed/webp/docs/riff_container
// Lossless format: https://developers.google.com/speed/webp/docs/webp_lossless_bitstream_specification
// Lossy format: https://datatracker.ietf.org/doc/html/rfc6386
namespace Gfx {
namespace {
struct FourCC {
constexpr FourCC(char const* name)
{
cc[0] = name[0];
cc[1] = name[1];
cc[2] = name[2];
cc[3] = name[3];
}
bool operator==(FourCC const&) const = default;
bool operator!=(FourCC const&) const = default;
char cc[4];
};
// https://developers.google.com/speed/webp/docs/riff_container#webp_file_header
struct WebPFileHeader {
FourCC riff;
LittleEndian<u32> file_size;
FourCC webp;
};
static_assert(AssertSize<WebPFileHeader, 12>());
struct ChunkHeader {
FourCC chunk_type;
LittleEndian<u32> chunk_size;
};
static_assert(AssertSize<ChunkHeader, 8>());
struct Chunk {
FourCC type;
ReadonlyBytes data;
};
struct VP8Header {
u8 version;
bool show_frame;
u32 size_of_first_partition;
u32 width;
u8 horizontal_scale;
u32 height;
u8 vertical_scale;
};
struct VP8LHeader {
u16 width;
u16 height;
bool is_alpha_used;
};
struct VP8XHeader {
bool has_icc;
bool has_alpha;
bool has_exif;
bool has_xmp;
bool has_animation;
u32 width;
u32 height;
};
struct ANIMChunk {
u32 background_color;
u16 loop_count;
};
}
struct WebPLoadingContext {
enum State {
NotDecoded = 0,
Error,
HeaderDecoded,
FirstChunkRead,
FirstChunkDecoded,
ChunksDecoded,
BitmapDecoded,
};
State state { State::NotDecoded };
ReadonlyBytes data;
ReadonlyBytes chunks_cursor;
Optional<IntSize> size;
RefPtr<Gfx::Bitmap> bitmap;
// Either 'VP8 ' (simple lossy file), 'VP8L' (simple lossless file), or 'VP8X' (extended file).
Optional<Chunk> first_chunk;
union {
VP8Header vp8_header;
VP8LHeader vp8l_header;
VP8XHeader vp8x_header;
};
// If first_chunk is not a VP8X chunk, then only image_data_chunk is set and all the other Chunks are not set.
// "For a still image, the image data consists of a single frame, which is made up of:
// An optional alpha subchunk.
// A bitstream subchunk."
Optional<Chunk> alpha_chunk; // 'ALPH'
Optional<Chunk> image_data_chunk; // Either 'VP8 ' or 'VP8L'.
Optional<Chunk> animation_header_chunk; // 'ANIM'
Vector<Chunk> animation_frame_chunks; // 'ANMF'
Optional<Chunk> iccp_chunk; // 'ICCP'
Optional<Chunk> exif_chunk; // 'EXIF'
Optional<Chunk> xmp_chunk; // 'XMP '
template<size_t N>
[[nodiscard]] class Error error(char const (&string_literal)[N])
{
state = WebPLoadingContext::State::Error;
return Error::from_string_literal(string_literal);
}
};
// https://developers.google.com/speed/webp/docs/riff_container#webp_file_header
static ErrorOr<void> decode_webp_header(WebPLoadingContext& context)
{
if (context.state >= WebPLoadingContext::HeaderDecoded)
return {};
if (context.data.size() < sizeof(WebPFileHeader))
return context.error("Missing WebP header");
auto& header = *bit_cast<WebPFileHeader const*>(context.data.data());
if (header.riff != FourCC("RIFF") || header.webp != FourCC("WEBP"))
return context.error("Invalid WebP header");
// "File Size: [...] The size of the file in bytes starting at offset 8. The maximum value of this field is 2^32 minus 10 bytes."
u32 const maximum_webp_file_size = 0xffff'ffff - 9;
if (header.file_size > maximum_webp_file_size)
return context.error("WebP header file size over maximum");
// "The file size in the header is the total size of the chunks that follow plus 4 bytes for the 'WEBP' FourCC.
// The file SHOULD NOT contain any data after the data specified by File Size.
// Readers MAY parse such files, ignoring the trailing data."
if (context.data.size() - 8 < header.file_size)
return context.error("WebP data too small for size in header");
if (context.data.size() - 8 > header.file_size) {
dbgln_if(WEBP_DEBUG, "WebP has {} bytes of data, but header needs only {}. Trimming.", context.data.size(), header.file_size + 8);
context.data = context.data.trim(header.file_size + 8);
}
context.state = WebPLoadingContext::HeaderDecoded;
return {};
}
// https://developers.google.com/speed/webp/docs/riff_container#riff_file_format
static ErrorOr<Chunk> decode_webp_chunk_header(WebPLoadingContext& context, ReadonlyBytes chunks)
{
if (chunks.size() < sizeof(ChunkHeader))
return context.error("Not enough data for WebP chunk header");
auto const& header = *bit_cast<ChunkHeader const*>(chunks.data());
dbgln_if(WEBP_DEBUG, "chunk {} size {}", header.chunk_type, header.chunk_size);
if (chunks.size() < sizeof(ChunkHeader) + header.chunk_size)
return context.error("Not enough data for WebP chunk");
return Chunk { header.chunk_type, { chunks.data() + sizeof(ChunkHeader), header.chunk_size } };
}
// https://developers.google.com/speed/webp/docs/riff_container#riff_file_format
static ErrorOr<Chunk> decode_webp_advance_chunk(WebPLoadingContext& context, ReadonlyBytes& chunks)
{
auto chunk = TRY(decode_webp_chunk_header(context, chunks));
// "Chunk Size: 32 bits (uint32)
// The size of the chunk in bytes, not including this field, the chunk identifier or padding.
// Chunk Payload: Chunk Size bytes
// The data payload. If Chunk Size is odd, a single padding byte -- that MUST be 0 to conform with RIFF -- is added."
chunks = chunks.slice(sizeof(ChunkHeader) + chunk.data.size());
if (chunk.data.size() % 2 != 0) {
if (chunks.is_empty())
return context.error("Missing data for padding byte");
if (*chunks.data() != 0)
return context.error("Padding byte is not 0");
chunks = chunks.slice(1);
}
return chunk;
}
// https://developers.google.com/speed/webp/docs/riff_container#simple_file_format_lossy
// https://datatracker.ietf.org/doc/html/rfc6386#section-19 "Annex A: Bitstream Syntax"
static ErrorOr<VP8Header> decode_webp_chunk_VP8_header(WebPLoadingContext& context, Chunk const& vp8_chunk)
{
VERIFY(vp8_chunk.type == FourCC("VP8 "));
if (vp8_chunk.data.size() < 10)
return context.error("WebPImageDecoderPlugin: 'VP8 ' chunk too small");
// FIXME: Eventually, this should probably call into LibVideo/VP8,
// and image decoders should move into LibImageDecoders which depends on both LibGfx and LibVideo.
// (LibVideo depends on LibGfx, so LibGfx can't depend on LibVideo itself.)
// https://datatracker.ietf.org/doc/html/rfc6386#section-4 "Overview of Compressed Data Format"
// "The decoder is simply presented with a sequence of compressed frames [...]
// The first frame presented to the decompressor is [...] a key frame. [...]
// [E]very compressed frame has three or more pieces. It begins with an uncompressed data chunk comprising 10 bytes in the case of key frames
u8 const* data = vp8_chunk.data.data();
// https://datatracker.ietf.org/doc/html/rfc6386#section-9.1 "Uncompressed Data Chunk"
u32 frame_tag = data[0] | (data[1] << 8) | (data[2] << 16);
bool is_key_frame = (frame_tag & 1) == 0; // https://www.rfc-editor.org/errata/eid5534
u8 version = (frame_tag & 0xe) >> 1;
bool show_frame = (frame_tag & 0x10) != 0;
u32 size_of_first_partition = frame_tag >> 5;
if (!is_key_frame)
return context.error("WebPImageDecoderPlugin: 'VP8 ' chunk not a key frame");
// FIXME: !show_frame does not make sense in a webp file either, probably?
u32 start_code = data[3] | (data[4] << 8) | (data[5] << 16);
if (start_code != 0x2a019d) // https://www.rfc-editor.org/errata/eid7370
return context.error("WebPImageDecoderPlugin: 'VP8 ' chunk invalid start_code");
// "The scaling specifications for each dimension are encoded as follows.
// 0 | No upscaling (the most common case).
// 1 | Upscale by 5/4.
// 2 | Upscale by 5/3.
// 3 | Upscale by 2."
// This is a display-time operation and doesn't affect decoding.
u16 width_and_horizontal_scale = data[6] | (data[7] << 8);
u16 width = width_and_horizontal_scale & 0x3fff;
u8 horizontal_scale = width_and_horizontal_scale >> 14;
u16 heigth_and_vertical_scale = data[8] | (data[9] << 8);
u16 height = heigth_and_vertical_scale & 0x3fff;
u8 vertical_scale = heigth_and_vertical_scale >> 14;
dbgln_if(WEBP_DEBUG, "version {}, show_frame {}, size_of_first_partition {}, width {}, horizontal_scale {}, height {}, vertical_scale {}",
version, show_frame, size_of_first_partition, width, horizontal_scale, height, vertical_scale);
return VP8Header { version, show_frame, size_of_first_partition, width, horizontal_scale, height, vertical_scale };
}
// https://developers.google.com/speed/webp/docs/riff_container#simple_file_format_lossless
// https://developers.google.com/speed/webp/docs/webp_lossless_bitstream_specification#7_overall_structure_of_the_format
static ErrorOr<VP8LHeader> decode_webp_chunk_VP8L_header(WebPLoadingContext& context, Chunk const& vp8l_chunk)
{
VERIFY(vp8l_chunk.type == FourCC("VP8L"));
// https://developers.google.com/speed/webp/docs/webp_lossless_bitstream_specification#3_riff_header
if (vp8l_chunk.data.size() < 5)
return context.error("WebPImageDecoderPlugin: VP8L chunk too small");
u8 const* data = vp8l_chunk.data.data();
u8 signature = data[0];
if (signature != 0x2f)
return context.error("WebPImageDecoderPlugin: VP8L chunk invalid signature");
// 14 bits width-1, 14 bits height-1, 1 bit alpha hint, 3 bit version_number.
u16 width = (data[1] | ((data[2] & 0x3f) << 8)) + 1;
u16 height = ((data[2] >> 6) | (data[3] << 2) | ((data[4] & 0xf) << 12)) + 1;
bool is_alpha_used = (data[4] & 0x10) != 0;
u8 version_number = (data[4] & 0xe0) >> 5;
dbgln_if(WEBP_DEBUG, "width {}, height {}, is_alpha_used {}, version_number {}",
width, height, is_alpha_used, version_number);
// "The version_number is a 3 bit code that must be set to 0. Any other value should be treated as an error. [AMENDED]"
if (version_number != 0)
return context.error("WebPImageDecoderPlugin: VP8L chunk invalid version_number");
return VP8LHeader { width, height, is_alpha_used };
}
static ErrorOr<VP8XHeader> decode_webp_chunk_VP8X(WebPLoadingContext& context, Chunk const& vp8x_chunk)
{
VERIFY(vp8x_chunk.type == FourCC("VP8X"));
// The VP8X chunk is documented at "Extended WebP file header:" at the end of
// https://developers.google.com/speed/webp/docs/riff_container#extended_file_format
if (vp8x_chunk.data.size() < 10)
return context.error("WebPImageDecoderPlugin: VP8X chunk too small");
u8 const* data = vp8x_chunk.data.data();
// 1 byte flags
// "Reserved (Rsv): 2 bits MUST be 0. Readers MUST ignore this field.
// ICC profile (I): 1 bit Set if the file contains an ICC profile.
// Alpha (L): 1 bit Set if any of the frames of the image contain transparency information ("alpha").
// Exif metadata (E): 1 bit Set if the file contains Exif metadata.
// XMP metadata (X): 1 bit Set if the file contains XMP metadata.
// Animation (A): 1 bit Set if this is an animated image. Data in 'ANIM' and 'ANMF' chunks should be used to control the animation.
// Reserved (R): 1 bit MUST be 0. Readers MUST ignore this field."
u8 flags = data[0];
bool has_icc = flags & 0x20;
bool has_alpha = flags & 0x10;
bool has_exif = flags & 0x8;
bool has_xmp = flags & 0x4;
bool has_animation = flags & 0x2;
// 3 bytes reserved
// 3 bytes width minus one
u32 width = (data[4] | (data[5] << 8) | (data[6] << 16)) + 1;
// 3 bytes height minus one
u32 height = (data[7] | (data[8] << 8) | (data[9] << 16)) + 1;
dbgln_if(WEBP_DEBUG, "flags 0x{:x} --{}{}{}{}{}{}, width {}, height {}",
flags,
has_icc ? " icc" : "",
has_alpha ? " alpha" : "",
has_exif ? " exif" : "",
has_xmp ? " xmp" : "",
has_animation ? " anim" : "",
(flags & 0x3e) == 0 ? " none" : "",
width, height);
return VP8XHeader { has_icc, has_alpha, has_exif, has_xmp, has_animation, width, height };
}
// https://developers.google.com/speed/webp/docs/riff_container#animation
static ErrorOr<ANIMChunk> decode_webp_chunk_ANIM(WebPLoadingContext& context, Chunk const& anim_chunk)
{
VERIFY(anim_chunk.type == FourCC("ANIM"));
if (anim_chunk.data.size() < 6)
return context.error("WebPImageDecoderPlugin: ANIM chunk too small");
u8 const* data = anim_chunk.data.data();
u32 background_color = (u32)data[0] | ((u32)data[1] << 8) | ((u32)data[2] << 16) | ((u32)data[3] << 24);
u16 loop_count = data[4] | (data[5] << 8);
return ANIMChunk { background_color, loop_count };
}
// https://developers.google.com/speed/webp/docs/riff_container#extended_file_format
static ErrorOr<void> decode_webp_extended(WebPLoadingContext& context, ReadonlyBytes chunks)
{
VERIFY(context.first_chunk->type == FourCC("VP8X"));
// FIXME: This isn't quite to spec, which says
// "All chunks SHOULD be placed in the same order as listed above.
// If a chunk appears in the wrong place, the file is invalid, but readers MAY parse the file, ignoring the chunks that are out of order."
auto store = [](auto& field, Chunk const& chunk) {
if (!field.has_value())
field = chunk;
};
while (!chunks.is_empty()) {
auto chunk = TRY(decode_webp_advance_chunk(context, chunks));
if (chunk.type == FourCC("ICCP"))
store(context.iccp_chunk, chunk);
else if (chunk.type == FourCC("ALPH"))
store(context.alpha_chunk, chunk);
else if (chunk.type == FourCC("ANIM"))
store(context.animation_header_chunk, chunk);
else if (chunk.type == FourCC("ANMF"))
TRY(context.animation_frame_chunks.try_append(chunk));
else if (chunk.type == FourCC("EXIF"))
store(context.exif_chunk, chunk);
else if (chunk.type == FourCC("XMP "))
store(context.xmp_chunk, chunk);
else if (chunk.type == FourCC("VP8 ") || chunk.type == FourCC("VP8L"))
store(context.image_data_chunk, chunk);
}
// Validate chunks.
// https://developers.google.com/speed/webp/docs/riff_container#animation
// "ANIM Chunk: [...] This chunk MUST appear if the Animation flag in the VP8X chunk is set. If the Animation flag is not set and this chunk is present, it MUST be ignored."
if (context.vp8x_header.has_animation && !context.animation_header_chunk.has_value())
return context.error("WebPImageDecoderPlugin: Header claims animation, but no ANIM chunk");
if (!context.vp8x_header.has_animation && context.animation_header_chunk.has_value()) {
dbgln_if(WEBP_DEBUG, "WebPImageDecoderPlugin: Header claims no animation, but ANIM chunk present. Ignoring ANIM chunk.");
context.animation_header_chunk.clear();
}
// "ANMF Chunk: [...] If the Animation flag is not set, then this chunk SHOULD NOT be present."
if (!context.vp8x_header.has_animation && context.animation_header_chunk.has_value()) {
dbgln_if(WEBP_DEBUG, "WebPImageDecoderPlugin: Header claims no animation, but ANMF chunks present. Ignoring ANMF chunks.");
context.animation_frame_chunks.clear();
}
// https://developers.google.com/speed/webp/docs/riff_container#alpha
// "A frame containing a 'VP8L' chunk SHOULD NOT contain this chunk."
// FIXME: Also check in ANMF chunks.
if (context.alpha_chunk.has_value() && context.image_data_chunk.has_value() && context.image_data_chunk->type == FourCC("VP8L")) {
dbgln_if(WEBP_DEBUG, "WebPImageDecoderPlugin: VP8L frames should not have ALPH chunks. Ignoring ALPH chunk.");
context.alpha_chunk.clear();
}
// https://developers.google.com/speed/webp/docs/riff_container#color_profile
// "This chunk MUST appear before the image data."
// FIXME: Doesn't check animated files.
if (context.iccp_chunk.has_value() && context.image_data_chunk.has_value() && context.iccp_chunk->data.data() > context.image_data_chunk->data.data())
return context.error("WebPImageDecoderPlugin: ICCP chunk is after image data");
context.state = WebPLoadingContext::State::ChunksDecoded;
return {};
}
static ErrorOr<void> read_webp_first_chunk(WebPLoadingContext& context)
{
if (context.state >= WebPLoadingContext::State::FirstChunkRead)
return {};
if (context.state < WebPLoadingContext::HeaderDecoded)
TRY(decode_webp_header(context));
context.chunks_cursor = context.data.slice(sizeof(WebPFileHeader));
auto first_chunk = TRY(decode_webp_advance_chunk(context, context.chunks_cursor));
if (first_chunk.type != FourCC("VP8 ") && first_chunk.type != FourCC("VP8L") && first_chunk.type != FourCC("VP8X"))
return context.error("WebPImageDecoderPlugin: Invalid first chunk type");
context.first_chunk = first_chunk;
context.state = WebPLoadingContext::State::FirstChunkRead;
if (first_chunk.type == FourCC("VP8 ") || first_chunk.type == FourCC("VP8L"))
context.image_data_chunk = first_chunk;
return {};
}
static ErrorOr<void> decode_webp_first_chunk(WebPLoadingContext& context)
{
if (context.state >= WebPLoadingContext::State::FirstChunkDecoded)
return {};
if (context.state < WebPLoadingContext::FirstChunkRead)
TRY(read_webp_first_chunk(context));
if (context.first_chunk->type == FourCC("VP8 ")) {
context.vp8_header = TRY(decode_webp_chunk_VP8_header(context, context.first_chunk.value()));
context.size = IntSize { context.vp8_header.width, context.vp8_header.height };
context.state = WebPLoadingContext::State::FirstChunkDecoded;
return {};
}
if (context.first_chunk->type == FourCC("VP8L")) {
context.vp8l_header = TRY(decode_webp_chunk_VP8L_header(context, context.first_chunk.value()));
context.size = IntSize { context.vp8l_header.width, context.vp8l_header.height };
context.state = WebPLoadingContext::State::FirstChunkDecoded;
return {};
}
VERIFY(context.first_chunk->type == FourCC("VP8X"));
context.vp8x_header = TRY(decode_webp_chunk_VP8X(context, context.first_chunk.value()));
context.size = IntSize { context.vp8x_header.width, context.vp8x_header.height };
context.state = WebPLoadingContext::State::FirstChunkDecoded;
return {};
}
static ErrorOr<void> decode_webp_chunks(WebPLoadingContext& context)
{
if (context.state >= WebPLoadingContext::State::ChunksDecoded)
return {};
if (context.state < WebPLoadingContext::FirstChunkDecoded)
TRY(decode_webp_first_chunk(context));
if (context.first_chunk->type == FourCC("VP8X"))
return decode_webp_extended(context, context.chunks_cursor);
context.state = WebPLoadingContext::State::ChunksDecoded;
return {};
}
WebPImageDecoderPlugin::WebPImageDecoderPlugin(ReadonlyBytes data, OwnPtr<WebPLoadingContext> context)
: m_context(move(context))
{
m_context->data = data;
}
WebPImageDecoderPlugin::~WebPImageDecoderPlugin() = default;
IntSize WebPImageDecoderPlugin::size()
{
if (m_context->state == WebPLoadingContext::State::Error)
return {};
if (m_context->state < WebPLoadingContext::State::FirstChunkDecoded) {
if (decode_webp_first_chunk(*m_context).is_error())
return {};
}
return m_context->size.value();
}
void WebPImageDecoderPlugin::set_volatile()
{
if (m_context->bitmap)
m_context->bitmap->set_volatile();
}
bool WebPImageDecoderPlugin::set_nonvolatile(bool& was_purged)
{
if (!m_context->bitmap)
return false;
return m_context->bitmap->set_nonvolatile(was_purged);
}
bool WebPImageDecoderPlugin::initialize()
{
return !decode_webp_header(*m_context).is_error();
}
bool WebPImageDecoderPlugin::sniff(ReadonlyBytes data)
{
WebPLoadingContext context;
context.data = data;
return !decode_webp_header(context).is_error();
}
ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> WebPImageDecoderPlugin::create(ReadonlyBytes data)
{
auto context = TRY(try_make<WebPLoadingContext>());
return adopt_nonnull_own_or_enomem(new (nothrow) WebPImageDecoderPlugin(data, move(context)));
}
bool WebPImageDecoderPlugin::is_animated()
{
if (m_context->state == WebPLoadingContext::State::Error)
return false;
if (m_context->state < WebPLoadingContext::State::FirstChunkDecoded) {
if (decode_webp_first_chunk(*m_context).is_error())
return false;
}
return m_context->first_chunk->type == FourCC("VP8X") && m_context->vp8x_header.has_animation;
}
size_t WebPImageDecoderPlugin::loop_count()
{
if (!is_animated())
return 0;
if (m_context->state < WebPLoadingContext::State::ChunksDecoded) {
if (decode_webp_chunks(*m_context).is_error())
return 0;
}
auto anim_or_error = decode_webp_chunk_ANIM(*m_context, m_context->animation_header_chunk.value());
if (decode_webp_chunks(*m_context).is_error())
return 0;
return anim_or_error.value().loop_count;
}
size_t WebPImageDecoderPlugin::frame_count()
{
if (!is_animated())
return 1;
if (m_context->state < WebPLoadingContext::State::ChunksDecoded) {
if (decode_webp_chunks(*m_context).is_error())
return 1;
}
return m_context->animation_frame_chunks.size();
}
ErrorOr<ImageFrameDescriptor> WebPImageDecoderPlugin::frame(size_t index)
{
if (index >= frame_count())
return Error::from_string_literal("WebPImageDecoderPlugin: Invalid frame index");
return Error::from_string_literal("WebPImageDecoderPlugin: decoding not yet implemented");
}
ErrorOr<Optional<ReadonlyBytes>> WebPImageDecoderPlugin::icc_data()
{
TRY(decode_webp_chunks(*m_context));
// FIXME: "If this chunk is not present, sRGB SHOULD be assumed."
return m_context->iccp_chunk.map([](auto iccp_chunk) { return iccp_chunk.data; });
}
}
template<>
struct AK::Formatter<Gfx::FourCC> : StandardFormatter {
ErrorOr<void> format(FormatBuilder& builder, Gfx::FourCC const& four_cc)
{
TRY(builder.put_padding('\'', 1));
TRY(builder.put_padding(four_cc.cc[0], 1));
TRY(builder.put_padding(four_cc.cc[1], 1));
TRY(builder.put_padding(four_cc.cc[2], 1));
TRY(builder.put_padding(four_cc.cc[3], 1));
TRY(builder.put_padding('\'', 1));
return {};
}
};

View file

@ -0,0 +1,38 @@
/*
* Copyright (c) 2023, Nico Weber <thakis@chromium.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibGfx/ImageFormats/ImageDecoder.h>
namespace Gfx {
struct WebPLoadingContext;
class WebPImageDecoderPlugin final : public ImageDecoderPlugin {
public:
static bool sniff(ReadonlyBytes);
static ErrorOr<NonnullOwnPtr<ImageDecoderPlugin>> create(ReadonlyBytes);
virtual ~WebPImageDecoderPlugin() override;
virtual IntSize size() override;
virtual void set_volatile() override;
[[nodiscard]] virtual bool set_nonvolatile(bool& was_purged) override;
virtual bool initialize() override;
virtual bool is_animated() override;
virtual size_t loop_count() override;
virtual size_t frame_count() override;
virtual ErrorOr<ImageFrameDescriptor> frame(size_t index) override;
virtual ErrorOr<Optional<ReadonlyBytes>> icc_data() override;
private:
WebPImageDecoderPlugin(ReadonlyBytes, OwnPtr<WebPLoadingContext>);
OwnPtr<WebPLoadingContext> m_context;
};
}