Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
bf1de9f8f0 | |||
82d5e0f8a4 | |||
bbf945137a | |||
54ed1dda27 | |||
34a9ec1b06 | |||
a6ec69d276 | |||
02cbf3889f | |||
6cf05cf305 | |||
938b8126b5 | |||
d36c191351 | |||
1acb5a6177 | |||
f2ccbf1724 | |||
5825c83235 | |||
b742cb7cc7 | |||
2e6eeebdfc | |||
db0b5d571a |
@ -5,7 +5,7 @@ project(KImageFormats)
|
||||
set (CMAKE_CXX_STANDARD 14)
|
||||
|
||||
include(FeatureSummary)
|
||||
find_package(ECM 5.75.0 NO_MODULE)
|
||||
find_package(ECM 5.78.0 NO_MODULE)
|
||||
set_package_properties(ECM PROPERTIES TYPE REQUIRED DESCRIPTION "Extra CMake Modules." URL "https://commits.kde.org/extra-cmake-modules")
|
||||
feature_summary(WHAT REQUIRED_PACKAGES_NOT_FOUND FATAL_ON_MISSING_REQUIRED_PACKAGES)
|
||||
|
||||
@ -19,7 +19,7 @@ include(KDECMakeSettings)
|
||||
|
||||
include(CheckIncludeFiles)
|
||||
|
||||
set(REQUIRED_QT_VERSION 5.12.0)
|
||||
set(REQUIRED_QT_VERSION 5.14.0)
|
||||
find_package(Qt5Gui ${REQUIRED_QT_VERSION} REQUIRED NO_MODULE)
|
||||
|
||||
find_package(KF5Archive)
|
||||
@ -47,11 +47,18 @@ set_package_properties(OpenEXR PROPERTIES
|
||||
TYPE OPTIONAL
|
||||
PURPOSE "Required for the QImage plugin for OpenEXR images"
|
||||
)
|
||||
|
||||
find_package(libavif 0.8.2 CONFIG)
|
||||
set_package_properties(libavif PROPERTIES
|
||||
TYPE OPTIONAL
|
||||
PURPOSE "Required for the QImage plugin for AVIF images"
|
||||
)
|
||||
|
||||
add_definitions(-DQT_NO_FOREACH)
|
||||
# 050d00 (5.13) triggers a BIC in qimageiohandler.h, in Qt 5.13, so do not enable that until we can require 5.14
|
||||
# https://codereview.qt-project.org/c/qt/qtbase/+/279215
|
||||
add_definitions(-DQT_DISABLE_DEPRECATED_BEFORE=0x050c00)
|
||||
add_definitions(-DKF_DISABLE_DEPRECATED_BEFORE_AND_AT=0x054900)
|
||||
add_definitions(-DQT_DISABLE_DEPRECATED_BEFORE=0x050e00)
|
||||
add_definitions(-DKF_DISABLE_DEPRECATED_BEFORE_AND_AT=0x054B00)
|
||||
add_subdirectory(src)
|
||||
if (BUILD_TESTING)
|
||||
add_subdirectory(autotests)
|
||||
|
@ -20,6 +20,7 @@ The following image formats have read-only support:
|
||||
|
||||
The following image formats have read and write support:
|
||||
|
||||
- AV1 Image File Format (AVIF)
|
||||
- Encapsulated PostScript (eps)
|
||||
- Personal Computer Exchange (pcx)
|
||||
- SGI images (rgb, rgba, sgi, bw)
|
||||
|
@ -70,6 +70,12 @@ if (KF5Archive_FOUND)
|
||||
)
|
||||
endif()
|
||||
|
||||
if (TARGET avif)
|
||||
kimageformats_read_tests(
|
||||
avif
|
||||
)
|
||||
endif()
|
||||
|
||||
# Allow some fuzziness when reading this formats, to allow for
|
||||
# rounding errors (eg: in alpha blending).
|
||||
kimageformats_read_tests(FUZZ 1
|
||||
|
BIN
autotests/read/avif/bw.avif
Normal file
After Width: | Height: | Size: 629 B |
BIN
autotests/read/avif/bw.png
Normal file
After Width: | Height: | Size: 743 B |
BIN
autotests/read/avif/bwa.avif
Normal file
After Width: | Height: | Size: 823 B |
BIN
autotests/read/avif/bwa.png
Normal file
After Width: | Height: | Size: 574 B |
BIN
autotests/read/avif/rgb.avif
Normal file
After Width: | Height: | Size: 1.5 KiB |
BIN
autotests/read/avif/rgb.png
Normal file
After Width: | Height: | Size: 1.0 KiB |
BIN
autotests/read/avif/rgba.avif
Normal file
After Width: | Height: | Size: 2.2 KiB |
BIN
autotests/read/avif/rgba.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
BIN
autotests/read/psd/16bit-rle.png
Normal file
After Width: | Height: | Size: 78 KiB |
BIN
autotests/read/psd/16bit-rle.psd
Normal file
BIN
autotests/read/psd/argb16-raw-affinityphoto-1.8.5.png
Normal file
After Width: | Height: | Size: 983 B |
BIN
autotests/read/psd/argb16-raw-affinityphoto-1.8.5.psd
Normal file
BIN
autotests/read/psd/rgb16-raw-affinityphoto-1.8.5.png
Normal file
After Width: | Height: | Size: 827 B |
BIN
autotests/read/psd/rgb16-raw-affinityphoto-1.8.5.psd
Normal file
@ -36,14 +36,17 @@ static void writeImageData(const char *name, const QString &filename, const QIma
|
||||
}
|
||||
}
|
||||
|
||||
// allow each byte to be different by up to 1, to allow for rounding errors
|
||||
template<class Trait>
|
||||
static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
|
||||
{
|
||||
Q_ASSERT(im1.format() == im2.format());
|
||||
Q_ASSERT(im1.depth() == 24 || im1.depth() == 32 || im1.depth() == 64);
|
||||
|
||||
const int height = im1.height();
|
||||
const int width = im1.width();
|
||||
for (int i = 0; i < height; ++i) {
|
||||
const uchar *line1 = im1.scanLine(i);
|
||||
const uchar *line2 = im2.scanLine(i);
|
||||
const Trait *line1 = reinterpret_cast<const Trait*>(im1.scanLine(i));
|
||||
const Trait *line2 = reinterpret_cast<const Trait*>(im2.scanLine(i));
|
||||
for (int j = 0; j < width; ++j) {
|
||||
if (line1[j] > line2[j]) {
|
||||
if (line1[j] - line2[j] > fuzziness)
|
||||
@ -57,6 +60,30 @@ static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
|
||||
return true;
|
||||
}
|
||||
|
||||
// allow each byte to be different by up to 1, to allow for rounding errors
|
||||
static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
|
||||
{
|
||||
return (im1.depth() == 64) ? fuzzyeq<quint16>(im1, im2, fuzziness)
|
||||
: fuzzyeq<quint8>(im1, im2, fuzziness);
|
||||
}
|
||||
|
||||
// Returns the original format if we support, or returns
|
||||
// format which we preferred to use for `fuzzyeq()`.
|
||||
// We do only support formats with 8-bits/16-bits pre pixel.
|
||||
// If that changed, don't forget to update `fuzzyeq()` too
|
||||
static QImage::Format preferredFormat(QImage::Format fmt)
|
||||
{
|
||||
switch (fmt) {
|
||||
case QImage::Format_RGB32:
|
||||
case QImage::Format_ARGB32:
|
||||
case QImage::Format_RGBX64:
|
||||
case QImage::Format_RGBA64:
|
||||
return fmt;
|
||||
default:
|
||||
return QImage::Format_ARGB32;
|
||||
}
|
||||
}
|
||||
|
||||
int main(int argc, char ** argv)
|
||||
{
|
||||
QCoreApplication app(argc, argv);
|
||||
@ -168,19 +195,23 @@ int main(int argc, char ** argv)
|
||||
<< expImage.height() << "\n";
|
||||
++failed;
|
||||
} else {
|
||||
if (inputImage.format() != QImage::Format_ARGB32) {
|
||||
QImage::Format inputFormat = preferredFormat(inputImage.format());
|
||||
QImage::Format expFormat = preferredFormat(expImage.format());
|
||||
QImage::Format cmpFormat = inputFormat == expFormat ? inputFormat : QImage::Format_ARGB32;
|
||||
|
||||
if (inputImage.format() != cmpFormat) {
|
||||
QTextStream(stdout) << "INFO : " << fi.fileName()
|
||||
<< ": converting " << fi.fileName()
|
||||
<< " from " << formatToString(inputImage.format())
|
||||
<< " to ARGB32\n";
|
||||
inputImage = inputImage.convertToFormat(QImage::Format_ARGB32);
|
||||
<< " to " << formatToString(cmpFormat) << '\n';
|
||||
inputImage = inputImage.convertToFormat(cmpFormat);
|
||||
}
|
||||
if (expImage.format() != QImage::Format_ARGB32) {
|
||||
if (expImage.format() != cmpFormat) {
|
||||
QTextStream(stdout) << "INFO : " << fi.fileName()
|
||||
<< ": converting " << expfilename
|
||||
<< " from " << formatToString(expImage.format())
|
||||
<< " to ARGB32\n";
|
||||
expImage = expImage.convertToFormat(QImage::Format_ARGB32);
|
||||
<< " to " << formatToString(cmpFormat) << '\n';
|
||||
expImage = expImage.convertToFormat(cmpFormat);
|
||||
}
|
||||
if (fuzzyeq(inputImage, expImage, fuzziness)) {
|
||||
QTextStream(stdout) << "PASS : " << fi.fileName() << "\n";
|
||||
|
@ -24,6 +24,14 @@ endfunction()
|
||||
|
||||
##################################
|
||||
|
||||
if (TARGET avif)
|
||||
kimageformats_add_plugin(kimg_avif JSON "avif.json" SOURCES "avif.cpp")
|
||||
target_link_libraries(kimg_avif "avif")
|
||||
install(FILES avif.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
|
||||
endif()
|
||||
|
||||
##################################
|
||||
|
||||
install(FILES dds-qt.desktop RENAME dds.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
|
||||
|
||||
##################################
|
||||
|
959
src/imageformats/avif.cpp
Normal file
@ -0,0 +1,959 @@
|
||||
/*
|
||||
AV1 Image File Format (AVIF) support for QImage.
|
||||
|
||||
SPDX-FileCopyrightText: 2020 Daniel Novomesky <dnovomesky@gmail.com>
|
||||
|
||||
SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <QtGlobal>
|
||||
#include <QThread>
|
||||
|
||||
#include <QColorSpace>
|
||||
|
||||
#include "avif_p.h"
|
||||
|
||||
|
||||
QAVIFHandler::QAVIFHandler() :
|
||||
m_parseState(ParseAvifNotParsed),
|
||||
m_quality(52),
|
||||
m_container_width(0),
|
||||
m_container_height(0),
|
||||
m_rawAvifData(AVIF_DATA_EMPTY),
|
||||
m_decoder(nullptr),
|
||||
m_must_jump_to_next_image(false)
|
||||
{
|
||||
}
|
||||
|
||||
QAVIFHandler::~QAVIFHandler()
|
||||
{
|
||||
if (m_decoder) {
|
||||
avifDecoderDestroy(m_decoder);
|
||||
}
|
||||
}
|
||||
|
||||
bool QAVIFHandler::canRead() const
|
||||
{
|
||||
if (m_parseState == ParseAvifNotParsed && !canRead(device())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (m_parseState != ParseAvifError) {
|
||||
setFormat("avif");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool QAVIFHandler::canRead(QIODevice *device)
|
||||
{
|
||||
if (!device) {
|
||||
return false;
|
||||
}
|
||||
QByteArray header = device->peek(144);
|
||||
if (header.size() < 12) {
|
||||
return false;
|
||||
}
|
||||
|
||||
avifROData input;
|
||||
input.data = (const uint8_t *) header.constData();
|
||||
input.size = header.size();
|
||||
|
||||
if (avifPeekCompatibleFileType(&input)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool QAVIFHandler::ensureParsed() const
|
||||
{
|
||||
if (m_parseState == ParseAvifSuccess) {
|
||||
return true;
|
||||
}
|
||||
if (m_parseState == ParseAvifError) {
|
||||
return false;
|
||||
}
|
||||
|
||||
QAVIFHandler *that = const_cast<QAVIFHandler *>(this);
|
||||
|
||||
return that->ensureDecoder();
|
||||
}
|
||||
|
||||
bool QAVIFHandler::ensureDecoder()
|
||||
{
|
||||
if (m_decoder) {
|
||||
return true;
|
||||
}
|
||||
|
||||
m_rawData = device()->readAll();
|
||||
|
||||
m_rawAvifData.data = (const uint8_t *) m_rawData.constData();
|
||||
m_rawAvifData.size = m_rawData.size();
|
||||
|
||||
if (avifPeekCompatibleFileType(&m_rawAvifData) == AVIF_FALSE) {
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
m_decoder = avifDecoderCreate();
|
||||
|
||||
avifResult decodeResult;
|
||||
|
||||
decodeResult = avifDecoderSetIOMemory(m_decoder, m_rawAvifData.data, m_rawAvifData.size);
|
||||
if (decodeResult != AVIF_RESULT_OK) {
|
||||
qWarning("ERROR: avifDecoderSetIOMemory failed: %s\n", avifResultToString(decodeResult));
|
||||
|
||||
avifDecoderDestroy(m_decoder);
|
||||
m_decoder = nullptr;
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
decodeResult = avifDecoderParse(m_decoder);
|
||||
if (decodeResult != AVIF_RESULT_OK) {
|
||||
qWarning("ERROR: Failed to parse input: %s\n", avifResultToString(decodeResult));
|
||||
|
||||
avifDecoderDestroy(m_decoder);
|
||||
m_decoder = nullptr;
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
decodeResult = avifDecoderNextImage(m_decoder);
|
||||
|
||||
if (decodeResult == AVIF_RESULT_OK) {
|
||||
|
||||
m_container_width = m_decoder->image->width;
|
||||
m_container_height = m_decoder->image->height;
|
||||
|
||||
if ((m_container_width > 32768) || (m_container_height > 32768)) {
|
||||
qWarning("AVIF image (%dx%d) is too large!", m_container_width, m_container_height);
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((m_container_width == 0) || (m_container_height == 0)) {
|
||||
qWarning("Empty image, nothing to decode");
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
m_parseState = ParseAvifSuccess;
|
||||
if (decode_one_frame()) {
|
||||
return true;
|
||||
} else {
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
qWarning("ERROR: Failed to decode image: %s\n", avifResultToString(decodeResult));
|
||||
}
|
||||
|
||||
avifDecoderDestroy(m_decoder);
|
||||
m_decoder = nullptr;
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
bool QAVIFHandler::decode_one_frame()
|
||||
{
|
||||
if (!ensureParsed()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool loadalpha;
|
||||
|
||||
if (m_decoder->image->alphaPlane) {
|
||||
loadalpha = true;
|
||||
} else {
|
||||
loadalpha = false;
|
||||
}
|
||||
|
||||
QImage::Format resultformat;
|
||||
|
||||
if (m_decoder->image->depth > 8) {
|
||||
if (loadalpha) {
|
||||
resultformat = QImage::Format_RGBA64;
|
||||
} else {
|
||||
resultformat = QImage::Format_RGBX64;
|
||||
}
|
||||
} else {
|
||||
if (loadalpha) {
|
||||
resultformat = QImage::Format_RGBA8888;
|
||||
} else {
|
||||
resultformat = QImage::Format_RGB888;
|
||||
}
|
||||
}
|
||||
QImage result(m_decoder->image->width, m_decoder->image->height, resultformat);
|
||||
|
||||
if (result.isNull()) {
|
||||
qWarning("Memory cannot be allocated");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (m_decoder->image->icc.data && (m_decoder->image->icc.size > 0)) {
|
||||
result.setColorSpace(QColorSpace::fromIccProfile(QByteArray::fromRawData((const char *) m_decoder->image->icc.data, (int) m_decoder->image->icc.size)));
|
||||
if (! result.colorSpace().isValid()) {
|
||||
qWarning("Invalid QColorSpace created from ICC!\n");
|
||||
}
|
||||
} else {
|
||||
|
||||
avifColorPrimaries primaries_to_load;
|
||||
avifTransferCharacteristics trc_to_load;
|
||||
|
||||
if ((m_decoder->image->colorPrimaries == 2 /* AVIF_COLOR_PRIMARIES_UNSPECIFIED */) ||
|
||||
(m_decoder->image->colorPrimaries == 0 /* AVIF_COLOR_PRIMARIES_UNKNOWN */)) {
|
||||
primaries_to_load = (avifColorPrimaries) 1; // AVIF_COLOR_PRIMARIES_BT709
|
||||
} else {
|
||||
primaries_to_load = m_decoder->image->colorPrimaries;
|
||||
}
|
||||
if ((m_decoder->image->transferCharacteristics == 2 /* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */) ||
|
||||
(m_decoder->image->transferCharacteristics == 0 /* AVIF_TRANSFER_CHARACTERISTICS_UNKNOWN */)) {
|
||||
trc_to_load = (avifTransferCharacteristics) 13; // AVIF_TRANSFER_CHARACTERISTICS_SRGB
|
||||
} else {
|
||||
trc_to_load = m_decoder->image->transferCharacteristics;
|
||||
}
|
||||
|
||||
float prim[8]; // outPrimaries: rX, rY, gX, gY, bX, bY, wX, wY
|
||||
avifColorPrimariesGetValues(primaries_to_load, prim);
|
||||
|
||||
QPointF redPoint(prim[0], prim[1]);
|
||||
QPointF greenPoint(prim[2], prim[3]);
|
||||
QPointF bluePoint(prim[4], prim[5]);
|
||||
QPointF whitePoint(prim[6], prim[7]);
|
||||
|
||||
|
||||
QColorSpace::TransferFunction q_trc = QColorSpace::TransferFunction::Custom;
|
||||
float q_trc_gamma = 0.0f;
|
||||
|
||||
switch (trc_to_load) {
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_BT470M */
|
||||
case 4:
|
||||
q_trc = QColorSpace::TransferFunction::Gamma;
|
||||
q_trc_gamma = 2.2f;
|
||||
break;
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_BT470BG */
|
||||
case 5:
|
||||
q_trc = QColorSpace::TransferFunction::Gamma;
|
||||
q_trc_gamma = 2.8f;
|
||||
break;
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
|
||||
case 8:
|
||||
q_trc = QColorSpace::TransferFunction::Linear;
|
||||
break;
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
|
||||
case 13:
|
||||
q_trc = QColorSpace::TransferFunction::SRgb;
|
||||
break;
|
||||
default:
|
||||
qWarning("CICP colorPrimaries: %d, transferCharacteristics: %d\nThe colorspace is unsupported by this plug-in yet.",
|
||||
m_decoder->image->colorPrimaries, m_decoder->image->transferCharacteristics);
|
||||
q_trc = QColorSpace::TransferFunction::SRgb;
|
||||
break;
|
||||
}
|
||||
|
||||
if (q_trc != QColorSpace::TransferFunction::Custom) { //we create new colorspace using Qt
|
||||
switch (primaries_to_load) {
|
||||
/* AVIF_COLOR_PRIMARIES_BT709 */
|
||||
case 1:
|
||||
result.setColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, q_trc, q_trc_gamma));
|
||||
break;
|
||||
/* AVIF_COLOR_PRIMARIES_SMPTE432 */
|
||||
case 12:
|
||||
result.setColorSpace(QColorSpace(QColorSpace::Primaries::DciP3D65, q_trc, q_trc_gamma));
|
||||
break;
|
||||
default:
|
||||
result.setColorSpace(QColorSpace(whitePoint, redPoint, greenPoint, bluePoint, q_trc, q_trc_gamma));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (! result.colorSpace().isValid()) {
|
||||
qWarning("Invalid QColorSpace created from NCLX/CICP!\n");
|
||||
}
|
||||
}
|
||||
|
||||
avifRGBImage rgb;
|
||||
avifRGBImageSetDefaults(&rgb, m_decoder->image);
|
||||
|
||||
if (m_decoder->image->depth > 8) {
|
||||
rgb.depth = 16;
|
||||
rgb.format = AVIF_RGB_FORMAT_RGBA;
|
||||
|
||||
if (!loadalpha) {
|
||||
rgb.ignoreAlpha = AVIF_TRUE;
|
||||
result.fill(Qt::black);
|
||||
if (m_decoder->image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400) {
|
||||
resultformat = QImage::Format_Grayscale16;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
rgb.depth = 8;
|
||||
if (loadalpha) {
|
||||
rgb.format = AVIF_RGB_FORMAT_RGBA;
|
||||
resultformat = QImage::Format_ARGB32;
|
||||
} else {
|
||||
rgb.format = AVIF_RGB_FORMAT_RGB;
|
||||
|
||||
if (m_decoder->image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400) {
|
||||
resultformat = QImage::Format_Grayscale8;
|
||||
} else {
|
||||
resultformat = QImage::Format_RGB32;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rgb.rowBytes = result.bytesPerLine();
|
||||
rgb.pixels = result.bits();
|
||||
|
||||
avifResult res = avifImageYUVToRGB(m_decoder->image, &rgb);
|
||||
if (res != AVIF_RESULT_OK) {
|
||||
qWarning("ERROR in avifImageYUVToRGB: %s\n", avifResultToString(res));
|
||||
return false;
|
||||
}
|
||||
|
||||
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_CLAP) {
|
||||
if ((m_decoder->image->clap.widthD > 0) && (m_decoder->image->clap.heightD > 0) &&
|
||||
(m_decoder->image->clap.horizOffD > 0) && (m_decoder->image->clap.vertOffD > 0)) {
|
||||
int new_width, new_height, offx, offy;
|
||||
|
||||
new_width = (int)((double)(m_decoder->image->clap.widthN) / (m_decoder->image->clap.widthD) + 0.5);
|
||||
if (new_width > result.width()) {
|
||||
new_width = result.width();
|
||||
}
|
||||
|
||||
new_height = (int)((double)(m_decoder->image->clap.heightN) / (m_decoder->image->clap.heightD) + 0.5);
|
||||
if (new_height > result.height()) {
|
||||
new_height = result.height();
|
||||
}
|
||||
|
||||
if (new_width > 0 && new_height > 0) {
|
||||
|
||||
offx = ((double)((int32_t) m_decoder->image->clap.horizOffN)) / (m_decoder->image->clap.horizOffD) +
|
||||
(result.width() - new_width) / 2.0 + 0.5;
|
||||
if (offx < 0) {
|
||||
offx = 0;
|
||||
} else if (offx > (result.width() - new_width)) {
|
||||
offx = result.width() - new_width;
|
||||
}
|
||||
|
||||
offy = ((double)((int32_t) m_decoder->image->clap.vertOffN)) / (m_decoder->image->clap.vertOffD) +
|
||||
(result.height() - new_height) / 2.0 + 0.5;
|
||||
if (offy < 0) {
|
||||
offy = 0;
|
||||
} else if (offy > (result.height() - new_height)) {
|
||||
offy = result.height() - new_height;
|
||||
}
|
||||
|
||||
result = result.copy(offx, offy, new_width, new_height);
|
||||
}
|
||||
}
|
||||
|
||||
else { //Zero values, we need to avoid 0 divide.
|
||||
qWarning("ERROR: Wrong values in avifCleanApertureBox\n");
|
||||
}
|
||||
}
|
||||
|
||||
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_IROT) {
|
||||
QTransform transform;
|
||||
switch (m_decoder->image->irot.angle) {
|
||||
case 1:
|
||||
transform.rotate(-90);
|
||||
result = result.transformed(transform);
|
||||
break;
|
||||
case 2:
|
||||
transform.rotate(180);
|
||||
result = result.transformed(transform);
|
||||
break;
|
||||
case 3:
|
||||
transform.rotate(90);
|
||||
result = result.transformed(transform);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_IMIR) {
|
||||
switch (m_decoder->image->imir.axis) {
|
||||
case 0: //vertical
|
||||
result = result.mirrored(false, true);
|
||||
break;
|
||||
case 1: //horizontal
|
||||
result = result.mirrored(true, false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (resultformat == result.format()) {
|
||||
m_current_image = result;
|
||||
} else {
|
||||
m_current_image = result.convertToFormat(resultformat);
|
||||
}
|
||||
|
||||
m_must_jump_to_next_image = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool QAVIFHandler::read(QImage *image)
|
||||
{
|
||||
if (!ensureParsed()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (m_must_jump_to_next_image) {
|
||||
jumpToNextImage();
|
||||
}
|
||||
|
||||
*image = m_current_image;
|
||||
if (imageCount() >= 2) {
|
||||
m_must_jump_to_next_image = true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool QAVIFHandler::write(const QImage &image)
|
||||
{
|
||||
if (image.format() == QImage::Format_Invalid) {
|
||||
qWarning("No image data to save");
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((image.width() > 32768) || (image.height() > 32768)) {
|
||||
qWarning("Image is too large");
|
||||
return false;
|
||||
}
|
||||
|
||||
int maxQuantizer = AVIF_QUANTIZER_WORST_QUALITY * (100 - qBound(0, m_quality, 100)) / 100;
|
||||
int minQuantizer = 0;
|
||||
int maxQuantizerAlpha = 0;
|
||||
avifResult res;
|
||||
|
||||
bool save_grayscale; //true - monochrome, false - colors
|
||||
int save_depth; //8 or 10bit per channel
|
||||
QImage::Format tmpformat; //format for temporary image
|
||||
|
||||
avifImage *avif = nullptr;
|
||||
|
||||
//grayscale detection
|
||||
switch (image.format()) {
|
||||
case QImage::Format_Mono:
|
||||
case QImage::Format_MonoLSB:
|
||||
case QImage::Format_Grayscale8:
|
||||
case QImage::Format_Grayscale16:
|
||||
save_grayscale = true;
|
||||
break;
|
||||
case QImage::Format_Indexed8:
|
||||
save_grayscale = image.isGrayscale();
|
||||
break;
|
||||
default:
|
||||
save_grayscale = false;
|
||||
break;
|
||||
}
|
||||
|
||||
//depth detection
|
||||
switch (image.format()) {
|
||||
case QImage::Format_BGR30:
|
||||
case QImage::Format_A2BGR30_Premultiplied:
|
||||
case QImage::Format_RGB30:
|
||||
case QImage::Format_A2RGB30_Premultiplied:
|
||||
case QImage::Format_Grayscale16:
|
||||
case QImage::Format_RGBX64:
|
||||
case QImage::Format_RGBA64:
|
||||
case QImage::Format_RGBA64_Premultiplied:
|
||||
save_depth = 10;
|
||||
break;
|
||||
default:
|
||||
if (image.depth() > 32) {
|
||||
save_depth = 10;
|
||||
} else {
|
||||
save_depth = 8;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
//quality settings
|
||||
if (maxQuantizer > 20) {
|
||||
minQuantizer = maxQuantizer - 20;
|
||||
if (maxQuantizer > 40) { //we decrease quality of alpha channel here
|
||||
maxQuantizerAlpha = maxQuantizer - 40;
|
||||
}
|
||||
}
|
||||
|
||||
if (save_grayscale && !image.hasAlphaChannel()) { //we are going to save grayscale image without alpha channel
|
||||
if (save_depth > 8) {
|
||||
tmpformat = QImage::Format_Grayscale16;
|
||||
} else {
|
||||
tmpformat = QImage::Format_Grayscale8;
|
||||
}
|
||||
QImage tmpgrayimage = image.convertToFormat(tmpformat);
|
||||
|
||||
avif = avifImageCreate(tmpgrayimage.width(), tmpgrayimage.height(), save_depth, AVIF_PIXEL_FORMAT_YUV400);
|
||||
avifImageAllocatePlanes(avif, AVIF_PLANES_YUV);
|
||||
|
||||
if (tmpgrayimage.colorSpace().isValid()) {
|
||||
avif->colorPrimaries = (avifColorPrimaries)1;
|
||||
avif->matrixCoefficients = (avifMatrixCoefficients)1;
|
||||
|
||||
switch (tmpgrayimage.colorSpace().transferFunction()) {
|
||||
case QColorSpace::TransferFunction::Linear:
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
|
||||
avif->transferCharacteristics = (avifTransferCharacteristics)8;
|
||||
break;
|
||||
case QColorSpace::TransferFunction::SRgb:
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
|
||||
avif->transferCharacteristics = (avifTransferCharacteristics)13;
|
||||
break;
|
||||
default:
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (save_depth > 8) { // QImage::Format_Grayscale16
|
||||
for (int y = 0; y < tmpgrayimage.height(); y++) {
|
||||
const uint16_t *src16bit = reinterpret_cast<const uint16_t *>(tmpgrayimage.constScanLine(y));
|
||||
uint16_t *dest16bit = reinterpret_cast<uint16_t *>(avif->yuvPlanes[0] + y * avif->yuvRowBytes[0]);
|
||||
for (int x = 0; x < tmpgrayimage.width(); x++) {
|
||||
int tmp_pixelval = (int)(((float)(*src16bit) / 65535.0f) * 1023.0f + 0.5f); //downgrade to 10 bits
|
||||
*dest16bit = qBound(0, tmp_pixelval, 1023);
|
||||
dest16bit++;
|
||||
src16bit++;
|
||||
}
|
||||
}
|
||||
} else { // QImage::Format_Grayscale8
|
||||
for (int y = 0; y < tmpgrayimage.height(); y++) {
|
||||
const uchar *src8bit = tmpgrayimage.constScanLine(y);
|
||||
uint8_t *dest8bit = avif->yuvPlanes[0] + y * avif->yuvRowBytes[0];
|
||||
for (int x = 0; x < tmpgrayimage.width(); x++) {
|
||||
*dest8bit = *src8bit;
|
||||
dest8bit++;
|
||||
src8bit++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} else { //we are going to save color image
|
||||
if (save_depth > 8) {
|
||||
if (image.hasAlphaChannel()) {
|
||||
tmpformat = QImage::Format_RGBA64;
|
||||
} else {
|
||||
tmpformat = QImage::Format_RGBX64;
|
||||
}
|
||||
} else { //8bit depth
|
||||
if (image.hasAlphaChannel()) {
|
||||
tmpformat = QImage::Format_RGBA8888;
|
||||
} else {
|
||||
tmpformat = QImage::Format_RGB888;
|
||||
}
|
||||
}
|
||||
|
||||
QImage tmpcolorimage = image.convertToFormat(tmpformat);
|
||||
|
||||
avifPixelFormat pixel_format = AVIF_PIXEL_FORMAT_YUV420;
|
||||
if (maxQuantizer < 20) {
|
||||
if (maxQuantizer < 10) {
|
||||
pixel_format = AVIF_PIXEL_FORMAT_YUV444; //best quality
|
||||
} else {
|
||||
pixel_format = AVIF_PIXEL_FORMAT_YUV422; //high quality
|
||||
}
|
||||
}
|
||||
|
||||
avifMatrixCoefficients matrix_to_save = (avifMatrixCoefficients)1; //default for Qt 5.12 and 5.13;
|
||||
|
||||
avifColorPrimaries primaries_to_save = (avifColorPrimaries)2;
|
||||
avifTransferCharacteristics transfer_to_save = (avifTransferCharacteristics)2;
|
||||
|
||||
if (tmpcolorimage.colorSpace().isValid()) {
|
||||
switch (tmpcolorimage.colorSpace().primaries()) {
|
||||
case QColorSpace::Primaries::SRgb:
|
||||
/* AVIF_COLOR_PRIMARIES_BT709 */
|
||||
primaries_to_save = (avifColorPrimaries)1;
|
||||
/* AVIF_MATRIX_COEFFICIENTS_BT709 */
|
||||
matrix_to_save = (avifMatrixCoefficients)1;
|
||||
break;
|
||||
case QColorSpace::Primaries::DciP3D65:
|
||||
/* AVIF_NCLX_COLOUR_PRIMARIES_P3, AVIF_NCLX_COLOUR_PRIMARIES_SMPTE432 */
|
||||
primaries_to_save = (avifColorPrimaries)12;
|
||||
/* AVIF_MATRIX_COEFFICIENTS_CHROMA_DERIVED_NCL */
|
||||
matrix_to_save = (avifMatrixCoefficients)12;
|
||||
break;
|
||||
default:
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
|
||||
primaries_to_save = (avifColorPrimaries)2;
|
||||
/* AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED */
|
||||
matrix_to_save = (avifMatrixCoefficients)2;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (tmpcolorimage.colorSpace().transferFunction()) {
|
||||
case QColorSpace::TransferFunction::Linear:
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
|
||||
transfer_to_save = (avifTransferCharacteristics)8;
|
||||
break;
|
||||
case QColorSpace::TransferFunction::Gamma:
|
||||
if (qAbs(tmpcolorimage.colorSpace().gamma() - 2.2f) < 0.1f) {
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_BT470M */
|
||||
transfer_to_save = (avifTransferCharacteristics)4;
|
||||
} else if (qAbs(tmpcolorimage.colorSpace().gamma() - 2.8f) < 0.1f) {
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_BT470BG */
|
||||
transfer_to_save = (avifTransferCharacteristics)5;
|
||||
} else {
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
|
||||
transfer_to_save = (avifTransferCharacteristics)2;
|
||||
}
|
||||
break;
|
||||
case QColorSpace::TransferFunction::SRgb:
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
|
||||
transfer_to_save = (avifTransferCharacteristics)13;
|
||||
break;
|
||||
default:
|
||||
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
|
||||
transfer_to_save = (avifTransferCharacteristics)2;
|
||||
break;
|
||||
}
|
||||
|
||||
//in case primaries or trc were not identified
|
||||
if ((primaries_to_save == 2) ||
|
||||
(transfer_to_save == 2)) {
|
||||
|
||||
//upgrade image to higher bit depth
|
||||
if (save_depth == 8) {
|
||||
save_depth = 10;
|
||||
if (tmpcolorimage.hasAlphaChannel()) {
|
||||
tmpcolorimage = tmpcolorimage.convertToFormat(QImage::Format_RGBA64);
|
||||
} else {
|
||||
tmpcolorimage = tmpcolorimage.convertToFormat(QImage::Format_RGBX64);
|
||||
}
|
||||
}
|
||||
|
||||
if ((primaries_to_save == 2) &&
|
||||
(transfer_to_save != 2)) { //other primaries but known trc
|
||||
primaries_to_save = (avifColorPrimaries)1; // AVIF_COLOR_PRIMARIES_BT709
|
||||
matrix_to_save = (avifMatrixCoefficients)1; // AVIF_MATRIX_COEFFICIENTS_BT709
|
||||
|
||||
switch (transfer_to_save) {
|
||||
case 8: // AVIF_TRANSFER_CHARACTERISTICS_LINEAR
|
||||
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::Linear));
|
||||
break;
|
||||
case 4: // AVIF_TRANSFER_CHARACTERISTICS_BT470M
|
||||
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, 2.2f));
|
||||
break;
|
||||
case 5: // AVIF_TRANSFER_CHARACTERISTICS_BT470BG
|
||||
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, 2.8f));
|
||||
break;
|
||||
default: // AVIF_TRANSFER_CHARACTERISTICS_SRGB + any other
|
||||
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::SRgb));
|
||||
transfer_to_save = (avifTransferCharacteristics)13;
|
||||
break;
|
||||
}
|
||||
} else if ((primaries_to_save != 2) &&
|
||||
(transfer_to_save == 2)) { //recognized primaries but other trc
|
||||
transfer_to_save = (avifTransferCharacteristics)13;
|
||||
tmpcolorimage.convertToColorSpace(tmpcolorimage.colorSpace().withTransferFunction(QColorSpace::TransferFunction::SRgb));
|
||||
} else { //unrecognized profile
|
||||
primaries_to_save = (avifColorPrimaries)1; // AVIF_COLOR_PRIMARIES_BT709
|
||||
transfer_to_save = (avifTransferCharacteristics)13;
|
||||
matrix_to_save = (avifMatrixCoefficients)1; // AVIF_MATRIX_COEFFICIENTS_BT709
|
||||
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::SRgb));
|
||||
}
|
||||
}
|
||||
}
|
||||
avif = avifImageCreate(tmpcolorimage.width(), tmpcolorimage.height(), save_depth, pixel_format);
|
||||
avif->matrixCoefficients = matrix_to_save;
|
||||
|
||||
avif->colorPrimaries = primaries_to_save;
|
||||
avif->transferCharacteristics = transfer_to_save;
|
||||
|
||||
avifRGBImage rgb;
|
||||
avifRGBImageSetDefaults(&rgb, avif);
|
||||
rgb.rowBytes = tmpcolorimage.bytesPerLine();
|
||||
rgb.pixels = const_cast<uint8_t *>(tmpcolorimage.constBits());
|
||||
|
||||
if (save_depth > 8) { //10bit depth
|
||||
rgb.depth = 16;
|
||||
|
||||
if (tmpcolorimage.hasAlphaChannel()) {
|
||||
avif->alphaRange = AVIF_RANGE_FULL;
|
||||
} else {
|
||||
rgb.ignoreAlpha = AVIF_TRUE;
|
||||
}
|
||||
|
||||
rgb.format = AVIF_RGB_FORMAT_RGBA;
|
||||
} else { //8bit depth
|
||||
rgb.depth = 8;
|
||||
|
||||
if (tmpcolorimage.hasAlphaChannel()) {
|
||||
rgb.format = AVIF_RGB_FORMAT_RGBA;
|
||||
avif->alphaRange = AVIF_RANGE_FULL;
|
||||
} else {
|
||||
rgb.format = AVIF_RGB_FORMAT_RGB;
|
||||
}
|
||||
}
|
||||
|
||||
res = avifImageRGBToYUV(avif, &rgb);
|
||||
if (res != AVIF_RESULT_OK) {
|
||||
qWarning("ERROR in avifImageRGBToYUV: %s\n", avifResultToString(res));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
avifRWData raw = AVIF_DATA_EMPTY;
|
||||
avifEncoder *encoder = avifEncoderCreate();
|
||||
encoder->maxThreads = qBound(1, QThread::idealThreadCount(), 64);
|
||||
encoder->minQuantizer = minQuantizer;
|
||||
encoder->maxQuantizer = maxQuantizer;
|
||||
|
||||
if (image.hasAlphaChannel()) {
|
||||
encoder->minQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS;
|
||||
encoder->maxQuantizerAlpha = maxQuantizerAlpha;
|
||||
}
|
||||
|
||||
encoder->speed = 8;
|
||||
|
||||
res = avifEncoderWrite(encoder, avif, &raw);
|
||||
avifEncoderDestroy(encoder);
|
||||
avifImageDestroy(avif);
|
||||
|
||||
if (res == AVIF_RESULT_OK) {
|
||||
qint64 status = device()->write((const char *)raw.data, raw.size);
|
||||
avifRWDataFree(&raw);
|
||||
|
||||
if (status > 0) {
|
||||
return true;
|
||||
} else if (status == -1) {
|
||||
qWarning("Write error: %s\n", qUtf8Printable(device()->errorString()));
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
qWarning("ERROR: Failed to encode: %s\n", avifResultToString(res));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
QVariant QAVIFHandler::option(ImageOption option) const
|
||||
{
|
||||
if (!supportsOption(option) || !ensureParsed()) {
|
||||
return QVariant();
|
||||
}
|
||||
|
||||
switch (option) {
|
||||
case Quality:
|
||||
return m_quality;
|
||||
case Size:
|
||||
return m_current_image.size();
|
||||
case Animation:
|
||||
if (imageCount() >= 2) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
default:
|
||||
return QVariant();
|
||||
}
|
||||
}
|
||||
|
||||
void QAVIFHandler::setOption(ImageOption option, const QVariant &value)
|
||||
{
|
||||
switch (option) {
|
||||
case Quality:
|
||||
m_quality = value.toInt();
|
||||
if (m_quality > 100) {
|
||||
m_quality = 100;
|
||||
} else if (m_quality < 0) {
|
||||
m_quality = 52;
|
||||
}
|
||||
return;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
QImageIOHandler::setOption(option, value);
|
||||
}
|
||||
|
||||
bool QAVIFHandler::supportsOption(ImageOption option) const
|
||||
{
|
||||
return option == Quality
|
||||
|| option == Size
|
||||
|| option == Animation;
|
||||
}
|
||||
|
||||
int QAVIFHandler::imageCount() const
|
||||
{
|
||||
if (!ensureParsed()) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (m_decoder->imageCount >= 1) {
|
||||
return m_decoder->imageCount;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int QAVIFHandler::currentImageNumber() const
|
||||
{
|
||||
if (m_parseState == ParseAvifNotParsed) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (m_parseState == ParseAvifError || !m_decoder) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return m_decoder->imageIndex;
|
||||
}
|
||||
|
||||
bool QAVIFHandler::jumpToNextImage()
|
||||
{
|
||||
if (!ensureParsed()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (m_decoder->imageCount < 2) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (m_decoder->imageIndex >= m_decoder->imageCount - 1) { //start from begining
|
||||
avifDecoderReset(m_decoder);
|
||||
}
|
||||
|
||||
avifResult decodeResult = avifDecoderNextImage(m_decoder);
|
||||
|
||||
if (decodeResult != AVIF_RESULT_OK) {
|
||||
qWarning("ERROR: Failed to decode Next image in sequence: %s\n", avifResultToString(decodeResult));
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((m_container_width != m_decoder->image->width) ||
|
||||
(m_container_height != m_decoder->image->height)) {
|
||||
qWarning("Decoded image sequence size (%dx%d) do not match first image size (%dx%d)!\n",
|
||||
m_decoder->image->width, m_decoder->image->height,
|
||||
m_container_width, m_container_height);
|
||||
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (decode_one_frame()) {
|
||||
return true;
|
||||
} else {
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
bool QAVIFHandler::jumpToImage(int imageNumber)
|
||||
{
|
||||
if (!ensureParsed()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (m_decoder->imageCount < 2) { //not an animation
|
||||
if (imageNumber == 0) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (imageNumber < 0 || imageNumber >= m_decoder->imageCount) { //wrong index
|
||||
return false;
|
||||
}
|
||||
|
||||
if (imageNumber == m_decoder->imageCount) { // we are here already
|
||||
return true;
|
||||
}
|
||||
|
||||
avifResult decodeResult = avifDecoderNthImage(m_decoder, imageNumber);
|
||||
|
||||
if (decodeResult != AVIF_RESULT_OK) {
|
||||
qWarning("ERROR: Failed to decode %d th Image in sequence: %s\n", imageNumber, avifResultToString(decodeResult));
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((m_container_width != m_decoder->image->width) ||
|
||||
(m_container_height != m_decoder->image->height)) {
|
||||
qWarning("Decoded image sequence size (%dx%d) do not match declared container size (%dx%d)!\n",
|
||||
m_decoder->image->width, m_decoder->image->height,
|
||||
m_container_width, m_container_height);
|
||||
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (decode_one_frame()) {
|
||||
return true;
|
||||
} else {
|
||||
m_parseState = ParseAvifError;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
int QAVIFHandler::nextImageDelay() const
|
||||
{
|
||||
if (!ensureParsed()) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (m_decoder->imageCount < 2) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int delay_ms = 1000.0 * m_decoder->imageTiming.duration;
|
||||
if (delay_ms < 1) {
|
||||
delay_ms = 1;
|
||||
}
|
||||
return delay_ms;
|
||||
}
|
||||
|
||||
int QAVIFHandler::loopCount() const
|
||||
{
|
||||
if (!ensureParsed()) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (m_decoder->imageCount < 2) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
QImageIOPlugin::Capabilities QAVIFPlugin::capabilities(QIODevice *device, const QByteArray &format) const
|
||||
{
|
||||
if (format == "avif") {
|
||||
return Capabilities(CanRead | CanWrite);
|
||||
}
|
||||
|
||||
if (format == "avifs") {
|
||||
return Capabilities(CanRead);
|
||||
}
|
||||
|
||||
if (!format.isEmpty()) {
|
||||
return {};
|
||||
}
|
||||
if (!device->isOpen()) {
|
||||
return {};
|
||||
}
|
||||
|
||||
Capabilities cap;
|
||||
if (device->isReadable() && QAVIFHandler::canRead(device)) {
|
||||
cap |= CanRead;
|
||||
}
|
||||
if (device->isWritable()) {
|
||||
cap |= CanWrite;
|
||||
}
|
||||
return cap;
|
||||
}
|
||||
|
||||
QImageIOHandler *QAVIFPlugin::create(QIODevice *device, const QByteArray &format) const
|
||||
{
|
||||
QImageIOHandler *handler = new QAVIFHandler;
|
||||
handler->setDevice(device);
|
||||
handler->setFormat(format);
|
||||
return handler;
|
||||
}
|
7
src/imageformats/avif.desktop
Normal file
@ -0,0 +1,7 @@
|
||||
[Desktop Entry]
|
||||
Type=Service
|
||||
X-KDE-ServiceTypes=QImageIOPlugins
|
||||
X-KDE-ImageFormat=avif
|
||||
X-KDE-MimeType=image/avif
|
||||
X-KDE-Read=true
|
||||
X-KDE-Write=true
|
4
src/imageformats/avif.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"Keys": [ "avif", "avifs" ],
|
||||
"MimeTypes": [ "image/avif", "image/avif-sequence" ]
|
||||
}
|
80
src/imageformats/avif_p.h
Normal file
@ -0,0 +1,80 @@
|
||||
/*
|
||||
AV1 Image File Format (AVIF) support for QImage.
|
||||
|
||||
SPDX-FileCopyrightText: 2020 Daniel Novomesky <dnovomesky@gmail.com>
|
||||
|
||||
SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#ifndef KIMG_AVIF_P_H
|
||||
#define KIMG_AVIF_P_H
|
||||
|
||||
#include <QImage>
|
||||
#include <QVariant>
|
||||
#include <qimageiohandler.h>
|
||||
#include <QImageIOPlugin>
|
||||
#include <QByteArray>
|
||||
#include <avif/avif.h>
|
||||
|
||||
class QAVIFHandler : public QImageIOHandler
|
||||
{
|
||||
public:
|
||||
QAVIFHandler();
|
||||
~QAVIFHandler();
|
||||
|
||||
bool canRead() const override;
|
||||
bool read (QImage *image) override;
|
||||
bool write (const QImage &image) override;
|
||||
|
||||
static bool canRead (QIODevice *device);
|
||||
|
||||
QVariant option (ImageOption option) const override;
|
||||
void setOption (ImageOption option, const QVariant &value) override;
|
||||
bool supportsOption (ImageOption option) const override;
|
||||
|
||||
int imageCount() const override;
|
||||
int currentImageNumber() const override;
|
||||
bool jumpToNextImage() override;
|
||||
bool jumpToImage (int imageNumber) override;
|
||||
|
||||
int nextImageDelay() const override;
|
||||
|
||||
int loopCount() const override;
|
||||
private:
|
||||
bool ensureParsed() const;
|
||||
bool ensureDecoder();
|
||||
bool decode_one_frame();
|
||||
|
||||
enum ParseAvifState
|
||||
{
|
||||
ParseAvifError = -1,
|
||||
ParseAvifNotParsed = 0,
|
||||
ParseAvifSuccess = 1
|
||||
};
|
||||
|
||||
ParseAvifState m_parseState;
|
||||
int m_quality;
|
||||
|
||||
uint32_t m_container_width;
|
||||
uint32_t m_container_height;
|
||||
|
||||
QByteArray m_rawData;
|
||||
avifROData m_rawAvifData;
|
||||
|
||||
avifDecoder *m_decoder;
|
||||
QImage m_current_image;
|
||||
|
||||
bool m_must_jump_to_next_image;
|
||||
};
|
||||
|
||||
class QAVIFPlugin : public QImageIOPlugin
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_PLUGIN_METADATA (IID "org.qt-project.Qt.QImageIOHandlerFactoryInterface" FILE "avif.json")
|
||||
|
||||
public:
|
||||
Capabilities capabilities (QIODevice *device, const QByteArray &format) const override;
|
||||
QImageIOHandler *create (QIODevice *device, const QByteArray &format = QByteArray()) const override;
|
||||
};
|
||||
|
||||
#endif // KIMG_AVIF_P_H
|
@ -87,7 +87,7 @@ static bool IsSupported(const PSDHeader &header)
|
||||
if (header.channel_count > 16) {
|
||||
return false;
|
||||
}
|
||||
if (header.depth != 8) {
|
||||
if (header.depth != 8 && header.depth != 16) {
|
||||
return false;
|
||||
}
|
||||
if (header.color_mode != CM_RGB) {
|
||||
@ -104,11 +104,13 @@ static void skip_section(QDataStream &s)
|
||||
s.skipRawData(section_length);
|
||||
}
|
||||
|
||||
static quint8 readPixel(QDataStream &stream) {
|
||||
quint8 pixel;
|
||||
template <class Trait>
|
||||
static Trait readPixel(QDataStream &stream) {
|
||||
Trait pixel;
|
||||
stream >> pixel;
|
||||
return pixel;
|
||||
}
|
||||
|
||||
static QRgb updateRed(QRgb oldPixel, quint8 redPixel) {
|
||||
return qRgba(redPixel, qGreen(oldPixel), qBlue(oldPixel), qAlpha(oldPixel));
|
||||
}
|
||||
@ -149,15 +151,18 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
|
||||
|
||||
quint32 channel_num = header.channel_count;
|
||||
|
||||
QImage::Format fmt = QImage::Format_RGB32;
|
||||
QImage::Format fmt = header.depth == 8 ? QImage::Format_RGB32
|
||||
: QImage::Format_RGBX64;
|
||||
// Clear the image.
|
||||
if (channel_num >= 4) {
|
||||
// Enable alpha.
|
||||
fmt = QImage::Format_ARGB32;
|
||||
fmt = header.depth == 8 ? QImage::Format_ARGB32
|
||||
: QImage::Format_RGBA64;
|
||||
|
||||
// Ignore the other channels.
|
||||
channel_num = 4;
|
||||
}
|
||||
|
||||
img = QImage(header.width, header.height, fmt);
|
||||
if (img.isNull()) {
|
||||
qWarning() << "Failed to allocate image, invalid dimensions?" << QSize(header.width, header.height);
|
||||
@ -166,9 +171,10 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
|
||||
img.fill(qRgb(0,0,0));
|
||||
|
||||
const quint32 pixel_count = header.height * header.width;
|
||||
const quint32 channel_size = pixel_count * header.depth / 8;
|
||||
|
||||
// Verify this, as this is used to write into the memory of the QImage
|
||||
if (pixel_count > img.sizeInBytes() / sizeof(QRgb)) {
|
||||
if (pixel_count > img.sizeInBytes() / (header.depth == 8 ? sizeof(QRgb) : sizeof(QRgba64))) {
|
||||
qWarning() << "Invalid pixel count!" << pixel_count << "bytes available:" << img.sizeInBytes();
|
||||
return false;
|
||||
}
|
||||
@ -186,6 +192,14 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
|
||||
updateAlpha
|
||||
};
|
||||
|
||||
typedef QRgba64(*channelUpdater16)(QRgba64, quint16);
|
||||
static const channelUpdater16 updaters64[4] = {
|
||||
[](QRgba64 oldPixel, quint16 redPixel) {return qRgba64((oldPixel & ~(0xFFFFull << 0)) | (quint64( redPixel) << 0));},
|
||||
[](QRgba64 oldPixel, quint16 greenPixel){return qRgba64((oldPixel & ~(0xFFFFull << 16)) | (quint64(greenPixel) << 16));},
|
||||
[](QRgba64 oldPixel, quint16 bluePixel) {return qRgba64((oldPixel & ~(0xFFFFull << 32)) | (quint64( bluePixel) << 32));},
|
||||
[](QRgba64 oldPixel, quint16 alphaPixel){return qRgba64((oldPixel & ~(0xFFFFull << 48)) | (quint64(alphaPixel) << 48));}
|
||||
};
|
||||
|
||||
if (compression) {
|
||||
// Skip row lengths.
|
||||
int skip_count = header.height * header.channel_count * sizeof(quint16);
|
||||
@ -194,9 +208,18 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
|
||||
}
|
||||
|
||||
for (unsigned short channel = 0; channel < channel_num; channel++) {
|
||||
bool success = decodeRLEData(RLEVariant::PackBits, stream,
|
||||
image_data, pixel_count,
|
||||
&readPixel, updaters[channel]);
|
||||
bool success = false;
|
||||
if (header.depth == 8) {
|
||||
success = decodeRLEData(RLEVariant::PackBits, stream,
|
||||
image_data, channel_size,
|
||||
&readPixel<quint8>, updaters[channel]);
|
||||
} else if (header.depth == 16) {
|
||||
QRgba64 *image_data = reinterpret_cast<QRgba64*>(img.bits());
|
||||
success = decodeRLEData(RLEVariant::PackBits16, stream,
|
||||
image_data, channel_size,
|
||||
&readPixel<quint8>, updaters64[channel]);
|
||||
}
|
||||
|
||||
if (!success) {
|
||||
qDebug() << "decodeRLEData on channel" << channel << "failed";
|
||||
return false;
|
||||
@ -204,8 +227,15 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
|
||||
}
|
||||
} else {
|
||||
for (unsigned short channel = 0; channel < channel_num; channel++) {
|
||||
for (unsigned i = 0; i < pixel_count; ++i) {
|
||||
image_data[i] = updaters[channel](image_data[i], readPixel(stream));
|
||||
if (header.depth == 8) {
|
||||
for (unsigned i = 0; i < pixel_count; ++i) {
|
||||
image_data[i] = updaters[channel](image_data[i], readPixel<quint8>(stream));
|
||||
}
|
||||
} else if (header.depth == 16) {
|
||||
QRgba64 *image_data = reinterpret_cast<QRgba64*>(img.bits());
|
||||
for (unsigned i = 0; i < pixel_count; ++i) {
|
||||
image_data[i] = updaters64[channel](image_data[i], readPixel<quint16>(stream));
|
||||
}
|
||||
}
|
||||
// make sure we didn't try to read past the end of the stream
|
||||
if (stream.status() != QDataStream::Ok) {
|
||||
|
@ -24,6 +24,11 @@ enum class RLEVariant {
|
||||
* of size 2, 130 of size 3, up to 255 of size 128.
|
||||
*/
|
||||
PackBits,
|
||||
/**
|
||||
* Same as PackBits, but treat unpacked data as
|
||||
* 16-bit integers.
|
||||
*/
|
||||
PackBits16,
|
||||
/**
|
||||
* PIC-style RLE
|
||||
*
|
||||
@ -67,6 +72,8 @@ static inline bool decodeRLEData(RLEVariant variant,
|
||||
Func2 updateItem)
|
||||
{
|
||||
unsigned offset = 0; // in dest
|
||||
bool is_msb = true; // only used for 16-bit PackBits, data is big-endian
|
||||
quint16 temp_data = 0;
|
||||
while (offset < length) {
|
||||
unsigned remaining = length - offset;
|
||||
quint8 count1;
|
||||
@ -85,7 +92,7 @@ static inline bool decodeRLEData(RLEVariant variant,
|
||||
// 2 to 128 repetitions
|
||||
length = count1 - 127u;
|
||||
}
|
||||
} else if (variant == RLEVariant::PackBits) {
|
||||
} else if (variant == RLEVariant::PackBits || variant == RLEVariant::PackBits16) {
|
||||
if (count1 == 128u) {
|
||||
// Ignore value 128
|
||||
continue;
|
||||
@ -102,7 +109,18 @@ static inline bool decodeRLEData(RLEVariant variant,
|
||||
}
|
||||
auto datum = readData(stream);
|
||||
for (unsigned i = offset; i < offset + length; ++i) {
|
||||
dest[i] = updateItem(dest[i], datum);
|
||||
if (variant == RLEVariant::PackBits16) {
|
||||
if (is_msb) {
|
||||
temp_data = datum << 8;
|
||||
is_msb = false;
|
||||
} else {
|
||||
temp_data |= datum;
|
||||
dest[i >> 1] = updateItem(dest[i >> 1], temp_data);
|
||||
is_msb = true;
|
||||
}
|
||||
} else {
|
||||
dest[i] = updateItem(dest[i], datum);
|
||||
}
|
||||
}
|
||||
offset += length;
|
||||
} else {
|
||||
@ -114,7 +132,18 @@ static inline bool decodeRLEData(RLEVariant variant,
|
||||
}
|
||||
for (unsigned i = offset; i < offset + length; ++i) {
|
||||
auto datum = readData(stream);
|
||||
dest[i] = updateItem(dest[i], datum);
|
||||
if (variant == RLEVariant::PackBits16) {
|
||||
if (is_msb) {
|
||||
temp_data = datum << 8;
|
||||
is_msb = false;
|
||||
} else {
|
||||
temp_data |= datum;
|
||||
dest[i >> 1] = updateItem(dest[i >> 1], temp_data);
|
||||
is_msb = true;
|
||||
}
|
||||
} else {
|
||||
dest[i] = updateItem(dest[i], datum);
|
||||
}
|
||||
}
|
||||
offset += length;
|
||||
}
|
||||
|
@ -7,56 +7,24 @@
|
||||
#ifndef FORMAT_ENUM_H
|
||||
#define FORMAT_ENUM_H
|
||||
|
||||
#include <QMetaEnum>
|
||||
#include <QImage>
|
||||
|
||||
// Generated from QImage::Format enum
|
||||
static const char * qimage_format_enum_names[] = {
|
||||
"Invalid",
|
||||
"Mono",
|
||||
"MonoLSB",
|
||||
"Indexed8",
|
||||
"RGB32",
|
||||
"ARGB32",
|
||||
"ARGB32_Premultiplied",
|
||||
"RGB16",
|
||||
"ARGB8565_Premultiplied",
|
||||
"RGB666",
|
||||
"ARGB6666_Premultiplied",
|
||||
"RGB555",
|
||||
"ARGB8555_Premultiplied",
|
||||
"RGB888",
|
||||
"RGB444",
|
||||
"ARGB4444_Premultiplied",
|
||||
"RGBX8888",
|
||||
"RGBA8888",
|
||||
"RGBA8888_Premultiplied"
|
||||
};
|
||||
// Never claim there are more than QImage::NImageFormats supported formats.
|
||||
// This is future-proofing against the above list being extended.
|
||||
static const int qimage_format_enum_names_count =
|
||||
(sizeof(qimage_format_enum_names) / sizeof(*qimage_format_enum_names) > int(QImage::NImageFormats))
|
||||
? int(QImage::NImageFormats)
|
||||
: (sizeof(qimage_format_enum_names) / sizeof(*qimage_format_enum_names));
|
||||
|
||||
QImage::Format formatFromString(const QString &str)
|
||||
{
|
||||
for (int i = 0; i < qimage_format_enum_names_count; ++i) {
|
||||
if (str.compare(QLatin1String(qimage_format_enum_names[i]), Qt::CaseInsensitive) == 0) {
|
||||
return (QImage::Format)(i);
|
||||
}
|
||||
}
|
||||
return QImage::Format_Invalid;
|
||||
const QMetaEnum metaEnum = QMetaEnum::fromType<QImage::Format>();
|
||||
const QString enumString = QStringLiteral("Format_") + str;
|
||||
|
||||
bool ok;
|
||||
const int res = metaEnum.keyToValue(enumString.toLatin1().constData(), &ok);
|
||||
|
||||
return ok ? static_cast<QImage::Format>(res) : QImage::Format_Invalid;
|
||||
}
|
||||
|
||||
QString formatToString(QImage::Format format)
|
||||
{
|
||||
int index = int(format);
|
||||
if (index > 0 && index < qimage_format_enum_names_count) {
|
||||
return QLatin1String(qimage_format_enum_names[index]);
|
||||
}
|
||||
return QLatin1String("<unknown:") +
|
||||
QString::number(index) +
|
||||
QLatin1String(">");
|
||||
const QMetaEnum metaEnum = QMetaEnum::fromType<QImage::Format>();
|
||||
return QString::fromLatin1(metaEnum.valueToKey(format)).remove(QStringLiteral("Format_"));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
@ -67,8 +67,8 @@ int main(int argc, char **argv)
|
||||
QTextStream out(stdout);
|
||||
out << "QImage formats:\n";
|
||||
// skip QImage::Format_Invalid
|
||||
for (int i = 1; i < qimage_format_enum_names_count; ++i) {
|
||||
out << " " << qimage_format_enum_names[i] << '\n';
|
||||
for (int i = 1; i < QImage::NImageFormats; ++i) {
|
||||
out << " " << formatToString(static_cast<QImage::Format>(i)) << '\n';
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|