Compare commits

..

24 Commits

Author SHA1 Message Date
8845dd9818 Simplify portion of NCLX color profile code 2021-02-02 09:28:32 +01:00
a4b8295625 [imagedump] Add "list MIME type" (-m) option
Allows listing the supported mime types
2021-01-31 20:49:16 +01:00
134c96fa61 GIT_SILENT Add auto generated .clang-format file to .gitignore 2021-01-30 18:39:11 +01:00
3673874a63 GIT_SILENT Upgrade ECM and KF5 version requirements for 5.79.0 release. 2021-01-29 20:44:46 +00:00
8ad43638ad Fix crash with malformed files
oss-fuzz/29284
2021-01-08 14:02:45 +00:00
c72c9f577b ani: Make sure riffSizeData is of the correct size before doing the quint32_le cast dance
oss-fuzz/29290
2021-01-05 21:52:24 +01:00
bf3f99abf5 Add missing includes 2021-01-03 10:05:53 +01:00
b79d1f222d Add plugin for animated Windows cursors (ANI) 2021-01-03 08:49:10 +00:00
bf1de9f8f0 GIT_SILENT Upgrade ECM and KF5 version requirements for 5.78.0 release. 2021-01-02 12:27:55 +00:00
82d5e0f8a4 Remove ifdefs, we require Qt 5.14 now 2020-12-18 19:34:07 +01:00
bbf945137a GIT_SILENT Upgrade Qt5 version requirement to 5.14.0. 2020-12-18 19:02:54 +01:00
54ed1dda27 Add AVIF to the list of supported formats 2020-12-16 08:36:39 +00:00
34a9ec1b06 Add plugin for AV1 Image File Format (AVIF) 2020-12-13 22:23:27 +00:00
a6ec69d276 GIT_SILENT Upgrade ECM and KF5 version requirements for 5.77.0 release. 2020-12-05 10:09:13 +00:00
02cbf3889f GIT_SILENT Upgrade Qt5 version requirement to 5.13.0. 2020-11-27 00:45:24 +01:00
6cf05cf305 test: don't convert image format if possible 2020-11-10 13:03:37 +08:00
938b8126b5 No longer descease color depth to 8 for 16 bit uncompressed PSD files 2020-11-10 13:03:37 +08:00
d36c191351 tests: Remove qimage_format_enum_names and just use QMetaEnum 2020-11-09 19:15:36 +00:00
1acb5a6177 GIT_SILENT Upgrade ECM and KF5 version requirements for 5.76.0 release. 2020-11-07 11:39:10 +00:00
f2ccbf1724 Add test case for RLE compressed 16 bpc PSD files. 2020-11-06 15:04:04 +08:00
5825c83235 Add support for RLE-compressed, 16 bits per channel PSD files. 2020-11-06 15:02:29 +08:00
b742cb7cc7 Return unsupported when reading 16bit RLE compressed PSD files 2020-11-01 11:50:48 +08:00
2e6eeebdfc feat: add psd color depth == 16 format support 2020-10-30 21:47:12 +08:00
db0b5d571a GIT_SILENT increase KF_DISABLE_DEPRECATED_BEFORE_AND_AT 2020-10-11 11:27:17 +02:00
37 changed files with 1983 additions and 71 deletions

1
.gitignore vendored
View File

@ -20,3 +20,4 @@ random_seed
CMakeLists.txt.user*
*.unc-backup*
.cmake/
/.clang-format

View File

@ -5,7 +5,7 @@ project(KImageFormats)
set (CMAKE_CXX_STANDARD 14)
include(FeatureSummary)
find_package(ECM 5.75.0 NO_MODULE)
find_package(ECM 5.79.0 NO_MODULE)
set_package_properties(ECM PROPERTIES TYPE REQUIRED DESCRIPTION "Extra CMake Modules." URL "https://commits.kde.org/extra-cmake-modules")
feature_summary(WHAT REQUIRED_PACKAGES_NOT_FOUND FATAL_ON_MISSING_REQUIRED_PACKAGES)
@ -19,7 +19,7 @@ include(KDECMakeSettings)
include(CheckIncludeFiles)
set(REQUIRED_QT_VERSION 5.12.0)
set(REQUIRED_QT_VERSION 5.14.0)
find_package(Qt5Gui ${REQUIRED_QT_VERSION} REQUIRED NO_MODULE)
find_package(KF5Archive)
@ -47,11 +47,18 @@ set_package_properties(OpenEXR PROPERTIES
TYPE OPTIONAL
PURPOSE "Required for the QImage plugin for OpenEXR images"
)
find_package(libavif 0.8.2 CONFIG)
set_package_properties(libavif PROPERTIES
TYPE OPTIONAL
PURPOSE "Required for the QImage plugin for AVIF images"
)
add_definitions(-DQT_NO_FOREACH)
# 050d00 (5.13) triggers a BIC in qimageiohandler.h, in Qt 5.13, so do not enable that until we can require 5.14
# https://codereview.qt-project.org/c/qt/qtbase/+/279215
add_definitions(-DQT_DISABLE_DEPRECATED_BEFORE=0x050c00)
add_definitions(-DKF_DISABLE_DEPRECATED_BEFORE_AND_AT=0x054900)
add_definitions(-DQT_DISABLE_DEPRECATED_BEFORE=0x050e00)
add_definitions(-DKF_DISABLE_DEPRECATED_BEFORE_AND_AT=0x054B00)
add_subdirectory(src)
if (BUILD_TESTING)
add_subdirectory(autotests)

View File

@ -13,6 +13,7 @@ image formats.
The following image formats have read-only support:
- Animated Windows cursors (ani)
- Gimp (xcf)
- OpenEXR (exr)
- Photoshop documents (psd)
@ -20,6 +21,7 @@ The following image formats have read-only support:
The following image formats have read and write support:
- AV1 Image File Format (AVIF)
- Encapsulated PostScript (eps)
- Personal Computer Exchange (pcx)
- SGI images (rgb, rgba, sgi, bw)

View File

@ -70,6 +70,12 @@ if (KF5Archive_FOUND)
)
endif()
if (TARGET avif)
kimageformats_read_tests(
avif
)
endif()
# Allow some fuzziness when reading this formats, to allow for
# rounding errors (eg: in alpha blending).
kimageformats_read_tests(FUZZ 1
@ -110,3 +116,8 @@ add_executable(pictest pictest.cpp)
target_link_libraries(pictest Qt5::Gui Qt5::Test)
ecm_mark_as_test(pictest)
add_test(NAME kimageformats-pic COMMAND pictest)
add_executable(anitest anitest.cpp)
target_link_libraries(anitest Qt5::Gui Qt5::Test)
ecm_mark_as_test(anitest)
add_test(NAME kimageformats-ani COMMAND anitest)

BIN
autotests/ani/test.ani Normal file

Binary file not shown.

BIN
autotests/ani/test_1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 813 B

BIN
autotests/ani/test_2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 697 B

BIN
autotests/ani/test_3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 810 B

119
autotests/anitest.cpp Normal file
View File

@ -0,0 +1,119 @@
/*
SPDX-FileCopyrightText: 2020 Kai Uwe Broulik <kde@broulik.de>
SPDX-License-Identifier: LGPL-2.1-only OR LGPL-3.0-only OR LicenseRef-KDE-Accepted-LGPL
*/
#include <QImage>
#include <QImageReader>
#include <QTest>
static bool imgEquals(const QImage &im1, const QImage &im2)
{
const int height = im1.height();
const int width = im1.width();
for (int i = 0; i < height; ++i) {
const auto *line1 = reinterpret_cast<const quint8 *>(im1.scanLine(i));
const auto *line2 = reinterpret_cast<const quint8 *>(im2.scanLine(i));
for (int j = 0; j < width; ++j) {
if (line1[j] - line2[j] != 0) {
return false;
}
}
}
return true;
}
class AniTests : public QObject
{
Q_OBJECT
private Q_SLOTS:
void initTestCase()
{
QCoreApplication::addLibraryPath(QStringLiteral(PLUGIN_DIR));
}
void testReadMetadata()
{
QImageReader reader(QFINDTESTDATA("ani/test.ani"));
QVERIFY(reader.canRead());
QCOMPARE(reader.imageCount(), 4);
QCOMPARE(reader.size(), QSize(32, 32));
QCOMPARE(reader.text(QStringLiteral("Title")), QStringLiteral("ANI Test"));
QCOMPARE(reader.text(QStringLiteral("Author")), QStringLiteral("KDE Community"));
}
void textRead()
{
QImageReader reader(QFINDTESTDATA("ani/test.ani"));
QVERIFY(reader.canRead());
QCOMPARE(reader.currentImageNumber(), 0);
QImage aniFrame;
QVERIFY(reader.read(&aniFrame));
QImage img1(QFINDTESTDATA("ani/test_1.png"));
img1.convertTo(aniFrame.format());
QVERIFY(imgEquals(aniFrame, img1));
QCOMPARE(reader.nextImageDelay(), 166); // 10 "jiffies"
QVERIFY(reader.canRead());
// that read() above should have advanced us to the next frame
QCOMPARE(reader.currentImageNumber(), 1);
QVERIFY(reader.read(&aniFrame));
QImage img2(QFINDTESTDATA("ani/test_2.png"));
img2.convertTo(aniFrame.format());
QVERIFY(imgEquals(aniFrame, img2));
// The "middle" frame has a longer delay than the others
QCOMPARE(reader.nextImageDelay(), 333); // 20 "jiffies"
QVERIFY(reader.canRead());
QCOMPARE(reader.currentImageNumber(), 2);
QVERIFY(reader.read(&aniFrame));
QImage img3(QFINDTESTDATA("ani/test_3.png"));
img3.convertTo(aniFrame.format());
QVERIFY(imgEquals(aniFrame, img3));
QCOMPARE(reader.nextImageDelay(), 166);
QVERIFY(reader.canRead());
QCOMPARE(reader.currentImageNumber(), 3);
QVERIFY(reader.read(&aniFrame));
// custom sequence in the ANI file should get us back to img2
QVERIFY(imgEquals(aniFrame, img2));
QCOMPARE(reader.nextImageDelay(), 166);
// We should have reached the end now
QVERIFY(!reader.canRead());
QVERIFY(!reader.read(&aniFrame));
// Jump back to the start
QVERIFY(reader.jumpToImage(0));
QVERIFY(reader.canRead());
QCOMPARE(reader.currentImageNumber(), 0);
QCOMPARE(reader.nextImageDelay(), 166);
QVERIFY(reader.read(&aniFrame));
QVERIFY(imgEquals(aniFrame, img1));
}
};
QTEST_MAIN(AniTests)
#include "anitest.moc"

BIN
autotests/read/avif/bw.avif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 629 B

BIN
autotests/read/avif/bw.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 743 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 823 B

BIN
autotests/read/avif/bwa.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 574 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
autotests/read/avif/rgb.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 983 B

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 827 B

Binary file not shown.

View File

@ -36,14 +36,17 @@ static void writeImageData(const char *name, const QString &filename, const QIma
}
}
// allow each byte to be different by up to 1, to allow for rounding errors
template<class Trait>
static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
{
Q_ASSERT(im1.format() == im2.format());
Q_ASSERT(im1.depth() == 24 || im1.depth() == 32 || im1.depth() == 64);
const int height = im1.height();
const int width = im1.width();
for (int i = 0; i < height; ++i) {
const uchar *line1 = im1.scanLine(i);
const uchar *line2 = im2.scanLine(i);
const Trait *line1 = reinterpret_cast<const Trait*>(im1.scanLine(i));
const Trait *line2 = reinterpret_cast<const Trait*>(im2.scanLine(i));
for (int j = 0; j < width; ++j) {
if (line1[j] > line2[j]) {
if (line1[j] - line2[j] > fuzziness)
@ -57,6 +60,30 @@ static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
return true;
}
// allow each byte to be different by up to 1, to allow for rounding errors
static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
{
return (im1.depth() == 64) ? fuzzyeq<quint16>(im1, im2, fuzziness)
: fuzzyeq<quint8>(im1, im2, fuzziness);
}
// Returns the original format if we support, or returns
// format which we preferred to use for `fuzzyeq()`.
// We do only support formats with 8-bits/16-bits pre pixel.
// If that changed, don't forget to update `fuzzyeq()` too
static QImage::Format preferredFormat(QImage::Format fmt)
{
switch (fmt) {
case QImage::Format_RGB32:
case QImage::Format_ARGB32:
case QImage::Format_RGBX64:
case QImage::Format_RGBA64:
return fmt;
default:
return QImage::Format_ARGB32;
}
}
int main(int argc, char ** argv)
{
QCoreApplication app(argc, argv);
@ -168,19 +195,23 @@ int main(int argc, char ** argv)
<< expImage.height() << "\n";
++failed;
} else {
if (inputImage.format() != QImage::Format_ARGB32) {
QImage::Format inputFormat = preferredFormat(inputImage.format());
QImage::Format expFormat = preferredFormat(expImage.format());
QImage::Format cmpFormat = inputFormat == expFormat ? inputFormat : QImage::Format_ARGB32;
if (inputImage.format() != cmpFormat) {
QTextStream(stdout) << "INFO : " << fi.fileName()
<< ": converting " << fi.fileName()
<< " from " << formatToString(inputImage.format())
<< " to ARGB32\n";
inputImage = inputImage.convertToFormat(QImage::Format_ARGB32);
<< " to " << formatToString(cmpFormat) << '\n';
inputImage = inputImage.convertToFormat(cmpFormat);
}
if (expImage.format() != QImage::Format_ARGB32) {
if (expImage.format() != cmpFormat) {
QTextStream(stdout) << "INFO : " << fi.fileName()
<< ": converting " << expfilename
<< " from " << formatToString(expImage.format())
<< " to ARGB32\n";
expImage = expImage.convertToFormat(QImage::Format_ARGB32);
<< " to " << formatToString(cmpFormat) << '\n';
expImage = expImage.convertToFormat(cmpFormat);
}
if (fuzzyeq(inputImage, expImage, fuzziness)) {
QTextStream(stdout) << "PASS : " << fi.fileName() << "\n";

View File

@ -24,6 +24,19 @@ endfunction()
##################################
kimageformats_add_plugin(kimg_ani JSON "ani.json" SOURCES ani.cpp)
install(FILES ani.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
##################################
if (TARGET avif)
kimageformats_add_plugin(kimg_avif JSON "avif.json" SOURCES "avif.cpp")
target_link_libraries(kimg_avif "avif")
install(FILES avif.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
endif()
##################################
install(FILES dds-qt.desktop RENAME dds.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
##################################

571
src/imageformats/ani.cpp Normal file
View File

@ -0,0 +1,571 @@
/*
SPDX-FileCopyrightText: 2020 Kai Uwe Broulik <kde@broulik.de>
SPDX-License-Identifier: LGPL-2.0-or-later
*/
#include "ani_p.h"
#include <QDebug>
#include <QImage>
#include <QScopeGuard>
#include <QtEndian>
#include <QVariant>
namespace
{
struct ChunkHeader {
char magic[4];
quint32_le size;
};
struct AniHeader {
quint32_le cbSize;
quint32_le nFrames; // number of actual frames in the file
quint32_le nSteps; // number of logical images
quint32_le iWidth;
quint32_le iHeight;
quint32_le iBitCount;
quint32_le nPlanes;
quint32_le iDispRate;
quint32_le bfAttributes; // attributes (0 = bitmap images, 1 = ico/cur, 3 = "seq" block available)
};
struct CurHeader {
quint16_le wReserved; // always 0
quint16_le wResID; // always 2
quint16_le wNumImages;
};
struct CursorDirEntry {
quint8 bWidth;
quint8 bHeight;
quint8 bColorCount;
quint8 bReserved; // always 0
quint16_le wHotspotX;
quint16_le wHotspotY;
quint32_le dwBytesInImage;
quint32_le dwImageOffset;
};
} // namespace
ANIHandler::ANIHandler() = default;
bool ANIHandler::canRead() const
{
if (canRead(device())) {
setFormat("ani");
return true;
}
// Check if there's another frame coming
const QByteArray nextFrame = device()->peek(sizeof(ChunkHeader));
if (nextFrame.size() == sizeof(ChunkHeader)) {
const auto *header = reinterpret_cast<const ChunkHeader *>(nextFrame.data());
if (qstrncmp(header->magic, "icon", sizeof(header->magic)) == 0
&& header->size > 0) {
setFormat("ani");
return true;
}
}
return false;
}
bool ANIHandler::read(QImage *outImage)
{
if (!ensureScanned()) {
return false;
}
if (device()->pos() < m_firstFrameOffset) {
device()->seek(m_firstFrameOffset);
}
const QByteArray frameType = device()->read(4);
if (frameType != "icon") {
return false;
}
const QByteArray frameSizeData = device()->read(sizeof(quint32_le));
if (frameSizeData.count() != sizeof(quint32_le)) {
return false;
}
const auto frameSize = *(reinterpret_cast<const quint32_le *>(frameSizeData.data()));
if (!frameSize) {
return false;
}
const QByteArray frameData = device()->read(frameSize);
const bool ok = outImage->loadFromData(frameData, "cur");
++m_currentImageNumber;
// When we have a custom image sequence, seek to before the frame that would follow
if (!m_imageSequence.isEmpty()) {
if (m_currentImageNumber < m_imageSequence.count()) {
const int nextFrame = m_imageSequence.at(m_currentImageNumber);
if (nextFrame < 0 || nextFrame >= m_frameOffsets.count()) {
return false;
}
const auto nextOffset = m_frameOffsets.at(nextFrame);
device()->seek(nextOffset);
} else if (m_currentImageNumber == m_imageSequence.count()) {
const auto endOffset = m_frameOffsets.last();
if (device()->pos() != endOffset) {
device()->seek(endOffset);
}
}
}
return ok;
}
int ANIHandler::currentImageNumber() const
{
if (!ensureScanned()) {
return 0;
}
return m_currentImageNumber;
}
int ANIHandler::imageCount() const
{
if (!ensureScanned()) {
return 0;
}
return m_imageCount;
}
bool ANIHandler::jumpToImage(int imageNumber)
{
if (!ensureScanned()) {
return false;
}
if (imageNumber < 0) {
return false;
}
if (imageNumber == m_currentImageNumber) {
return true;
}
// If we have a custom image sequence we have a index of frames we can jump to
if (!m_imageSequence.isEmpty()) {
if (imageNumber >= m_imageSequence.count()) {
return false;
}
const int targetFrame = m_imageSequence.at(imageNumber);
const auto targetOffset = m_frameOffsets.value(targetFrame, -1);
if (device()->seek(targetOffset)) {
m_currentImageNumber = imageNumber;
return true;
}
return false;
}
if (imageNumber >= m_frameCount) {
return false;
}
// otherwise we need to jump from frame to frame
const auto oldPos = device()->pos();
if (imageNumber < m_currentImageNumber) {
// start from the beginning
if (!device()->seek(m_firstFrameOffset)) {
return false;
}
}
while (m_currentImageNumber < imageNumber) {
if (!jumpToNextImage()) {
device()->seek(oldPos);
return false;
}
}
m_currentImageNumber = imageNumber;
return true;
}
bool ANIHandler::jumpToNextImage()
{
if (!ensureScanned()) {
return false;
}
// If we have a custom image sequence we have a index of frames we can jump to
// Delegate to jumpToImage
if (!m_imageSequence.isEmpty()) {
return jumpToImage(m_currentImageNumber + 1);
}
if (device()->pos() < m_firstFrameOffset) {
if (!device()->seek(m_firstFrameOffset)) {
return false;
}
}
const QByteArray nextFrame = device()->peek(sizeof(ChunkHeader));
if (nextFrame.size() != sizeof(ChunkHeader)) {
return false;
}
const auto *header = reinterpret_cast<const ChunkHeader *>(nextFrame.data());
if (qstrncmp(header->magic, "icon", sizeof(header->magic)) != 0) {
return false;
}
const qint64 seekBy = sizeof(ChunkHeader) + header->size;
if (!device()->seek(device()->pos() + seekBy)) {
return false;
}
++m_currentImageNumber;
return true;
}
int ANIHandler::loopCount() const
{
if (!ensureScanned()) {
return 0;
}
return -1;
}
int ANIHandler::nextImageDelay() const
{
if (!ensureScanned()) {
return 0;
}
int rate = m_displayRate;
if (!m_displayRates.isEmpty()) {
int previousImage = m_currentImageNumber - 1;
if (previousImage < 0) {
previousImage = m_displayRates.count() - 1;
}
rate = m_displayRates.at(previousImage);
}
return rate * 1000 / 60;
}
bool ANIHandler::supportsOption(ImageOption option) const
{
return option == Size || option == Name || option == Description || option == Animation;
}
QVariant ANIHandler::option(ImageOption option) const
{
if (!supportsOption(option) || !ensureScanned()) {
return QVariant();
}
switch (option) {
case QImageIOHandler::Size:
return m_size;
// TODO QImageIOHandler::Format
// but both iBitCount in AniHeader and bColorCount are just zero most of the time
// so one would probably need to traverse even further down into IcoHeader and IconDirEntry...
// but Qt's ICO/CUR handler always seems to give us a ARB
case QImageIOHandler::Name:
return m_name;
case QImageIOHandler::Description: {
QString description;
if (!m_name.isEmpty()) {
description += QStringLiteral("Title: %1\n\n").arg(m_name);
}
if (!m_artist.isEmpty()) {
description += QStringLiteral("Author: %1\n\n").arg(m_artist);
}
return description;
}
case QImageIOHandler::Animation:
return true;
default:
break;
}
return QVariant();
}
bool ANIHandler::ensureScanned() const
{
if (m_scanned) {
return true;
}
if (device()->isSequential()) {
return false;
}
auto *mutableThis = const_cast<ANIHandler *>(this);
const auto oldPos = device()->pos();
auto cleanup = qScopeGuard([this, oldPos] {
device()->seek(oldPos);
});
device()->seek(0);
const QByteArray riffIntro = device()->read(4);
if (riffIntro != "RIFF") {
return false;
}
const auto riffSizeData = device()->read(sizeof(quint32_le));
if (riffSizeData.size() != sizeof(quint32_le)) {
return false;
}
const auto riffSize = *(reinterpret_cast<const quint32_le *>(riffSizeData.data()));
// TODO do a basic sanity check if the size is enough to hold some metadata and a frame?
if (riffSize == 0) {
return false;
}
mutableThis->m_displayRates.clear();
mutableThis->m_imageSequence.clear();
while (device()->pos() < riffSize) {
const QByteArray chunkId = device()->read(4);
if (chunkId.length() != 4) {
return false;
}
if (chunkId == "ACON") {
continue;
}
const QByteArray chunkSizeData = device()->read(sizeof(quint32_le));
if (chunkSizeData.length() != sizeof(quint32_le)) {
return false;
}
auto chunkSize = *(reinterpret_cast<const quint32_le *>(chunkSizeData.data()));
if (chunkId == "anih") {
if (chunkSize != sizeof(AniHeader)) {
qWarning() << "anih chunk size does not match ANIHEADER size";
return false;
}
const QByteArray anihData = device()->read(sizeof(AniHeader));
if (anihData.size() != sizeof(AniHeader)) {
return false;
}
auto *aniHeader = reinterpret_cast<const AniHeader *>(anihData.data());
// The size in the ani header is usually 0 unfortunately,
// so we'll also check the first frame for its size further below
mutableThis->m_size = QSize(aniHeader->iWidth, aniHeader->iHeight);
mutableThis->m_frameCount = aniHeader->nFrames;
mutableThis->m_imageCount = aniHeader->nSteps;
mutableThis->m_displayRate = aniHeader->iDispRate;
} else if (chunkId == "rate" || chunkId == "seq ") {
const QByteArray data = device()->read(chunkSize);
if (static_cast<quint32_le>(data.size()) != chunkSize
|| data.size() % sizeof(quint32_le) != 0) {
return false;
}
// TODO should we check that the number of rate entries matches nSteps?
auto *dataPtr = data.data();
QVector<int> list;
for (int i = 0; i < data.count(); i += sizeof(quint32_le)) {
const auto entry = *(reinterpret_cast<const quint32_le *>(dataPtr + i));
list.append(entry);
}
if (chunkId == "rate") {
// should we check that the number of rate entries matches nSteps?
mutableThis->m_displayRates = list;
} else if (chunkId == "seq ") {
// Check if it's just an ascending sequence, don't bother with it then
bool isAscending = true;
for (int i = 0; i < list.count(); ++i) {
if (list.at(i) != i) {
isAscending = false;
break;
}
}
if (!isAscending) {
mutableThis->m_imageSequence = list;
}
}
// IART and INAM are technically inside LIST->INFO but "INFO" is supposedly optional
// so just handle those two attributes wherever we encounter them
} else if (chunkId == "INAM" || chunkId == "IART") {
const QByteArray value = device()->read(chunkSize);
if (static_cast<quint32_le>(value.size()) != chunkSize) {
return false;
}
// DWORDs are aligned to even sizes
if (chunkSize % 2 != 0) {
device()->read(1);
}
// FIXME encoding
const QString stringValue = QString::fromLocal8Bit(value);
if (chunkId == "INAM") {
mutableThis->m_name = stringValue;
} else if (chunkId == "IART") {
mutableThis->m_artist = stringValue;
}
} else if (chunkId == "LIST") {
const QByteArray listType = device()->read(4);
if (listType == "INFO") {
// Technically would contain INAM and IART but we handle them anywhere above
} else if (listType == "fram") {
quint64 read = 0;
while (read < chunkSize) {
const QByteArray chunkType = device()->read(4);
read += 4;
if (chunkType != "icon") {
break;
}
if (!m_firstFrameOffset) {
mutableThis->m_firstFrameOffset = device()->pos() - 4;
mutableThis->m_currentImageNumber = 0;
// If size in header isn't valid, use the first frame's size instead
if (!m_size.isValid() || m_size.isEmpty()) {
const auto oldPos = device()->pos();
device()->read(sizeof(quint32_le));
const QByteArray curHeaderData = device()->read(sizeof(CurHeader));
const QByteArray cursorDirEntryData = device()->read(sizeof(CursorDirEntry));
if (curHeaderData.length() == sizeof(CurHeader)
&& cursorDirEntryData.length() == sizeof(CursorDirEntry)) {
auto *cursorDirEntry = reinterpret_cast<const CursorDirEntry *>(cursorDirEntryData.data());
mutableThis->m_size = QSize(cursorDirEntry->bWidth, cursorDirEntry->bHeight);
}
device()->seek(oldPos);
}
// If we don't have a custom image sequence we can stop scanning right here
if (m_imageSequence.isEmpty()) {
break;
}
}
mutableThis->m_frameOffsets.append(device()->pos() - 4);
const QByteArray frameSizeData = device()->read(sizeof(quint32_le));
if (frameSizeData.size() != sizeof(quint32_le)) {
return false;
}
const auto frameSize = *(reinterpret_cast<const quint32_le *>(frameSizeData.data()));
device()->seek(device()->pos() + frameSize);
read += frameSize;
if (m_frameOffsets.count() == m_frameCount) {
// Also record the end of frame data
mutableThis->m_frameOffsets.append(device()->pos() - 4);
break;
}
}
break;
}
}
}
if (m_imageCount != m_frameCount && m_imageSequence.isEmpty()) {
qWarning("ANIHandler: 'nSteps' is not equal to 'nFrames' but no 'seq' entries were provided");
return false;
}
if (!m_imageSequence.isEmpty() && m_imageSequence.count() != m_imageCount) {
qWarning("ANIHandler: count of entries in 'seq' does not match 'nSteps' in anih");
return false;
}
if (!m_displayRates.isEmpty() && m_displayRates.count() != m_imageCount) {
qWarning("ANIHandler: count of entries in 'rate' does not match 'nSteps' in anih");
return false;
}
if (!m_frameOffsets.isEmpty() && m_frameOffsets.count() != m_frameCount + 1) {
qWarning("ANIHandler: number of actual frames does not match 'nFrames' in anih");
return false;
}
mutableThis->m_scanned = true;
return true;
}
bool ANIHandler::canRead(QIODevice *device)
{
if (!device) {
qWarning("ANIHandler::canRead() called with no device");
return false;
}
const QByteArray riffIntro = device->peek(12);
if (riffIntro.length() != 12) {
return false;
}
if (!riffIntro.startsWith("RIFF")) {
return false;
}
// TODO sanity check chunk size?
if (riffIntro.mid(4 + 4, 4) != "ACON") {
return false;
}
return true;
}
QImageIOPlugin::Capabilities ANIPlugin::capabilities(QIODevice *device, const QByteArray &format) const
{
if (format == "ani") {
return Capabilities(CanRead);
}
if (!format.isEmpty()) {
return {};
}
if (!device->isOpen()) {
return {};
}
Capabilities cap;
if (device->isReadable() && ANIHandler::canRead(device)) {
cap |= CanRead;
}
return cap;
}
QImageIOHandler *ANIPlugin::create(QIODevice *device, const QByteArray &format) const
{
QImageIOHandler *handler = new ANIHandler;
handler->setDevice(device);
handler->setFormat(format);
return handler;
}

View File

@ -0,0 +1,7 @@
[Desktop Entry]
Type=Service
X-KDE-ServiceTypes=QImageIOPlugins
X-KDE-ImageFormat=ani
X-KDE-MimeType=application/x-navi-animation
X-KDE-Read=true
X-KDE-Write=false

View File

@ -0,0 +1,4 @@
{
"Keys": [ "ani" ],
"MimeTypes": [ "application/x-navi-animation" ]
}

69
src/imageformats/ani_p.h Normal file
View File

@ -0,0 +1,69 @@
/*
SPDX-FileCopyrightText: 2020 Kai Uwe Broulik <kde@broulik.de>
SPDX-License-Identifier: LGPL-2.0-or-later
*/
#ifndef KIMG_ANI_P_H
#define KIMG_ANI_P_H
#include <QImageIOPlugin>
#include <QSize>
class ANIHandler : public QImageIOHandler
{
public:
ANIHandler();
bool canRead() const override;
bool read(QImage *image) override;
int currentImageNumber() const override;
int imageCount() const override;
bool jumpToImage(int imageNumber) override;
bool jumpToNextImage() override;
int loopCount() const override;
int nextImageDelay() const override;
bool supportsOption(ImageOption option) const override;
QVariant option(ImageOption option) const override;
static bool canRead(QIODevice *device);
private:
bool ensureScanned() const;
bool m_scanned = false;
int m_currentImageNumber = 0;
int m_frameCount = 0; // "physical" frames
int m_imageCount = 0; // logical images
// Stores a custom sequence of images
QVector<int> m_imageSequence;
// and the corresponding offsets where they are
// since we can't read the image data sequentally in this case then
QVector<qint64> m_frameOffsets;
qint64 m_firstFrameOffset = 0;
int m_displayRate = 0;
QVector<int> m_displayRates;
QString m_name;
QString m_artist;
QSize m_size;
};
class ANIPlugin : public QImageIOPlugin
{
Q_OBJECT
Q_PLUGIN_METADATA(IID "org.qt-project.Qt.QImageIOHandlerFactoryInterface" FILE "ani.json")
public:
Capabilities capabilities(QIODevice *device, const QByteArray &format) const override;
QImageIOHandler *create(QIODevice *device, const QByteArray &format = QByteArray()) const override;
};
#endif // KIMG_ANI_P_H

946
src/imageformats/avif.cpp Normal file
View File

@ -0,0 +1,946 @@
/*
AV1 Image File Format (AVIF) support for QImage.
SPDX-FileCopyrightText: 2020 Daniel Novomesky <dnovomesky@gmail.com>
SPDX-License-Identifier: BSD-2-Clause
*/
#include <QtGlobal>
#include <QThread>
#include <QColorSpace>
#include "avif_p.h"
QAVIFHandler::QAVIFHandler() :
m_parseState(ParseAvifNotParsed),
m_quality(52),
m_container_width(0),
m_container_height(0),
m_rawAvifData(AVIF_DATA_EMPTY),
m_decoder(nullptr),
m_must_jump_to_next_image(false)
{
}
QAVIFHandler::~QAVIFHandler()
{
if (m_decoder) {
avifDecoderDestroy(m_decoder);
}
}
bool QAVIFHandler::canRead() const
{
if (m_parseState == ParseAvifNotParsed && !canRead(device())) {
return false;
}
if (m_parseState != ParseAvifError) {
setFormat("avif");
return true;
}
return false;
}
bool QAVIFHandler::canRead(QIODevice *device)
{
if (!device) {
return false;
}
QByteArray header = device->peek(144);
if (header.size() < 12) {
return false;
}
avifROData input;
input.data = (const uint8_t *) header.constData();
input.size = header.size();
if (avifPeekCompatibleFileType(&input)) {
return true;
}
return false;
}
bool QAVIFHandler::ensureParsed() const
{
if (m_parseState == ParseAvifSuccess) {
return true;
}
if (m_parseState == ParseAvifError) {
return false;
}
QAVIFHandler *that = const_cast<QAVIFHandler *>(this);
return that->ensureDecoder();
}
bool QAVIFHandler::ensureDecoder()
{
if (m_decoder) {
return true;
}
m_rawData = device()->readAll();
m_rawAvifData.data = (const uint8_t *) m_rawData.constData();
m_rawAvifData.size = m_rawData.size();
if (avifPeekCompatibleFileType(&m_rawAvifData) == AVIF_FALSE) {
m_parseState = ParseAvifError;
return false;
}
m_decoder = avifDecoderCreate();
avifResult decodeResult;
decodeResult = avifDecoderSetIOMemory(m_decoder, m_rawAvifData.data, m_rawAvifData.size);
if (decodeResult != AVIF_RESULT_OK) {
qWarning("ERROR: avifDecoderSetIOMemory failed: %s\n", avifResultToString(decodeResult));
avifDecoderDestroy(m_decoder);
m_decoder = nullptr;
m_parseState = ParseAvifError;
return false;
}
decodeResult = avifDecoderParse(m_decoder);
if (decodeResult != AVIF_RESULT_OK) {
qWarning("ERROR: Failed to parse input: %s\n", avifResultToString(decodeResult));
avifDecoderDestroy(m_decoder);
m_decoder = nullptr;
m_parseState = ParseAvifError;
return false;
}
decodeResult = avifDecoderNextImage(m_decoder);
if (decodeResult == AVIF_RESULT_OK) {
m_container_width = m_decoder->image->width;
m_container_height = m_decoder->image->height;
if ((m_container_width > 32768) || (m_container_height > 32768)) {
qWarning("AVIF image (%dx%d) is too large!", m_container_width, m_container_height);
m_parseState = ParseAvifError;
return false;
}
if ((m_container_width == 0) || (m_container_height == 0)) {
qWarning("Empty image, nothing to decode");
m_parseState = ParseAvifError;
return false;
}
m_parseState = ParseAvifSuccess;
if (decode_one_frame()) {
return true;
} else {
m_parseState = ParseAvifError;
return false;
}
} else {
qWarning("ERROR: Failed to decode image: %s\n", avifResultToString(decodeResult));
}
avifDecoderDestroy(m_decoder);
m_decoder = nullptr;
m_parseState = ParseAvifError;
return false;
}
bool QAVIFHandler::decode_one_frame()
{
if (!ensureParsed()) {
return false;
}
bool loadalpha;
if (m_decoder->image->alphaPlane) {
loadalpha = true;
} else {
loadalpha = false;
}
QImage::Format resultformat;
if (m_decoder->image->depth > 8) {
if (loadalpha) {
resultformat = QImage::Format_RGBA64;
} else {
resultformat = QImage::Format_RGBX64;
}
} else {
if (loadalpha) {
resultformat = QImage::Format_RGBA8888;
} else {
resultformat = QImage::Format_RGB888;
}
}
QImage result(m_decoder->image->width, m_decoder->image->height, resultformat);
if (result.isNull()) {
qWarning("Memory cannot be allocated");
return false;
}
if (m_decoder->image->icc.data && (m_decoder->image->icc.size > 0)) {
result.setColorSpace(QColorSpace::fromIccProfile(QByteArray::fromRawData((const char *) m_decoder->image->icc.data, (int) m_decoder->image->icc.size)));
if (! result.colorSpace().isValid()) {
qWarning("Invalid QColorSpace created from ICC!\n");
}
} else {
float prim[8]; // outPrimaries: rX, rY, gX, gY, bX, bY, wX, wY
avifColorPrimariesGetValues(m_decoder->image->colorPrimaries, prim);
QPointF redPoint(prim[0], prim[1]);
QPointF greenPoint(prim[2], prim[3]);
QPointF bluePoint(prim[4], prim[5]);
QPointF whitePoint(prim[6], prim[7]);
QColorSpace::TransferFunction q_trc = QColorSpace::TransferFunction::Custom;
float q_trc_gamma = 0.0f;
switch (m_decoder->image->transferCharacteristics) {
/* AVIF_TRANSFER_CHARACTERISTICS_BT470M */
case 4:
q_trc = QColorSpace::TransferFunction::Gamma;
q_trc_gamma = 2.2f;
break;
/* AVIF_TRANSFER_CHARACTERISTICS_BT470BG */
case 5:
q_trc = QColorSpace::TransferFunction::Gamma;
q_trc_gamma = 2.8f;
break;
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
case 8:
q_trc = QColorSpace::TransferFunction::Linear;
break;
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
case 0:
case 2: /* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
case 13:
q_trc = QColorSpace::TransferFunction::SRgb;
break;
default:
qWarning("CICP colorPrimaries: %d, transferCharacteristics: %d\nThe colorspace is unsupported by this plug-in yet.",
m_decoder->image->colorPrimaries, m_decoder->image->transferCharacteristics);
q_trc = QColorSpace::TransferFunction::SRgb;
break;
}
if (q_trc != QColorSpace::TransferFunction::Custom) { //we create new colorspace using Qt
switch (m_decoder->image->colorPrimaries) {
/* AVIF_COLOR_PRIMARIES_BT709 */
case 0:
case 1:
case 2: /* AVIF_COLOR_PRIMARIES_UNSPECIFIED */
result.setColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, q_trc, q_trc_gamma));
break;
/* AVIF_COLOR_PRIMARIES_SMPTE432 */
case 12:
result.setColorSpace(QColorSpace(QColorSpace::Primaries::DciP3D65, q_trc, q_trc_gamma));
break;
default:
result.setColorSpace(QColorSpace(whitePoint, redPoint, greenPoint, bluePoint, q_trc, q_trc_gamma));
break;
}
}
if (! result.colorSpace().isValid()) {
qWarning("Invalid QColorSpace created from NCLX/CICP!\n");
}
}
avifRGBImage rgb;
avifRGBImageSetDefaults(&rgb, m_decoder->image);
if (m_decoder->image->depth > 8) {
rgb.depth = 16;
rgb.format = AVIF_RGB_FORMAT_RGBA;
if (!loadalpha) {
rgb.ignoreAlpha = AVIF_TRUE;
result.fill(Qt::black);
if (m_decoder->image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400) {
resultformat = QImage::Format_Grayscale16;
}
}
} else {
rgb.depth = 8;
if (loadalpha) {
rgb.format = AVIF_RGB_FORMAT_RGBA;
resultformat = QImage::Format_ARGB32;
} else {
rgb.format = AVIF_RGB_FORMAT_RGB;
if (m_decoder->image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400) {
resultformat = QImage::Format_Grayscale8;
} else {
resultformat = QImage::Format_RGB32;
}
}
}
rgb.rowBytes = result.bytesPerLine();
rgb.pixels = result.bits();
avifResult res = avifImageYUVToRGB(m_decoder->image, &rgb);
if (res != AVIF_RESULT_OK) {
qWarning("ERROR in avifImageYUVToRGB: %s\n", avifResultToString(res));
return false;
}
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_CLAP) {
if ((m_decoder->image->clap.widthD > 0) && (m_decoder->image->clap.heightD > 0) &&
(m_decoder->image->clap.horizOffD > 0) && (m_decoder->image->clap.vertOffD > 0)) {
int new_width, new_height, offx, offy;
new_width = (int)((double)(m_decoder->image->clap.widthN) / (m_decoder->image->clap.widthD) + 0.5);
if (new_width > result.width()) {
new_width = result.width();
}
new_height = (int)((double)(m_decoder->image->clap.heightN) / (m_decoder->image->clap.heightD) + 0.5);
if (new_height > result.height()) {
new_height = result.height();
}
if (new_width > 0 && new_height > 0) {
offx = ((double)((int32_t) m_decoder->image->clap.horizOffN)) / (m_decoder->image->clap.horizOffD) +
(result.width() - new_width) / 2.0 + 0.5;
if (offx < 0) {
offx = 0;
} else if (offx > (result.width() - new_width)) {
offx = result.width() - new_width;
}
offy = ((double)((int32_t) m_decoder->image->clap.vertOffN)) / (m_decoder->image->clap.vertOffD) +
(result.height() - new_height) / 2.0 + 0.5;
if (offy < 0) {
offy = 0;
} else if (offy > (result.height() - new_height)) {
offy = result.height() - new_height;
}
result = result.copy(offx, offy, new_width, new_height);
}
}
else { //Zero values, we need to avoid 0 divide.
qWarning("ERROR: Wrong values in avifCleanApertureBox\n");
}
}
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_IROT) {
QTransform transform;
switch (m_decoder->image->irot.angle) {
case 1:
transform.rotate(-90);
result = result.transformed(transform);
break;
case 2:
transform.rotate(180);
result = result.transformed(transform);
break;
case 3:
transform.rotate(90);
result = result.transformed(transform);
break;
}
}
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_IMIR) {
switch (m_decoder->image->imir.axis) {
case 0: //vertical
result = result.mirrored(false, true);
break;
case 1: //horizontal
result = result.mirrored(true, false);
break;
}
}
if (resultformat == result.format()) {
m_current_image = result;
} else {
m_current_image = result.convertToFormat(resultformat);
}
m_must_jump_to_next_image = false;
return true;
}
bool QAVIFHandler::read(QImage *image)
{
if (!ensureParsed()) {
return false;
}
if (m_must_jump_to_next_image) {
jumpToNextImage();
}
*image = m_current_image;
if (imageCount() >= 2) {
m_must_jump_to_next_image = true;
}
return true;
}
bool QAVIFHandler::write(const QImage &image)
{
if (image.format() == QImage::Format_Invalid) {
qWarning("No image data to save");
return false;
}
if ((image.width() > 32768) || (image.height() > 32768)) {
qWarning("Image is too large");
return false;
}
int maxQuantizer = AVIF_QUANTIZER_WORST_QUALITY * (100 - qBound(0, m_quality, 100)) / 100;
int minQuantizer = 0;
int maxQuantizerAlpha = 0;
avifResult res;
bool save_grayscale; //true - monochrome, false - colors
int save_depth; //8 or 10bit per channel
QImage::Format tmpformat; //format for temporary image
avifImage *avif = nullptr;
//grayscale detection
switch (image.format()) {
case QImage::Format_Mono:
case QImage::Format_MonoLSB:
case QImage::Format_Grayscale8:
case QImage::Format_Grayscale16:
save_grayscale = true;
break;
case QImage::Format_Indexed8:
save_grayscale = image.isGrayscale();
break;
default:
save_grayscale = false;
break;
}
//depth detection
switch (image.format()) {
case QImage::Format_BGR30:
case QImage::Format_A2BGR30_Premultiplied:
case QImage::Format_RGB30:
case QImage::Format_A2RGB30_Premultiplied:
case QImage::Format_Grayscale16:
case QImage::Format_RGBX64:
case QImage::Format_RGBA64:
case QImage::Format_RGBA64_Premultiplied:
save_depth = 10;
break;
default:
if (image.depth() > 32) {
save_depth = 10;
} else {
save_depth = 8;
}
break;
}
//quality settings
if (maxQuantizer > 20) {
minQuantizer = maxQuantizer - 20;
if (maxQuantizer > 40) { //we decrease quality of alpha channel here
maxQuantizerAlpha = maxQuantizer - 40;
}
}
if (save_grayscale && !image.hasAlphaChannel()) { //we are going to save grayscale image without alpha channel
if (save_depth > 8) {
tmpformat = QImage::Format_Grayscale16;
} else {
tmpformat = QImage::Format_Grayscale8;
}
QImage tmpgrayimage = image.convertToFormat(tmpformat);
avif = avifImageCreate(tmpgrayimage.width(), tmpgrayimage.height(), save_depth, AVIF_PIXEL_FORMAT_YUV400);
avifImageAllocatePlanes(avif, AVIF_PLANES_YUV);
if (tmpgrayimage.colorSpace().isValid()) {
avif->colorPrimaries = (avifColorPrimaries)1;
avif->matrixCoefficients = (avifMatrixCoefficients)1;
switch (tmpgrayimage.colorSpace().transferFunction()) {
case QColorSpace::TransferFunction::Linear:
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
avif->transferCharacteristics = (avifTransferCharacteristics)8;
break;
case QColorSpace::TransferFunction::SRgb:
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
avif->transferCharacteristics = (avifTransferCharacteristics)13;
break;
default:
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
break;
}
}
if (save_depth > 8) { // QImage::Format_Grayscale16
for (int y = 0; y < tmpgrayimage.height(); y++) {
const uint16_t *src16bit = reinterpret_cast<const uint16_t *>(tmpgrayimage.constScanLine(y));
uint16_t *dest16bit = reinterpret_cast<uint16_t *>(avif->yuvPlanes[0] + y * avif->yuvRowBytes[0]);
for (int x = 0; x < tmpgrayimage.width(); x++) {
int tmp_pixelval = (int)(((float)(*src16bit) / 65535.0f) * 1023.0f + 0.5f); //downgrade to 10 bits
*dest16bit = qBound(0, tmp_pixelval, 1023);
dest16bit++;
src16bit++;
}
}
} else { // QImage::Format_Grayscale8
for (int y = 0; y < tmpgrayimage.height(); y++) {
const uchar *src8bit = tmpgrayimage.constScanLine(y);
uint8_t *dest8bit = avif->yuvPlanes[0] + y * avif->yuvRowBytes[0];
for (int x = 0; x < tmpgrayimage.width(); x++) {
*dest8bit = *src8bit;
dest8bit++;
src8bit++;
}
}
}
} else { //we are going to save color image
if (save_depth > 8) {
if (image.hasAlphaChannel()) {
tmpformat = QImage::Format_RGBA64;
} else {
tmpformat = QImage::Format_RGBX64;
}
} else { //8bit depth
if (image.hasAlphaChannel()) {
tmpformat = QImage::Format_RGBA8888;
} else {
tmpformat = QImage::Format_RGB888;
}
}
QImage tmpcolorimage = image.convertToFormat(tmpformat);
avifPixelFormat pixel_format = AVIF_PIXEL_FORMAT_YUV420;
if (maxQuantizer < 20) {
if (maxQuantizer < 10) {
pixel_format = AVIF_PIXEL_FORMAT_YUV444; //best quality
} else {
pixel_format = AVIF_PIXEL_FORMAT_YUV422; //high quality
}
}
avifMatrixCoefficients matrix_to_save = (avifMatrixCoefficients)1; //default for Qt 5.12 and 5.13;
avifColorPrimaries primaries_to_save = (avifColorPrimaries)2;
avifTransferCharacteristics transfer_to_save = (avifTransferCharacteristics)2;
if (tmpcolorimage.colorSpace().isValid()) {
switch (tmpcolorimage.colorSpace().primaries()) {
case QColorSpace::Primaries::SRgb:
/* AVIF_COLOR_PRIMARIES_BT709 */
primaries_to_save = (avifColorPrimaries)1;
/* AVIF_MATRIX_COEFFICIENTS_BT709 */
matrix_to_save = (avifMatrixCoefficients)1;
break;
case QColorSpace::Primaries::DciP3D65:
/* AVIF_NCLX_COLOUR_PRIMARIES_P3, AVIF_NCLX_COLOUR_PRIMARIES_SMPTE432 */
primaries_to_save = (avifColorPrimaries)12;
/* AVIF_MATRIX_COEFFICIENTS_CHROMA_DERIVED_NCL */
matrix_to_save = (avifMatrixCoefficients)12;
break;
default:
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
primaries_to_save = (avifColorPrimaries)2;
/* AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED */
matrix_to_save = (avifMatrixCoefficients)2;
break;
}
switch (tmpcolorimage.colorSpace().transferFunction()) {
case QColorSpace::TransferFunction::Linear:
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
transfer_to_save = (avifTransferCharacteristics)8;
break;
case QColorSpace::TransferFunction::Gamma:
if (qAbs(tmpcolorimage.colorSpace().gamma() - 2.2f) < 0.1f) {
/* AVIF_TRANSFER_CHARACTERISTICS_BT470M */
transfer_to_save = (avifTransferCharacteristics)4;
} else if (qAbs(tmpcolorimage.colorSpace().gamma() - 2.8f) < 0.1f) {
/* AVIF_TRANSFER_CHARACTERISTICS_BT470BG */
transfer_to_save = (avifTransferCharacteristics)5;
} else {
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
transfer_to_save = (avifTransferCharacteristics)2;
}
break;
case QColorSpace::TransferFunction::SRgb:
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
transfer_to_save = (avifTransferCharacteristics)13;
break;
default:
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
transfer_to_save = (avifTransferCharacteristics)2;
break;
}
//in case primaries or trc were not identified
if ((primaries_to_save == 2) ||
(transfer_to_save == 2)) {
//upgrade image to higher bit depth
if (save_depth == 8) {
save_depth = 10;
if (tmpcolorimage.hasAlphaChannel()) {
tmpcolorimage = tmpcolorimage.convertToFormat(QImage::Format_RGBA64);
} else {
tmpcolorimage = tmpcolorimage.convertToFormat(QImage::Format_RGBX64);
}
}
if ((primaries_to_save == 2) &&
(transfer_to_save != 2)) { //other primaries but known trc
primaries_to_save = (avifColorPrimaries)1; // AVIF_COLOR_PRIMARIES_BT709
matrix_to_save = (avifMatrixCoefficients)1; // AVIF_MATRIX_COEFFICIENTS_BT709
switch (transfer_to_save) {
case 8: // AVIF_TRANSFER_CHARACTERISTICS_LINEAR
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::Linear));
break;
case 4: // AVIF_TRANSFER_CHARACTERISTICS_BT470M
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, 2.2f));
break;
case 5: // AVIF_TRANSFER_CHARACTERISTICS_BT470BG
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, 2.8f));
break;
default: // AVIF_TRANSFER_CHARACTERISTICS_SRGB + any other
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::SRgb));
transfer_to_save = (avifTransferCharacteristics)13;
break;
}
} else if ((primaries_to_save != 2) &&
(transfer_to_save == 2)) { //recognized primaries but other trc
transfer_to_save = (avifTransferCharacteristics)13;
tmpcolorimage.convertToColorSpace(tmpcolorimage.colorSpace().withTransferFunction(QColorSpace::TransferFunction::SRgb));
} else { //unrecognized profile
primaries_to_save = (avifColorPrimaries)1; // AVIF_COLOR_PRIMARIES_BT709
transfer_to_save = (avifTransferCharacteristics)13;
matrix_to_save = (avifMatrixCoefficients)1; // AVIF_MATRIX_COEFFICIENTS_BT709
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::SRgb));
}
}
}
avif = avifImageCreate(tmpcolorimage.width(), tmpcolorimage.height(), save_depth, pixel_format);
avif->matrixCoefficients = matrix_to_save;
avif->colorPrimaries = primaries_to_save;
avif->transferCharacteristics = transfer_to_save;
avifRGBImage rgb;
avifRGBImageSetDefaults(&rgb, avif);
rgb.rowBytes = tmpcolorimage.bytesPerLine();
rgb.pixels = const_cast<uint8_t *>(tmpcolorimage.constBits());
if (save_depth > 8) { //10bit depth
rgb.depth = 16;
if (tmpcolorimage.hasAlphaChannel()) {
avif->alphaRange = AVIF_RANGE_FULL;
} else {
rgb.ignoreAlpha = AVIF_TRUE;
}
rgb.format = AVIF_RGB_FORMAT_RGBA;
} else { //8bit depth
rgb.depth = 8;
if (tmpcolorimage.hasAlphaChannel()) {
rgb.format = AVIF_RGB_FORMAT_RGBA;
avif->alphaRange = AVIF_RANGE_FULL;
} else {
rgb.format = AVIF_RGB_FORMAT_RGB;
}
}
res = avifImageRGBToYUV(avif, &rgb);
if (res != AVIF_RESULT_OK) {
qWarning("ERROR in avifImageRGBToYUV: %s\n", avifResultToString(res));
return false;
}
}
avifRWData raw = AVIF_DATA_EMPTY;
avifEncoder *encoder = avifEncoderCreate();
encoder->maxThreads = qBound(1, QThread::idealThreadCount(), 64);
encoder->minQuantizer = minQuantizer;
encoder->maxQuantizer = maxQuantizer;
if (image.hasAlphaChannel()) {
encoder->minQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS;
encoder->maxQuantizerAlpha = maxQuantizerAlpha;
}
encoder->speed = 8;
res = avifEncoderWrite(encoder, avif, &raw);
avifEncoderDestroy(encoder);
avifImageDestroy(avif);
if (res == AVIF_RESULT_OK) {
qint64 status = device()->write((const char *)raw.data, raw.size);
avifRWDataFree(&raw);
if (status > 0) {
return true;
} else if (status == -1) {
qWarning("Write error: %s\n", qUtf8Printable(device()->errorString()));
return false;
}
} else {
qWarning("ERROR: Failed to encode: %s\n", avifResultToString(res));
}
return false;
}
QVariant QAVIFHandler::option(ImageOption option) const
{
if (!supportsOption(option) || !ensureParsed()) {
return QVariant();
}
switch (option) {
case Quality:
return m_quality;
case Size:
return m_current_image.size();
case Animation:
if (imageCount() >= 2) {
return true;
} else {
return false;
}
default:
return QVariant();
}
}
void QAVIFHandler::setOption(ImageOption option, const QVariant &value)
{
switch (option) {
case Quality:
m_quality = value.toInt();
if (m_quality > 100) {
m_quality = 100;
} else if (m_quality < 0) {
m_quality = 52;
}
return;
default:
break;
}
QImageIOHandler::setOption(option, value);
}
bool QAVIFHandler::supportsOption(ImageOption option) const
{
return option == Quality
|| option == Size
|| option == Animation;
}
int QAVIFHandler::imageCount() const
{
if (!ensureParsed()) {
return 0;
}
if (m_decoder->imageCount >= 1) {
return m_decoder->imageCount;
}
return 0;
}
int QAVIFHandler::currentImageNumber() const
{
if (m_parseState == ParseAvifNotParsed) {
return -1;
}
if (m_parseState == ParseAvifError || !m_decoder) {
return 0;
}
return m_decoder->imageIndex;
}
bool QAVIFHandler::jumpToNextImage()
{
if (!ensureParsed()) {
return false;
}
if (m_decoder->imageCount < 2) {
return true;
}
if (m_decoder->imageIndex >= m_decoder->imageCount - 1) { //start from begining
avifDecoderReset(m_decoder);
}
avifResult decodeResult = avifDecoderNextImage(m_decoder);
if (decodeResult != AVIF_RESULT_OK) {
qWarning("ERROR: Failed to decode Next image in sequence: %s\n", avifResultToString(decodeResult));
m_parseState = ParseAvifError;
return false;
}
if ((m_container_width != m_decoder->image->width) ||
(m_container_height != m_decoder->image->height)) {
qWarning("Decoded image sequence size (%dx%d) do not match first image size (%dx%d)!\n",
m_decoder->image->width, m_decoder->image->height,
m_container_width, m_container_height);
m_parseState = ParseAvifError;
return false;
}
if (decode_one_frame()) {
return true;
} else {
m_parseState = ParseAvifError;
return false;
}
}
bool QAVIFHandler::jumpToImage(int imageNumber)
{
if (!ensureParsed()) {
return false;
}
if (m_decoder->imageCount < 2) { //not an animation
if (imageNumber == 0) {
return true;
} else {
return false;
}
}
if (imageNumber < 0 || imageNumber >= m_decoder->imageCount) { //wrong index
return false;
}
if (imageNumber == m_decoder->imageCount) { // we are here already
return true;
}
avifResult decodeResult = avifDecoderNthImage(m_decoder, imageNumber);
if (decodeResult != AVIF_RESULT_OK) {
qWarning("ERROR: Failed to decode %d th Image in sequence: %s\n", imageNumber, avifResultToString(decodeResult));
m_parseState = ParseAvifError;
return false;
}
if ((m_container_width != m_decoder->image->width) ||
(m_container_height != m_decoder->image->height)) {
qWarning("Decoded image sequence size (%dx%d) do not match declared container size (%dx%d)!\n",
m_decoder->image->width, m_decoder->image->height,
m_container_width, m_container_height);
m_parseState = ParseAvifError;
return false;
}
if (decode_one_frame()) {
return true;
} else {
m_parseState = ParseAvifError;
return false;
}
}
int QAVIFHandler::nextImageDelay() const
{
if (!ensureParsed()) {
return 0;
}
if (m_decoder->imageCount < 2) {
return 0;
}
int delay_ms = 1000.0 * m_decoder->imageTiming.duration;
if (delay_ms < 1) {
delay_ms = 1;
}
return delay_ms;
}
int QAVIFHandler::loopCount() const
{
if (!ensureParsed()) {
return 0;
}
if (m_decoder->imageCount < 2) {
return 0;
}
return 1;
}
QImageIOPlugin::Capabilities QAVIFPlugin::capabilities(QIODevice *device, const QByteArray &format) const
{
if (format == "avif") {
return Capabilities(CanRead | CanWrite);
}
if (format == "avifs") {
return Capabilities(CanRead);
}
if (!format.isEmpty()) {
return {};
}
if (!device->isOpen()) {
return {};
}
Capabilities cap;
if (device->isReadable() && QAVIFHandler::canRead(device)) {
cap |= CanRead;
}
if (device->isWritable()) {
cap |= CanWrite;
}
return cap;
}
QImageIOHandler *QAVIFPlugin::create(QIODevice *device, const QByteArray &format) const
{
QImageIOHandler *handler = new QAVIFHandler;
handler->setDevice(device);
handler->setFormat(format);
return handler;
}

View File

@ -0,0 +1,7 @@
[Desktop Entry]
Type=Service
X-KDE-ServiceTypes=QImageIOPlugins
X-KDE-ImageFormat=avif
X-KDE-MimeType=image/avif
X-KDE-Read=true
X-KDE-Write=true

View File

@ -0,0 +1,4 @@
{
"Keys": [ "avif", "avifs" ],
"MimeTypes": [ "image/avif", "image/avif-sequence" ]
}

80
src/imageformats/avif_p.h Normal file
View File

@ -0,0 +1,80 @@
/*
AV1 Image File Format (AVIF) support for QImage.
SPDX-FileCopyrightText: 2020 Daniel Novomesky <dnovomesky@gmail.com>
SPDX-License-Identifier: BSD-2-Clause
*/
#ifndef KIMG_AVIF_P_H
#define KIMG_AVIF_P_H
#include <QImage>
#include <QVariant>
#include <qimageiohandler.h>
#include <QImageIOPlugin>
#include <QByteArray>
#include <avif/avif.h>
class QAVIFHandler : public QImageIOHandler
{
public:
QAVIFHandler();
~QAVIFHandler();
bool canRead() const override;
bool read (QImage *image) override;
bool write (const QImage &image) override;
static bool canRead (QIODevice *device);
QVariant option (ImageOption option) const override;
void setOption (ImageOption option, const QVariant &value) override;
bool supportsOption (ImageOption option) const override;
int imageCount() const override;
int currentImageNumber() const override;
bool jumpToNextImage() override;
bool jumpToImage (int imageNumber) override;
int nextImageDelay() const override;
int loopCount() const override;
private:
bool ensureParsed() const;
bool ensureDecoder();
bool decode_one_frame();
enum ParseAvifState
{
ParseAvifError = -1,
ParseAvifNotParsed = 0,
ParseAvifSuccess = 1
};
ParseAvifState m_parseState;
int m_quality;
uint32_t m_container_width;
uint32_t m_container_height;
QByteArray m_rawData;
avifROData m_rawAvifData;
avifDecoder *m_decoder;
QImage m_current_image;
bool m_must_jump_to_next_image;
};
class QAVIFPlugin : public QImageIOPlugin
{
Q_OBJECT
Q_PLUGIN_METADATA (IID "org.qt-project.Qt.QImageIOHandlerFactoryInterface" FILE "avif.json")
public:
Capabilities capabilities (QIODevice *device, const QByteArray &format) const override;
QImageIOHandler *create (QIODevice *device, const QByteArray &format = QByteArray()) const override;
};
#endif // KIMG_AVIF_P_H

View File

@ -87,7 +87,7 @@ static bool IsSupported(const PSDHeader &header)
if (header.channel_count > 16) {
return false;
}
if (header.depth != 8) {
if (header.depth != 8 && header.depth != 16) {
return false;
}
if (header.color_mode != CM_RGB) {
@ -104,11 +104,13 @@ static void skip_section(QDataStream &s)
s.skipRawData(section_length);
}
static quint8 readPixel(QDataStream &stream) {
quint8 pixel;
template <class Trait>
static Trait readPixel(QDataStream &stream) {
Trait pixel;
stream >> pixel;
return pixel;
}
static QRgb updateRed(QRgb oldPixel, quint8 redPixel) {
return qRgba(redPixel, qGreen(oldPixel), qBlue(oldPixel), qAlpha(oldPixel));
}
@ -149,15 +151,18 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
quint32 channel_num = header.channel_count;
QImage::Format fmt = QImage::Format_RGB32;
QImage::Format fmt = header.depth == 8 ? QImage::Format_RGB32
: QImage::Format_RGBX64;
// Clear the image.
if (channel_num >= 4) {
// Enable alpha.
fmt = QImage::Format_ARGB32;
fmt = header.depth == 8 ? QImage::Format_ARGB32
: QImage::Format_RGBA64;
// Ignore the other channels.
channel_num = 4;
}
img = QImage(header.width, header.height, fmt);
if (img.isNull()) {
qWarning() << "Failed to allocate image, invalid dimensions?" << QSize(header.width, header.height);
@ -166,9 +171,10 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
img.fill(qRgb(0,0,0));
const quint32 pixel_count = header.height * header.width;
const quint32 channel_size = pixel_count * header.depth / 8;
// Verify this, as this is used to write into the memory of the QImage
if (pixel_count > img.sizeInBytes() / sizeof(QRgb)) {
if (pixel_count > img.sizeInBytes() / (header.depth == 8 ? sizeof(QRgb) : sizeof(QRgba64))) {
qWarning() << "Invalid pixel count!" << pixel_count << "bytes available:" << img.sizeInBytes();
return false;
}
@ -186,6 +192,14 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
updateAlpha
};
typedef QRgba64(*channelUpdater16)(QRgba64, quint16);
static const channelUpdater16 updaters64[4] = {
[](QRgba64 oldPixel, quint16 redPixel) {return qRgba64((oldPixel & ~(0xFFFFull << 0)) | (quint64( redPixel) << 0));},
[](QRgba64 oldPixel, quint16 greenPixel){return qRgba64((oldPixel & ~(0xFFFFull << 16)) | (quint64(greenPixel) << 16));},
[](QRgba64 oldPixel, quint16 bluePixel) {return qRgba64((oldPixel & ~(0xFFFFull << 32)) | (quint64( bluePixel) << 32));},
[](QRgba64 oldPixel, quint16 alphaPixel){return qRgba64((oldPixel & ~(0xFFFFull << 48)) | (quint64(alphaPixel) << 48));}
};
if (compression) {
// Skip row lengths.
int skip_count = header.height * header.channel_count * sizeof(quint16);
@ -194,9 +208,18 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
}
for (unsigned short channel = 0; channel < channel_num; channel++) {
bool success = decodeRLEData(RLEVariant::PackBits, stream,
image_data, pixel_count,
&readPixel, updaters[channel]);
bool success = false;
if (header.depth == 8) {
success = decodeRLEData(RLEVariant::PackBits, stream,
image_data, channel_size,
&readPixel<quint8>, updaters[channel]);
} else if (header.depth == 16) {
QRgba64 *image_data = reinterpret_cast<QRgba64*>(img.bits());
success = decodeRLEData(RLEVariant::PackBits16, stream,
image_data, channel_size,
&readPixel<quint8>, updaters64[channel]);
}
if (!success) {
qDebug() << "decodeRLEData on channel" << channel << "failed";
return false;
@ -204,8 +227,15 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
}
} else {
for (unsigned short channel = 0; channel < channel_num; channel++) {
for (unsigned i = 0; i < pixel_count; ++i) {
image_data[i] = updaters[channel](image_data[i], readPixel(stream));
if (header.depth == 8) {
for (unsigned i = 0; i < pixel_count; ++i) {
image_data[i] = updaters[channel](image_data[i], readPixel<quint8>(stream));
}
} else if (header.depth == 16) {
QRgba64 *image_data = reinterpret_cast<QRgba64*>(img.bits());
for (unsigned i = 0; i < pixel_count; ++i) {
image_data[i] = updaters64[channel](image_data[i], readPixel<quint16>(stream));
}
}
// make sure we didn't try to read past the end of the stream
if (stream.status() != QDataStream::Ok) {

View File

@ -24,6 +24,11 @@ enum class RLEVariant {
* of size 2, 130 of size 3, up to 255 of size 128.
*/
PackBits,
/**
* Same as PackBits, but treat unpacked data as
* 16-bit integers.
*/
PackBits16,
/**
* PIC-style RLE
*
@ -67,6 +72,8 @@ static inline bool decodeRLEData(RLEVariant variant,
Func2 updateItem)
{
unsigned offset = 0; // in dest
bool is_msb = true; // only used for 16-bit PackBits, data is big-endian
quint16 temp_data = 0;
while (offset < length) {
unsigned remaining = length - offset;
quint8 count1;
@ -85,7 +92,7 @@ static inline bool decodeRLEData(RLEVariant variant,
// 2 to 128 repetitions
length = count1 - 127u;
}
} else if (variant == RLEVariant::PackBits) {
} else if (variant == RLEVariant::PackBits || variant == RLEVariant::PackBits16) {
if (count1 == 128u) {
// Ignore value 128
continue;
@ -102,7 +109,18 @@ static inline bool decodeRLEData(RLEVariant variant,
}
auto datum = readData(stream);
for (unsigned i = offset; i < offset + length; ++i) {
dest[i] = updateItem(dest[i], datum);
if (variant == RLEVariant::PackBits16) {
if (is_msb) {
temp_data = datum << 8;
is_msb = false;
} else {
temp_data |= datum;
dest[i >> 1] = updateItem(dest[i >> 1], temp_data);
is_msb = true;
}
} else {
dest[i] = updateItem(dest[i], datum);
}
}
offset += length;
} else {
@ -114,7 +132,18 @@ static inline bool decodeRLEData(RLEVariant variant,
}
for (unsigned i = offset; i < offset + length; ++i) {
auto datum = readData(stream);
dest[i] = updateItem(dest[i], datum);
if (variant == RLEVariant::PackBits16) {
if (is_msb) {
temp_data = datum << 8;
is_msb = false;
} else {
temp_data |= datum;
dest[i >> 1] = updateItem(dest[i >> 1], temp_data);
is_msb = true;
}
} else {
dest[i] = updateItem(dest[i], datum);
}
}
offset += length;
}

View File

@ -7,56 +7,24 @@
#ifndef FORMAT_ENUM_H
#define FORMAT_ENUM_H
#include <QMetaEnum>
#include <QImage>
// Generated from QImage::Format enum
static const char * qimage_format_enum_names[] = {
"Invalid",
"Mono",
"MonoLSB",
"Indexed8",
"RGB32",
"ARGB32",
"ARGB32_Premultiplied",
"RGB16",
"ARGB8565_Premultiplied",
"RGB666",
"ARGB6666_Premultiplied",
"RGB555",
"ARGB8555_Premultiplied",
"RGB888",
"RGB444",
"ARGB4444_Premultiplied",
"RGBX8888",
"RGBA8888",
"RGBA8888_Premultiplied"
};
// Never claim there are more than QImage::NImageFormats supported formats.
// This is future-proofing against the above list being extended.
static const int qimage_format_enum_names_count =
(sizeof(qimage_format_enum_names) / sizeof(*qimage_format_enum_names) > int(QImage::NImageFormats))
? int(QImage::NImageFormats)
: (sizeof(qimage_format_enum_names) / sizeof(*qimage_format_enum_names));
QImage::Format formatFromString(const QString &str)
{
for (int i = 0; i < qimage_format_enum_names_count; ++i) {
if (str.compare(QLatin1String(qimage_format_enum_names[i]), Qt::CaseInsensitive) == 0) {
return (QImage::Format)(i);
}
}
return QImage::Format_Invalid;
const QMetaEnum metaEnum = QMetaEnum::fromType<QImage::Format>();
const QString enumString = QStringLiteral("Format_") + str;
bool ok;
const int res = metaEnum.keyToValue(enumString.toLatin1().constData(), &ok);
return ok ? static_cast<QImage::Format>(res) : QImage::Format_Invalid;
}
QString formatToString(QImage::Format format)
{
int index = int(format);
if (index > 0 && index < qimage_format_enum_names_count) {
return QLatin1String(qimage_format_enum_names[index]);
}
return QLatin1String("<unknown:") +
QString::number(index) +
QLatin1String(">");
const QMetaEnum metaEnum = QMetaEnum::fromType<QImage::Format>();
return QString::fromLatin1(metaEnum.valueToKey(format)).remove(QStringLiteral("Format_"));
}
#endif

View File

@ -45,6 +45,10 @@ int main(int argc, char **argv)
QStringList() << QStringLiteral("l") << QStringLiteral("list-file-formats"),
QStringLiteral("List supported image file formats"));
parser.addOption(listformats);
QCommandLineOption listmimetypes(
QStringList() << QStringLiteral("m") << QStringLiteral("list-mime-types"),
QStringLiteral("List supported image mime types"));
parser.addOption(listmimetypes);
QCommandLineOption listqformats(
QStringList() << QStringLiteral("p") << QStringLiteral("list-qimage-formats"),
QStringLiteral("List supported QImage data formats"));
@ -63,12 +67,21 @@ int main(int argc, char **argv)
}
return 0;
}
if (parser.isSet(listmimetypes)) {
QTextStream out(stdout);
out << "MIME types:\n";
const auto lstSupportedMimeTypes = QImageReader::supportedMimeTypes();
for (const auto &fmt : lstSupportedMimeTypes) {
out << " " << fmt << '\n';
}
return 0;
}
if (parser.isSet(listqformats)) {
QTextStream out(stdout);
out << "QImage formats:\n";
// skip QImage::Format_Invalid
for (int i = 1; i < qimage_format_enum_names_count; ++i) {
out << " " << qimage_format_enum_names[i] << '\n';
for (int i = 1; i < QImage::NImageFormats; ++i) {
out << " " << formatToString(static_cast<QImage::Format>(i)) << '\n';
}
return 0;
}