Compare commits

..

32 Commits

Author SHA1 Message Date
503b3eee2b Fix Non-square Radiance/RGBE/.hdr images failing to load
The HDR QImageIOHandler plugin only supports the default image orientation (-Y +X) in .hdr files. It mixes up the width and height however, resulting in non-square images not loading.

This fix adds a check for the standard image orientation in the file and returns false (with error message) if that fails.
If it succeeds, it takes the height from the -Y component, and the width from the +X component, resulting in successful loading of the image.

Add autotest images for landscape and portrait HDR (Radiance RGBE) loader

BUGS: 433877
2021-03-04 22:57:23 +01:00
511a22f0b4 Check the input buffer before passing it to libheif 2021-03-02 11:46:13 +00:00
c532227d43 GIT_SILENT Upgrade ECM and KF version requirements for 5.80.0 release. 2021-02-28 18:59:05 +00:00
1462c3abd6 Check primaries returned from libavif
Due to various double vs float arithmetic,
some primaries could be rejected by Qt.
If necessary, we adjust the values so they
will be accepted by Qt.

Remove newline from the ends of error strings.
2021-02-27 19:11:56 +01:00
ca52d4ddf5 Add plugin for High Efficiency Image File Format (HEIF)
Code partially by Sirius Bakke
2021-02-25 11:52:00 +01:00
7ba4d6adda GIT_SILENT increase KF_DISABLE_DEPRECATED_BEFORE_AND_AT 2021-02-13 14:41:28 +01:00
0aaab103b1 Add compile_commands.json to .gitignore
See https://invent.kde.org/sdk/kdesrc-build/-/merge_requests/82

GIT_SILENT
2021-02-11 14:27:34 +02:00
8b9125c913 Quality option can be returned without parsing input file. 2021-02-08 10:00:53 +01:00
8845dd9818 Simplify portion of NCLX color profile code 2021-02-02 09:28:32 +01:00
a4b8295625 [imagedump] Add "list MIME type" (-m) option
Allows listing the supported mime types
2021-01-31 20:49:16 +01:00
134c96fa61 GIT_SILENT Add auto generated .clang-format file to .gitignore 2021-01-30 18:39:11 +01:00
3673874a63 GIT_SILENT Upgrade ECM and KF5 version requirements for 5.79.0 release. 2021-01-29 20:44:46 +00:00
8ad43638ad Fix crash with malformed files
oss-fuzz/29284
2021-01-08 14:02:45 +00:00
c72c9f577b ani: Make sure riffSizeData is of the correct size before doing the quint32_le cast dance
oss-fuzz/29290
2021-01-05 21:52:24 +01:00
bf3f99abf5 Add missing includes 2021-01-03 10:05:53 +01:00
b79d1f222d Add plugin for animated Windows cursors (ANI) 2021-01-03 08:49:10 +00:00
bf1de9f8f0 GIT_SILENT Upgrade ECM and KF5 version requirements for 5.78.0 release. 2021-01-02 12:27:55 +00:00
82d5e0f8a4 Remove ifdefs, we require Qt 5.14 now 2020-12-18 19:34:07 +01:00
bbf945137a GIT_SILENT Upgrade Qt5 version requirement to 5.14.0. 2020-12-18 19:02:54 +01:00
54ed1dda27 Add AVIF to the list of supported formats 2020-12-16 08:36:39 +00:00
34a9ec1b06 Add plugin for AV1 Image File Format (AVIF) 2020-12-13 22:23:27 +00:00
a6ec69d276 GIT_SILENT Upgrade ECM and KF5 version requirements for 5.77.0 release. 2020-12-05 10:09:13 +00:00
02cbf3889f GIT_SILENT Upgrade Qt5 version requirement to 5.13.0. 2020-11-27 00:45:24 +01:00
6cf05cf305 test: don't convert image format if possible 2020-11-10 13:03:37 +08:00
938b8126b5 No longer descease color depth to 8 for 16 bit uncompressed PSD files 2020-11-10 13:03:37 +08:00
d36c191351 tests: Remove qimage_format_enum_names and just use QMetaEnum 2020-11-09 19:15:36 +00:00
1acb5a6177 GIT_SILENT Upgrade ECM and KF5 version requirements for 5.76.0 release. 2020-11-07 11:39:10 +00:00
f2ccbf1724 Add test case for RLE compressed 16 bpc PSD files. 2020-11-06 15:04:04 +08:00
5825c83235 Add support for RLE-compressed, 16 bits per channel PSD files. 2020-11-06 15:02:29 +08:00
b742cb7cc7 Return unsupported when reading 16bit RLE compressed PSD files 2020-11-01 11:50:48 +08:00
2e6eeebdfc feat: add psd color depth == 16 format support 2020-10-30 21:47:12 +08:00
db0b5d571a GIT_SILENT increase KF_DISABLE_DEPRECATED_BEFORE_AND_AT 2020-10-11 11:27:17 +02:00
50 changed files with 2841 additions and 74 deletions

2
.gitignore vendored
View File

@ -20,3 +20,5 @@ random_seed
CMakeLists.txt.user*
*.unc-backup*
.cmake/
/.clang-format
/compile_commands.json

View File

@ -1,11 +1,11 @@
cmake_minimum_required(VERSION 3.5)
cmake_minimum_required(VERSION 3.6)
project(KImageFormats)
set (CMAKE_CXX_STANDARD 14)
include(FeatureSummary)
find_package(ECM 5.75.0 NO_MODULE)
find_package(ECM 5.80.0 NO_MODULE)
set_package_properties(ECM PROPERTIES TYPE REQUIRED DESCRIPTION "Extra CMake Modules." URL "https://commits.kde.org/extra-cmake-modules")
feature_summary(WHAT REQUIRED_PACKAGES_NOT_FOUND FATAL_ON_MISSING_REQUIRED_PACKAGES)
@ -19,7 +19,7 @@ include(KDECMakeSettings)
include(CheckIncludeFiles)
set(REQUIRED_QT_VERSION 5.12.0)
set(REQUIRED_QT_VERSION 5.14.0)
find_package(Qt5Gui ${REQUIRED_QT_VERSION} REQUIRED NO_MODULE)
find_package(KF5Archive)
@ -47,11 +47,25 @@ set_package_properties(OpenEXR PROPERTIES
TYPE OPTIONAL
PURPOSE "Required for the QImage plugin for OpenEXR images"
)
find_package(libavif 0.8.2 CONFIG)
set_package_properties(libavif PROPERTIES
TYPE OPTIONAL
PURPOSE "Required for the QImage plugin for AVIF images"
)
option(KIMAGEFORMATS_HEIF "Enable plugin for HEIF format" OFF)
if(KIMAGEFORMATS_HEIF)
include(FindPkgConfig)
pkg_check_modules(LibHeif IMPORTED_TARGET libheif>=1.10.0)
endif()
add_feature_info(LibHeif LibHeif_FOUND "required for the QImage plugin for HEIF/HEIC images")
add_definitions(-DQT_NO_FOREACH)
# 050d00 (5.13) triggers a BIC in qimageiohandler.h, in Qt 5.13, so do not enable that until we can require 5.14
# https://codereview.qt-project.org/c/qt/qtbase/+/279215
add_definitions(-DQT_DISABLE_DEPRECATED_BEFORE=0x050c00)
add_definitions(-DKF_DISABLE_DEPRECATED_BEFORE_AND_AT=0x054900)
add_definitions(-DQT_DISABLE_DEPRECATED_BEFORE=0x050e00)
add_definitions(-DKF_DISABLE_DEPRECATED_BEFORE_AND_AT=0x054F00)
add_subdirectory(src)
if (BUILD_TESTING)
add_subdirectory(autotests)

View File

@ -13,6 +13,7 @@ image formats.
The following image formats have read-only support:
- Animated Windows cursors (ani)
- Gimp (xcf)
- OpenEXR (exr)
- Photoshop documents (psd)
@ -20,6 +21,7 @@ The following image formats have read-only support:
The following image formats have read and write support:
- AV1 Image File Format (AVIF)
- Encapsulated PostScript (eps)
- Personal Computer Exchange (pcx)
- SGI images (rgb, rgba, sgi, bw)

View File

@ -70,6 +70,18 @@ if (KF5Archive_FOUND)
)
endif()
if (TARGET avif)
kimageformats_read_tests(
avif
)
endif()
if (LibHeif_FOUND)
kimageformats_read_tests(
heif
)
endif()
# Allow some fuzziness when reading this formats, to allow for
# rounding errors (eg: in alpha blending).
kimageformats_read_tests(FUZZ 1
@ -110,3 +122,8 @@ add_executable(pictest pictest.cpp)
target_link_libraries(pictest Qt5::Gui Qt5::Test)
ecm_mark_as_test(pictest)
add_test(NAME kimageformats-pic COMMAND pictest)
add_executable(anitest anitest.cpp)
target_link_libraries(anitest Qt5::Gui Qt5::Test)
ecm_mark_as_test(anitest)
add_test(NAME kimageformats-ani COMMAND anitest)

BIN
autotests/ani/test.ani Normal file

Binary file not shown.

BIN
autotests/ani/test_1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 813 B

BIN
autotests/ani/test_2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 697 B

BIN
autotests/ani/test_3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 810 B

119
autotests/anitest.cpp Normal file
View File

@ -0,0 +1,119 @@
/*
SPDX-FileCopyrightText: 2020 Kai Uwe Broulik <kde@broulik.de>
SPDX-License-Identifier: LGPL-2.1-only OR LGPL-3.0-only OR LicenseRef-KDE-Accepted-LGPL
*/
#include <QImage>
#include <QImageReader>
#include <QTest>
static bool imgEquals(const QImage &im1, const QImage &im2)
{
const int height = im1.height();
const int width = im1.width();
for (int i = 0; i < height; ++i) {
const auto *line1 = reinterpret_cast<const quint8 *>(im1.scanLine(i));
const auto *line2 = reinterpret_cast<const quint8 *>(im2.scanLine(i));
for (int j = 0; j < width; ++j) {
if (line1[j] - line2[j] != 0) {
return false;
}
}
}
return true;
}
class AniTests : public QObject
{
Q_OBJECT
private Q_SLOTS:
void initTestCase()
{
QCoreApplication::addLibraryPath(QStringLiteral(PLUGIN_DIR));
}
void testReadMetadata()
{
QImageReader reader(QFINDTESTDATA("ani/test.ani"));
QVERIFY(reader.canRead());
QCOMPARE(reader.imageCount(), 4);
QCOMPARE(reader.size(), QSize(32, 32));
QCOMPARE(reader.text(QStringLiteral("Title")), QStringLiteral("ANI Test"));
QCOMPARE(reader.text(QStringLiteral("Author")), QStringLiteral("KDE Community"));
}
void textRead()
{
QImageReader reader(QFINDTESTDATA("ani/test.ani"));
QVERIFY(reader.canRead());
QCOMPARE(reader.currentImageNumber(), 0);
QImage aniFrame;
QVERIFY(reader.read(&aniFrame));
QImage img1(QFINDTESTDATA("ani/test_1.png"));
img1.convertTo(aniFrame.format());
QVERIFY(imgEquals(aniFrame, img1));
QCOMPARE(reader.nextImageDelay(), 166); // 10 "jiffies"
QVERIFY(reader.canRead());
// that read() above should have advanced us to the next frame
QCOMPARE(reader.currentImageNumber(), 1);
QVERIFY(reader.read(&aniFrame));
QImage img2(QFINDTESTDATA("ani/test_2.png"));
img2.convertTo(aniFrame.format());
QVERIFY(imgEquals(aniFrame, img2));
// The "middle" frame has a longer delay than the others
QCOMPARE(reader.nextImageDelay(), 333); // 20 "jiffies"
QVERIFY(reader.canRead());
QCOMPARE(reader.currentImageNumber(), 2);
QVERIFY(reader.read(&aniFrame));
QImage img3(QFINDTESTDATA("ani/test_3.png"));
img3.convertTo(aniFrame.format());
QVERIFY(imgEquals(aniFrame, img3));
QCOMPARE(reader.nextImageDelay(), 166);
QVERIFY(reader.canRead());
QCOMPARE(reader.currentImageNumber(), 3);
QVERIFY(reader.read(&aniFrame));
// custom sequence in the ANI file should get us back to img2
QVERIFY(imgEquals(aniFrame, img2));
QCOMPARE(reader.nextImageDelay(), 166);
// We should have reached the end now
QVERIFY(!reader.canRead());
QVERIFY(!reader.read(&aniFrame));
// Jump back to the start
QVERIFY(reader.jumpToImage(0));
QVERIFY(reader.canRead());
QCOMPARE(reader.currentImageNumber(), 0);
QCOMPARE(reader.nextImageDelay(), 166);
QVERIFY(reader.read(&aniFrame));
QVERIFY(imgEquals(aniFrame, img1));
}
};
QTEST_MAIN(AniTests)
#include "anitest.moc"

BIN
autotests/read/avif/bw.avif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 629 B

BIN
autotests/read/avif/bw.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 743 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 823 B

BIN
autotests/read/avif/bwa.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 574 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
autotests/read/avif/rgb.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

BIN
autotests/read/heif/rgb.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 983 B

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 827 B

Binary file not shown.

View File

@ -36,14 +36,17 @@ static void writeImageData(const char *name, const QString &filename, const QIma
}
}
// allow each byte to be different by up to 1, to allow for rounding errors
template<class Trait>
static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
{
Q_ASSERT(im1.format() == im2.format());
Q_ASSERT(im1.depth() == 24 || im1.depth() == 32 || im1.depth() == 64);
const int height = im1.height();
const int width = im1.width();
for (int i = 0; i < height; ++i) {
const uchar *line1 = im1.scanLine(i);
const uchar *line2 = im2.scanLine(i);
const Trait *line1 = reinterpret_cast<const Trait*>(im1.scanLine(i));
const Trait *line2 = reinterpret_cast<const Trait*>(im2.scanLine(i));
for (int j = 0; j < width; ++j) {
if (line1[j] > line2[j]) {
if (line1[j] - line2[j] > fuzziness)
@ -57,6 +60,30 @@ static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
return true;
}
// allow each byte to be different by up to 1, to allow for rounding errors
static bool fuzzyeq(const QImage &im1, const QImage &im2, uchar fuzziness)
{
return (im1.depth() == 64) ? fuzzyeq<quint16>(im1, im2, fuzziness)
: fuzzyeq<quint8>(im1, im2, fuzziness);
}
// Returns the original format if we support, or returns
// format which we preferred to use for `fuzzyeq()`.
// We do only support formats with 8-bits/16-bits pre pixel.
// If that changed, don't forget to update `fuzzyeq()` too
static QImage::Format preferredFormat(QImage::Format fmt)
{
switch (fmt) {
case QImage::Format_RGB32:
case QImage::Format_ARGB32:
case QImage::Format_RGBX64:
case QImage::Format_RGBA64:
return fmt;
default:
return QImage::Format_ARGB32;
}
}
int main(int argc, char ** argv)
{
QCoreApplication app(argc, argv);
@ -168,19 +195,23 @@ int main(int argc, char ** argv)
<< expImage.height() << "\n";
++failed;
} else {
if (inputImage.format() != QImage::Format_ARGB32) {
QImage::Format inputFormat = preferredFormat(inputImage.format());
QImage::Format expFormat = preferredFormat(expImage.format());
QImage::Format cmpFormat = inputFormat == expFormat ? inputFormat : QImage::Format_ARGB32;
if (inputImage.format() != cmpFormat) {
QTextStream(stdout) << "INFO : " << fi.fileName()
<< ": converting " << fi.fileName()
<< " from " << formatToString(inputImage.format())
<< " to ARGB32\n";
inputImage = inputImage.convertToFormat(QImage::Format_ARGB32);
<< " to " << formatToString(cmpFormat) << '\n';
inputImage = inputImage.convertToFormat(cmpFormat);
}
if (expImage.format() != QImage::Format_ARGB32) {
if (expImage.format() != cmpFormat) {
QTextStream(stdout) << "INFO : " << fi.fileName()
<< ": converting " << expfilename
<< " from " << formatToString(expImage.format())
<< " to ARGB32\n";
expImage = expImage.convertToFormat(QImage::Format_ARGB32);
<< " to " << formatToString(cmpFormat) << '\n';
expImage = expImage.convertToFormat(cmpFormat);
}
if (fuzzyeq(inputImage, expImage, fuzziness)) {
QTextStream(stdout) << "PASS : " << fi.fileName() << "\n";

View File

@ -24,6 +24,19 @@ endfunction()
##################################
kimageformats_add_plugin(kimg_ani JSON "ani.json" SOURCES ani.cpp)
install(FILES ani.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
##################################
if (TARGET avif)
kimageformats_add_plugin(kimg_avif JSON "avif.json" SOURCES "avif.cpp")
target_link_libraries(kimg_avif "avif")
install(FILES avif.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
endif()
##################################
install(FILES dds-qt.desktop RENAME dds.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
##################################
@ -58,6 +71,15 @@ install(FILES hdr.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugi
##################################
if (LibHeif_FOUND)
kimageformats_add_plugin(kimg_heif JSON "heif.json" SOURCES heif.cpp)
target_link_libraries(kimg_heif PkgConfig::LibHeif)
kde_target_enable_exceptions(kimg_heif PRIVATE)
install(FILES heif.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)
endif()
##################################
kimageformats_add_plugin(kimg_pcx JSON "pcx.json" SOURCES pcx.cpp)
install(FILES pcx.desktop DESTINATION ${KDE_INSTALL_KSERVICES5DIR}/qimageioplugins/)

571
src/imageformats/ani.cpp Normal file
View File

@ -0,0 +1,571 @@
/*
SPDX-FileCopyrightText: 2020 Kai Uwe Broulik <kde@broulik.de>
SPDX-License-Identifier: LGPL-2.0-or-later
*/
#include "ani_p.h"
#include <QDebug>
#include <QImage>
#include <QScopeGuard>
#include <QtEndian>
#include <QVariant>
namespace
{
struct ChunkHeader {
char magic[4];
quint32_le size;
};
struct AniHeader {
quint32_le cbSize;
quint32_le nFrames; // number of actual frames in the file
quint32_le nSteps; // number of logical images
quint32_le iWidth;
quint32_le iHeight;
quint32_le iBitCount;
quint32_le nPlanes;
quint32_le iDispRate;
quint32_le bfAttributes; // attributes (0 = bitmap images, 1 = ico/cur, 3 = "seq" block available)
};
struct CurHeader {
quint16_le wReserved; // always 0
quint16_le wResID; // always 2
quint16_le wNumImages;
};
struct CursorDirEntry {
quint8 bWidth;
quint8 bHeight;
quint8 bColorCount;
quint8 bReserved; // always 0
quint16_le wHotspotX;
quint16_le wHotspotY;
quint32_le dwBytesInImage;
quint32_le dwImageOffset;
};
} // namespace
ANIHandler::ANIHandler() = default;
bool ANIHandler::canRead() const
{
if (canRead(device())) {
setFormat("ani");
return true;
}
// Check if there's another frame coming
const QByteArray nextFrame = device()->peek(sizeof(ChunkHeader));
if (nextFrame.size() == sizeof(ChunkHeader)) {
const auto *header = reinterpret_cast<const ChunkHeader *>(nextFrame.data());
if (qstrncmp(header->magic, "icon", sizeof(header->magic)) == 0
&& header->size > 0) {
setFormat("ani");
return true;
}
}
return false;
}
bool ANIHandler::read(QImage *outImage)
{
if (!ensureScanned()) {
return false;
}
if (device()->pos() < m_firstFrameOffset) {
device()->seek(m_firstFrameOffset);
}
const QByteArray frameType = device()->read(4);
if (frameType != "icon") {
return false;
}
const QByteArray frameSizeData = device()->read(sizeof(quint32_le));
if (frameSizeData.count() != sizeof(quint32_le)) {
return false;
}
const auto frameSize = *(reinterpret_cast<const quint32_le *>(frameSizeData.data()));
if (!frameSize) {
return false;
}
const QByteArray frameData = device()->read(frameSize);
const bool ok = outImage->loadFromData(frameData, "cur");
++m_currentImageNumber;
// When we have a custom image sequence, seek to before the frame that would follow
if (!m_imageSequence.isEmpty()) {
if (m_currentImageNumber < m_imageSequence.count()) {
const int nextFrame = m_imageSequence.at(m_currentImageNumber);
if (nextFrame < 0 || nextFrame >= m_frameOffsets.count()) {
return false;
}
const auto nextOffset = m_frameOffsets.at(nextFrame);
device()->seek(nextOffset);
} else if (m_currentImageNumber == m_imageSequence.count()) {
const auto endOffset = m_frameOffsets.last();
if (device()->pos() != endOffset) {
device()->seek(endOffset);
}
}
}
return ok;
}
int ANIHandler::currentImageNumber() const
{
if (!ensureScanned()) {
return 0;
}
return m_currentImageNumber;
}
int ANIHandler::imageCount() const
{
if (!ensureScanned()) {
return 0;
}
return m_imageCount;
}
bool ANIHandler::jumpToImage(int imageNumber)
{
if (!ensureScanned()) {
return false;
}
if (imageNumber < 0) {
return false;
}
if (imageNumber == m_currentImageNumber) {
return true;
}
// If we have a custom image sequence we have a index of frames we can jump to
if (!m_imageSequence.isEmpty()) {
if (imageNumber >= m_imageSequence.count()) {
return false;
}
const int targetFrame = m_imageSequence.at(imageNumber);
const auto targetOffset = m_frameOffsets.value(targetFrame, -1);
if (device()->seek(targetOffset)) {
m_currentImageNumber = imageNumber;
return true;
}
return false;
}
if (imageNumber >= m_frameCount) {
return false;
}
// otherwise we need to jump from frame to frame
const auto oldPos = device()->pos();
if (imageNumber < m_currentImageNumber) {
// start from the beginning
if (!device()->seek(m_firstFrameOffset)) {
return false;
}
}
while (m_currentImageNumber < imageNumber) {
if (!jumpToNextImage()) {
device()->seek(oldPos);
return false;
}
}
m_currentImageNumber = imageNumber;
return true;
}
bool ANIHandler::jumpToNextImage()
{
if (!ensureScanned()) {
return false;
}
// If we have a custom image sequence we have a index of frames we can jump to
// Delegate to jumpToImage
if (!m_imageSequence.isEmpty()) {
return jumpToImage(m_currentImageNumber + 1);
}
if (device()->pos() < m_firstFrameOffset) {
if (!device()->seek(m_firstFrameOffset)) {
return false;
}
}
const QByteArray nextFrame = device()->peek(sizeof(ChunkHeader));
if (nextFrame.size() != sizeof(ChunkHeader)) {
return false;
}
const auto *header = reinterpret_cast<const ChunkHeader *>(nextFrame.data());
if (qstrncmp(header->magic, "icon", sizeof(header->magic)) != 0) {
return false;
}
const qint64 seekBy = sizeof(ChunkHeader) + header->size;
if (!device()->seek(device()->pos() + seekBy)) {
return false;
}
++m_currentImageNumber;
return true;
}
int ANIHandler::loopCount() const
{
if (!ensureScanned()) {
return 0;
}
return -1;
}
int ANIHandler::nextImageDelay() const
{
if (!ensureScanned()) {
return 0;
}
int rate = m_displayRate;
if (!m_displayRates.isEmpty()) {
int previousImage = m_currentImageNumber - 1;
if (previousImage < 0) {
previousImage = m_displayRates.count() - 1;
}
rate = m_displayRates.at(previousImage);
}
return rate * 1000 / 60;
}
bool ANIHandler::supportsOption(ImageOption option) const
{
return option == Size || option == Name || option == Description || option == Animation;
}
QVariant ANIHandler::option(ImageOption option) const
{
if (!supportsOption(option) || !ensureScanned()) {
return QVariant();
}
switch (option) {
case QImageIOHandler::Size:
return m_size;
// TODO QImageIOHandler::Format
// but both iBitCount in AniHeader and bColorCount are just zero most of the time
// so one would probably need to traverse even further down into IcoHeader and IconDirEntry...
// but Qt's ICO/CUR handler always seems to give us a ARB
case QImageIOHandler::Name:
return m_name;
case QImageIOHandler::Description: {
QString description;
if (!m_name.isEmpty()) {
description += QStringLiteral("Title: %1\n\n").arg(m_name);
}
if (!m_artist.isEmpty()) {
description += QStringLiteral("Author: %1\n\n").arg(m_artist);
}
return description;
}
case QImageIOHandler::Animation:
return true;
default:
break;
}
return QVariant();
}
bool ANIHandler::ensureScanned() const
{
if (m_scanned) {
return true;
}
if (device()->isSequential()) {
return false;
}
auto *mutableThis = const_cast<ANIHandler *>(this);
const auto oldPos = device()->pos();
auto cleanup = qScopeGuard([this, oldPos] {
device()->seek(oldPos);
});
device()->seek(0);
const QByteArray riffIntro = device()->read(4);
if (riffIntro != "RIFF") {
return false;
}
const auto riffSizeData = device()->read(sizeof(quint32_le));
if (riffSizeData.size() != sizeof(quint32_le)) {
return false;
}
const auto riffSize = *(reinterpret_cast<const quint32_le *>(riffSizeData.data()));
// TODO do a basic sanity check if the size is enough to hold some metadata and a frame?
if (riffSize == 0) {
return false;
}
mutableThis->m_displayRates.clear();
mutableThis->m_imageSequence.clear();
while (device()->pos() < riffSize) {
const QByteArray chunkId = device()->read(4);
if (chunkId.length() != 4) {
return false;
}
if (chunkId == "ACON") {
continue;
}
const QByteArray chunkSizeData = device()->read(sizeof(quint32_le));
if (chunkSizeData.length() != sizeof(quint32_le)) {
return false;
}
auto chunkSize = *(reinterpret_cast<const quint32_le *>(chunkSizeData.data()));
if (chunkId == "anih") {
if (chunkSize != sizeof(AniHeader)) {
qWarning() << "anih chunk size does not match ANIHEADER size";
return false;
}
const QByteArray anihData = device()->read(sizeof(AniHeader));
if (anihData.size() != sizeof(AniHeader)) {
return false;
}
auto *aniHeader = reinterpret_cast<const AniHeader *>(anihData.data());
// The size in the ani header is usually 0 unfortunately,
// so we'll also check the first frame for its size further below
mutableThis->m_size = QSize(aniHeader->iWidth, aniHeader->iHeight);
mutableThis->m_frameCount = aniHeader->nFrames;
mutableThis->m_imageCount = aniHeader->nSteps;
mutableThis->m_displayRate = aniHeader->iDispRate;
} else if (chunkId == "rate" || chunkId == "seq ") {
const QByteArray data = device()->read(chunkSize);
if (static_cast<quint32_le>(data.size()) != chunkSize
|| data.size() % sizeof(quint32_le) != 0) {
return false;
}
// TODO should we check that the number of rate entries matches nSteps?
auto *dataPtr = data.data();
QVector<int> list;
for (int i = 0; i < data.count(); i += sizeof(quint32_le)) {
const auto entry = *(reinterpret_cast<const quint32_le *>(dataPtr + i));
list.append(entry);
}
if (chunkId == "rate") {
// should we check that the number of rate entries matches nSteps?
mutableThis->m_displayRates = list;
} else if (chunkId == "seq ") {
// Check if it's just an ascending sequence, don't bother with it then
bool isAscending = true;
for (int i = 0; i < list.count(); ++i) {
if (list.at(i) != i) {
isAscending = false;
break;
}
}
if (!isAscending) {
mutableThis->m_imageSequence = list;
}
}
// IART and INAM are technically inside LIST->INFO but "INFO" is supposedly optional
// so just handle those two attributes wherever we encounter them
} else if (chunkId == "INAM" || chunkId == "IART") {
const QByteArray value = device()->read(chunkSize);
if (static_cast<quint32_le>(value.size()) != chunkSize) {
return false;
}
// DWORDs are aligned to even sizes
if (chunkSize % 2 != 0) {
device()->read(1);
}
// FIXME encoding
const QString stringValue = QString::fromLocal8Bit(value);
if (chunkId == "INAM") {
mutableThis->m_name = stringValue;
} else if (chunkId == "IART") {
mutableThis->m_artist = stringValue;
}
} else if (chunkId == "LIST") {
const QByteArray listType = device()->read(4);
if (listType == "INFO") {
// Technically would contain INAM and IART but we handle them anywhere above
} else if (listType == "fram") {
quint64 read = 0;
while (read < chunkSize) {
const QByteArray chunkType = device()->read(4);
read += 4;
if (chunkType != "icon") {
break;
}
if (!m_firstFrameOffset) {
mutableThis->m_firstFrameOffset = device()->pos() - 4;
mutableThis->m_currentImageNumber = 0;
// If size in header isn't valid, use the first frame's size instead
if (!m_size.isValid() || m_size.isEmpty()) {
const auto oldPos = device()->pos();
device()->read(sizeof(quint32_le));
const QByteArray curHeaderData = device()->read(sizeof(CurHeader));
const QByteArray cursorDirEntryData = device()->read(sizeof(CursorDirEntry));
if (curHeaderData.length() == sizeof(CurHeader)
&& cursorDirEntryData.length() == sizeof(CursorDirEntry)) {
auto *cursorDirEntry = reinterpret_cast<const CursorDirEntry *>(cursorDirEntryData.data());
mutableThis->m_size = QSize(cursorDirEntry->bWidth, cursorDirEntry->bHeight);
}
device()->seek(oldPos);
}
// If we don't have a custom image sequence we can stop scanning right here
if (m_imageSequence.isEmpty()) {
break;
}
}
mutableThis->m_frameOffsets.append(device()->pos() - 4);
const QByteArray frameSizeData = device()->read(sizeof(quint32_le));
if (frameSizeData.size() != sizeof(quint32_le)) {
return false;
}
const auto frameSize = *(reinterpret_cast<const quint32_le *>(frameSizeData.data()));
device()->seek(device()->pos() + frameSize);
read += frameSize;
if (m_frameOffsets.count() == m_frameCount) {
// Also record the end of frame data
mutableThis->m_frameOffsets.append(device()->pos() - 4);
break;
}
}
break;
}
}
}
if (m_imageCount != m_frameCount && m_imageSequence.isEmpty()) {
qWarning("ANIHandler: 'nSteps' is not equal to 'nFrames' but no 'seq' entries were provided");
return false;
}
if (!m_imageSequence.isEmpty() && m_imageSequence.count() != m_imageCount) {
qWarning("ANIHandler: count of entries in 'seq' does not match 'nSteps' in anih");
return false;
}
if (!m_displayRates.isEmpty() && m_displayRates.count() != m_imageCount) {
qWarning("ANIHandler: count of entries in 'rate' does not match 'nSteps' in anih");
return false;
}
if (!m_frameOffsets.isEmpty() && m_frameOffsets.count() != m_frameCount + 1) {
qWarning("ANIHandler: number of actual frames does not match 'nFrames' in anih");
return false;
}
mutableThis->m_scanned = true;
return true;
}
bool ANIHandler::canRead(QIODevice *device)
{
if (!device) {
qWarning("ANIHandler::canRead() called with no device");
return false;
}
const QByteArray riffIntro = device->peek(12);
if (riffIntro.length() != 12) {
return false;
}
if (!riffIntro.startsWith("RIFF")) {
return false;
}
// TODO sanity check chunk size?
if (riffIntro.mid(4 + 4, 4) != "ACON") {
return false;
}
return true;
}
QImageIOPlugin::Capabilities ANIPlugin::capabilities(QIODevice *device, const QByteArray &format) const
{
if (format == "ani") {
return Capabilities(CanRead);
}
if (!format.isEmpty()) {
return {};
}
if (!device->isOpen()) {
return {};
}
Capabilities cap;
if (device->isReadable() && ANIHandler::canRead(device)) {
cap |= CanRead;
}
return cap;
}
QImageIOHandler *ANIPlugin::create(QIODevice *device, const QByteArray &format) const
{
QImageIOHandler *handler = new ANIHandler;
handler->setDevice(device);
handler->setFormat(format);
return handler;
}

View File

@ -0,0 +1,7 @@
[Desktop Entry]
Type=Service
X-KDE-ServiceTypes=QImageIOPlugins
X-KDE-ImageFormat=ani
X-KDE-MimeType=application/x-navi-animation
X-KDE-Read=true
X-KDE-Write=false

View File

@ -0,0 +1,4 @@
{
"Keys": [ "ani" ],
"MimeTypes": [ "application/x-navi-animation" ]
}

69
src/imageformats/ani_p.h Normal file
View File

@ -0,0 +1,69 @@
/*
SPDX-FileCopyrightText: 2020 Kai Uwe Broulik <kde@broulik.de>
SPDX-License-Identifier: LGPL-2.0-or-later
*/
#ifndef KIMG_ANI_P_H
#define KIMG_ANI_P_H
#include <QImageIOPlugin>
#include <QSize>
class ANIHandler : public QImageIOHandler
{
public:
ANIHandler();
bool canRead() const override;
bool read(QImage *image) override;
int currentImageNumber() const override;
int imageCount() const override;
bool jumpToImage(int imageNumber) override;
bool jumpToNextImage() override;
int loopCount() const override;
int nextImageDelay() const override;
bool supportsOption(ImageOption option) const override;
QVariant option(ImageOption option) const override;
static bool canRead(QIODevice *device);
private:
bool ensureScanned() const;
bool m_scanned = false;
int m_currentImageNumber = 0;
int m_frameCount = 0; // "physical" frames
int m_imageCount = 0; // logical images
// Stores a custom sequence of images
QVector<int> m_imageSequence;
// and the corresponding offsets where they are
// since we can't read the image data sequentally in this case then
QVector<qint64> m_frameOffsets;
qint64 m_firstFrameOffset = 0;
int m_displayRate = 0;
QVector<int> m_displayRates;
QString m_name;
QString m_artist;
QSize m_size;
};
class ANIPlugin : public QImageIOPlugin
{
Q_OBJECT
Q_PLUGIN_METADATA(IID "org.qt-project.Qt.QImageIOHandlerFactoryInterface" FILE "ani.json")
public:
Capabilities capabilities(QIODevice *device, const QByteArray &format) const override;
QImageIOHandler *create(QIODevice *device, const QByteArray &format = QByteArray()) const override;
};
#endif // KIMG_ANI_P_H

962
src/imageformats/avif.cpp Normal file
View File

@ -0,0 +1,962 @@
/*
AV1 Image File Format (AVIF) support for QImage.
SPDX-FileCopyrightText: 2020 Daniel Novomesky <dnovomesky@gmail.com>
SPDX-License-Identifier: BSD-2-Clause
*/
#include <QtGlobal>
#include <QThread>
#include <QColorSpace>
#include "avif_p.h"
#include <cfloat>
QAVIFHandler::QAVIFHandler() :
m_parseState(ParseAvifNotParsed),
m_quality(52),
m_container_width(0),
m_container_height(0),
m_rawAvifData(AVIF_DATA_EMPTY),
m_decoder(nullptr),
m_must_jump_to_next_image(false)
{
}
QAVIFHandler::~QAVIFHandler()
{
if (m_decoder) {
avifDecoderDestroy(m_decoder);
}
}
bool QAVIFHandler::canRead() const
{
if (m_parseState == ParseAvifNotParsed && !canRead(device())) {
return false;
}
if (m_parseState != ParseAvifError) {
setFormat("avif");
return true;
}
return false;
}
bool QAVIFHandler::canRead(QIODevice *device)
{
if (!device) {
return false;
}
QByteArray header = device->peek(144);
if (header.size() < 12) {
return false;
}
avifROData input;
input.data = (const uint8_t *) header.constData();
input.size = header.size();
if (avifPeekCompatibleFileType(&input)) {
return true;
}
return false;
}
bool QAVIFHandler::ensureParsed() const
{
if (m_parseState == ParseAvifSuccess) {
return true;
}
if (m_parseState == ParseAvifError) {
return false;
}
QAVIFHandler *that = const_cast<QAVIFHandler *>(this);
return that->ensureDecoder();
}
bool QAVIFHandler::ensureDecoder()
{
if (m_decoder) {
return true;
}
m_rawData = device()->readAll();
m_rawAvifData.data = (const uint8_t *) m_rawData.constData();
m_rawAvifData.size = m_rawData.size();
if (avifPeekCompatibleFileType(&m_rawAvifData) == AVIF_FALSE) {
m_parseState = ParseAvifError;
return false;
}
m_decoder = avifDecoderCreate();
avifResult decodeResult;
decodeResult = avifDecoderSetIOMemory(m_decoder, m_rawAvifData.data, m_rawAvifData.size);
if (decodeResult != AVIF_RESULT_OK) {
qWarning("ERROR: avifDecoderSetIOMemory failed: %s", avifResultToString(decodeResult));
avifDecoderDestroy(m_decoder);
m_decoder = nullptr;
m_parseState = ParseAvifError;
return false;
}
decodeResult = avifDecoderParse(m_decoder);
if (decodeResult != AVIF_RESULT_OK) {
qWarning("ERROR: Failed to parse input: %s", avifResultToString(decodeResult));
avifDecoderDestroy(m_decoder);
m_decoder = nullptr;
m_parseState = ParseAvifError;
return false;
}
decodeResult = avifDecoderNextImage(m_decoder);
if (decodeResult == AVIF_RESULT_OK) {
m_container_width = m_decoder->image->width;
m_container_height = m_decoder->image->height;
if ((m_container_width > 32768) || (m_container_height > 32768)) {
qWarning("AVIF image (%dx%d) is too large!", m_container_width, m_container_height);
m_parseState = ParseAvifError;
return false;
}
if ((m_container_width == 0) || (m_container_height == 0)) {
qWarning("Empty image, nothing to decode");
m_parseState = ParseAvifError;
return false;
}
m_parseState = ParseAvifSuccess;
if (decode_one_frame()) {
return true;
} else {
m_parseState = ParseAvifError;
return false;
}
} else {
qWarning("ERROR: Failed to decode image: %s", avifResultToString(decodeResult));
}
avifDecoderDestroy(m_decoder);
m_decoder = nullptr;
m_parseState = ParseAvifError;
return false;
}
bool QAVIFHandler::decode_one_frame()
{
if (!ensureParsed()) {
return false;
}
bool loadalpha;
if (m_decoder->image->alphaPlane) {
loadalpha = true;
} else {
loadalpha = false;
}
QImage::Format resultformat;
if (m_decoder->image->depth > 8) {
if (loadalpha) {
resultformat = QImage::Format_RGBA64;
} else {
resultformat = QImage::Format_RGBX64;
}
} else {
if (loadalpha) {
resultformat = QImage::Format_RGBA8888;
} else {
resultformat = QImage::Format_RGB888;
}
}
QImage result(m_decoder->image->width, m_decoder->image->height, resultformat);
if (result.isNull()) {
qWarning("Memory cannot be allocated");
return false;
}
if (m_decoder->image->icc.data && (m_decoder->image->icc.size > 0)) {
result.setColorSpace(QColorSpace::fromIccProfile(QByteArray::fromRawData((const char *) m_decoder->image->icc.data, (int) m_decoder->image->icc.size)));
if (! result.colorSpace().isValid()) {
qWarning("Invalid QColorSpace created from ICC!");
}
} else {
float prim[8] = { 0.64f, 0.33f, 0.3f, 0.6f, 0.15f, 0.06f, 0.3127f, 0.329f };
// outPrimaries: rX, rY, gX, gY, bX, bY, wX, wY
avifColorPrimariesGetValues(m_decoder->image->colorPrimaries, prim);
const QPointF redPoint(QAVIFHandler::CompatibleChromacity(prim[0], prim[1]));
const QPointF greenPoint(QAVIFHandler::CompatibleChromacity(prim[2], prim[3]));
const QPointF bluePoint(QAVIFHandler::CompatibleChromacity(prim[4], prim[5]));
const QPointF whitePoint(QAVIFHandler::CompatibleChromacity(prim[6], prim[7]));
QColorSpace::TransferFunction q_trc = QColorSpace::TransferFunction::Custom;
float q_trc_gamma = 0.0f;
switch (m_decoder->image->transferCharacteristics) {
/* AVIF_TRANSFER_CHARACTERISTICS_BT470M */
case 4:
q_trc = QColorSpace::TransferFunction::Gamma;
q_trc_gamma = 2.2f;
break;
/* AVIF_TRANSFER_CHARACTERISTICS_BT470BG */
case 5:
q_trc = QColorSpace::TransferFunction::Gamma;
q_trc_gamma = 2.8f;
break;
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
case 8:
q_trc = QColorSpace::TransferFunction::Linear;
break;
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
case 0:
case 2: /* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
case 13:
q_trc = QColorSpace::TransferFunction::SRgb;
break;
default:
qWarning("CICP colorPrimaries: %d, transferCharacteristics: %d\nThe colorspace is unsupported by this plug-in yet.",
m_decoder->image->colorPrimaries, m_decoder->image->transferCharacteristics);
q_trc = QColorSpace::TransferFunction::SRgb;
break;
}
if (q_trc != QColorSpace::TransferFunction::Custom) { //we create new colorspace using Qt
switch (m_decoder->image->colorPrimaries) {
/* AVIF_COLOR_PRIMARIES_BT709 */
case 0:
case 1:
case 2: /* AVIF_COLOR_PRIMARIES_UNSPECIFIED */
result.setColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, q_trc, q_trc_gamma));
break;
/* AVIF_COLOR_PRIMARIES_SMPTE432 */
case 12:
result.setColorSpace(QColorSpace(QColorSpace::Primaries::DciP3D65, q_trc, q_trc_gamma));
break;
default:
result.setColorSpace(QColorSpace(whitePoint, redPoint, greenPoint, bluePoint, q_trc, q_trc_gamma));
break;
}
}
if (! result.colorSpace().isValid()) {
qWarning("Invalid QColorSpace created from NCLX/CICP!");
}
}
avifRGBImage rgb;
avifRGBImageSetDefaults(&rgb, m_decoder->image);
if (m_decoder->image->depth > 8) {
rgb.depth = 16;
rgb.format = AVIF_RGB_FORMAT_RGBA;
if (!loadalpha) {
rgb.ignoreAlpha = AVIF_TRUE;
result.fill(Qt::black);
if (m_decoder->image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400) {
resultformat = QImage::Format_Grayscale16;
}
}
} else {
rgb.depth = 8;
if (loadalpha) {
rgb.format = AVIF_RGB_FORMAT_RGBA;
resultformat = QImage::Format_ARGB32;
} else {
rgb.format = AVIF_RGB_FORMAT_RGB;
if (m_decoder->image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400) {
resultformat = QImage::Format_Grayscale8;
} else {
resultformat = QImage::Format_RGB32;
}
}
}
rgb.rowBytes = result.bytesPerLine();
rgb.pixels = result.bits();
avifResult res = avifImageYUVToRGB(m_decoder->image, &rgb);
if (res != AVIF_RESULT_OK) {
qWarning("ERROR in avifImageYUVToRGB: %s", avifResultToString(res));
return false;
}
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_CLAP) {
if ((m_decoder->image->clap.widthD > 0) && (m_decoder->image->clap.heightD > 0) &&
(m_decoder->image->clap.horizOffD > 0) && (m_decoder->image->clap.vertOffD > 0)) {
int new_width, new_height, offx, offy;
new_width = (int)((double)(m_decoder->image->clap.widthN) / (m_decoder->image->clap.widthD) + 0.5);
if (new_width > result.width()) {
new_width = result.width();
}
new_height = (int)((double)(m_decoder->image->clap.heightN) / (m_decoder->image->clap.heightD) + 0.5);
if (new_height > result.height()) {
new_height = result.height();
}
if (new_width > 0 && new_height > 0) {
offx = ((double)((int32_t) m_decoder->image->clap.horizOffN)) / (m_decoder->image->clap.horizOffD) +
(result.width() - new_width) / 2.0 + 0.5;
if (offx < 0) {
offx = 0;
} else if (offx > (result.width() - new_width)) {
offx = result.width() - new_width;
}
offy = ((double)((int32_t) m_decoder->image->clap.vertOffN)) / (m_decoder->image->clap.vertOffD) +
(result.height() - new_height) / 2.0 + 0.5;
if (offy < 0) {
offy = 0;
} else if (offy > (result.height() - new_height)) {
offy = result.height() - new_height;
}
result = result.copy(offx, offy, new_width, new_height);
}
}
else { //Zero values, we need to avoid 0 divide.
qWarning("ERROR: Wrong values in avifCleanApertureBox");
}
}
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_IROT) {
QTransform transform;
switch (m_decoder->image->irot.angle) {
case 1:
transform.rotate(-90);
result = result.transformed(transform);
break;
case 2:
transform.rotate(180);
result = result.transformed(transform);
break;
case 3:
transform.rotate(90);
result = result.transformed(transform);
break;
}
}
if (m_decoder->image->transformFlags & AVIF_TRANSFORM_IMIR) {
switch (m_decoder->image->imir.axis) {
case 0: //vertical
result = result.mirrored(false, true);
break;
case 1: //horizontal
result = result.mirrored(true, false);
break;
}
}
if (resultformat == result.format()) {
m_current_image = result;
} else {
m_current_image = result.convertToFormat(resultformat);
}
m_must_jump_to_next_image = false;
return true;
}
bool QAVIFHandler::read(QImage *image)
{
if (!ensureParsed()) {
return false;
}
if (m_must_jump_to_next_image) {
jumpToNextImage();
}
*image = m_current_image;
if (imageCount() >= 2) {
m_must_jump_to_next_image = true;
}
return true;
}
bool QAVIFHandler::write(const QImage &image)
{
if (image.format() == QImage::Format_Invalid) {
qWarning("No image data to save");
return false;
}
if ((image.width() > 32768) || (image.height() > 32768)) {
qWarning("Image is too large");
return false;
}
int maxQuantizer = AVIF_QUANTIZER_WORST_QUALITY * (100 - qBound(0, m_quality, 100)) / 100;
int minQuantizer = 0;
int maxQuantizerAlpha = 0;
avifResult res;
bool save_grayscale; //true - monochrome, false - colors
int save_depth; //8 or 10bit per channel
QImage::Format tmpformat; //format for temporary image
avifImage *avif = nullptr;
//grayscale detection
switch (image.format()) {
case QImage::Format_Mono:
case QImage::Format_MonoLSB:
case QImage::Format_Grayscale8:
case QImage::Format_Grayscale16:
save_grayscale = true;
break;
case QImage::Format_Indexed8:
save_grayscale = image.isGrayscale();
break;
default:
save_grayscale = false;
break;
}
//depth detection
switch (image.format()) {
case QImage::Format_BGR30:
case QImage::Format_A2BGR30_Premultiplied:
case QImage::Format_RGB30:
case QImage::Format_A2RGB30_Premultiplied:
case QImage::Format_Grayscale16:
case QImage::Format_RGBX64:
case QImage::Format_RGBA64:
case QImage::Format_RGBA64_Premultiplied:
save_depth = 10;
break;
default:
if (image.depth() > 32) {
save_depth = 10;
} else {
save_depth = 8;
}
break;
}
//quality settings
if (maxQuantizer > 20) {
minQuantizer = maxQuantizer - 20;
if (maxQuantizer > 40) { //we decrease quality of alpha channel here
maxQuantizerAlpha = maxQuantizer - 40;
}
}
if (save_grayscale && !image.hasAlphaChannel()) { //we are going to save grayscale image without alpha channel
if (save_depth > 8) {
tmpformat = QImage::Format_Grayscale16;
} else {
tmpformat = QImage::Format_Grayscale8;
}
QImage tmpgrayimage = image.convertToFormat(tmpformat);
avif = avifImageCreate(tmpgrayimage.width(), tmpgrayimage.height(), save_depth, AVIF_PIXEL_FORMAT_YUV400);
avifImageAllocatePlanes(avif, AVIF_PLANES_YUV);
if (tmpgrayimage.colorSpace().isValid()) {
avif->colorPrimaries = (avifColorPrimaries)1;
avif->matrixCoefficients = (avifMatrixCoefficients)1;
switch (tmpgrayimage.colorSpace().transferFunction()) {
case QColorSpace::TransferFunction::Linear:
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
avif->transferCharacteristics = (avifTransferCharacteristics)8;
break;
case QColorSpace::TransferFunction::SRgb:
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
avif->transferCharacteristics = (avifTransferCharacteristics)13;
break;
default:
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
break;
}
}
if (save_depth > 8) { // QImage::Format_Grayscale16
for (int y = 0; y < tmpgrayimage.height(); y++) {
const uint16_t *src16bit = reinterpret_cast<const uint16_t *>(tmpgrayimage.constScanLine(y));
uint16_t *dest16bit = reinterpret_cast<uint16_t *>(avif->yuvPlanes[0] + y * avif->yuvRowBytes[0]);
for (int x = 0; x < tmpgrayimage.width(); x++) {
int tmp_pixelval = (int)(((float)(*src16bit) / 65535.0f) * 1023.0f + 0.5f); //downgrade to 10 bits
*dest16bit = qBound(0, tmp_pixelval, 1023);
dest16bit++;
src16bit++;
}
}
} else { // QImage::Format_Grayscale8
for (int y = 0; y < tmpgrayimage.height(); y++) {
const uchar *src8bit = tmpgrayimage.constScanLine(y);
uint8_t *dest8bit = avif->yuvPlanes[0] + y * avif->yuvRowBytes[0];
for (int x = 0; x < tmpgrayimage.width(); x++) {
*dest8bit = *src8bit;
dest8bit++;
src8bit++;
}
}
}
} else { //we are going to save color image
if (save_depth > 8) {
if (image.hasAlphaChannel()) {
tmpformat = QImage::Format_RGBA64;
} else {
tmpformat = QImage::Format_RGBX64;
}
} else { //8bit depth
if (image.hasAlphaChannel()) {
tmpformat = QImage::Format_RGBA8888;
} else {
tmpformat = QImage::Format_RGB888;
}
}
QImage tmpcolorimage = image.convertToFormat(tmpformat);
avifPixelFormat pixel_format = AVIF_PIXEL_FORMAT_YUV420;
if (maxQuantizer < 20) {
if (maxQuantizer < 10) {
pixel_format = AVIF_PIXEL_FORMAT_YUV444; //best quality
} else {
pixel_format = AVIF_PIXEL_FORMAT_YUV422; //high quality
}
}
avifMatrixCoefficients matrix_to_save = (avifMatrixCoefficients)1; //default for Qt 5.12 and 5.13;
avifColorPrimaries primaries_to_save = (avifColorPrimaries)2;
avifTransferCharacteristics transfer_to_save = (avifTransferCharacteristics)2;
if (tmpcolorimage.colorSpace().isValid()) {
switch (tmpcolorimage.colorSpace().primaries()) {
case QColorSpace::Primaries::SRgb:
/* AVIF_COLOR_PRIMARIES_BT709 */
primaries_to_save = (avifColorPrimaries)1;
/* AVIF_MATRIX_COEFFICIENTS_BT709 */
matrix_to_save = (avifMatrixCoefficients)1;
break;
case QColorSpace::Primaries::DciP3D65:
/* AVIF_NCLX_COLOUR_PRIMARIES_P3, AVIF_NCLX_COLOUR_PRIMARIES_SMPTE432 */
primaries_to_save = (avifColorPrimaries)12;
/* AVIF_MATRIX_COEFFICIENTS_CHROMA_DERIVED_NCL */
matrix_to_save = (avifMatrixCoefficients)12;
break;
default:
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
primaries_to_save = (avifColorPrimaries)2;
/* AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED */
matrix_to_save = (avifMatrixCoefficients)2;
break;
}
switch (tmpcolorimage.colorSpace().transferFunction()) {
case QColorSpace::TransferFunction::Linear:
/* AVIF_TRANSFER_CHARACTERISTICS_LINEAR */
transfer_to_save = (avifTransferCharacteristics)8;
break;
case QColorSpace::TransferFunction::Gamma:
if (qAbs(tmpcolorimage.colorSpace().gamma() - 2.2f) < 0.1f) {
/* AVIF_TRANSFER_CHARACTERISTICS_BT470M */
transfer_to_save = (avifTransferCharacteristics)4;
} else if (qAbs(tmpcolorimage.colorSpace().gamma() - 2.8f) < 0.1f) {
/* AVIF_TRANSFER_CHARACTERISTICS_BT470BG */
transfer_to_save = (avifTransferCharacteristics)5;
} else {
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
transfer_to_save = (avifTransferCharacteristics)2;
}
break;
case QColorSpace::TransferFunction::SRgb:
/* AVIF_TRANSFER_CHARACTERISTICS_SRGB */
transfer_to_save = (avifTransferCharacteristics)13;
break;
default:
/* AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED */
transfer_to_save = (avifTransferCharacteristics)2;
break;
}
//in case primaries or trc were not identified
if ((primaries_to_save == 2) ||
(transfer_to_save == 2)) {
//upgrade image to higher bit depth
if (save_depth == 8) {
save_depth = 10;
if (tmpcolorimage.hasAlphaChannel()) {
tmpcolorimage = tmpcolorimage.convertToFormat(QImage::Format_RGBA64);
} else {
tmpcolorimage = tmpcolorimage.convertToFormat(QImage::Format_RGBX64);
}
}
if ((primaries_to_save == 2) &&
(transfer_to_save != 2)) { //other primaries but known trc
primaries_to_save = (avifColorPrimaries)1; // AVIF_COLOR_PRIMARIES_BT709
matrix_to_save = (avifMatrixCoefficients)1; // AVIF_MATRIX_COEFFICIENTS_BT709
switch (transfer_to_save) {
case 8: // AVIF_TRANSFER_CHARACTERISTICS_LINEAR
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::Linear));
break;
case 4: // AVIF_TRANSFER_CHARACTERISTICS_BT470M
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, 2.2f));
break;
case 5: // AVIF_TRANSFER_CHARACTERISTICS_BT470BG
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, 2.8f));
break;
default: // AVIF_TRANSFER_CHARACTERISTICS_SRGB + any other
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::SRgb));
transfer_to_save = (avifTransferCharacteristics)13;
break;
}
} else if ((primaries_to_save != 2) &&
(transfer_to_save == 2)) { //recognized primaries but other trc
transfer_to_save = (avifTransferCharacteristics)13;
tmpcolorimage.convertToColorSpace(tmpcolorimage.colorSpace().withTransferFunction(QColorSpace::TransferFunction::SRgb));
} else { //unrecognized profile
primaries_to_save = (avifColorPrimaries)1; // AVIF_COLOR_PRIMARIES_BT709
transfer_to_save = (avifTransferCharacteristics)13;
matrix_to_save = (avifMatrixCoefficients)1; // AVIF_MATRIX_COEFFICIENTS_BT709
tmpcolorimage.convertToColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, QColorSpace::TransferFunction::SRgb));
}
}
}
avif = avifImageCreate(tmpcolorimage.width(), tmpcolorimage.height(), save_depth, pixel_format);
avif->matrixCoefficients = matrix_to_save;
avif->colorPrimaries = primaries_to_save;
avif->transferCharacteristics = transfer_to_save;
avifRGBImage rgb;
avifRGBImageSetDefaults(&rgb, avif);
rgb.rowBytes = tmpcolorimage.bytesPerLine();
rgb.pixels = const_cast<uint8_t *>(tmpcolorimage.constBits());
if (save_depth > 8) { //10bit depth
rgb.depth = 16;
if (tmpcolorimage.hasAlphaChannel()) {
avif->alphaRange = AVIF_RANGE_FULL;
} else {
rgb.ignoreAlpha = AVIF_TRUE;
}
rgb.format = AVIF_RGB_FORMAT_RGBA;
} else { //8bit depth
rgb.depth = 8;
if (tmpcolorimage.hasAlphaChannel()) {
rgb.format = AVIF_RGB_FORMAT_RGBA;
avif->alphaRange = AVIF_RANGE_FULL;
} else {
rgb.format = AVIF_RGB_FORMAT_RGB;
}
}
res = avifImageRGBToYUV(avif, &rgb);
if (res != AVIF_RESULT_OK) {
qWarning("ERROR in avifImageRGBToYUV: %s", avifResultToString(res));
return false;
}
}
avifRWData raw = AVIF_DATA_EMPTY;
avifEncoder *encoder = avifEncoderCreate();
encoder->maxThreads = qBound(1, QThread::idealThreadCount(), 64);
encoder->minQuantizer = minQuantizer;
encoder->maxQuantizer = maxQuantizer;
if (image.hasAlphaChannel()) {
encoder->minQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS;
encoder->maxQuantizerAlpha = maxQuantizerAlpha;
}
encoder->speed = 8;
res = avifEncoderWrite(encoder, avif, &raw);
avifEncoderDestroy(encoder);
avifImageDestroy(avif);
if (res == AVIF_RESULT_OK) {
qint64 status = device()->write((const char *)raw.data, raw.size);
avifRWDataFree(&raw);
if (status > 0) {
return true;
} else if (status == -1) {
qWarning("Write error: %s", qUtf8Printable(device()->errorString()));
return false;
}
} else {
qWarning("ERROR: Failed to encode: %s", avifResultToString(res));
}
return false;
}
QVariant QAVIFHandler::option(ImageOption option) const
{
if (option == Quality) {
return m_quality;
}
if (!supportsOption(option) || !ensureParsed()) {
return QVariant();
}
switch (option) {
case Size:
return m_current_image.size();
case Animation:
if (imageCount() >= 2) {
return true;
} else {
return false;
}
default:
return QVariant();
}
}
void QAVIFHandler::setOption(ImageOption option, const QVariant &value)
{
switch (option) {
case Quality:
m_quality = value.toInt();
if (m_quality > 100) {
m_quality = 100;
} else if (m_quality < 0) {
m_quality = 52;
}
return;
default:
break;
}
QImageIOHandler::setOption(option, value);
}
bool QAVIFHandler::supportsOption(ImageOption option) const
{
return option == Quality
|| option == Size
|| option == Animation;
}
int QAVIFHandler::imageCount() const
{
if (!ensureParsed()) {
return 0;
}
if (m_decoder->imageCount >= 1) {
return m_decoder->imageCount;
}
return 0;
}
int QAVIFHandler::currentImageNumber() const
{
if (m_parseState == ParseAvifNotParsed) {
return -1;
}
if (m_parseState == ParseAvifError || !m_decoder) {
return 0;
}
return m_decoder->imageIndex;
}
bool QAVIFHandler::jumpToNextImage()
{
if (!ensureParsed()) {
return false;
}
if (m_decoder->imageCount < 2) {
return true;
}
if (m_decoder->imageIndex >= m_decoder->imageCount - 1) { //start from begining
avifDecoderReset(m_decoder);
}
avifResult decodeResult = avifDecoderNextImage(m_decoder);
if (decodeResult != AVIF_RESULT_OK) {
qWarning("ERROR: Failed to decode Next image in sequence: %s", avifResultToString(decodeResult));
m_parseState = ParseAvifError;
return false;
}
if ((m_container_width != m_decoder->image->width) ||
(m_container_height != m_decoder->image->height)) {
qWarning("Decoded image sequence size (%dx%d) do not match first image size (%dx%d)!",
m_decoder->image->width, m_decoder->image->height,
m_container_width, m_container_height);
m_parseState = ParseAvifError;
return false;
}
if (decode_one_frame()) {
return true;
} else {
m_parseState = ParseAvifError;
return false;
}
}
bool QAVIFHandler::jumpToImage(int imageNumber)
{
if (!ensureParsed()) {
return false;
}
if (m_decoder->imageCount < 2) { //not an animation
if (imageNumber == 0) {
return true;
} else {
return false;
}
}
if (imageNumber < 0 || imageNumber >= m_decoder->imageCount) { //wrong index
return false;
}
if (imageNumber == m_decoder->imageCount) { // we are here already
return true;
}
avifResult decodeResult = avifDecoderNthImage(m_decoder, imageNumber);
if (decodeResult != AVIF_RESULT_OK) {
qWarning("ERROR: Failed to decode %d th Image in sequence: %s", imageNumber, avifResultToString(decodeResult));
m_parseState = ParseAvifError;
return false;
}
if ((m_container_width != m_decoder->image->width) ||
(m_container_height != m_decoder->image->height)) {
qWarning("Decoded image sequence size (%dx%d) do not match declared container size (%dx%d)!",
m_decoder->image->width, m_decoder->image->height,
m_container_width, m_container_height);
m_parseState = ParseAvifError;
return false;
}
if (decode_one_frame()) {
return true;
} else {
m_parseState = ParseAvifError;
return false;
}
}
int QAVIFHandler::nextImageDelay() const
{
if (!ensureParsed()) {
return 0;
}
if (m_decoder->imageCount < 2) {
return 0;
}
int delay_ms = 1000.0 * m_decoder->imageTiming.duration;
if (delay_ms < 1) {
delay_ms = 1;
}
return delay_ms;
}
int QAVIFHandler::loopCount() const
{
if (!ensureParsed()) {
return 0;
}
if (m_decoder->imageCount < 2) {
return 0;
}
return 1;
}
QPointF QAVIFHandler::CompatibleChromacity(qreal chrX, qreal chrY)
{
chrX = qBound(qreal(0.0), chrX, qreal(1.0));
chrY = qBound(qreal(DBL_MIN), chrY, qreal(1.0));
if ((chrX + chrY) > qreal(1.0)) {
chrX = qreal(1.0) - chrY;
}
return QPointF(chrX, chrY);
}
QImageIOPlugin::Capabilities QAVIFPlugin::capabilities(QIODevice *device, const QByteArray &format) const
{
if (format == "avif") {
return Capabilities(CanRead | CanWrite);
}
if (format == "avifs") {
return Capabilities(CanRead);
}
if (!format.isEmpty()) {
return {};
}
if (!device->isOpen()) {
return {};
}
Capabilities cap;
if (device->isReadable() && QAVIFHandler::canRead(device)) {
cap |= CanRead;
}
if (device->isWritable()) {
cap |= CanWrite;
}
return cap;
}
QImageIOHandler *QAVIFPlugin::create(QIODevice *device, const QByteArray &format) const
{
QImageIOHandler *handler = new QAVIFHandler;
handler->setDevice(device);
handler->setFormat(format);
return handler;
}

View File

@ -0,0 +1,7 @@
[Desktop Entry]
Type=Service
X-KDE-ServiceTypes=QImageIOPlugins
X-KDE-ImageFormat=avif
X-KDE-MimeType=image/avif
X-KDE-Read=true
X-KDE-Write=true

View File

@ -0,0 +1,4 @@
{
"Keys": [ "avif", "avifs" ],
"MimeTypes": [ "image/avif", "image/avif-sequence" ]
}

82
src/imageformats/avif_p.h Normal file
View File

@ -0,0 +1,82 @@
/*
AV1 Image File Format (AVIF) support for QImage.
SPDX-FileCopyrightText: 2020 Daniel Novomesky <dnovomesky@gmail.com>
SPDX-License-Identifier: BSD-2-Clause
*/
#ifndef KIMG_AVIF_P_H
#define KIMG_AVIF_P_H
#include <QImage>
#include <QVariant>
#include <qimageiohandler.h>
#include <QImageIOPlugin>
#include <QByteArray>
#include <QPointF>
#include <avif/avif.h>
class QAVIFHandler : public QImageIOHandler
{
public:
QAVIFHandler();
~QAVIFHandler();
bool canRead() const override;
bool read (QImage *image) override;
bool write (const QImage &image) override;
static bool canRead (QIODevice *device);
QVariant option (ImageOption option) const override;
void setOption (ImageOption option, const QVariant &value) override;
bool supportsOption (ImageOption option) const override;
int imageCount() const override;
int currentImageNumber() const override;
bool jumpToNextImage() override;
bool jumpToImage (int imageNumber) override;
int nextImageDelay() const override;
int loopCount() const override;
private:
static QPointF CompatibleChromacity(qreal chrX, qreal chrY);
bool ensureParsed() const;
bool ensureDecoder();
bool decode_one_frame();
enum ParseAvifState
{
ParseAvifError = -1,
ParseAvifNotParsed = 0,
ParseAvifSuccess = 1
};
ParseAvifState m_parseState;
int m_quality;
uint32_t m_container_width;
uint32_t m_container_height;
QByteArray m_rawData;
avifROData m_rawAvifData;
avifDecoder *m_decoder;
QImage m_current_image;
bool m_must_jump_to_next_image;
};
class QAVIFPlugin : public QImageIOPlugin
{
Q_OBJECT
Q_PLUGIN_METADATA (IID "org.qt-project.Qt.QImageIOHandlerFactoryInterface" FILE "avif.json")
public:
Capabilities capabilities (QIODevice *device, const QByteArray &format) const override;
QImageIOHandler *create (QIODevice *device, const QByteArray &format = QByteArray()) const override;
};
#endif // KIMG_AVIF_P_H

View File

@ -227,8 +227,15 @@ bool HDRHandler::read(QImage *outImage)
qCDebug(HDRPLUGIN) << "Invalid HDR file, the first line after the header didn't have the expected format:" << line;
return false;
}
const int width = match.captured(2).toInt();
const int height = match.captured(4).toInt();
if ( (match.captured(1).at(1) != u'Y') ||
(match.captured(3).at(1) != u'X') ) {
qCDebug(HDRPLUGIN) << "Unsupported image orientation in HDR file.";
return false;
}
const int width = match.captured(4).toInt();
const int height = match.captured(2).toInt();
QDataStream s(device());

739
src/imageformats/heif.cpp Normal file
View File

@ -0,0 +1,739 @@
/*
High Efficiency Image File Format (HEIF) support for QImage.
SPDX-FileCopyrightText: 2020 Sirius Bakke <sirius@bakke.co>
SPDX-FileCopyrightText: 2021 Daniel Novomesky <dnovomesky@gmail.com>
SPDX-License-Identifier: LGPL-2.0-or-later
*/
#include "heif_p.h"
#include "libheif/heif_cxx.h"
#include <QPointF>
#include <QColorSpace>
#include <QDebug>
#include <QSysInfo>
#include <string.h>
namespace // Private.
{
struct HeifQIODeviceWriter : public heif::Context::Writer {
HeifQIODeviceWriter(QIODevice *device) : m_ioDevice(device) {}
heif_error write(const void *data, size_t size) override
{
heif_error error;
error.code = heif_error_Ok;
error.subcode = heif_suberror_Unspecified;
error.message = errorOkMessage;
qint64 bytesWritten = m_ioDevice->write(static_cast<const char *>(data), size);
if (bytesWritten < static_cast<qint64>(size)) {
error.code = heif_error_Encoding_error;
error.message = QIODeviceWriteErrorMessage;
error.subcode = heif_suberror_Cannot_write_output_data;
}
return error;
}
static constexpr const char *errorOkMessage = "Success";
static constexpr const char *QIODeviceWriteErrorMessage = "Bytes written to QIODevice are smaller than input data size";
private:
QIODevice *m_ioDevice;
};
} // namespace
HEIFHandler::HEIFHandler() :
m_parseState(ParseHeicNotParsed),
m_quality(100)
{
}
bool HEIFHandler::canRead() const
{
if (m_parseState == ParseHeicNotParsed && !canRead(device())) {
return false;
}
if (m_parseState != ParseHeicError) {
setFormat("heif");
return true;
}
return false;
}
bool HEIFHandler::read(QImage *outImage)
{
if (!ensureParsed()) {
return false;
}
*outImage = m_current_image;
return true;
}
bool HEIFHandler::write(const QImage &image)
{
if (image.format() == QImage::Format_Invalid || image.isNull()) {
qWarning("No image data to save");
return false;
}
int save_depth; //8 or 10bit per channel
QImage::Format tmpformat; //format for temporary image
const bool save_alpha = image.hasAlphaChannel();
switch (image.format()) {
case QImage::Format_BGR30:
case QImage::Format_A2BGR30_Premultiplied:
case QImage::Format_RGB30:
case QImage::Format_A2RGB30_Premultiplied:
case QImage::Format_Grayscale16:
case QImage::Format_RGBX64:
case QImage::Format_RGBA64:
case QImage::Format_RGBA64_Premultiplied:
save_depth = 10;
break;
default:
if (image.depth() > 32) {
save_depth = 10;
} else {
save_depth = 8;
}
break;
}
heif_chroma chroma;
if (save_depth > 8) {
if (save_alpha) {
tmpformat = QImage::Format_RGBA64;
chroma = (QSysInfo::ByteOrder == QSysInfo::LittleEndian) ? heif_chroma_interleaved_RRGGBBAA_LE : heif_chroma_interleaved_RRGGBBAA_BE;
} else {
tmpformat = QImage::Format_RGBX64;
chroma = (QSysInfo::ByteOrder == QSysInfo::LittleEndian) ? heif_chroma_interleaved_RRGGBB_LE : heif_chroma_interleaved_RRGGBB_BE;
}
} else {
if (save_alpha) {
tmpformat = QImage::Format_RGBA8888;
chroma = heif_chroma_interleaved_RGBA;
} else {
tmpformat = QImage::Format_RGB888;
chroma = heif_chroma_interleaved_RGB;
}
}
const QImage tmpimage = image.convertToFormat(tmpformat);
try {
heif::Context ctx;
heif::Image heifImage;
heifImage.create(tmpimage.width(), tmpimage.height(), heif_colorspace_RGB, chroma);
if (tmpimage.colorSpace().isValid()) {
QByteArray iccprofile = tmpimage.colorSpace().iccProfile();
if (iccprofile.size() > 0) {
std::vector<uint8_t> rawProfile(iccprofile.begin(), iccprofile.end());
heifImage.set_raw_color_profile(heif_color_profile_type_prof, rawProfile);
}
}
heifImage.add_plane(heif_channel_interleaved, image.width(), image.height(), save_depth);
int stride = 0;
uint8_t *const dst = heifImage.get_plane(heif_channel_interleaved, &stride);
size_t rowbytes;
switch (save_depth) {
case 10:
if (save_alpha) {
for (int y = 0; y < tmpimage.height(); y++) {
const uint16_t *src_word = reinterpret_cast<const uint16_t *>(tmpimage.constScanLine(y));
uint16_t *dest_word = reinterpret_cast<uint16_t *>(dst + (y * stride));
for (int x = 0; x < tmpimage.width(); x++) {
int tmp_pixelval;
//R
tmp_pixelval = (int)(((float)(*src_word) / 65535.0f) * 1023.0f + 0.5f);
*dest_word = qBound(0, tmp_pixelval, 1023);
src_word++;
dest_word++;
//G
tmp_pixelval = (int)(((float)(*src_word) / 65535.0f) * 1023.0f + 0.5f);
*dest_word = qBound(0, tmp_pixelval, 1023);
src_word++;
dest_word++;
//B
tmp_pixelval = (int)(((float)(*src_word) / 65535.0f) * 1023.0f + 0.5f);
*dest_word = qBound(0, tmp_pixelval, 1023);
src_word++;
dest_word++;
//A
tmp_pixelval = (int)(((float)(*src_word) / 65535.0f) * 1023.0f + 0.5f);
*dest_word = qBound(0, tmp_pixelval, 1023);
src_word++;
dest_word++;
}
}
} else { //no alpha channel
for (int y = 0; y < tmpimage.height(); y++) {
const uint16_t *src_word = reinterpret_cast<const uint16_t *>(tmpimage.constScanLine(y));
uint16_t *dest_word = reinterpret_cast<uint16_t *>(dst + (y * stride));
for (int x = 0; x < tmpimage.width(); x++) {
int tmp_pixelval;
//R
tmp_pixelval = (int)(((float)(*src_word) / 65535.0f) * 1023.0f + 0.5f);
*dest_word = qBound(0, tmp_pixelval, 1023);
src_word++;
dest_word++;
//G
tmp_pixelval = (int)(((float)(*src_word) / 65535.0f) * 1023.0f + 0.5f);
*dest_word = qBound(0, tmp_pixelval, 1023);
src_word++;
dest_word++;
//B
tmp_pixelval = (int)(((float)(*src_word) / 65535.0f) * 1023.0f + 0.5f);
*dest_word = qBound(0, tmp_pixelval, 1023);
src_word++;
dest_word++;
//X
src_word++;
}
}
}
break;
case 8:
rowbytes = save_alpha ? (tmpimage.width() * 4) : (tmpimage.width() * 3);
for (int y = 0; y < tmpimage.height(); y++) {
memcpy(dst + (y * stride), tmpimage.constScanLine(y), rowbytes);
}
break;
default:
qWarning() << "Unsupported depth:" << save_depth;
return false;
break;
}
heif::Encoder encoder(heif_compression_HEVC);
encoder.set_lossy_quality(m_quality);
if (m_quality > 90) {
if (m_quality == 100) {
encoder.set_lossless(true);
}
encoder.set_string_parameter("chroma", "444");
}
heif::Context::EncodingOptions encodingOptions;
encodingOptions.save_alpha_channel = save_alpha;
if ((tmpimage.width() % 2 == 1) || (tmpimage.height() % 2 == 1)) {
qWarning() << "Image has odd dimension!\nUse even-numbered dimension(s) for better compatibility with other HEIF implementations.";
if (save_alpha) {
// This helps to save alpha channel when image has odd dimension
encodingOptions.macOS_compatibility_workaround = 0;
}
}
ctx.encode_image(heifImage, encoder, encodingOptions);
HeifQIODeviceWriter writer(device());
ctx.write(writer);
} catch (const heif::Error &err) {
qWarning() << "libheif error:" << err.get_message().c_str();
return false;
}
return true;
}
bool HEIFHandler::canRead(QIODevice *device)
{
if (!device) {
qWarning("HEIFHandler::canRead() called with no device");
return false;
}
const QByteArray header = device->peek(28);
return HEIFHandler::isSupportedBMFFType(header);
}
bool HEIFHandler::isSupportedBMFFType(const QByteArray &header)
{
if (header.size() < 28) {
return false;
}
const char *buffer = header.constData();
if (qstrncmp(buffer + 4, "ftyp", 4) == 0) {
if (qstrncmp(buffer + 8, "heic", 4) == 0) {
return true;
}
if (qstrncmp(buffer + 8, "heis", 4) == 0) {
return true;
}
if (qstrncmp(buffer + 8, "heix", 4) == 0) {
return true;
}
/* we want to avoid loading AVIF files via this plugin */
if (qstrncmp(buffer + 8, "mif1", 4) == 0) {
for (int offset = 16; offset <= 24; offset += 4) {
if (qstrncmp(buffer + offset, "avif", 4) == 0) {
return false;
}
}
return true;
}
if (qstrncmp(buffer + 8, "mif2", 4) == 0) {
return true;
}
if (qstrncmp(buffer + 8, "msf1", 4) == 0) {
return true;
}
}
return false;
}
QVariant HEIFHandler::option(ImageOption option) const
{
if (option == Quality) {
return m_quality;
}
if (!supportsOption(option) || !ensureParsed()) {
return QVariant();
}
switch (option) {
case Size:
return m_current_image.size();
break;
default:
return QVariant();
break;
}
}
void HEIFHandler::setOption(ImageOption option, const QVariant &value)
{
switch (option) {
case Quality:
m_quality = value.toInt();
if (m_quality > 100) {
m_quality = 100;
} else if (m_quality < 0) {
m_quality = 100;
}
break;
default:
QImageIOHandler::setOption(option, value);
break;
}
}
bool HEIFHandler::supportsOption(ImageOption option) const
{
return option == Quality
|| option == Size;
}
bool HEIFHandler::ensureParsed() const
{
if (m_parseState == ParseHeicSuccess) {
return true;
}
if (m_parseState == ParseHeicError) {
return false;
}
HEIFHandler *that = const_cast<HEIFHandler *>(this);
return that->ensureDecoder();
}
bool HEIFHandler::ensureDecoder()
{
if (m_parseState != ParseHeicNotParsed) {
if (m_parseState == ParseHeicSuccess) {
return true;
}
return false;
}
const QByteArray buffer = device()->readAll();
if (!HEIFHandler::isSupportedBMFFType(buffer)) {
m_parseState = ParseHeicError;
return false;
}
try {
heif::Context ctx;
ctx.read_from_memory_without_copy((const void *)(buffer.constData()),
buffer.size());
heif::ImageHandle handle = ctx.get_primary_image_handle();
const bool hasAlphaChannel = handle.has_alpha_channel();
const int bit_depth = handle.get_luma_bits_per_pixel();
heif_chroma chroma;
QImage::Format target_image_format;
if (bit_depth == 10 || bit_depth == 12) {
if (hasAlphaChannel) {
chroma = (QSysInfo::ByteOrder == QSysInfo::LittleEndian) ? heif_chroma_interleaved_RRGGBBAA_LE : heif_chroma_interleaved_RRGGBBAA_BE;
target_image_format = QImage::Format_RGBA64;
} else {
chroma = (QSysInfo::ByteOrder == QSysInfo::LittleEndian) ? heif_chroma_interleaved_RRGGBB_LE : heif_chroma_interleaved_RRGGBB_BE;
target_image_format = QImage::Format_RGBX64;
}
} else if (bit_depth == 8) {
if (hasAlphaChannel) {
chroma = heif_chroma_interleaved_RGBA;
target_image_format = QImage::Format_ARGB32;
} else {
chroma = heif_chroma_interleaved_RGB;
target_image_format = QImage::Format_RGB32;
}
} else {
m_parseState = ParseHeicError;
if (bit_depth > 0) {
qWarning() << "Unsupported bit depth:" << bit_depth;
} else {
qWarning() << "Undefined bit depth.";
}
return false;
}
heif::Image img = handle.decode_image(heif_colorspace_RGB, chroma);
const int imageWidth = img.get_width(heif_channel_interleaved);
const int imageHeight = img.get_height(heif_channel_interleaved);
QSize imageSize(imageWidth, imageHeight);
if (!imageSize.isValid()) {
m_parseState = ParseHeicError;
qWarning() << "HEIC image size invalid:" << imageSize;
return false;
}
int stride = 0;
const uint8_t *const src = img.get_plane(heif_channel_interleaved, &stride);
if (!src || stride <= 0) {
m_parseState = ParseHeicError;
qWarning() << "HEIC data pixels information not valid!";
return false;
}
m_current_image = QImage(imageSize, target_image_format);
if (m_current_image.isNull()) {
m_parseState = ParseHeicError;
qWarning() << "Unable to allocate memory!";
return false;
}
switch (bit_depth) {
case 12:
if (hasAlphaChannel) {
for (int y = 0; y < imageHeight; y++) {
const uint16_t *src_word = reinterpret_cast<const uint16_t *>(src + (y * stride));
uint16_t *dest_data = reinterpret_cast<uint16_t *>(m_current_image.scanLine(y));
for (int x = 0; x < imageWidth; x++) {
int tmpvalue;
//R
tmpvalue = (int)(((float)(0x0fff & (*src_word)) / 4095.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//G
tmpvalue = (int)(((float)(0x0fff & (*src_word)) / 4095.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//B
tmpvalue = (int)(((float)(0x0fff & (*src_word)) / 4095.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//A
tmpvalue = (int)(((float)(0x0fff & (*src_word)) / 4095.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
}
}
} else { //no alpha channel
for (int y = 0; y < imageHeight; y++) {
const uint16_t *src_word = reinterpret_cast<const uint16_t *>(src + (y * stride));
uint16_t *dest_data = reinterpret_cast<uint16_t *>(m_current_image.scanLine(y));
for (int x = 0; x < imageWidth; x++) {
int tmpvalue;
//R
tmpvalue = (int)(((float)(0x0fff & (*src_word)) / 4095.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//G
tmpvalue = (int)(((float)(0x0fff & (*src_word)) / 4095.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//B
tmpvalue = (int)(((float)(0x0fff & (*src_word)) / 4095.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//X = 0xffff
*dest_data = 0xffff;
dest_data++;
}
}
}
break;
case 10:
if (hasAlphaChannel) {
for (int y = 0; y < imageHeight; y++) {
const uint16_t *src_word = reinterpret_cast<const uint16_t *>(src + (y * stride));
uint16_t *dest_data = reinterpret_cast<uint16_t *>(m_current_image.scanLine(y));
for (int x = 0; x < imageWidth; x++) {
int tmpvalue;
//R
tmpvalue = (int)(((float)(0x03ff & (*src_word)) / 1023.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//G
tmpvalue = (int)(((float)(0x03ff & (*src_word)) / 1023.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//B
tmpvalue = (int)(((float)(0x03ff & (*src_word)) / 1023.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//A
tmpvalue = (int)(((float)(0x03ff & (*src_word)) / 1023.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
}
}
} else { //no alpha channel
for (int y = 0; y < imageHeight; y++) {
const uint16_t *src_word = reinterpret_cast<const uint16_t *>(src + (y * stride));
uint16_t *dest_data = reinterpret_cast<uint16_t *>(m_current_image.scanLine(y));
for (int x = 0; x < imageWidth; x++) {
int tmpvalue;
//R
tmpvalue = (int)(((float)(0x03ff & (*src_word)) / 1023.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//G
tmpvalue = (int)(((float)(0x03ff & (*src_word)) / 1023.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//B
tmpvalue = (int)(((float)(0x03ff & (*src_word)) / 1023.0f) * 65535.0f + 0.5f);
tmpvalue = qBound(0, tmpvalue, 65535);
*dest_data = (uint16_t) tmpvalue;
src_word++;
dest_data++;
//X = 0xffff
*dest_data = 0xffff;
dest_data++;
}
}
}
break;
case 8:
if (hasAlphaChannel) {
for (int y = 0; y < imageHeight; y++) {
const uint8_t *src_byte = src + (y * stride);
uint32_t *dest_pixel = reinterpret_cast<uint32_t *>(m_current_image.scanLine(y));
for (int x = 0; x < imageWidth; x++) {
int red = *src_byte++;
int green = *src_byte++;
int blue = *src_byte++;
int alpha = *src_byte++;
*dest_pixel = qRgba(red, green, blue, alpha);
dest_pixel++;
}
}
} else { //no alpha channel
for (int y = 0; y < imageHeight; y++) {
const uint8_t *src_byte = src + (y * stride);
uint32_t *dest_pixel = reinterpret_cast<uint32_t *>(m_current_image.scanLine(y));
for (int x = 0; x < imageWidth; x++) {
int red = *src_byte++;
int green = *src_byte++;
int blue = *src_byte++;
*dest_pixel = qRgb(red, green, blue);
dest_pixel++;
}
}
}
break;
default:
m_parseState = ParseHeicError;
qWarning() << "Unsupported bit depth:" << bit_depth;
return false;
break;
}
heif_color_profile_type profileType = heif_image_handle_get_color_profile_type(handle.get_raw_image_handle());
struct heif_error err;
if (profileType == heif_color_profile_type_prof || profileType == heif_color_profile_type_rICC) {
int rawProfileSize = (int) heif_image_handle_get_raw_color_profile_size(handle.get_raw_image_handle());
if (rawProfileSize > 0) {
QByteArray ba(rawProfileSize, 0);
err = heif_image_handle_get_raw_color_profile(handle.get_raw_image_handle(), ba.data());
if (err.code) {
qWarning() << "icc profile loading failed";
} else {
m_current_image.setColorSpace(QColorSpace::fromIccProfile(ba));
if (!m_current_image.colorSpace().isValid()) {
qWarning() << "icc profile is invalid";
}
}
} else {
qWarning() << "icc profile is empty";
}
} else if (profileType == heif_color_profile_type_nclx) {
struct heif_color_profile_nclx *nclx = nullptr;
err = heif_image_handle_get_nclx_color_profile(handle.get_raw_image_handle(), &nclx);
if (err.code || !nclx) {
qWarning() << "nclx profile loading failed";
} else {
const QPointF redPoint(nclx->color_primary_red_x, nclx->color_primary_red_y);
const QPointF greenPoint(nclx->color_primary_green_x, nclx->color_primary_green_y);
const QPointF bluePoint(nclx->color_primary_blue_x, nclx->color_primary_blue_y);
const QPointF whitePoint(nclx->color_primary_white_x, nclx->color_primary_white_y);
QColorSpace::TransferFunction q_trc = QColorSpace::TransferFunction::Custom;
float q_trc_gamma = 0.0f;
switch (nclx->transfer_characteristics) {
case 4:
q_trc = QColorSpace::TransferFunction::Gamma;
q_trc_gamma = 2.2f;
break;
case 5:
q_trc = QColorSpace::TransferFunction::Gamma;
q_trc_gamma = 2.8f;
break;
case 8:
q_trc = QColorSpace::TransferFunction::Linear;
break;
case 2:
case 13:
q_trc = QColorSpace::TransferFunction::SRgb;
break;
default:
qWarning("CICP color_primaries: %d, transfer_characteristics: %d\nThe colorspace is unsupported by this plug-in yet.",
nclx->color_primaries, nclx->transfer_characteristics);
q_trc = QColorSpace::TransferFunction::SRgb;
break;
}
if (q_trc != QColorSpace::TransferFunction::Custom) { //we create new colorspace using Qt
switch (nclx->color_primaries) {
case 1:
case 2:
m_current_image.setColorSpace(QColorSpace(QColorSpace::Primaries::SRgb, q_trc, q_trc_gamma));
break;
case 12:
m_current_image.setColorSpace(QColorSpace(QColorSpace::Primaries::DciP3D65, q_trc, q_trc_gamma));
break;
default:
m_current_image.setColorSpace(QColorSpace(whitePoint, redPoint, greenPoint, bluePoint, q_trc, q_trc_gamma));
break;
}
}
heif_nclx_color_profile_free(nclx);
if (!m_current_image.colorSpace().isValid()) {
qWarning() << "invalid color profile created from NCLX";
}
}
} else {
m_current_image.setColorSpace(QColorSpace(QColorSpace::SRgb));
}
} catch (const heif::Error &err) {
m_parseState = ParseHeicError;
qWarning() << "libheif error:" << err.get_message().c_str();
return false;
}
m_parseState = ParseHeicSuccess;
return true;
}
QImageIOPlugin::Capabilities HEIFPlugin::capabilities(QIODevice *device, const QByteArray &format) const
{
if (format == "heif" || format == "heic") {
Capabilities format_cap;
if (heif_have_decoder_for_format(heif_compression_HEVC)) {
format_cap |= CanRead;
}
if (heif_have_encoder_for_format(heif_compression_HEVC)) {
format_cap |= CanWrite;
}
return format_cap;
}
if (!format.isEmpty()) {
return {};
}
if (!device->isOpen()) {
return {};
}
Capabilities cap;
if (device->isReadable() && HEIFHandler::canRead(device) && heif_have_decoder_for_format(heif_compression_HEVC)) {
cap |= CanRead;
}
if (device->isWritable() && heif_have_encoder_for_format(heif_compression_HEVC)) {
cap |= CanWrite;
}
return cap;
}
QImageIOHandler *HEIFPlugin::create(QIODevice *device, const QByteArray &format) const
{
QImageIOHandler *handler = new HEIFHandler;
handler->setDevice(device);
handler->setFormat(format);
return handler;
}

View File

@ -0,0 +1,7 @@
[Desktop Entry]
Type=Service
X-KDE-ServiceTypes=QImageIOPlugins
X-KDE-ImageFormat=heif
X-KDE-MimeType=image/heif
X-KDE-Read=true
X-KDE-Write=true

View File

@ -0,0 +1,4 @@
{
"Keys": [ "heif", "heic" ],
"MimeTypes": [ "image/heif", "image/heif" ]
}

57
src/imageformats/heif_p.h Normal file
View File

@ -0,0 +1,57 @@
/*
High Efficiency Image File Format (HEIF) support for QImage.
SPDX-FileCopyrightText: 2020 Sirius Bakke <sirius@bakke.co>
SPDX-FileCopyrightText: 2021 Daniel Novomesky <dnovomesky@gmail.com>
SPDX-License-Identifier: LGPL-2.0-or-later
*/
#ifndef KIMG_HEIF_P_H
#define KIMG_HEIF_P_H
#include <QByteArray>
#include <QImage>
#include <QImageIOPlugin>
class HEIFHandler : public QImageIOHandler
{
public:
HEIFHandler();
bool canRead() const override;
bool read(QImage *image) override;
bool write(const QImage &image) override;
static bool canRead(QIODevice *device);
QVariant option(ImageOption option) const override;
void setOption(ImageOption option, const QVariant &value) override;
bool supportsOption(ImageOption option) const override;
private:
static bool isSupportedBMFFType(const QByteArray &header);
bool ensureParsed() const;
bool ensureDecoder();
enum ParseHeicState {
ParseHeicError = -1,
ParseHeicNotParsed = 0,
ParseHeicSuccess = 1
};
ParseHeicState m_parseState;
int m_quality;
QImage m_current_image;
};
class HEIFPlugin : public QImageIOPlugin
{
Q_OBJECT
Q_PLUGIN_METADATA(IID "org.qt-project.Qt.QImageIOHandlerFactoryInterface" FILE "heif.json")
public:
Capabilities capabilities(QIODevice *device, const QByteArray &format) const override;
QImageIOHandler *create(QIODevice *device, const QByteArray &format = QByteArray()) const override;
};
#endif // KIMG_HEIF_P_H

View File

@ -87,7 +87,7 @@ static bool IsSupported(const PSDHeader &header)
if (header.channel_count > 16) {
return false;
}
if (header.depth != 8) {
if (header.depth != 8 && header.depth != 16) {
return false;
}
if (header.color_mode != CM_RGB) {
@ -104,11 +104,13 @@ static void skip_section(QDataStream &s)
s.skipRawData(section_length);
}
static quint8 readPixel(QDataStream &stream) {
quint8 pixel;
template <class Trait>
static Trait readPixel(QDataStream &stream) {
Trait pixel;
stream >> pixel;
return pixel;
}
static QRgb updateRed(QRgb oldPixel, quint8 redPixel) {
return qRgba(redPixel, qGreen(oldPixel), qBlue(oldPixel), qAlpha(oldPixel));
}
@ -149,15 +151,18 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
quint32 channel_num = header.channel_count;
QImage::Format fmt = QImage::Format_RGB32;
QImage::Format fmt = header.depth == 8 ? QImage::Format_RGB32
: QImage::Format_RGBX64;
// Clear the image.
if (channel_num >= 4) {
// Enable alpha.
fmt = QImage::Format_ARGB32;
fmt = header.depth == 8 ? QImage::Format_ARGB32
: QImage::Format_RGBA64;
// Ignore the other channels.
channel_num = 4;
}
img = QImage(header.width, header.height, fmt);
if (img.isNull()) {
qWarning() << "Failed to allocate image, invalid dimensions?" << QSize(header.width, header.height);
@ -166,9 +171,10 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
img.fill(qRgb(0,0,0));
const quint32 pixel_count = header.height * header.width;
const quint32 channel_size = pixel_count * header.depth / 8;
// Verify this, as this is used to write into the memory of the QImage
if (pixel_count > img.sizeInBytes() / sizeof(QRgb)) {
if (pixel_count > img.sizeInBytes() / (header.depth == 8 ? sizeof(QRgb) : sizeof(QRgba64))) {
qWarning() << "Invalid pixel count!" << pixel_count << "bytes available:" << img.sizeInBytes();
return false;
}
@ -186,6 +192,14 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
updateAlpha
};
typedef QRgba64(*channelUpdater16)(QRgba64, quint16);
static const channelUpdater16 updaters64[4] = {
[](QRgba64 oldPixel, quint16 redPixel) {return qRgba64((oldPixel & ~(0xFFFFull << 0)) | (quint64( redPixel) << 0));},
[](QRgba64 oldPixel, quint16 greenPixel){return qRgba64((oldPixel & ~(0xFFFFull << 16)) | (quint64(greenPixel) << 16));},
[](QRgba64 oldPixel, quint16 bluePixel) {return qRgba64((oldPixel & ~(0xFFFFull << 32)) | (quint64( bluePixel) << 32));},
[](QRgba64 oldPixel, quint16 alphaPixel){return qRgba64((oldPixel & ~(0xFFFFull << 48)) | (quint64(alphaPixel) << 48));}
};
if (compression) {
// Skip row lengths.
int skip_count = header.height * header.channel_count * sizeof(quint16);
@ -194,9 +208,18 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
}
for (unsigned short channel = 0; channel < channel_num; channel++) {
bool success = decodeRLEData(RLEVariant::PackBits, stream,
image_data, pixel_count,
&readPixel, updaters[channel]);
bool success = false;
if (header.depth == 8) {
success = decodeRLEData(RLEVariant::PackBits, stream,
image_data, channel_size,
&readPixel<quint8>, updaters[channel]);
} else if (header.depth == 16) {
QRgba64 *image_data = reinterpret_cast<QRgba64*>(img.bits());
success = decodeRLEData(RLEVariant::PackBits16, stream,
image_data, channel_size,
&readPixel<quint8>, updaters64[channel]);
}
if (!success) {
qDebug() << "decodeRLEData on channel" << channel << "failed";
return false;
@ -204,8 +227,15 @@ static bool LoadPSD(QDataStream &stream, const PSDHeader &header, QImage &img)
}
} else {
for (unsigned short channel = 0; channel < channel_num; channel++) {
for (unsigned i = 0; i < pixel_count; ++i) {
image_data[i] = updaters[channel](image_data[i], readPixel(stream));
if (header.depth == 8) {
for (unsigned i = 0; i < pixel_count; ++i) {
image_data[i] = updaters[channel](image_data[i], readPixel<quint8>(stream));
}
} else if (header.depth == 16) {
QRgba64 *image_data = reinterpret_cast<QRgba64*>(img.bits());
for (unsigned i = 0; i < pixel_count; ++i) {
image_data[i] = updaters64[channel](image_data[i], readPixel<quint16>(stream));
}
}
// make sure we didn't try to read past the end of the stream
if (stream.status() != QDataStream::Ok) {

View File

@ -24,6 +24,11 @@ enum class RLEVariant {
* of size 2, 130 of size 3, up to 255 of size 128.
*/
PackBits,
/**
* Same as PackBits, but treat unpacked data as
* 16-bit integers.
*/
PackBits16,
/**
* PIC-style RLE
*
@ -67,6 +72,8 @@ static inline bool decodeRLEData(RLEVariant variant,
Func2 updateItem)
{
unsigned offset = 0; // in dest
bool is_msb = true; // only used for 16-bit PackBits, data is big-endian
quint16 temp_data = 0;
while (offset < length) {
unsigned remaining = length - offset;
quint8 count1;
@ -85,7 +92,7 @@ static inline bool decodeRLEData(RLEVariant variant,
// 2 to 128 repetitions
length = count1 - 127u;
}
} else if (variant == RLEVariant::PackBits) {
} else if (variant == RLEVariant::PackBits || variant == RLEVariant::PackBits16) {
if (count1 == 128u) {
// Ignore value 128
continue;
@ -102,7 +109,18 @@ static inline bool decodeRLEData(RLEVariant variant,
}
auto datum = readData(stream);
for (unsigned i = offset; i < offset + length; ++i) {
dest[i] = updateItem(dest[i], datum);
if (variant == RLEVariant::PackBits16) {
if (is_msb) {
temp_data = datum << 8;
is_msb = false;
} else {
temp_data |= datum;
dest[i >> 1] = updateItem(dest[i >> 1], temp_data);
is_msb = true;
}
} else {
dest[i] = updateItem(dest[i], datum);
}
}
offset += length;
} else {
@ -114,7 +132,18 @@ static inline bool decodeRLEData(RLEVariant variant,
}
for (unsigned i = offset; i < offset + length; ++i) {
auto datum = readData(stream);
dest[i] = updateItem(dest[i], datum);
if (variant == RLEVariant::PackBits16) {
if (is_msb) {
temp_data = datum << 8;
is_msb = false;
} else {
temp_data |= datum;
dest[i >> 1] = updateItem(dest[i >> 1], temp_data);
is_msb = true;
}
} else {
dest[i] = updateItem(dest[i], datum);
}
}
offset += length;
}

View File

@ -7,56 +7,24 @@
#ifndef FORMAT_ENUM_H
#define FORMAT_ENUM_H
#include <QMetaEnum>
#include <QImage>
// Generated from QImage::Format enum
static const char * qimage_format_enum_names[] = {
"Invalid",
"Mono",
"MonoLSB",
"Indexed8",
"RGB32",
"ARGB32",
"ARGB32_Premultiplied",
"RGB16",
"ARGB8565_Premultiplied",
"RGB666",
"ARGB6666_Premultiplied",
"RGB555",
"ARGB8555_Premultiplied",
"RGB888",
"RGB444",
"ARGB4444_Premultiplied",
"RGBX8888",
"RGBA8888",
"RGBA8888_Premultiplied"
};
// Never claim there are more than QImage::NImageFormats supported formats.
// This is future-proofing against the above list being extended.
static const int qimage_format_enum_names_count =
(sizeof(qimage_format_enum_names) / sizeof(*qimage_format_enum_names) > int(QImage::NImageFormats))
? int(QImage::NImageFormats)
: (sizeof(qimage_format_enum_names) / sizeof(*qimage_format_enum_names));
QImage::Format formatFromString(const QString &str)
{
for (int i = 0; i < qimage_format_enum_names_count; ++i) {
if (str.compare(QLatin1String(qimage_format_enum_names[i]), Qt::CaseInsensitive) == 0) {
return (QImage::Format)(i);
}
}
return QImage::Format_Invalid;
const QMetaEnum metaEnum = QMetaEnum::fromType<QImage::Format>();
const QString enumString = QStringLiteral("Format_") + str;
bool ok;
const int res = metaEnum.keyToValue(enumString.toLatin1().constData(), &ok);
return ok ? static_cast<QImage::Format>(res) : QImage::Format_Invalid;
}
QString formatToString(QImage::Format format)
{
int index = int(format);
if (index > 0 && index < qimage_format_enum_names_count) {
return QLatin1String(qimage_format_enum_names[index]);
}
return QLatin1String("<unknown:") +
QString::number(index) +
QLatin1String(">");
const QMetaEnum metaEnum = QMetaEnum::fromType<QImage::Format>();
return QString::fromLatin1(metaEnum.valueToKey(format)).remove(QStringLiteral("Format_"));
}
#endif

View File

@ -45,6 +45,10 @@ int main(int argc, char **argv)
QStringList() << QStringLiteral("l") << QStringLiteral("list-file-formats"),
QStringLiteral("List supported image file formats"));
parser.addOption(listformats);
QCommandLineOption listmimetypes(
QStringList() << QStringLiteral("m") << QStringLiteral("list-mime-types"),
QStringLiteral("List supported image mime types"));
parser.addOption(listmimetypes);
QCommandLineOption listqformats(
QStringList() << QStringLiteral("p") << QStringLiteral("list-qimage-formats"),
QStringLiteral("List supported QImage data formats"));
@ -63,12 +67,21 @@ int main(int argc, char **argv)
}
return 0;
}
if (parser.isSet(listmimetypes)) {
QTextStream out(stdout);
out << "MIME types:\n";
const auto lstSupportedMimeTypes = QImageReader::supportedMimeTypes();
for (const auto &fmt : lstSupportedMimeTypes) {
out << " " << fmt << '\n';
}
return 0;
}
if (parser.isSet(listqformats)) {
QTextStream out(stdout);
out << "QImage formats:\n";
// skip QImage::Format_Invalid
for (int i = 1; i < qimage_format_enum_names_count; ++i) {
out << " " << qimage_format_enum_names[i] << '\n';
for (int i = 1; i < QImage::NImageFormats; ++i) {
out << " " << formatToString(static_cast<QImage::Format>(i)) << '\n';
}
return 0;
}