Full range HDR support

EXR, HDR, JXR and PFM formats support High Dynamic Range images (FP values grater than 1).

In summary, here is the list of changes:

    EXR, HDR, JXR and PFM: When working with FP formats, the clamp between 0 and 1 ​​is no longer done.
    EXR: Removed old SDR code and conversions. Due to the lack of a QImage Gray FP format, Gray images are output as RGB FP (recently added code for Qt 6.8 has been removed).
    PFM: Due to the lack of a QImage Gray FP format, Gray images are output as RGB FP.
    HDR: Added rotation and exposure support.

With this patch, EXR, JXR, HDR, PFM behave like Qt's TIFF plugin when working with FP images.
This commit is contained in:
Mirco Miranda 2024-06-20 15:45:08 +02:00 committed by Albert Astals Cid
parent bb17f7bf84
commit f728b87ae8
25 changed files with 351 additions and 309 deletions

12
.gitattributes vendored
View File

@ -1 +1,13 @@
autotests/read/raw/RAW_KODAK_C330_FORMAT_NONE_YRGB.raw binary
autotests/read/hdr/orientation1.hdr binary
autotests/read/hdr/orientation2.hdr binary
autotests/read/hdr/orientation3.hdr binary
autotests/read/hdr/orientation4.hdr binary
autotests/read/hdr/orientation5.hdr binary
autotests/read/hdr/orientation6.hdr binary
autotests/read/hdr/orientation7.hdr binary
autotests/read/hdr/orientation8.hdr binary
autotests/read/hdr/fake_earth.hdr binary
autotests/read/hdr/rgb.hdr binary
autotests/read/hdr/rgb-landscape.hdr binary
autotests/read/hdr/rgb-portrait.hdr binary

Binary file not shown.

View File

@ -0,0 +1,5 @@
[
{
"fileName" : "orientation_all.png"
}
]

Binary file not shown.

View File

@ -0,0 +1,5 @@
[
{
"fileName" : "orientation_all.png"
}
]

Binary file not shown.

View File

@ -0,0 +1,5 @@
[
{
"fileName" : "orientation_all.png"
}
]

Binary file not shown.

View File

@ -0,0 +1,5 @@
[
{
"fileName" : "orientation_all.png"
}
]

Binary file not shown.

View File

@ -0,0 +1,5 @@
[
{
"fileName" : "orientation_all.png"
}
]

Binary file not shown.

View File

@ -0,0 +1,5 @@
[
{
"fileName" : "orientation_all.png"
}
]

Binary file not shown.

View File

@ -0,0 +1,5 @@
[
{
"fileName" : "orientation_all.png"
}
]

Binary file not shown.

View File

@ -0,0 +1,5 @@
[
{
"fileName" : "orientation_all.png"
}
]

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.4 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View File

@ -177,7 +177,8 @@ public:
{
bool ok = true;
if (!m_size.isEmpty()) {
ok = ok && (m_size == image.size());
// Size option return the size without transformation (tested with Qt TIFF plugin).
ok = ok && (m_size == image.size() || m_size == image.size().transposed());
}
if (m_format != QImage::Format_Invalid) {
ok = ok && (m_format == image.format());

View File

@ -7,20 +7,11 @@
SPDX-License-Identifier: LGPL-2.0-or-later
*/
/* *** EXR_USE_LEGACY_CONVERSIONS ***
* If defined, the result image is an 8-bit RGB(A) converted
* without icc profiles. Otherwise, a 16-bit images is generated.
* NOTE: The use of legacy conversions are discouraged due to
* imprecise image result.
*/
//#define EXR_USE_LEGACY_CONVERSIONS // default commented -> you should define it in your cmake file
/* *** EXR_CONVERT_TO_SRGB ***
* If defined, the linear data is converted to sRGB on read to accommodate
* programs that do not support color profiles.
* Otherwise the data are kept as is and it is the display program that
* must convert to the monitor profile.
* NOTE: If EXR_USE_LEGACY_CONVERSIONS is active, this is ignored.
*/
//#define EXR_CONVERT_TO_SRGB // default: commented -> you should define it in your cmake file
@ -92,18 +83,6 @@
#include <QThread>
#include <QTimeZone>
// Allow the code to works on all QT versions supported by KDE
// project (Qt 5.15 and Qt 6.x) to easy backports fixes.
#if !defined(EXR_USE_LEGACY_CONVERSIONS)
// If uncommented, the image is rendered in a float16 format, the result is very precise
#define EXR_USE_QT6_FLOAT_IMAGE // default uncommented
#endif
// Qt 6.8 allow to create and use Gray profile, so we can load a Gray image as Grayscale format instead RGB one.
#if QT_VERSION >= QT_VERSION_CHECK(6, 8, 0)
#define EXR_GRAY_SUPPORT_ENABLED
#endif
class K_IStream : public Imf::IStream
{
public:
@ -214,22 +193,6 @@ void K_OStream::seekg(Imf::Int64 pos)
m_dev->seek(pos);
}
#ifdef EXR_USE_LEGACY_CONVERSIONS
// source: https://openexr.com/en/latest/ReadingAndWritingImageFiles.html
inline unsigned char gamma(float x)
{
x = std::pow(5.5555f * std::max(0.f, x), 0.4545f) * 84.66f;
return (unsigned char)qBound(0.f, x, 255.f);
}
inline QRgb RgbaToQrgba(struct Imf::Rgba &imagePixel)
{
return qRgba(gamma(float(imagePixel.r)),
gamma(float(imagePixel.g)),
gamma(float(imagePixel.b)),
(unsigned char)(qBound(0.f, imagePixel.a * 255.f, 255.f) + 0.5f));
}
#endif
EXRHandler::EXRHandler()
: m_compressionRatio(-1)
, m_quality(-1)
@ -253,18 +216,7 @@ bool EXRHandler::canRead() const
static QImage::Format imageFormat(const Imf::RgbaInputFile &file)
{
auto isRgba = file.channels() & Imf::RgbaChannels::WRITE_A;
#ifdef EXR_GRAY_SUPPORT_ENABLED
auto isGray = file.channels() & Imf::RgbaChannels::WRITE_Y;
#else
auto isGray = false;
#endif
#if defined(EXR_USE_LEGACY_CONVERSIONS)
return (isRgba ? QImage::Format_ARGB32 : QImage::Format_RGB32);
#elif defined(EXR_USE_QT6_FLOAT_IMAGE)
return (isRgba ? QImage::Format_RGBA16FPx4 : isGray ? QImage::Format_Grayscale16 : QImage::Format_RGBX16FPx4);
#else
return (isRgba ? QImage::Format_RGBA64 : QImage::Format_RGBX64);
#endif
return (isRgba ? QImage::Format_RGBA16FPx4 : QImage::Format_RGBX16FPx4);
}
/*!
@ -371,8 +323,6 @@ static void readMetadata(const Imf::Header &header, QImage &image)
static void readColorSpace(const Imf::Header &header, QImage &image)
{
// final color operations
#ifndef EXR_USE_LEGACY_CONVERSIONS
QColorSpace cs;
if (auto chroma = header.findTypedAttribute<Imf::ChromaticitiesAttribute>("chromaticities")) {
auto &&v = chroma->value();
@ -383,24 +333,13 @@ static void readColorSpace(const Imf::Header &header, QImage &image)
QColorSpace::TransferFunction::Linear);
}
if (!cs.isValid()) {
#ifdef EXR_GRAY_SUPPORT_ENABLED
if (image.format() == QImage::Format_Grayscale16 || image.format() == QImage::Format_Grayscale8) {
cs = QColorSpace(QPointF(0.31271, 0.32902), QColorSpace::TransferFunction::Linear);
cs.setDescription(QStringLiteral("Gray Linear build-in"));
} else {
cs = QColorSpace(QColorSpace::SRgbLinear);
}
#else
cs = QColorSpace(QColorSpace::SRgbLinear);
#endif
}
image.setColorSpace(cs);
#ifdef EXR_CONVERT_TO_SRGB
image.convertToColorSpace(QColorSpace(QColorSpace::SRgb));
#endif
#endif // !EXR_USE_LEGACY_CONVERSIONS
}
bool EXRHandler::read(QImage *outImage)
@ -451,7 +390,6 @@ bool EXRHandler::read(QImage *outImage)
pixels.resizeErase(EXR_LINES_PER_BLOCK, width);
bool isRgba = image.hasAlphaChannel();
// somehow copy pixels into image
for (int y = 0, n = 0; y < height; y += n) {
auto my = dw.min.y + y;
if (my > dw.max.y) { // paranoia check
@ -462,37 +400,14 @@ bool EXRHandler::read(QImage *outImage)
file.readPixels(my, std::min(my + EXR_LINES_PER_BLOCK - 1, dw.max.y));
for (n = 0; n < std::min(EXR_LINES_PER_BLOCK, height - y); ++n) {
if (image.format() == QImage::Format_Grayscale16) { // grayscale image
auto scanLine = reinterpret_cast<quint16 *>(image.scanLine(y + n));
for (int x = 0; x < width; ++x) {
*(scanLine + x) = quint16(qBound(0.f, float(pixels[n][x].r) * 65535.f + 0.5f, 65535.f));
}
continue;
}
#if defined(EXR_USE_LEGACY_CONVERSIONS)
Q_UNUSED(isRgba)
auto scanLine = reinterpret_cast<QRgb *>(image.scanLine(y + n));
for (int x = 0; x < width; ++x) {
*(scanLine + x) = RgbaToQrgba(pixels[n][x]);
}
#elif defined(EXR_USE_QT6_FLOAT_IMAGE)
auto scanLine = reinterpret_cast<qfloat16 *>(image.scanLine(y + n));
for (int x = 0; x < width; ++x) {
auto xcs = x * 4;
*(scanLine + xcs) = qfloat16(qBound(0.f, float(pixels[n][x].r), 1.f));
*(scanLine + xcs + 1) = qfloat16(qBound(0.f, float(pixels[n][x].g), 1.f));
*(scanLine + xcs + 2) = qfloat16(qBound(0.f, float(pixels[n][x].b), 1.f));
*(scanLine + xcs + 3) = qfloat16(isRgba ? qBound(0.f, float(pixels[n][x].a), 1.f) : 1.f);
*(scanLine + xcs) = qfloat16(float(pixels[n][x].r));
*(scanLine + xcs + 1) = qfloat16(float(pixels[n][x].g));
*(scanLine + xcs + 2) = qfloat16(float(pixels[n][x].b));
*(scanLine + xcs + 3) = qfloat16(isRgba ? std::clamp(float(pixels[n][x].a), 0.f, 1.f) : 1.f);
}
#else
auto scanLine = reinterpret_cast<QRgba64 *>(image.scanLine(y + n));
for (int x = 0; x < width; ++x) {
*(scanLine + x) = QRgba64::fromRgba64(quint16(qBound(0.f, float(pixels[n][x].r) * 65535.f + 0.5f, 65535.f)),
quint16(qBound(0.f, float(pixels[n][x].g) * 65535.f + 0.5f, 65535.f)),
quint16(qBound(0.f, float(pixels[n][x].b) * 65535.f + 0.5f, 65535.f)),
isRgba ? quint16(qBound(0.f, float(pixels[n][x].a) * 65535.f + 0.5f, 65535.f)) : quint16(65535));
}
#endif
}
}
@ -659,26 +574,12 @@ bool EXRHandler::write(const QImage &image)
pixels.resizeErase(EXR_LINES_PER_BLOCK, width);
// convert the image and write into the stream
#if defined(EXR_USE_QT6_FLOAT_IMAGE)
auto convFormat = image.hasAlphaChannel() ? QImage::Format_RGBA16FPx4 : QImage::Format_RGBX16FPx4;
#else
auto convFormat = image.hasAlphaChannel() ? QImage::Format_RGBA64 : QImage::Format_RGBX64;
#endif
ScanLineConverter slc(convFormat);
#ifdef EXR_GRAY_SUPPORT_ENABLED
if (channelsType == Imf::RgbaChannels::WRITE_Y) {
slc.setDefaultSourceColorSpace(QColorSpace(QColorSpace(QColorSpace::SRgb).whitePoint(), QColorSpace::TransferFunction::SRgb)); // Creates a custom grayscale color space
} else {
slc.setDefaultSourceColorSpace(QColorSpace(QColorSpace::SRgb));
}
#else
slc.setDefaultSourceColorSpace(QColorSpace(QColorSpace::SRgb));
#endif
slc.setTargetColorSpace(QColorSpace(QColorSpace::SRgbLinear));
for (int y = 0, n = 0; y < height; y += n) {
for (n = 0; n < std::min(EXR_LINES_PER_BLOCK, height - y); ++n) {
#if defined(EXR_USE_QT6_FLOAT_IMAGE)
auto scanLine = reinterpret_cast<const qfloat16 *>(slc.convertedScanLine(image, y + n));
if (scanLine == nullptr) {
return false;
@ -690,18 +591,6 @@ bool EXRHandler::write(const QImage &image)
pixels[n][x].b = float(*(scanLine + xcs + 2));
pixels[n][x].a = float(*(scanLine + xcs + 3));
}
#else
auto scanLine = reinterpret_cast<const QRgba64 *>(slc.convertedScanLine(image, y + n));
if (scanLine == nullptr) {
return false;
}
for (int x = 0; x < width; ++x) {
pixels[n][x].r = float((scanLine + x)->red() / 65535.f);
pixels[n][x].g = float((scanLine + x)->green() / 65535.f);
pixels[n][x].b = float((scanLine + x)->blue() / 65535.f);
pixels[n][x].a = float((scanLine + x)->alpha() / 65535.f);
}
#endif
}
file.setFrameBuffer(&pixels[0][0] - qint64(y) * width, 1, width);
file.writePixels(n);

View File

@ -27,12 +27,194 @@ typedef unsigned char uchar;
Q_LOGGING_CATEGORY(HDRPLUGIN, "kf.imageformats.plugins.hdr", QtWarningMsg)
namespace // Private.
{
#define MAXLINE 1024
#define MINELEN 8 // minimum scanline length for encoding
#define MAXELEN 0x7fff // maximum scanline length for encoding
class Header
{
public:
Header()
{
m_colorSpace = QColorSpace(QColorSpace::SRgbLinear);
m_transformation = QImageIOHandler::TransformationNone;
}
Header(const Header&) = default;
Header& operator=(const Header&) = default;
bool isValid() const { return width() > 0 && height() > 0; }
qint32 width() const { return(m_size.width()); }
qint32 height() const { return(m_size.height()); }
QString software() const { return(m_software); }
QImageIOHandler::Transformations transformation() const { return(m_transformation); }
/*!
* \brief colorSpace
*
* The color space for the image.
*
* The CIE (x,y) chromaticity coordinates of the three (RGB)
* primaries and the white point used to standardize the picture's
* color system. This is used mainly by the ra_xyze program to
* convert between color systems. If no PRIMARIES line
* appears, we assume the standard primaries defined in
* src/common/color.h, namely "0.640 0.330 0.290
* 0.600 0.150 0.060 0.333 0.333" for red, green, blue
* and white, respectively.
*/
QColorSpace colorSpace() const { return(m_colorSpace); }
/*!
* \brief exposure
*
* A single floating point number indicating a multiplier that has
* been applied to all the pixels in the file. EXPOSURE values are
* cumulative, so the original pixel values (i.e., radiances in
* watts/steradian/m^2) must be derived by taking the values in the
* file and dividing by all the EXPOSURE settings multiplied
* together. No EXPOSURE setting implies that no exposure
* changes have taken place.
*/
float exposure() const {
float mul = 1;
for (auto&& v : m_exposure)
mul *= v;
return mul;
}
QImageIOHandler::Transformations m_transformation;
QColorSpace m_colorSpace;
QString m_software;
QSize m_size;
QList<float> m_exposure;
};
class HDRHandlerPrivate
{
public:
HDRHandlerPrivate()
{
}
~HDRHandlerPrivate()
{
}
const Header& header(QIODevice *device)
{
auto&& h = m_header;
if (h.isValid()) {
return h;
}
h = readHeader(device);
return h;
}
static Header readHeader(QIODevice *device)
{
Header h;
int len;
QByteArray line(MAXLINE + 1, Qt::Uninitialized);
QByteArray format;
// Parse header
do {
len = device->readLine(line.data(), MAXLINE);
if (line.startsWith("FORMAT=")) {
format = line.mid(7, len - 7).trimmed();
}
if (line.startsWith("SOFTWARE=")) {
h.m_software = QString::fromUtf8(line.mid(9, len - 9)).trimmed();
}
if (line.startsWith("EXPOSURE=")) {
auto ok = false;
auto ex = QLocale::c().toFloat(QString::fromLatin1(line.mid(9, len - 9)).trimmed(), &ok);
if (ok)
h.m_exposure << ex;
}
if (line.startsWith("PRIMARIES=")) {
auto list = line.mid(10, len - 10).trimmed().split(' ');
QList<double> primaries;
for (auto&& v : list) {
auto ok = false;
auto d = QLocale::c().toDouble(QString::fromLatin1(v), &ok);
if (ok)
primaries << d;
}
if (primaries.size() == 8) {
auto cs = QColorSpace(QPointF(primaries.at(6), primaries.at(7)),
QPointF(primaries.at(0), primaries.at(1)),
QPointF(primaries.at(2), primaries.at(3)),
QPointF(primaries.at(4), primaries.at(5)),
QColorSpace::TransferFunction::Linear);
cs.setDescription(QStringLiteral("Embedded RGB"));
if (cs.isValid())
h.m_colorSpace = cs;
}
}
} while ((len > 0) && (line[0] != '\n'));
if (format != "32-bit_rle_rgbe") {
qCDebug(HDRPLUGIN) << "Unknown HDR format:" << format;
return h;
}
len = device->readLine(line.data(), MAXLINE);
line.resize(len);
/*
* Handle flipping and rotation, as per the spec below.
* The single resolution line consists of 4 values, a X and Y label each followed by a numerical
* integer value. The X and Y are immediately preceded by a sign which can be used to indicate
* flipping, the order of the X and Y indicate rotation. The standard coordinate system for
* Radiance images would have the following resolution string -Y N +X N. This indicates that the
* vertical axis runs down the file and the X axis is to the right (imagining the image as a
* rectangular block of data). A -X would indicate a horizontal flip of the image. A +Y would
* indicate a vertical flip. If the X value appears before the Y value then that indicates that
* the image is stored in column order rather than row order, that is, it is rotated by 90 degrees.
* The reader can convince themselves that the 8 combinations cover all the possible image orientations
* and rotations.
*/
QRegularExpression resolutionRegExp(QStringLiteral("([+\\-][XY])\\s+([0-9]+)\\s+([+\\-][XY])\\s+([0-9]+)\n"));
QRegularExpressionMatch match = resolutionRegExp.match(QString::fromLatin1(line));
if (!match.hasMatch()) {
qCDebug(HDRPLUGIN) << "Invalid HDR file, the first line after the header didn't have the expected format:" << line;
return h;
}
auto c0 = match.captured(1);
auto c1 = match.captured(3);
if (c0.at(1) == u'Y') {
if (c0.at(0) == u'-' && c1.at(0) == u'+')
h.m_transformation = QImageIOHandler::TransformationNone;
if (c0.at(0) == u'-' && c1.at(0) == u'-')
h.m_transformation = QImageIOHandler::TransformationMirror;
if (c0.at(0) == u'+' && c1.at(0) == u'+')
h.m_transformation = QImageIOHandler::TransformationFlip;
if (c0.at(0) == u'+' && c1.at(0) == u'-')
h.m_transformation = QImageIOHandler::TransformationRotate180;
}
else {
if (c0.at(0) == u'-' && c1.at(0) == u'+')
h.m_transformation = QImageIOHandler::TransformationRotate90;
if (c0.at(0) == u'-' && c1.at(0) == u'-')
h.m_transformation = QImageIOHandler::TransformationMirrorAndRotate90;
if (c0.at(0) == u'+' && c1.at(0) == u'+')
h.m_transformation = QImageIOHandler::TransformationFlipAndRotate90;
if (c0.at(0) == u'+' && c1.at(0) == u'-')
h.m_transformation = QImageIOHandler::TransformationRotate270;
}
h.m_size = QSize(match.captured(4).toInt(), match.captured(2).toInt());
return h;
}
private:
Header m_header;
};
// read an old style line from the hdr image file
// if 'first' is true the first byte is already read
static bool Read_Old_Line(uchar *image, int width, QDataStream &s)
@ -76,9 +258,10 @@ static bool Read_Old_Line(uchar *image, int width, QDataStream &s)
}
template<class float_T>
void RGBE_To_QRgbLine(uchar *image, float_T *scanline, int width)
void RGBE_To_QRgbLine(uchar *image, float_T *scanline, const Header& h)
{
for (int j = 0; j < width; j++) {
auto exposure = h.exposure();
for (int j = 0, width = h.width(); j < width; j++) {
// v = ldexp(1.0, int(image[3]) - 128);
float v;
int e = qBound(-31, int(image[3]) - 128, 31);
@ -90,9 +273,13 @@ void RGBE_To_QRgbLine(uchar *image, float_T *scanline, int width)
auto j4 = j * 4;
auto vn = v / 255.0f;
scanline[j4] = float_T(std::min(float(image[0]) * vn, 1.0f));
scanline[j4 + 1] = float_T(std::min(float(image[1]) * vn, 1.0f));
scanline[j4 + 2] = float_T(std::min(float(image[2]) * vn, 1.0f));
if (exposure > 0) {
vn /= exposure;
}
scanline[j4] = float_T(float(image[0]) * vn);
scanline[j4 + 1] = float_T(float(image[1]) * vn);
scanline[j4 + 2] = float_T(float(image[2]) * vn);
scanline[j4 + 3] = float_T(1.0f);
image += 4;
}
@ -108,11 +295,14 @@ QImage::Format imageFormat()
}
// Load the HDR image.
static bool LoadHDR(QDataStream &s, const int width, const int height, QImage &img)
static bool LoadHDR(QDataStream &s, const Header& h, QImage &img)
{
uchar val;
uchar code;
const int width = h.width();
const int height = h.height();
// Create dst image.
img = imageAlloc(width, height, imageFormat());
if (img.isNull()) {
@ -134,7 +324,7 @@ static bool LoadHDR(QDataStream &s, const int width, const int height, QImage &i
// determine scanline type
if ((width < MINELEN) || (MAXELEN < width)) {
Read_Old_Line(image, width, s);
RGBE_To_QRgbLine(image, scanline, width);
RGBE_To_QRgbLine(image, scanline, h);
continue;
}
@ -147,7 +337,7 @@ static bool LoadHDR(QDataStream &s, const int width, const int height, QImage &i
if (val != 2) {
s.device()->ungetChar(val);
Read_Old_Line(image, width, s);
RGBE_To_QRgbLine(image, scanline, width);
RGBE_To_QRgbLine(image, scanline, h);
continue;
}
@ -162,7 +352,7 @@ static bool LoadHDR(QDataStream &s, const int width, const int height, QImage &i
if ((image[1] != 2) || (image[2] & 128)) {
image[0] = 2;
Read_Old_Line(image + 4, width - 1, s);
RGBE_To_QRgbLine(image, scanline, width);
RGBE_To_QRgbLine(image, scanline, h);
continue;
}
@ -204,84 +394,34 @@ static bool LoadHDR(QDataStream &s, const int width, const int height, QImage &i
}
}
}
RGBE_To_QRgbLine(image, scanline, width);
RGBE_To_QRgbLine(image, scanline, h);
}
return true;
}
static QSize readHeaderSize(QIODevice *device)
{
int len;
QByteArray line(MAXLINE + 1, Qt::Uninitialized);
QByteArray format;
// Parse header
do {
len = device->readLine(line.data(), MAXLINE);
if (line.startsWith("FORMAT=")) {
format = line.mid(7, len - 7 - 1 /*\n*/);
}
} while ((len > 0) && (line[0] != '\n'));
if (format != "32-bit_rle_rgbe") {
qCDebug(HDRPLUGIN) << "Unknown HDR format:" << format;
return QSize();
}
len = device->readLine(line.data(), MAXLINE);
line.resize(len);
/*
TODO: handle flipping and rotation, as per the spec below
The single resolution line consists of 4 values, a X and Y label each followed by a numerical
integer value. The X and Y are immediately preceded by a sign which can be used to indicate
flipping, the order of the X and Y indicate rotation. The standard coordinate system for
Radiance images would have the following resolution string -Y N +X N. This indicates that the
vertical axis runs down the file and the X axis is to the right (imagining the image as a
rectangular block of data). A -X would indicate a horizontal flip of the image. A +Y would
indicate a vertical flip. If the X value appears before the Y value then that indicates that
the image is stored in column order rather than row order, that is, it is rotated by 90 degrees.
The reader can convince themselves that the 8 combinations cover all the possible image orientations
and rotations.
*/
QRegularExpression resolutionRegExp(QStringLiteral("([+\\-][XY]) ([0-9]+) ([+\\-][XY]) ([0-9]+)\n"));
QRegularExpressionMatch match = resolutionRegExp.match(QString::fromLatin1(line));
if (!match.hasMatch()) {
qCDebug(HDRPLUGIN) << "Invalid HDR file, the first line after the header didn't have the expected format:" << line;
return QSize();
}
if ((match.captured(1).at(1) != u'Y') || (match.captured(3).at(1) != u'X')) {
qCDebug(HDRPLUGIN) << "Unsupported image orientation in HDR file.";
return QSize();
}
return QSize(match.captured(4).toInt(), match.captured(2).toInt());
}
} // namespace
bool HDRHandler::read(QImage *outImage)
{
QDataStream s(device());
m_imageSize = readHeaderSize(s.device());
if (!m_imageSize.isValid()) {
const Header& h = d->header(s.device());
if (!h.isValid()) {
return false;
}
QImage img;
if (!LoadHDR(s, m_imageSize.width(), m_imageSize.height(), img)) {
if (!LoadHDR(s, h, img)) {
// qDebug() << "Error loading HDR file.";
return false;
}
// The images read by Gimp and Photoshop (including those of the tests) are interpreted with linear color space.
// By setting the linear color space, programs that support profiles display HDR files as in GIMP and Photoshop.
img.setColorSpace(QColorSpace(QColorSpace::SRgbLinear));
img.setColorSpace(h.colorSpace());
// Metadata
if (!h.software().isEmpty()) {
img.setText(QStringLiteral(META_KEY_SOFTWARE), h.software());
}
*outImage = img;
return true;
@ -295,6 +435,9 @@ bool HDRHandler::supportsOption(ImageOption option) const
if (option == QImageIOHandler::ImageFormat) {
return true;
}
if (option == QImageIOHandler::ImageTransformation) {
return true;
}
return false;
}
@ -303,15 +446,10 @@ QVariant HDRHandler::option(ImageOption option) const
QVariant v;
if (option == QImageIOHandler::Size) {
if (!m_imageSize.isEmpty()) {
v = QVariant::fromValue(m_imageSize);
} else if (auto d = device()) {
// transactions works on both random and sequential devices
d->startTransaction();
auto size = readHeaderSize(d);
d->rollbackTransaction();
if (size.isValid()) {
v = QVariant::fromValue(size);
if (auto dev = device()) {
auto&& h = d->header(dev);
if (h.isValid()) {
v = QVariant::fromValue(h.m_size);
}
}
}
@ -320,10 +458,21 @@ QVariant HDRHandler::option(ImageOption option) const
v = QVariant::fromValue(imageFormat());
}
if (option == QImageIOHandler::ImageTransformation) {
if (auto dev = device()) {
auto&& h = d->header(dev);
if (h.isValid()) {
v = QVariant::fromValue(h.transformation());
}
}
}
return v;
}
HDRHandler::HDRHandler()
: QImageIOHandler()
, d(new HDRHandlerPrivate)
{
}
@ -350,9 +499,9 @@ bool HDRHandler::canRead(QIODevice *device)
// allow to load offical test cases: https://radsite.lbl.gov/radiance/framed.html
device->startTransaction();
QSize size = readHeaderSize(device);
auto h = HDRHandlerPrivate::readHeader(device);
device->rollbackTransaction();
if (size.isValid()) {
if (h.isValid()) {
return true;
}

View File

@ -9,7 +9,9 @@
#define KIMG_HDR_P_H
#include <QImageIOPlugin>
#include <QScopedPointer>
class HDRHandlerPrivate;
class HDRHandler : public QImageIOHandler
{
public:
@ -24,11 +26,7 @@ public:
static bool canRead(QIODevice *device);
private:
/*!
* \brief m_imageSize
* Image size cache used by option()
*/
QSize m_imageSize;
const QScopedPointer<HDRHandlerPrivate> d;
};
class HDRPlugin : public QImageIOPlugin

View File

@ -436,6 +436,13 @@ public:
#if QT_VERSION >= QT_VERSION_CHECK(6, 8, 0)
<< QImage::Format_CMYK8888
#endif
#ifndef JXR_DENY_FLOAT_IMAGE
<< QImage::Format_RGBA16FPx4
<< QImage::Format_RGBX16FPx4
<< QImage::Format_RGBA32FPx4
<< QImage::Format_RGBA32FPx4_Premultiplied
<< QImage::Format_RGBX32FPx4
#endif // JXR_DENY_FLOAT_IMAGE
<< QImage::Format_RGBA64
<< QImage::Format_RGBA64_Premultiplied
<< QImage::Format_RGBA8888
@ -476,7 +483,18 @@ public:
} else {
qi = qi.convertToFormat(alpha ? QImage::Format_RGBA8888 : QImage::Format_RGB888);
}
#ifndef JXR_DENY_FLOAT_IMAGE
} else if(qi.format() == QImage::Format_RGBA16FPx4 ||
qi.format() == QImage::Format_RGBX16FPx4 ||
qi.format() == QImage::Format_RGBA32FPx4 ||
qi.format() == QImage::Format_RGBA32FPx4_Premultiplied ||
qi.format() == QImage::Format_RGBX32FPx4) {
auto cs = qi.colorSpace();
if (cs.isValid() && cs.transferFunction() != QColorSpace::TransferFunction::Linear) {
qi = qi.convertedToColorSpace(QColorSpace(QColorSpace::SRgbLinear));
}
}
#endif // JXR_DENY_FLOAT_IMAGE
return qi;
}
@ -759,35 +777,6 @@ private:
}
};
template<class T>
inline T scRGBTosRGB(T f)
{
// convert from linear scRGB to non-linear sRGB
if (f <= T(0)) {
return T(0);
}
if (f <= T(0.0031308f)) {
return qBound(T(0), f * T(12.92f), T(1));
}
if (f < T(1)) {
return qBound(T(0), T(1.055f) * T(pow(f, T(1.0) / T(2.4))) - T(0.055), T(1));
}
return T(1);
}
template<class T>
inline T alpha_scRGBTosRGB(T f)
{
// alpha is converted differently than RGB in scRGB
if (f <= T(0)) {
return T(0);
}
if (f < T(1.0)) {
return T(f);
}
return T(1);
}
bool JXRHandler::read(QImage *outImage)
{
if (!d->initForReading(device())) {
@ -841,13 +830,11 @@ bool JXRHandler::read(QImage *outImage)
} else { // additional buffer needed
qint64 convStrideSize = (img.width() * d->pDecoder->WMP.wmiI.cBitsPerUnit + 7) / 8;
qint64 buffSize = convStrideSize * img.height();
#if QT_VERSION >= QT_VERSION_CHECK(6, 0, 0)
qint64 limit = QImageReader::allocationLimit();
if (limit && (buffSize + img.sizeInBytes()) > limit * 1024 * 1024) {
qCWarning(LOG_JXRPLUGIN) << "JXRHandler::read() unable to covert due to allocation limit set:" << limit << "MiB";
return false;
}
#endif
QVector<quint8> ba(buffSize);
if (auto err = pConverter->Copy(pConverter, &rect, ba.data(), convStrideSize)) {
PKFormatConverter_Release(&pConverter);
@ -866,34 +853,24 @@ bool JXRHandler::read(QImage *outImage)
d->setTextMetadata(img);
#ifndef JXR_DENY_FLOAT_IMAGE
// JXR float are stored in scRGB -> range -0,5 to 7,5 (source Wikipedia)
if (img.format() == QImage::Format_RGBX16FPx4 || img.format() == QImage::Format_RGBA16FPx4 || img.format() == QImage::Format_RGBA16FPx4_Premultiplied) {
// JXR float are stored in scRGB.
if (img.format() == QImage::Format_RGBX16FPx4 || img.format() == QImage::Format_RGBA16FPx4 || img.format() == QImage::Format_RGBA16FPx4_Premultiplied ||
img.format() == QImage::Format_RGBX32FPx4 || img.format() == QImage::Format_RGBA32FPx4 || img.format() == QImage::Format_RGBA32FPx4_Premultiplied) {
auto hasAlpha = img.hasAlphaChannel();
for (qint32 y = 0, h = img.height(); y < h; ++y) {
qfloat16 *line = reinterpret_cast<qfloat16 *>(img.scanLine(y));
for (int x = 0, w = img.width(); x < w; ++x) {
const auto x4 = x * 4;
line[x4 + 0] = scRGBTosRGB(line[x4 + 0]);
line[x4 + 1] = scRGBTosRGB(line[x4 + 1]);
line[x4 + 2] = scRGBTosRGB(line[x4 + 2]);
line[x4 + 3] = hasAlpha ? alpha_scRGBTosRGB(line[x4 + 3]) : qfloat16(1);
if (img.depth() == 64) {
auto line = reinterpret_cast<qfloat16 *>(img.scanLine(y));
for (int x = 0, w = img.width() * 4; x < w; x += 4)
line[x + 3] = hasAlpha ? std::clamp(line[x + 3], qfloat16(0), qfloat16(1)) : qfloat16(1);
} else {
auto line = reinterpret_cast<float *>(img.scanLine(y));
for (int x = 0, w = img.width() * 4; x < w; x += 4)
line[x + 3] = hasAlpha ? std::clamp(line[x + 3], float(0), float(1)) : float(1);
}
}
img.setColorSpace(QColorSpace(QColorSpace::SRgb));
} else if (img.format() == QImage::Format_RGBX32FPx4 || img.format() == QImage::Format_RGBA32FPx4
|| img.format() == QImage::Format_RGBA32FPx4_Premultiplied) {
auto hasAlpha = img.hasAlphaChannel();
for (qint32 y = 0, h = img.height(); y < h; ++y) {
float *line = reinterpret_cast<float *>(img.scanLine(y));
for (int x = 0, w = img.width(); x < w; ++x) {
const auto x4 = x * 4;
line[x4 + 0] = scRGBTosRGB(line[x4 + 0]);
line[x4 + 1] = scRGBTosRGB(line[x4 + 1]);
line[x4 + 2] = scRGBTosRGB(line[x4 + 2]);
line[x4 + 3] = hasAlpha ? alpha_scRGBTosRGB(line[x4 + 3]) : float(1);
}
if(!img.colorSpace().isValid()) {
img.setColorSpace(QColorSpace(QColorSpace::SRgbLinear));
}
img.setColorSpace(QColorSpace(QColorSpace::SRgb));
}
#endif
@ -922,19 +899,11 @@ bool JXRHandler::write(const QImage &image)
#ifndef JXR_DISABLE_BGRA_HACK
if (IsEqualGUID(jxlfmt, GUID_PKPixelFormat32bppRGBA)) {
jxlfmt = GUID_PKPixelFormat32bppBGRA;
#if QT_VERSION < QT_VERSION_CHECK(6, 0, 0)
qi = qi.rgbSwapped();
#else
qi.rgbSwap();
#endif
}
if (IsEqualGUID(jxlfmt, GUID_PKPixelFormat32bppPRGBA)) {
jxlfmt = GUID_PKPixelFormat32bppPBGRA;
#if QT_VERSION < QT_VERSION_CHECK(6, 0, 0)
qi = qi.rgbSwapped();
#else
qi.rgbSwap();
#endif
}
#endif
@ -968,7 +937,7 @@ bool JXRHandler::write(const QImage &image)
}
// setting metadata (a failure of setting metadata doesn't stop the encoding)
auto cs = image.colorSpace().iccProfile();
auto cs = qi.colorSpace().iccProfile();
if (!cs.isEmpty()) {
if (auto err = d->pEncoder->SetColorContext(d->pEncoder, reinterpret_cast<quint8 *>(cs.data()), cs.size())) {
qCWarning(LOG_JXRPLUGIN) << "JXRHandler::write() error while setting ICC profile:" << err;
@ -1043,28 +1012,28 @@ QVariant JXRHandler::option(ImageOption option) const
if (d->initForReading(device())) {
switch (d->pDecoder->WMP.oOrientationFromContainer) {
case O_FLIPV:
v = QImageIOHandler::TransformationFlip;
v = int(QImageIOHandler::TransformationFlip);
break;
case O_FLIPH:
v = QImageIOHandler::TransformationMirror;
v = int(QImageIOHandler::TransformationMirror);
break;
case O_FLIPVH:
v = QImageIOHandler::TransformationRotate180;
v = int(QImageIOHandler::TransformationRotate180);
break;
case O_RCW:
v = QImageIOHandler::TransformationRotate90;
v = int(QImageIOHandler::TransformationRotate90);
break;
case O_RCW_FLIPV:
v = QImageIOHandler::TransformationFlipAndRotate90;
v = int(QImageIOHandler::TransformationFlipAndRotate90);
break;
case O_RCW_FLIPH:
v = QImageIOHandler::TransformationMirrorAndRotate90;
v = int(QImageIOHandler::TransformationMirrorAndRotate90);
break;
case O_RCW_FLIPVH:
v = QImageIOHandler::TransformationRotate270;
v = int(QImageIOHandler::TransformationRotate270);
break;
default:
v = QImageIOHandler::TransformationNone;
v = int(QImageIOHandler::TransformationNone);
break;
}
}

View File

@ -109,7 +109,7 @@ public:
QImage::Format format() const
{
if (isValid()) {
return isBlackAndWhite() ? QImage::Format_Grayscale16 : QImage::Format_RGBX32FPx4;
return QImage::Format_RGBX32FPx4;
}
return QImage::Format_Invalid;
}
@ -155,7 +155,7 @@ public:
d->rollbackTransaction();
return ok;
}
} ;
};
class PFMHandlerPrivate
{
@ -215,42 +215,26 @@ bool PFMHandler::read(QImage *image)
}
for (auto y = 0, h = img.height(); y < h; ++y) {
float f;
if (header.isBlackAndWhite()) {
auto line = reinterpret_cast<quint16*>(img.scanLine(header.isPhotoshop() ? y : h - y - 1));
for (auto x = 0, w = img.width(); x < w; ++x) {
s >> f;
// QColorSpace does not handle gray linear profile, so I have to convert to non-linear
f = f < 0.0031308f ? (f * 12.92f) : (1.055 * std::pow(f, 1.0 / 2.4) - 0.055);
line[x] = quint16(std::clamp(f, float(0), float(1)) * std::numeric_limits<quint16>::max() + float(0.5));
if (s.status() != QDataStream::Ok) {
qCWarning(LOG_PFMPLUGIN) << "PFMHandler::read() detected corrupted data";
return false;
}
auto bw = header.isBlackAndWhite();
auto line = reinterpret_cast<float *>(img.scanLine(header.isPhotoshop() ? y : h - y - 1));
for (auto x = 0, n = img.width() * 4; x < n; x += 4) {
line[x + 3] = float(1);
s >> line[x];
if (bw) {
line[x + 1] = line[x];
line[x + 2] = line[x];
} else {
s >> line[x + 1];
s >> line[x + 2];
}
} else {
auto line = reinterpret_cast<float*>(img.scanLine(header.isPhotoshop() ? y : h - y - 1));
for (auto x = 0, n = img.width() * 4; x < n; x += 4) {
s >> f;
line[x] = std::clamp(f, float(0), float(1));
s >> f;
line[x + 1] = std::clamp(f, float(0), float(1));
s >> f;
line[x + 2] = std::clamp(f, float(0), float(1));
line[x + 3] = float(1);
if (s.status() != QDataStream::Ok) {
qCWarning(LOG_PFMPLUGIN) << "PFMHandler::read() detected corrupted data";
return false;
}
if (s.status() != QDataStream::Ok) {
qCWarning(LOG_PFMPLUGIN) << "PFMHandler::read() detected corrupted data";
return false;
}
}
}
if (!header.isBlackAndWhite()) {
img.setColorSpace(QColorSpace(QColorSpace::SRgbLinear));
}
img.setColorSpace(QColorSpace(QColorSpace::SRgbLinear));
*image = img;
return true;