From 2432775755914d1449aa1488ef87fec63304eac4 Mon Sep 17 00:00:00 2001 From: Felix Kauselmann <2039670+selmf@users.noreply.github.com> Date: Wed, 29 Apr 2015 14:54:57 +0200 Subject: [PATCH] Fix a parsing bug in unarr backend that could lead to pageskips if non-image files are present in the archive. --- compressed_archive/unarr/compressed_archive.cpp | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/compressed_archive/unarr/compressed_archive.cpp b/compressed_archive/unarr/compressed_archive.cpp index 49b0a563..b7594986 100644 --- a/compressed_archive/unarr/compressed_archive.cpp +++ b/compressed_archive/unarr/compressed_archive.cpp @@ -92,17 +92,8 @@ void CompressedArchive::getAllData(const QVector & indexes, ExtractDele int i=0; while (i < indexes.count()) { - if (i==0) - { - ar_parse_entry_at(ar, offsets.at(indexes.at(0))); //set ar_entry to start of indexes - } - else - { - //TODO: - //since we already have offset lists, we want to use ar_parse_entry_at here as well - //speed impact? - ar_parse_entry(ar); - } + //use the offset list so we generated so we're not getting any non-page files + ar_parse_entry_at(ar, offsets.at(indexes.at(i))); //set ar_entry to start of indexes buffer.resize(ar_entry_get_size(ar)); if (ar_entry_uncompress(ar, buffer.data(), buffer.size())) //did we extract it? {