aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorElliott Hughes <enh@google.com>2019-01-24 16:07:29 +0000
committerGerrit Code Review <noreply-gerritcodereview@google.com>2019-01-24 16:07:29 +0000
commitc786a5099f48c69e4f7ae9993e65d22d4a0346c2 (patch)
treef2834c84e8da8433a3d2af57ed55e81f2fbbe493
parentd59e72bab504fb8da3fd28cb11480ce2842132d5 (diff)
parentf8305e869eed1026e62f9204a1be685a3894f4f6 (diff)
downloadlzma-oreo-mr1-iot-release.tar.gz
-rw-r--r--C/7zArcIn.c4
-rw-r--r--C/7zDec.c8
-rw-r--r--C/7zTypes.h5
-rw-r--r--C/7zVersion.h6
-rw-r--r--C/Android.bp3
-rw-r--r--C/Bcj2Enc.c6
-rw-r--r--C/CpuArch.c10
-rw-r--r--C/CpuArch.h8
-rw-r--r--C/LzFind.c164
-rw-r--r--C/LzFindMt.c117
-rw-r--r--C/LzFindMt.h14
-rw-r--r--C/Lzma2Dec.c10
-rw-r--r--C/Lzma2DecMt.c30
-rw-r--r--C/Lzma2Enc.c8
-rw-r--r--C/Lzma86Enc.c8
-rw-r--r--C/LzmaDec.c32
-rw-r--r--C/LzmaEnc.c733
-rw-r--r--C/MtCoder.c8
-rw-r--r--C/MtCoder.h6
-rw-r--r--C/MtDec.c20
-rw-r--r--C/MtDec.h24
-rw-r--r--C/Ppmd7.c6
-rw-r--r--C/Ppmd7.h6
-rw-r--r--C/Ppmd7Dec.c4
-rw-r--r--C/Util/7z/7zMain.c4
-rw-r--r--C/Util/Lzma/LzmaUtil.c4
-rw-r--r--C/Util/SfxSetup/SfxSetup.c10
-rw-r--r--C/Xz.h20
-rw-r--r--C/XzDec.c85
-rw-r--r--C/XzEnc.c6
-rw-r--r--C/XzIn.c4
-rw-r--r--CPP/7zip/Aes.mak2
-rw-r--r--CPP/7zip/Archive/7z/7zHandler.cpp7
-rw-r--r--CPP/7zip/Archive/7z/7zHandlerOut.cpp3
-rw-r--r--CPP/7zip/Archive/7z/7zIn.cpp17
-rw-r--r--CPP/7zip/Archive/7z/7zIn.h10
-rw-r--r--CPP/7zip/Archive/7z/7zUpdate.cpp19
-rw-r--r--CPP/7zip/Archive/Common/CoderMixer2.cpp1
-rw-r--r--CPP/7zip/Archive/LzmaHandler.cpp52
-rw-r--r--CPP/7zip/Archive/XzHandler.cpp4
-rw-r--r--CPP/7zip/Bundles/LzmaCon/makefile1
-rw-r--r--CPP/7zip/Crc.mak2
-rw-r--r--CPP/7zip/Crc64.mak2
-rw-r--r--CPP/7zip/LzmaDec.mak2
-rw-r--r--CPP/7zip/UI/Common/ArchiveCommandLine.cpp16
-rw-r--r--CPP/7zip/UI/Common/ArchiveExtractCallback.cpp26
-rw-r--r--CPP/7zip/UI/Common/ArchiveName.cpp76
-rw-r--r--CPP/7zip/UI/Common/ArchiveName.h5
-rw-r--r--CPP/7zip/UI/Common/Bench.cpp96
-rw-r--r--CPP/7zip/UI/Common/Bench.h5
-rw-r--r--CPP/7zip/UI/Common/EnumDirItems.cpp10
-rw-r--r--CPP/7zip/UI/Common/EnumDirItems.h1
-rw-r--r--CPP/7zip/UI/Common/HashCalc.cpp2
-rw-r--r--CPP/7zip/UI/Common/HashCalc.h8
-rw-r--r--CPP/7zip/UI/Common/OpenArchive.cpp5
-rw-r--r--CPP/7zip/UI/Common/Update.cpp95
-rw-r--r--CPP/7zip/UI/Console/HashCon.cpp2
-rw-r--r--CPP/7zip/UI/Console/List.cpp50
-rw-r--r--CPP/7zip/UI/Console/Main.cpp16
-rw-r--r--CPP/7zip/UI/FileManager/ExtractCallback.cpp4
-rw-r--r--CPP/7zip/UI/GUI/ExtractGUI.cpp4
-rw-r--r--CPP/7zip/UI/GUI/HashGUI.h6
-rw-r--r--CPP/Build.mak49
-rw-r--r--CPP/Common/ListFileUtils.cpp15
-rw-r--r--CPP/Common/ListFileUtils.h6
-rw-r--r--CPP/Common/MyString.h1
-rw-r--r--DOC/7zFormat.txt6
-rw-r--r--DOC/Methods.txt10
-rw-r--r--DOC/lzma-history.txt14
-rw-r--r--DOC/lzma-sdk.txt2
-rw-r--r--Java/Tukaani/Android.bp23
-rw-r--r--Java/Tukaani/COPYING10
-rw-r--r--Java/Tukaani/NEWS56
-rw-r--r--Java/Tukaani/README50
-rw-r--r--Java/Tukaani/THANKS16
-rw-r--r--Java/Tukaani/build.properties29
-rw-r--r--Java/Tukaani/build.xml143
-rw-r--r--Java/Tukaani/fileset-misc.txt11
-rw-r--r--Java/Tukaani/fileset-src.txt100
-rw-r--r--Java/Tukaani/maven/README2
-rw-r--r--Java/Tukaani/maven/pom_template.xml58
-rw-r--r--Java/Tukaani/src/LZMADecDemo.java80
-rw-r--r--Java/Tukaani/src/XZDecDemo.java71
-rw-r--r--Java/Tukaani/src/XZEncDemo.java41
-rw-r--r--Java/Tukaani/src/XZSeekDecDemo.java75
-rw-r--r--Java/Tukaani/src/XZSeekEncDemo.java68
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/ARMOptions.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/ARMThumbOptions.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/BCJCoder.java35
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/BCJDecoder.java62
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/BCJEncoder.java48
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/BCJOptions.java57
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/BlockInputStream.java284
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/BlockOutputStream.java134
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/CorruptedInputException.java37
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/CountingInputStream.java45
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/CountingOutputStream.java54
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/DeltaCoder.java26
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/DeltaDecoder.java32
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/DeltaEncoder.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/DeltaInputStream.java146
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/DeltaOptions.java102
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/DeltaOutputStream.java113
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/FilterCoder.java16
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/FilterDecoder.java17
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/FilterEncoder.java17
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/FilterOptions.java80
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/FinishableOutputStream.java31
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/FinishableWrapperOutputStream.java70
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/IA64Options.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/IndexIndicatorException.java14
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/LZMA2Coder.java26
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/LZMA2Decoder.java35
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/LZMA2Encoder.java50
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/LZMA2InputStream.java358
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/LZMA2Options.java581
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/LZMA2OutputStream.java261
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/LZMAInputStream.java569
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/MemoryLimitException.java60
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/PowerPCOptions.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/RawCoder.java33
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/SPARCOptions.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/SeekableFileInputStream.java102
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/SeekableInputStream.java81
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/SeekableXZInputStream.java964
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/SimpleInputStream.java138
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/SimpleOutputStream.java151
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/SingleXZInputStream.java375
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/UncompressedLZMA2OutputStream.java153
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/UnsupportedOptionsException.java34
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/X86Options.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/XZ.java53
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/XZFormatException.java24
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/XZIOException.java27
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/XZInputStream.java371
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/XZOutputStream.java488
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/check/CRC32.java33
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/check/CRC64.java54
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/check/Check.java57
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/check/None.java24
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/check/SHA256.java30
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/common/DecoderUtil.java121
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/common/EncoderUtil.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/common/StreamFlags.java15
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/common/Util.java28
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/delta/DeltaCoder.java27
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/delta/DeltaDecoder.java24
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/delta/DeltaEncoder.java24
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/index/BlockInfo.java38
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/index/IndexBase.java56
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/index/IndexDecoder.java223
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/index/IndexEncoder.java59
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/index/IndexHash.java94
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/index/IndexRecord.java20
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lz/BT4.java255
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lz/CRC32Hash.java35
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lz/HC4.java200
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lz/Hash234.java89
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lz/LZDecoder.java126
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lz/LZEncoder.java419
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lz/Matches.java22
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lzma/LZMACoder.java140
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lzma/LZMADecoder.java199
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoder.java711
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoderFast.java151
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoderNormal.java566
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lzma/Optimum.java73
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/lzma/State.java75
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/package-info.java36
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeCoder.java26
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoder.java83
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoderFromBuffer.java64
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoderFromStream.java41
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeEncoder.java203
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/simple/ARM.java50
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/simple/ARMThumb.java53
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/simple/IA64.java81
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/simple/PowerPC.java50
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/simple/SPARC.java56
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/simple/SimpleFilter.java14
-rw-r--r--Java/Tukaani/src/org/tukaani/xz/simple/X86.java98
181 files changed, 1308 insertions, 13035 deletions
diff --git a/C/7zArcIn.c b/C/7zArcIn.c
index 70d7175..2202d08 100644
--- a/C/7zArcIn.c
+++ b/C/7zArcIn.c
@@ -1,5 +1,5 @@
/* 7zArcIn.c -- 7z Input functions
-2017-04-03 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -1744,7 +1744,7 @@ size_t SzArEx_GetFullNameLen(const CSzArEx *p, size_t fileIndex)
UInt16 *SzArEx_GetFullNameUtf16_Back(const CSzArEx *p, size_t fileIndex, UInt16 *dest)
{
- Bool needSlash;
+ BoolInt needSlash;
if (!p->FileNameOffsets)
{
*(--dest) = 0;
diff --git a/C/7zDec.c b/C/7zDec.c
index 9c98695..088f540 100644
--- a/C/7zDec.c
+++ b/C/7zDec.c
@@ -1,5 +1,5 @@
/* 7zDec.c -- Decoding from 7z folder
-2017-04-03 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -44,7 +44,7 @@ typedef struct
const Byte *end;
const Byte *begin;
UInt64 processed;
- Bool extra;
+ BoolInt extra;
SRes res;
const ILookInStream *inStream;
} CByteInToLook;
@@ -269,7 +269,7 @@ static SRes SzDecodeCopy(UInt64 inSize, ILookInStream *inStream, Byte *outBuffer
return SZ_OK;
}
-static Bool IS_MAIN_METHOD(UInt32 m)
+static BoolInt IS_MAIN_METHOD(UInt32 m)
{
switch (m)
{
@@ -286,7 +286,7 @@ static Bool IS_MAIN_METHOD(UInt32 m)
return False;
}
-static Bool IS_SUPPORTED_CODER(const CSzCoderInfo *c)
+static BoolInt IS_SUPPORTED_CODER(const CSzCoderInfo *c)
{
return
c->NumStreams == 1
diff --git a/C/7zTypes.h b/C/7zTypes.h
index 4977cda..593f5aa 100644
--- a/C/7zTypes.h
+++ b/C/7zTypes.h
@@ -1,5 +1,5 @@
/* 7zTypes.h -- Basic types
-2017-07-17 : Igor Pavlov : Public domain */
+2018-08-04 : Igor Pavlov : Public domain */
#ifndef __7Z_TYPES_H
#define __7Z_TYPES_H
@@ -103,7 +103,8 @@ typedef UInt32 SizeT;
typedef size_t SizeT;
#endif
-typedef int Bool;
+typedef int BoolInt;
+/* typedef BoolInt Bool; */
#define True 1
#define False 0
diff --git a/C/7zVersion.h b/C/7zVersion.h
index ed3aa94..c26d9d2 100644
--- a/C/7zVersion.h
+++ b/C/7zVersion.h
@@ -1,7 +1,7 @@
#define MY_VER_MAJOR 18
-#define MY_VER_MINOR 05
+#define MY_VER_MINOR 06
#define MY_VER_BUILD 0
-#define MY_VERSION_NUMBERS "18.05"
+#define MY_VERSION_NUMBERS "18.06"
#define MY_VERSION MY_VERSION_NUMBERS
#ifdef MY_CPU_NAME
@@ -10,7 +10,7 @@
#define MY_VERSION_CPU MY_VERSION
#endif
-#define MY_DATE "2018-04-30"
+#define MY_DATE "2018-12-30"
#undef MY_COPYRIGHT
#undef MY_VERSION_COPYRIGHT_DATE
#define MY_AUTHOR_NAME "Igor Pavlov"
diff --git a/C/Android.bp b/C/Android.bp
index f65553f..296f737 100644
--- a/C/Android.bp
+++ b/C/Android.bp
@@ -18,8 +18,9 @@ cc_library {
"-Werror",
"-Wno-empty-body",
"-Wno-enum-conversion",
+ "-Wno-logical-op-parentheses",
+ "-Wno-self-assign",
],
- clang_cflags: ["-Wno-self-assign", "-Werror"],
export_include_dirs: ["."],
diff --git a/C/Bcj2Enc.c b/C/Bcj2Enc.c
index b0bc759..a54ea08 100644
--- a/C/Bcj2Enc.c
+++ b/C/Bcj2Enc.c
@@ -1,5 +1,5 @@
/* Bcj2Enc.c -- BCJ2 Encoder (Converter for x86 code)
-2017-04-28 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -52,7 +52,7 @@ void Bcj2Enc_Init(CBcj2Enc *p)
p->probs[i] = kBitModelTotal >> 1;
}
-static Bool MY_FAST_CALL RangeEnc_ShiftLow(CBcj2Enc *p)
+static BoolInt MY_FAST_CALL RangeEnc_ShiftLow(CBcj2Enc *p)
{
if ((UInt32)p->low < (UInt32)0xFF000000 || (UInt32)(p->low >> 32) != 0)
{
@@ -165,7 +165,7 @@ static void Bcj2Enc_Encode_2(CBcj2Enc *p)
{
Byte context = (Byte)(num == 0 ? p->prevByte : src[-1]);
- Bool needConvert;
+ BoolInt needConvert;
p->bufs[BCJ2_STREAM_MAIN] = dest + 1;
p->ip += (UInt32)num + 1;
diff --git a/C/CpuArch.c b/C/CpuArch.c
index f835c2b..b37bdca 100644
--- a/C/CpuArch.c
+++ b/C/CpuArch.c
@@ -1,5 +1,5 @@
/* CpuArch.c -- CPU specific code
-2016-02-25: Igor Pavlov : Public domain */
+2018-07-04: Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -115,7 +115,7 @@ void MyCPUID(UInt32 function, UInt32 *a, UInt32 *b, UInt32 *c, UInt32 *d)
#endif
}
-Bool x86cpuid_CheckAndRead(Cx86cpuid *p)
+BoolInt x86cpuid_CheckAndRead(Cx86cpuid *p)
{
CHECK_CPUID_IS_SUPPORTED
MyCPUID(0, &p->maxFunc, &p->vendor[0], &p->vendor[2], &p->vendor[1]);
@@ -144,7 +144,7 @@ int x86cpuid_GetFirm(const Cx86cpuid *p)
return -1;
}
-Bool CPU_Is_InOrder()
+BoolInt CPU_Is_InOrder()
{
Cx86cpuid p;
int firm;
@@ -175,7 +175,7 @@ Bool CPU_Is_InOrder()
#if !defined(MY_CPU_AMD64) && defined(_WIN32)
#include <windows.h>
-static Bool CPU_Sys_Is_SSE_Supported()
+static BoolInt CPU_Sys_Is_SSE_Supported()
{
OSVERSIONINFO vi;
vi.dwOSVersionInfoSize = sizeof(vi);
@@ -188,7 +188,7 @@ static Bool CPU_Sys_Is_SSE_Supported()
#define CHECK_SYS_SSE_SUPPORT
#endif
-Bool CPU_Is_Aes_Supported()
+BoolInt CPU_Is_Aes_Supported()
{
Cx86cpuid p;
CHECK_SYS_SSE_SUPPORT
diff --git a/C/CpuArch.h b/C/CpuArch.h
index 7fb2728..51574f1 100644
--- a/C/CpuArch.h
+++ b/C/CpuArch.h
@@ -1,5 +1,5 @@
/* CpuArch.h -- CPU specific code
-2017-09-04 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#ifndef __CPU_ARCH_H
#define __CPU_ARCH_H
@@ -318,15 +318,15 @@ enum
void MyCPUID(UInt32 function, UInt32 *a, UInt32 *b, UInt32 *c, UInt32 *d);
-Bool x86cpuid_CheckAndRead(Cx86cpuid *p);
+BoolInt x86cpuid_CheckAndRead(Cx86cpuid *p);
int x86cpuid_GetFirm(const Cx86cpuid *p);
#define x86cpuid_GetFamily(ver) (((ver >> 16) & 0xFF0) | ((ver >> 8) & 0xF))
#define x86cpuid_GetModel(ver) (((ver >> 12) & 0xF0) | ((ver >> 4) & 0xF))
#define x86cpuid_GetStepping(ver) (ver & 0xF)
-Bool CPU_Is_InOrder();
-Bool CPU_Is_Aes_Supported();
+BoolInt CPU_Is_InOrder();
+BoolInt CPU_Is_Aes_Supported();
#endif
diff --git a/C/LzFind.c b/C/LzFind.c
index 6ea82a9..4eefc17 100644
--- a/C/LzFind.c
+++ b/C/LzFind.c
@@ -1,5 +1,5 @@
/* LzFind.c -- Match finder for LZ algorithms
-2017-06-10 : Igor Pavlov : Public domain */
+2018-07-08 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -138,7 +138,7 @@ static void MatchFinder_SetDefaultSettings(CMatchFinder *p)
void MatchFinder_Construct(CMatchFinder *p)
{
- UInt32 i;
+ unsigned i;
p->bufferBase = NULL;
p->directInput = 0;
p->hash = NULL;
@@ -147,7 +147,7 @@ void MatchFinder_Construct(CMatchFinder *p)
for (i = 0; i < 256; i++)
{
- UInt32 r = i;
+ UInt32 r = (UInt32)i;
unsigned j;
for (j = 0; j < 8; j++)
r = (r >> 1) ^ (kCrcPoly & ((UInt32)0 - (r & 1)));
@@ -368,6 +368,8 @@ static void MatchFinder_Normalize(CMatchFinder *p)
MatchFinder_ReduceOffsets(p, subValue);
}
+
+MY_NO_INLINE
static void MatchFinder_CheckLimits(CMatchFinder *p)
{
if (p->pos == kMaxValForNormalize)
@@ -379,10 +381,16 @@ static void MatchFinder_CheckLimits(CMatchFinder *p)
MatchFinder_SetLimits(p);
}
-static UInt32 * Hc_GetMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
+
+/*
+ (lenLimit > maxLen)
+*/
+MY_FORCE_INLINE
+static UInt32 * Hc_GetMatchesSpec(unsigned lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
UInt32 _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue,
- UInt32 *distances, UInt32 maxLen)
+ UInt32 *distances, unsigned maxLen)
{
+ /*
son[_cyclicBufferPos] = curMatch;
for (;;)
{
@@ -400,7 +408,8 @@ static UInt32 * Hc_GetMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos,
break;
if (maxLen < len)
{
- *distances++ = maxLen = len;
+ maxLen = len;
+ *distances++ = len;
*distances++ = delta - 1;
if (len == lenLimit)
return distances;
@@ -408,15 +417,58 @@ static UInt32 * Hc_GetMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos,
}
}
}
+ */
+
+ const Byte *lim = cur + lenLimit;
+ son[_cyclicBufferPos] = curMatch;
+ do
+ {
+ UInt32 delta = pos - curMatch;
+ if (delta >= _cyclicBufferSize)
+ break;
+ {
+ ptrdiff_t diff;
+ curMatch = son[_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)];
+ diff = (ptrdiff_t)0 - delta;
+ if (cur[maxLen] == cur[maxLen + diff])
+ {
+ const Byte *c = cur;
+ while (*c == c[diff])
+ {
+ if (++c == lim)
+ {
+ distances[0] = (UInt32)(lim - cur);
+ distances[1] = delta - 1;
+ return distances + 2;
+ }
+ }
+ {
+ unsigned len = (unsigned)(c - cur);
+ if (maxLen < len)
+ {
+ maxLen = len;
+ distances[0] = (UInt32)len;
+ distances[1] = delta - 1;
+ distances += 2;
+ }
+ }
+ }
+ }
+ }
+ while (--cutValue);
+
+ return distances;
}
+
+MY_FORCE_INLINE
UInt32 * GetMatchesSpec1(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
UInt32 _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue,
UInt32 *distances, UInt32 maxLen)
{
- CLzRef *ptr0 = son + (_cyclicBufferPos << 1) + 1;
- CLzRef *ptr1 = son + (_cyclicBufferPos << 1);
- UInt32 len0 = 0, len1 = 0;
+ CLzRef *ptr0 = son + ((size_t)_cyclicBufferPos << 1) + 1;
+ CLzRef *ptr1 = son + ((size_t)_cyclicBufferPos << 1);
+ unsigned len0 = 0, len1 = 0;
for (;;)
{
UInt32 delta = pos - curMatch;
@@ -426,9 +478,10 @@ UInt32 * GetMatchesSpec1(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byt
return distances;
}
{
- CLzRef *pair = son + ((_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);
+ CLzRef *pair = son + ((size_t)(_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);
const Byte *pb = cur - delta;
- UInt32 len = (len0 < len1 ? len0 : len1);
+ unsigned len = (len0 < len1 ? len0 : len1);
+ UInt32 pair0 = pair[0];
if (pb[len] == cur[len])
{
if (++len != lenLimit && pb[len] == cur[len])
@@ -437,11 +490,12 @@ UInt32 * GetMatchesSpec1(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byt
break;
if (maxLen < len)
{
- *distances++ = maxLen = len;
+ maxLen = (UInt32)len;
+ *distances++ = (UInt32)len;
*distances++ = delta - 1;
if (len == lenLimit)
{
- *ptr1 = pair[0];
+ *ptr1 = pair0;
*ptr0 = pair[1];
return distances;
}
@@ -468,9 +522,9 @@ UInt32 * GetMatchesSpec1(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byt
static void SkipMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
UInt32 _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue)
{
- CLzRef *ptr0 = son + (_cyclicBufferPos << 1) + 1;
- CLzRef *ptr1 = son + (_cyclicBufferPos << 1);
- UInt32 len0 = 0, len1 = 0;
+ CLzRef *ptr0 = son + ((size_t)_cyclicBufferPos << 1) + 1;
+ CLzRef *ptr1 = son + ((size_t)_cyclicBufferPos << 1);
+ unsigned len0 = 0, len1 = 0;
for (;;)
{
UInt32 delta = pos - curMatch;
@@ -480,9 +534,9 @@ static void SkipMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const
return;
}
{
- CLzRef *pair = son + ((_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);
+ CLzRef *pair = son + ((size_t)(_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);
const Byte *pb = cur - delta;
- UInt32 len = (len0 < len1 ? len0 : len1);
+ unsigned len = (len0 < len1 ? len0 : len1);
if (pb[len] == cur[len])
{
while (++len != lenLimit)
@@ -520,13 +574,13 @@ static void SkipMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const
p->buffer++; \
if (++p->pos == p->posLimit) MatchFinder_CheckLimits(p);
-#define MOVE_POS_RET MOVE_POS return offset;
+#define MOVE_POS_RET MOVE_POS return (UInt32)offset;
static void MatchFinder_MovePos(CMatchFinder *p) { MOVE_POS; }
#define GET_MATCHES_HEADER2(minLen, ret_op) \
- UInt32 lenLimit; UInt32 hv; const Byte *cur; UInt32 curMatch; \
- lenLimit = p->lenLimit; { if (lenLimit < minLen) { MatchFinder_MovePos(p); ret_op; }} \
+ unsigned lenLimit; UInt32 hv; const Byte *cur; UInt32 curMatch; \
+ lenLimit = (unsigned)p->lenLimit; { if (lenLimit < minLen) { MatchFinder_MovePos(p); ret_op; }} \
cur = p->buffer;
#define GET_MATCHES_HEADER(minLen) GET_MATCHES_HEADER2(minLen, return 0)
@@ -535,22 +589,22 @@ static void MatchFinder_MovePos(CMatchFinder *p) { MOVE_POS; }
#define MF_PARAMS(p) p->pos, p->buffer, p->son, p->cyclicBufferPos, p->cyclicBufferSize, p->cutValue
#define GET_MATCHES_FOOTER(offset, maxLen) \
- offset = (UInt32)(GetMatchesSpec1(lenLimit, curMatch, MF_PARAMS(p), \
- distances + offset, maxLen) - distances); MOVE_POS_RET;
+ offset = (unsigned)(GetMatchesSpec1((UInt32)lenLimit, curMatch, MF_PARAMS(p), \
+ distances + offset, (UInt32)maxLen) - distances); MOVE_POS_RET;
#define SKIP_FOOTER \
- SkipMatchesSpec(lenLimit, curMatch, MF_PARAMS(p)); MOVE_POS;
+ SkipMatchesSpec((UInt32)lenLimit, curMatch, MF_PARAMS(p)); MOVE_POS;
#define UPDATE_maxLen { \
ptrdiff_t diff = (ptrdiff_t)0 - d2; \
const Byte *c = cur + maxLen; \
const Byte *lim = cur + lenLimit; \
for (; c != lim; c++) if (*(c + diff) != *c) break; \
- maxLen = (UInt32)(c - cur); }
+ maxLen = (unsigned)(c - cur); }
static UInt32 Bt2_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
- UInt32 offset;
+ unsigned offset;
GET_MATCHES_HEADER(2)
HASH2_CALC;
curMatch = p->hash[hv];
@@ -561,7 +615,7 @@ static UInt32 Bt2_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
UInt32 Bt3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
- UInt32 offset;
+ unsigned offset;
GET_MATCHES_HEADER(3)
HASH_ZIP_CALC;
curMatch = p->hash[hv];
@@ -572,7 +626,8 @@ UInt32 Bt3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
static UInt32 Bt3_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
- UInt32 h2, d2, maxLen, offset, pos;
+ UInt32 h2, d2, pos;
+ unsigned maxLen, offset;
UInt32 *hash;
GET_MATCHES_HEADER(3)
@@ -594,12 +649,12 @@ static UInt32 Bt3_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
if (d2 < p->cyclicBufferSize && *(cur - d2) == *cur)
{
UPDATE_maxLen
- distances[0] = maxLen;
+ distances[0] = (UInt32)maxLen;
distances[1] = d2 - 1;
offset = 2;
if (maxLen == lenLimit)
{
- SkipMatchesSpec(lenLimit, curMatch, MF_PARAMS(p));
+ SkipMatchesSpec((UInt32)lenLimit, curMatch, MF_PARAMS(p));
MOVE_POS_RET;
}
}
@@ -609,7 +664,8 @@ static UInt32 Bt3_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
static UInt32 Bt4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
- UInt32 h2, h3, d2, d3, maxLen, offset, pos;
+ UInt32 h2, h3, d2, d3, pos;
+ unsigned maxLen, offset;
UInt32 *hash;
GET_MATCHES_HEADER(4)
@@ -618,12 +674,12 @@ static UInt32 Bt4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
hash = p->hash;
pos = p->pos;
- d2 = pos - hash[ h2];
+ d2 = pos - hash [h2];
d3 = pos - (hash + kFix3HashSize)[h3];
curMatch = (hash + kFix4HashSize)[hv];
- hash[ h2] = pos;
+ hash [h2] = pos;
(hash + kFix3HashSize)[h3] = pos;
(hash + kFix4HashSize)[hv] = pos;
@@ -632,7 +688,8 @@ static UInt32 Bt4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
if (d2 < p->cyclicBufferSize && *(cur - d2) == *cur)
{
- distances[0] = maxLen = 2;
+ maxLen = 2;
+ distances[0] = 2;
distances[1] = d2 - 1;
offset = 2;
}
@@ -648,10 +705,10 @@ static UInt32 Bt4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
if (offset != 0)
{
UPDATE_maxLen
- distances[(size_t)offset - 2] = maxLen;
+ distances[(size_t)offset - 2] = (UInt32)maxLen;
if (maxLen == lenLimit)
{
- SkipMatchesSpec(lenLimit, curMatch, MF_PARAMS(p));
+ SkipMatchesSpec((UInt32)lenLimit, curMatch, MF_PARAMS(p));
MOVE_POS_RET;
}
}
@@ -674,13 +731,13 @@ static UInt32 Bt5_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
hash = p->hash;
pos = p->pos;
- d2 = pos - hash[ h2];
+ d2 = pos - hash [h2];
d3 = pos - (hash + kFix3HashSize)[h3];
d4 = pos - (hash + kFix4HashSize)[h4];
curMatch = (hash + kFix5HashSize)[hv];
- hash[ h2] = pos;
+ hash [h2] = pos;
(hash + kFix3HashSize)[h3] = pos;
(hash + kFix4HashSize)[h4] = pos;
(hash + kFix5HashSize)[hv] = pos;
@@ -741,7 +798,8 @@ static UInt32 Bt5_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
static UInt32 Hc4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
- UInt32 h2, h3, d2, d3, maxLen, offset, pos;
+ UInt32 h2, h3, d2, d3, pos;
+ unsigned maxLen, offset;
UInt32 *hash;
GET_MATCHES_HEADER(4)
@@ -750,12 +808,11 @@ static UInt32 Hc4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
hash = p->hash;
pos = p->pos;
- d2 = pos - hash[ h2];
+ d2 = pos - hash [h2];
d3 = pos - (hash + kFix3HashSize)[h3];
-
curMatch = (hash + kFix4HashSize)[hv];
- hash[ h2] = pos;
+ hash [h2] = pos;
(hash + kFix3HashSize)[h3] = pos;
(hash + kFix4HashSize)[hv] = pos;
@@ -764,7 +821,8 @@ static UInt32 Hc4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
if (d2 < p->cyclicBufferSize && *(cur - d2) == *cur)
{
- distances[0] = maxLen = 2;
+ maxLen = 2;
+ distances[0] = 2;
distances[1] = d2 - 1;
offset = 2;
}
@@ -780,7 +838,7 @@ static UInt32 Hc4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
if (offset != 0)
{
UPDATE_maxLen
- distances[(size_t)offset - 2] = maxLen;
+ distances[(size_t)offset - 2] = (UInt32)maxLen;
if (maxLen == lenLimit)
{
p->son[p->cyclicBufferPos] = curMatch;
@@ -791,7 +849,7 @@ static UInt32 Hc4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
if (maxLen < 3)
maxLen = 3;
- offset = (UInt32)(Hc_GetMatchesSpec(lenLimit, curMatch, MF_PARAMS(p),
+ offset = (unsigned)(Hc_GetMatchesSpec(lenLimit, curMatch, MF_PARAMS(p),
distances + offset, maxLen) - (distances));
MOVE_POS_RET
}
@@ -808,13 +866,13 @@ static UInt32 Hc5_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
hash = p->hash;
pos = p->pos;
- d2 = pos - hash[ h2];
+ d2 = pos - hash [h2];
d3 = pos - (hash + kFix3HashSize)[h3];
d4 = pos - (hash + kFix4HashSize)[h4];
curMatch = (hash + kFix5HashSize)[hv];
- hash[ h2] = pos;
+ hash [h2] = pos;
(hash + kFix3HashSize)[h3] = pos;
(hash + kFix4HashSize)[h4] = pos;
(hash + kFix5HashSize)[hv] = pos;
@@ -877,12 +935,12 @@ static UInt32 Hc5_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
UInt32 Hc3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
- UInt32 offset;
+ unsigned offset;
GET_MATCHES_HEADER(3)
HASH_ZIP_CALC;
curMatch = p->hash[hv];
p->hash[hv] = p->pos;
- offset = (UInt32)(Hc_GetMatchesSpec(lenLimit, curMatch, MF_PARAMS(p),
+ offset = (unsigned)(Hc_GetMatchesSpec(lenLimit, curMatch, MF_PARAMS(p),
distances, 2) - (distances));
MOVE_POS_RET
}
@@ -940,7 +998,7 @@ static void Bt4_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
HASH4_CALC;
hash = p->hash;
curMatch = (hash + kFix4HashSize)[hv];
- hash[ h2] =
+ hash [h2] =
(hash + kFix3HashSize)[h3] =
(hash + kFix4HashSize)[hv] = p->pos;
SKIP_FOOTER
@@ -959,7 +1017,7 @@ static void Bt5_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
HASH5_CALC;
hash = p->hash;
curMatch = (hash + kFix5HashSize)[hv];
- hash[ h2] =
+ hash [h2] =
(hash + kFix3HashSize)[h3] =
(hash + kFix4HashSize)[h4] =
(hash + kFix5HashSize)[hv] = p->pos;
@@ -979,7 +1037,7 @@ static void Hc4_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
HASH4_CALC;
hash = p->hash;
curMatch = (hash + kFix4HashSize)[hv];
- hash[ h2] =
+ hash [h2] =
(hash + kFix3HashSize)[h3] =
(hash + kFix4HashSize)[hv] = p->pos;
p->son[p->cyclicBufferPos] = curMatch;
@@ -999,7 +1057,7 @@ static void Hc5_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
HASH5_CALC;
hash = p->hash;
curMatch = hash + kFix5HashSize)[hv];
- hash[ h2] =
+ hash [h2] =
(hash + kFix3HashSize)[h3] =
(hash + kFix4HashSize)[h4] =
(hash + kFix5HashSize)[hv] = p->pos;
diff --git a/C/LzFindMt.c b/C/LzFindMt.c
index 2563824..df32146 100644
--- a/C/LzFindMt.c
+++ b/C/LzFindMt.c
@@ -1,5 +1,5 @@
/* LzFindMt.c -- multithreaded Match finder for LZ algorithms
-2017-06-10 : Igor Pavlov : Public domain */
+2018-12-29 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -232,38 +232,57 @@ static void MatchFinderMt_GetNextBlock_Hash(CMatchFinderMt *p)
#define kEmptyHashValue 0
-/* #define MFMT_GM_INLINE */
+#define MFMT_GM_INLINE
#ifdef MFMT_GM_INLINE
-#define NO_INLINE MY_FAST_CALL
+/*
+ we use size_t for _cyclicBufferPos instead of UInt32
+ to eliminate "movsx" BUG in old MSVC x64 compiler.
+*/
-static Int32 NO_INLINE GetMatchesSpecN(UInt32 lenLimit, UInt32 pos, const Byte *cur, CLzRef *son,
- UInt32 _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 _cutValue,
- UInt32 *_distances, UInt32 _maxLen, const UInt32 *hash, Int32 limit, UInt32 size, UInt32 *posRes)
+MY_NO_INLINE
+static UInt32 *GetMatchesSpecN(UInt32 lenLimit, UInt32 pos, const Byte *cur, CLzRef *son,
+ size_t _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 _cutValue,
+ UInt32 *distances, UInt32 _maxLen, const UInt32 *hash, const UInt32 *limit, UInt32 size, UInt32 *posRes)
{
do
{
- UInt32 *distances = _distances + 1;
- UInt32 curMatch = pos - *hash++;
+ UInt32 *_distances = ++distances;
+ UInt32 delta = *hash++;
- CLzRef *ptr0 = son + (_cyclicBufferPos << 1) + 1;
- CLzRef *ptr1 = son + (_cyclicBufferPos << 1);
- UInt32 len0 = 0, len1 = 0;
+ CLzRef *ptr0 = son + ((size_t)_cyclicBufferPos << 1) + 1;
+ CLzRef *ptr1 = son + ((size_t)_cyclicBufferPos << 1);
+ unsigned len0 = 0, len1 = 0;
UInt32 cutValue = _cutValue;
- UInt32 maxLen = _maxLen;
- for (;;)
+ unsigned maxLen = (unsigned)_maxLen;
+
+ /*
+ if (size > 1)
{
- UInt32 delta = pos - curMatch;
- if (cutValue-- == 0 || delta >= _cyclicBufferSize)
+ UInt32 delta = *hash;
+ if (delta < _cyclicBufferSize)
{
- *ptr0 = *ptr1 = kEmptyHashValue;
- break;
+ UInt32 cyc1 = _cyclicBufferPos + 1;
+ CLzRef *pair = son + ((size_t)(cyc1 - delta + ((delta > cyc1) ? _cyclicBufferSize : 0)) << 1);
+ Byte b = *(cur + 1 - delta);
+ _distances[0] = pair[0];
+ _distances[1] = b;
}
+ }
+ */
+ if (cutValue == 0 || delta >= _cyclicBufferSize)
+ {
+ *ptr0 = *ptr1 = kEmptyHashValue;
+ }
+ else
+ for(;;)
+ {
{
- CLzRef *pair = son + ((_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);
+ CLzRef *pair = son + ((size_t)(_cyclicBufferPos - delta + ((_cyclicBufferPos < delta) ? _cyclicBufferSize : 0)) << 1);
const Byte *pb = cur - delta;
- UInt32 len = (len0 < len1 ? len0 : len1);
+ unsigned len = (len0 < len1 ? len0 : len1);
+ UInt32 pair0 = *pair;
if (pb[len] == cur[len])
{
if (++len != lenLimit && pb[len] == cur[len])
@@ -272,54 +291,66 @@ static Int32 NO_INLINE GetMatchesSpecN(UInt32 lenLimit, UInt32 pos, const Byte *
break;
if (maxLen < len)
{
- *distances++ = maxLen = len;
+ maxLen = len;
+ *distances++ = (UInt32)len;
*distances++ = delta - 1;
if (len == lenLimit)
{
- *ptr1 = pair[0];
- *ptr0 = pair[1];
+ UInt32 pair1 = pair[1];
+ *ptr1 = pair0;
+ *ptr0 = pair1;
break;
}
}
}
- if (pb[len] < cur[len])
- {
- *ptr1 = curMatch;
- ptr1 = pair + 1;
- curMatch = *ptr1;
- len1 = len;
- }
- else
{
- *ptr0 = curMatch;
- ptr0 = pair;
- curMatch = *ptr0;
- len0 = len;
+ UInt32 curMatch = pos - delta;
+ // delta = pos - *pair;
+ // delta = pos - pair[((UInt32)pb[len] - (UInt32)cur[len]) >> 31];
+ if (pb[len] < cur[len])
+ {
+ delta = pos - pair[1];
+ *ptr1 = curMatch;
+ ptr1 = pair + 1;
+ len1 = len;
+ }
+ else
+ {
+ delta = pos - *pair;
+ *ptr0 = curMatch;
+ ptr0 = pair;
+ len0 = len;
+ }
}
}
+ if (--cutValue == 0 || delta >= _cyclicBufferSize)
+ {
+ *ptr0 = *ptr1 = kEmptyHashValue;
+ break;
+ }
}
pos++;
_cyclicBufferPos++;
cur++;
{
UInt32 num = (UInt32)(distances - _distances);
- *_distances = num - 1;
- _distances += num;
- limit -= num;
+ _distances[-1] = num;
}
}
- while (limit > 0 && --size != 0);
+ while (distances < limit && --size != 0);
*posRes = pos;
- return limit;
+ return distances;
}
#endif
+
+
static void BtGetMatches(CMatchFinderMt *p, UInt32 *distances)
{
UInt32 numProcessed = 0;
UInt32 curPos = 2;
- UInt32 limit = kMtBtBlockSize - (p->matchMaxLen * 2);
+ UInt32 limit = kMtBtBlockSize - (p->matchMaxLen * 2); // * 2
distances[1] = p->hashNumAvail;
@@ -369,8 +400,10 @@ static void BtGetMatches(CMatchFinderMt *p, UInt32 *distances)
#else
{
UInt32 posRes;
- curPos = limit - GetMatchesSpecN(lenLimit, pos, p->buffer, p->son, cyclicBufferPos, p->cyclicBufferSize, p->cutValue,
- distances + curPos, p->numHashBytes - 1, p->hashBuf + p->hashBufPos, (Int32)(limit - curPos), size, &posRes);
+ curPos = (UInt32)(GetMatchesSpecN(lenLimit, pos, p->buffer, p->son, cyclicBufferPos, p->cyclicBufferSize, p->cutValue,
+ distances + curPos, p->numHashBytes - 1, p->hashBuf + p->hashBufPos,
+ distances + limit,
+ size, &posRes) - distances);
p->hashBufPos += posRes - pos;
cyclicBufferPos += posRes - pos;
p->buffer += posRes - pos;
diff --git a/C/LzFindMt.h b/C/LzFindMt.h
index 3d86c78..fdd1700 100644
--- a/C/LzFindMt.h
+++ b/C/LzFindMt.h
@@ -1,5 +1,5 @@
/* LzFindMt.h -- multithreaded Match finder for LZ algorithms
-2017-04-03 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#ifndef __LZ_FIND_MT_H
#define __LZ_FIND_MT_H
@@ -19,10 +19,10 @@ EXTERN_C_BEGIN
typedef struct _CMtSync
{
- Bool wasCreated;
- Bool needStart;
- Bool exit;
- Bool stopWriting;
+ BoolInt wasCreated;
+ BoolInt needStart;
+ BoolInt exit;
+ BoolInt stopWriting;
CThread thread;
CAutoResetEvent canStart;
@@ -30,8 +30,8 @@ typedef struct _CMtSync
CAutoResetEvent wasStopped;
CSemaphore freeSemaphore;
CSemaphore filledSemaphore;
- Bool csWasInitialized;
- Bool csWasEntered;
+ BoolInt csWasInitialized;
+ BoolInt csWasEntered;
CCriticalSection cs;
UInt32 numProcessedBlocks;
} CMtSync;
diff --git a/C/Lzma2Dec.c b/C/Lzma2Dec.c
index 57e7f34..69648ea 100644
--- a/C/Lzma2Dec.c
+++ b/C/Lzma2Dec.c
@@ -1,5 +1,5 @@
/* Lzma2Dec.c -- LZMA2 Decoder
-2018-02-19 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
/* #define SHOW_DEBUG_INFO */
@@ -169,7 +169,7 @@ static void LzmaDec_UpdateWithUncompressed(CLzmaDec *p, const Byte *src, SizeT s
p->processedPos += (UInt32)size;
}
-void LzmaDec_InitDicAndState(CLzmaDec *p, Bool initDic, Bool initState);
+void LzmaDec_InitDicAndState(CLzmaDec *p, BoolInt initDic, BoolInt initState);
SRes Lzma2Dec_DecodeToDic(CLzma2Dec *p, SizeT dicLimit,
@@ -232,7 +232,7 @@ SRes Lzma2Dec_DecodeToDic(CLzma2Dec *p, SizeT dicLimit,
if (p->state == LZMA2_STATE_DATA)
{
- Bool initDic = (p->control == LZMA2_CONTROL_COPY_RESET_DIC);
+ BoolInt initDic = (p->control == LZMA2_CONTROL_COPY_RESET_DIC);
LzmaDec_InitDicAndState(&p->decoder, initDic, False);
}
@@ -254,8 +254,8 @@ SRes Lzma2Dec_DecodeToDic(CLzma2Dec *p, SizeT dicLimit,
if (p->state == LZMA2_STATE_DATA)
{
- Bool initDic = (p->control >= 0xE0);
- Bool initState = (p->control >= 0xA0);
+ BoolInt initDic = (p->control >= 0xE0);
+ BoolInt initState = (p->control >= 0xA0);
LzmaDec_InitDicAndState(&p->decoder, initDic, initState);
p->state = LZMA2_STATE_DATA_CONT;
}
diff --git a/C/Lzma2DecMt.c b/C/Lzma2DecMt.c
index be698cb..2ca316f 100644
--- a/C/Lzma2DecMt.c
+++ b/C/Lzma2DecMt.c
@@ -1,5 +1,5 @@
/* Lzma2DecMt.c -- LZMA2 Decoder Multi-thread
-2018-03-02 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -94,13 +94,13 @@ typedef struct
ISeqOutStream *outStream;
ICompressProgress *progress;
- Bool finishMode;
- Bool outSize_Defined;
+ BoolInt finishMode;
+ BoolInt outSize_Defined;
UInt64 outSize;
UInt64 outProcessed;
UInt64 inProcessed;
- Bool readWasFinished;
+ BoolInt readWasFinished;
SRes readRes;
Byte *inBuf;
@@ -113,7 +113,7 @@ typedef struct
#ifndef _7ZIP_ST
UInt64 outProcessed_Parse;
- Bool mtc_WasConstructed;
+ BoolInt mtc_WasConstructed;
CMtDec mtc;
CLzma2DecMtThread coders[MTDEC__THREADS_MAX];
#endif
@@ -277,7 +277,7 @@ static void Lzma2DecMt_MtCallback_Parse(void *obj, unsigned coderIndex, CMtDecCa
{
ELzma2ParseStatus status;
- Bool overflow;
+ BoolInt overflow;
UInt32 unpackRem = 0;
int checkFinishBlock = True;
@@ -477,7 +477,7 @@ static SRes Lzma2DecMt_MtCallback_Code(void *pp, unsigned coderIndex,
{
ELzmaStatus status;
size_t srcProcessed = srcSize;
- Bool blockWasFinished =
+ BoolInt blockWasFinished =
((int)t->parseStatus == LZMA_STATUS_FINISHED_WITH_MARK
|| t->parseStatus == LZMA2_PARSE_STATUS_NEW_BLOCK);
@@ -526,15 +526,15 @@ static SRes Lzma2DecMt_MtCallback_Code(void *pp, unsigned coderIndex,
#define LZMA2DECMT_STREAM_WRITE_STEP (1 << 24)
static SRes Lzma2DecMt_MtCallback_Write(void *pp, unsigned coderIndex,
- Bool needWriteToStream,
+ BoolInt needWriteToStream,
const Byte *src, size_t srcSize,
- Bool *needContinue, Bool *canRecode)
+ BoolInt *needContinue, BoolInt *canRecode)
{
CLzma2DecMt *me = (CLzma2DecMt *)pp;
const CLzma2DecMtThread *t = &me->coders[coderIndex];
size_t size = t->outCodeSize;
const Byte *data = t->outBuf;
- Bool needContinue2 = True;
+ BoolInt needContinue2 = True;
PRF_STR_INT_2("Write", coderIndex, srcSize);
@@ -633,7 +633,7 @@ static SRes Lzma2Dec_Prepare_ST(CLzma2DecMt *p)
static SRes Lzma2Dec_Decode_ST(CLzma2DecMt *p
#ifndef _7ZIP_ST
- , Bool tMode
+ , BoolInt tMode
#endif
)
{
@@ -674,8 +674,8 @@ static SRes Lzma2Dec_Decode_ST(CLzma2DecMt *p
SRes res;
SizeT outProcessed;
- Bool outFinished;
- Bool needStop;
+ BoolInt outFinished;
+ BoolInt needStop;
if (inPos == inLim)
{
@@ -810,7 +810,7 @@ SRes Lzma2DecMt_Decode(CLzma2DecMtHandle pp,
{
CLzma2DecMt *p = (CLzma2DecMt *)pp;
#ifndef _7ZIP_ST
- Bool tMode;
+ BoolInt tMode;
#endif
*inProcessed = 0;
@@ -903,7 +903,7 @@ SRes Lzma2DecMt_Decode(CLzma2DecMtHandle pp,
vt.Write = Lzma2DecMt_MtCallback_Write;
{
- Bool needContinue = False;
+ BoolInt needContinue = False;
SRes res = MtDec_Code(&p->mtc);
diff --git a/C/Lzma2Enc.c b/C/Lzma2Enc.c
index 5098195..d541477 100644
--- a/C/Lzma2Enc.c
+++ b/C/Lzma2Enc.c
@@ -1,5 +1,5 @@
/* Lzma2Enc.c -- LZMA2 Encoder
-2018-04-27 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -115,7 +115,7 @@ SRes LzmaEnc_PrepareForLzma2(CLzmaEncHandle pp, ISeqInStream *inStream, UInt32 k
ISzAllocPtr alloc, ISzAllocPtr allocBig);
SRes LzmaEnc_MemPrepare(CLzmaEncHandle pp, const Byte *src, SizeT srcLen,
UInt32 keepWindowSize, ISzAllocPtr alloc, ISzAllocPtr allocBig);
-SRes LzmaEnc_CodeOneMemBlock(CLzmaEncHandle pp, Bool reInit,
+SRes LzmaEnc_CodeOneMemBlock(CLzmaEncHandle pp, BoolInt reInit,
Byte *dest, size_t *destLen, UInt32 desiredPackSize, UInt32 *unpackSize);
const Byte *LzmaEnc_GetCurBuf(CLzmaEncHandle pp);
void LzmaEnc_Finish(CLzmaEncHandle pp);
@@ -133,7 +133,7 @@ static SRes Lzma2EncInt_EncodeSubblock(CLzma2EncInt *p, Byte *outBuf,
size_t packSize = packSizeLimit;
UInt32 unpackSize = LZMA2_UNPACK_SIZE_MAX;
unsigned lzHeaderSize = 5 + (p->needInitProp ? 1 : 0);
- Bool useCopyBlock;
+ BoolInt useCopyBlock;
SRes res;
*packSizeRes = 0;
@@ -373,7 +373,7 @@ typedef struct
size_t outBufSize; /* size of allocated outBufs[i] */
size_t outBufsDataSizes[MTCODER__BLOCKS_MAX];
- Bool mtCoder_WasConstructed;
+ BoolInt mtCoder_WasConstructed;
CMtCoder mtCoder;
Byte *outBufs[MTCODER__BLOCKS_MAX];
diff --git a/C/Lzma86Enc.c b/C/Lzma86Enc.c
index ee59fb7..8d35e6d 100644
--- a/C/Lzma86Enc.c
+++ b/C/Lzma86Enc.c
@@ -1,5 +1,5 @@
/* Lzma86Enc.c -- LZMA + x86 (BCJ) Filter Encoder
-2016-05-16 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -18,7 +18,7 @@ int Lzma86_Encode(Byte *dest, size_t *destLen, const Byte *src, size_t srcLen,
{
size_t outSize2 = *destLen;
Byte *filteredStream;
- Bool useFilter;
+ BoolInt useFilter;
int mainResult = SZ_ERROR_OUTPUT_EOF;
CLzmaEncProps props;
LzmaEncProps_Init(&props);
@@ -56,7 +56,7 @@ int Lzma86_Encode(Byte *dest, size_t *destLen, const Byte *src, size_t srcLen,
{
size_t minSize = 0;
- Bool bestIsFiltered = False;
+ BoolInt bestIsFiltered = False;
/* passes for SZ_FILTER_AUTO:
0 - BCJ + LZMA
@@ -71,7 +71,7 @@ int Lzma86_Encode(Byte *dest, size_t *destLen, const Byte *src, size_t srcLen,
size_t outSizeProcessed = outSize2 - LZMA86_HEADER_SIZE;
size_t outPropsSize = 5;
SRes curRes;
- Bool curModeIsFiltered = (numPasses > 1 && i == numPasses - 1);
+ BoolInt curModeIsFiltered = (numPasses > 1 && i == numPasses - 1);
if (curModeIsFiltered && !bestIsFiltered)
break;
if (useFilter && i == 0)
diff --git a/C/LzmaDec.c b/C/LzmaDec.c
index 962b94b..4d15764 100644
--- a/C/LzmaDec.c
+++ b/C/LzmaDec.c
@@ -1,13 +1,13 @@
/* LzmaDec.c -- LZMA Decoder
-2018-02-28 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
+#include <string.h>
+
/* #include "CpuArch.h" */
#include "LzmaDec.h"
-#include <string.h>
-
#define kNumTopBits 24
#define kTopValue ((UInt32)1 << kNumTopBits)
@@ -19,7 +19,7 @@
#define NORMALIZE if (range < kTopValue) { range <<= 8; code = (code << 8) | (*buf++); }
-#define IF_BIT_0(p) ttt = *(p); NORMALIZE; bound = (range >> kNumBitModelTotalBits) * ttt; if (code < bound)
+#define IF_BIT_0(p) ttt = *(p); NORMALIZE; bound = (range >> kNumBitModelTotalBits) * (UInt32)ttt; if (code < bound)
#define UPDATE_0(p) range = bound; *(p) = (CLzmaProb)(ttt + ((kBitModelTotal - ttt) >> kNumMoveBits));
#define UPDATE_1(p) range -= bound; code -= bound; *(p) = (CLzmaProb)(ttt - (ttt >> kNumMoveBits));
#define GET_BIT2(p, i, A0, A1) IF_BIT_0(p) \
@@ -66,7 +66,7 @@
#define NORMALIZE_CHECK if (range < kTopValue) { if (buf >= bufLimit) return DUMMY_ERROR; range <<= 8; code = (code << 8) | (*buf++); }
-#define IF_BIT_0_CHECK(p) ttt = *(p); NORMALIZE_CHECK; bound = (range >> kNumBitModelTotalBits) * ttt; if (code < bound)
+#define IF_BIT_0_CHECK(p) ttt = *(p); NORMALIZE_CHECK; bound = (range >> kNumBitModelTotalBits) * (UInt32)ttt; if (code < bound)
#define UPDATE_0_CHECK range = bound;
#define UPDATE_1_CHECK range -= bound; code -= bound;
#define GET_BIT2_CHECK(p, i, A0, A1) IF_BIT_0_CHECK(p) \
@@ -539,7 +539,7 @@ int MY_FAST_CALL LZMA_DECODE_REAL(CLzmaDec *p, SizeT limit, const Byte *bufLimit
curLen = ((rem < len) ? (unsigned)rem : len);
pos = dicPos - rep0 + (dicPos < rep0 ? dicBufSize : 0);
- processedPos += curLen;
+ processedPos += (UInt32)curLen;
len -= curLen;
if (curLen <= dicBufSize - pos)
@@ -547,7 +547,7 @@ int MY_FAST_CALL LZMA_DECODE_REAL(CLzmaDec *p, SizeT limit, const Byte *bufLimit
Byte *dest = dic + dicPos;
ptrdiff_t src = (ptrdiff_t)pos - (ptrdiff_t)dicPos;
const Byte *lim = dest + curLen;
- dicPos += curLen;
+ dicPos += (SizeT)curLen;
do
*(dest) = (Byte)*(dest + src);
while (++dest != lim);
@@ -572,14 +572,14 @@ int MY_FAST_CALL LZMA_DECODE_REAL(CLzmaDec *p, SizeT limit, const Byte *bufLimit
p->buf = buf;
p->range = range;
p->code = code;
- p->remainLen = len;
+ p->remainLen = (UInt32)len;
p->dicPos = dicPos;
p->processedPos = processedPos;
p->reps[0] = rep0;
p->reps[1] = rep1;
p->reps[2] = rep2;
p->reps[3] = rep3;
- p->state = state;
+ p->state = (UInt32)state;
return SZ_OK;
}
@@ -601,8 +601,8 @@ static void MY_FAST_CALL LzmaDec_WriteRem(CLzmaDec *p, SizeT limit)
if (p->checkDicSize == 0 && p->prop.dicSize - p->processedPos <= len)
p->checkDicSize = p->prop.dicSize;
- p->processedPos += len;
- p->remainLen -= len;
+ p->processedPos += (UInt32)len;
+ p->remainLen -= (UInt32)len;
while (len != 0)
{
len--;
@@ -850,7 +850,7 @@ static ELzmaDummy LzmaDec_TryDummy(const CLzmaDec *p, const Byte *buf, SizeT inS
}
-void LzmaDec_InitDicAndState(CLzmaDec *p, Bool initDic, Bool initState)
+void LzmaDec_InitDicAndState(CLzmaDec *p, BoolInt initDic, BoolInt initState)
{
p->remainLen = kMatchSpecLenStart + 1;
p->tempBufSize = 0;
@@ -979,10 +979,10 @@ SRes LzmaDec_DecodeToDic(CLzmaDec *p, SizeT dicLimit, const Byte *src, SizeT *sr
p->tempBufSize = rem;
if (rem < LZMA_REQUIRED_INPUT_MAX || checkEndMarkNow)
{
- int dummyRes = LzmaDec_TryDummy(p, p->tempBuf, rem);
+ int dummyRes = LzmaDec_TryDummy(p, p->tempBuf, (SizeT)rem);
if (dummyRes == DUMMY_ERROR)
{
- (*srcLen) += lookAhead;
+ (*srcLen) += (SizeT)lookAhead;
*status = LZMA_STATUS_NEEDS_MORE_INPUT;
return SZ_OK;
}
@@ -1005,9 +1005,9 @@ SRes LzmaDec_DecodeToDic(CLzmaDec *p, SizeT dicLimit, const Byte *src, SizeT *sr
return SZ_ERROR_FAIL; /* some internal error */
lookAhead -= rem;
}
- (*srcLen) += lookAhead;
+ (*srcLen) += (SizeT)lookAhead;
src += lookAhead;
- inSize -= lookAhead;
+ inSize -= (SizeT)lookAhead;
p->tempBufSize = 0;
}
}
diff --git a/C/LzmaEnc.c b/C/LzmaEnc.c
index bebe664..6954c2d 100644
--- a/C/LzmaEnc.c
+++ b/C/LzmaEnc.c
@@ -1,5 +1,5 @@
/* LzmaEnc.c -- LZMA Encoder
-2018-04-29 : Igor Pavlov : Public domain */
+2018-12-29: Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -38,6 +38,8 @@ static unsigned g_STAT_OFFSET = 0;
#define kNumBitPriceShiftBits 4
#define kBitPrice (1 << kNumBitPriceShiftBits)
+#define REP_LEN_COUNT 64
+
void LzmaEncProps_Init(CLzmaEncProps *p)
{
p->level = 5;
@@ -183,8 +185,11 @@ typedef struct
} COptimal;
-#define kNumOpts (1 << 12)
-#define kPackReserve (1 + kNumOpts * 2)
+// 18.06
+#define kNumOpts (1 << 11)
+#define kPackReserve (kNumOpts * 8)
+// #define kNumOpts (1 << 12)
+// #define kPackReserve (1 + kNumOpts * 2)
#define kNumLenToPosStates 4
#define kNumPosSlotBits 6
@@ -236,10 +241,18 @@ typedef struct
typedef struct
{
unsigned tableSize;
- unsigned counters[LZMA_NUM_PB_STATES_MAX];
UInt32 prices[LZMA_NUM_PB_STATES_MAX][kLenNumSymbolsTotal];
+ // UInt32 prices1[LZMA_NUM_PB_STATES_MAX][kLenNumLowSymbols * 2];
+ // UInt32 prices2[kLenNumSymbolsTotal];
} CLenPriceEnc;
+#define GET_PRICE_LEN(p, posState, len) \
+ ((p)->prices[posState][(size_t)(len) - LZMA_MATCH_LEN_MIN])
+
+/*
+#define GET_PRICE_LEN(p, posState, len) \
+ ((p)->prices2[(size_t)(len) - 2] + ((p)->prices1[posState][((len) - 2) & (kLenNumLowSymbols * 2 - 1)] & (((len) - 2 - kLenNumLowSymbols * 2) >> 9)))
+*/
typedef struct
{
@@ -308,16 +321,18 @@ typedef struct
unsigned lc, lp, pb;
unsigned lclp;
- Bool fastMode;
- Bool writeEndMark;
- Bool finished;
- Bool multiThread;
- Bool needInit;
+ BoolInt fastMode;
+ BoolInt writeEndMark;
+ BoolInt finished;
+ BoolInt multiThread;
+ BoolInt needInit;
+ // BoolInt _maxMode;
UInt64 nowPos64;
unsigned matchPriceCount;
- unsigned alignPriceCount;
+ // unsigned alignPriceCount;
+ int repLenEncCounter;
unsigned distTableSize;
@@ -325,7 +340,7 @@ typedef struct
SRes result;
#ifndef _7ZIP_ST
- Bool mtMode;
+ BoolInt mtMode;
// begin of CMatchFinderMt is used in LZ thread
CMatchFinderMt matchFinderMt;
// end of CMatchFinderMt is used in BT and HASH threads
@@ -458,6 +473,7 @@ SRes LzmaEnc_SetProps(CLzmaEncHandle pp, const CLzmaEncProps *props2)
p->lp = props.lp;
p->pb = props.pb;
p->fastMode = (props.algo == 0);
+ // p->_maxMode = True;
p->matchFinderBase.btMode = (Byte)(props.btMode ? 1 : 0);
{
unsigned numHashBytes = 4;
@@ -618,9 +634,9 @@ static void RangeEnc_FlushData(CRangeEnc *p)
#ifdef _LZMA_ENC_USE_BRANCH
-#define RC_BIT(p, prob, symbol) { \
+#define RC_BIT(p, prob, bit) { \
RC_BIT_PRE(p, prob) \
- if (symbol == 0) { range = newBound; ttt += (kBitModelTotal - ttt) >> kNumMoveBits; } \
+ if (bit == 0) { range = newBound; ttt += (kBitModelTotal - ttt) >> kNumMoveBits; } \
else { (p)->low += newBound; range -= newBound; ttt -= ttt >> kNumMoveBits; } \
*(prob) = (CLzmaProb)ttt; \
RC_NORM(p) \
@@ -628,15 +644,15 @@ static void RangeEnc_FlushData(CRangeEnc *p)
#else
-#define RC_BIT(p, prob, symbol) { \
+#define RC_BIT(p, prob, bit) { \
UInt32 mask; \
RC_BIT_PRE(p, prob) \
- mask = 0 - (UInt32)symbol; \
+ mask = 0 - (UInt32)bit; \
range &= mask; \
mask &= newBound; \
range -= mask; \
(p)->low += mask; \
- mask = (UInt32)symbol - 1; \
+ mask = (UInt32)bit - 1; \
range += newBound & mask; \
mask &= (kBitModelTotal - ((1 << kNumMoveBits) - 1)); \
mask += ((1 << kNumMoveBits) - 1); \
@@ -673,42 +689,42 @@ static void RangeEnc_EncodeBit_0(CRangeEnc *p, CLzmaProb *prob)
p->range = range;
}
-static void LitEnc_Encode(CRangeEnc *p, CLzmaProb *probs, UInt32 symbol)
+static void LitEnc_Encode(CRangeEnc *p, CLzmaProb *probs, UInt32 sym)
{
UInt32 range = p->range;
- symbol |= 0x100;
+ sym |= 0x100;
do
{
UInt32 ttt, newBound;
- // RangeEnc_EncodeBit(p, probs + (symbol >> 8), (symbol >> 7) & 1);
- CLzmaProb *prob = probs + (symbol >> 8);
- UInt32 bit = (symbol >> 7) & 1;
- symbol <<= 1;
+ // RangeEnc_EncodeBit(p, probs + (sym >> 8), (sym >> 7) & 1);
+ CLzmaProb *prob = probs + (sym >> 8);
+ UInt32 bit = (sym >> 7) & 1;
+ sym <<= 1;
RC_BIT(p, prob, bit);
}
- while (symbol < 0x10000);
+ while (sym < 0x10000);
p->range = range;
}
-static void LitEnc_EncodeMatched(CRangeEnc *p, CLzmaProb *probs, UInt32 symbol, UInt32 matchByte)
+static void LitEnc_EncodeMatched(CRangeEnc *p, CLzmaProb *probs, UInt32 sym, UInt32 matchByte)
{
UInt32 range = p->range;
UInt32 offs = 0x100;
- symbol |= 0x100;
+ sym |= 0x100;
do
{
UInt32 ttt, newBound;
CLzmaProb *prob;
UInt32 bit;
matchByte <<= 1;
- // RangeEnc_EncodeBit(p, probs + (offs + (matchByte & offs) + (symbol >> 8)), (symbol >> 7) & 1);
- prob = probs + (offs + (matchByte & offs) + (symbol >> 8));
- bit = (symbol >> 7) & 1;
- symbol <<= 1;
- offs &= ~(matchByte ^ symbol);
+ // RangeEnc_EncodeBit(p, probs + (offs + (matchByte & offs) + (sym >> 8)), (sym >> 7) & 1);
+ prob = probs + (offs + (matchByte & offs) + (sym >> 8));
+ bit = (sym >> 7) & 1;
+ sym <<= 1;
+ offs &= ~(matchByte ^ sym);
RC_BIT(p, prob, bit);
}
- while (symbol < 0x10000);
+ while (sym < 0x10000);
p->range = range;
}
@@ -739,11 +755,11 @@ static void LzmaEnc_InitPriceTables(CProbPrice *ProbPrices)
}
-#define GET_PRICE(prob, symbol) \
- p->ProbPrices[((prob) ^ (unsigned)(((-(int)(symbol))) & (kBitModelTotal - 1))) >> kNumMoveReducingBits];
+#define GET_PRICE(prob, bit) \
+ p->ProbPrices[((prob) ^ (unsigned)(((-(int)(bit))) & (kBitModelTotal - 1))) >> kNumMoveReducingBits];
-#define GET_PRICEa(prob, symbol) \
- ProbPrices[((prob) ^ (unsigned)((-((int)(symbol))) & (kBitModelTotal - 1))) >> kNumMoveReducingBits];
+#define GET_PRICEa(prob, bit) \
+ ProbPrices[((prob) ^ (unsigned)((-((int)(bit))) & (kBitModelTotal - 1))) >> kNumMoveReducingBits];
#define GET_PRICE_0(prob) p->ProbPrices[(prob) >> kNumMoveReducingBits]
#define GET_PRICE_1(prob) p->ProbPrices[((prob) ^ (kBitModelTotal - 1)) >> kNumMoveReducingBits]
@@ -752,48 +768,48 @@ static void LzmaEnc_InitPriceTables(CProbPrice *ProbPrices)
#define GET_PRICEa_1(prob) ProbPrices[((prob) ^ (kBitModelTotal - 1)) >> kNumMoveReducingBits]
-static UInt32 LitEnc_GetPrice(const CLzmaProb *probs, UInt32 symbol, const CProbPrice *ProbPrices)
+static UInt32 LitEnc_GetPrice(const CLzmaProb *probs, UInt32 sym, const CProbPrice *ProbPrices)
{
UInt32 price = 0;
- symbol |= 0x100;
+ sym |= 0x100;
do
{
- unsigned bit = symbol & 1;
- symbol >>= 1;
- price += GET_PRICEa(probs[symbol], bit);
+ unsigned bit = sym & 1;
+ sym >>= 1;
+ price += GET_PRICEa(probs[sym], bit);
}
- while (symbol >= 2);
+ while (sym >= 2);
return price;
}
-static UInt32 LitEnc_Matched_GetPrice(const CLzmaProb *probs, UInt32 symbol, UInt32 matchByte, const CProbPrice *ProbPrices)
+static UInt32 LitEnc_Matched_GetPrice(const CLzmaProb *probs, UInt32 sym, UInt32 matchByte, const CProbPrice *ProbPrices)
{
UInt32 price = 0;
UInt32 offs = 0x100;
- symbol |= 0x100;
+ sym |= 0x100;
do
{
matchByte <<= 1;
- price += GET_PRICEa(probs[offs + (matchByte & offs) + (symbol >> 8)], (symbol >> 7) & 1);
- symbol <<= 1;
- offs &= ~(matchByte ^ symbol);
+ price += GET_PRICEa(probs[offs + (matchByte & offs) + (sym >> 8)], (sym >> 7) & 1);
+ sym <<= 1;
+ offs &= ~(matchByte ^ sym);
}
- while (symbol < 0x10000);
+ while (sym < 0x10000);
return price;
}
-static void RcTree_ReverseEncode(CRangeEnc *rc, CLzmaProb *probs, unsigned numBits, UInt32 symbol)
+static void RcTree_ReverseEncode(CRangeEnc *rc, CLzmaProb *probs, unsigned numBits, unsigned sym)
{
UInt32 range = rc->range;
unsigned m = 1;
do
{
UInt32 ttt, newBound;
- unsigned bit = symbol & 1;
+ unsigned bit = sym & 1;
// RangeEnc_EncodeBit(rc, probs + m, bit);
- symbol >>= 1;
+ sym >>= 1;
RC_BIT(rc, probs + m, bit);
m = (m << 1) | bit;
}
@@ -812,37 +828,37 @@ static void LenEnc_Init(CLenEnc *p)
p->high[i] = kProbInitValue;
}
-static void LenEnc_Encode(CLenEnc *p, CRangeEnc *rc, unsigned symbol, unsigned posState)
+static void LenEnc_Encode(CLenEnc *p, CRangeEnc *rc, unsigned sym, unsigned posState)
{
UInt32 range, ttt, newBound;
CLzmaProb *probs = p->low;
range = rc->range;
RC_BIT_PRE(rc, probs);
- if (symbol >= kLenNumLowSymbols)
+ if (sym >= kLenNumLowSymbols)
{
RC_BIT_1(rc, probs);
probs += kLenNumLowSymbols;
RC_BIT_PRE(rc, probs);
- if (symbol >= kLenNumLowSymbols * 2)
+ if (sym >= kLenNumLowSymbols * 2)
{
RC_BIT_1(rc, probs);
rc->range = range;
- // RcTree_Encode(rc, p->high, kLenNumHighBits, symbol - kLenNumLowSymbols * 2);
- LitEnc_Encode(rc, p->high, symbol - kLenNumLowSymbols * 2);
+ // RcTree_Encode(rc, p->high, kLenNumHighBits, sym - kLenNumLowSymbols * 2);
+ LitEnc_Encode(rc, p->high, sym - kLenNumLowSymbols * 2);
return;
}
- symbol -= kLenNumLowSymbols;
+ sym -= kLenNumLowSymbols;
}
- // RcTree_Encode(rc, probs + (posState << kLenNumLowBits), kLenNumLowBits, symbol);
+ // RcTree_Encode(rc, probs + (posState << kLenNumLowBits), kLenNumLowBits, sym);
{
unsigned m;
unsigned bit;
RC_BIT_0(rc, probs);
probs += (posState << (1 + kLenNumLowBits));
- bit = (symbol >> 2) ; RC_BIT(rc, probs + 1, bit); m = (1 << 1) + bit;
- bit = (symbol >> 1) & 1; RC_BIT(rc, probs + m, bit); m = (m << 1) + bit;
- bit = symbol & 1; RC_BIT(rc, probs + m, bit);
+ bit = (sym >> 2) ; RC_BIT(rc, probs + 1, bit); m = (1 << 1) + bit;
+ bit = (sym >> 1) & 1; RC_BIT(rc, probs + m, bit); m = (m << 1) + bit;
+ bit = sym & 1; RC_BIT(rc, probs + m, bit);
rc->range = range;
}
}
@@ -863,50 +879,93 @@ static void SetPrices_3(const CLzmaProb *probs, UInt32 startPrice, UInt32 *price
}
-MY_NO_INLINE static void MY_FAST_CALL LenPriceEnc_UpdateTable(
- CLenPriceEnc *p, unsigned posState,
+MY_NO_INLINE static void MY_FAST_CALL LenPriceEnc_UpdateTables(
+ CLenPriceEnc *p,
+ unsigned numPosStates,
const CLenEnc *enc,
const CProbPrice *ProbPrices)
{
- // int y; for (y = 0; y < 100; y++) {
- UInt32 a;
- unsigned i, numSymbols;
+ UInt32 b;
+
+ {
+ unsigned prob = enc->low[0];
+ UInt32 a, c;
+ unsigned posState;
+ b = GET_PRICEa_1(prob);
+ a = GET_PRICEa_0(prob);
+ c = b + GET_PRICEa_0(enc->low[kLenNumLowSymbols]);
+ for (posState = 0; posState < numPosStates; posState++)
+ {
+ UInt32 *prices = p->prices[posState];
+ const CLzmaProb *probs = enc->low + (posState << (1 + kLenNumLowBits));
+ SetPrices_3(probs, a, prices, ProbPrices);
+ SetPrices_3(probs + kLenNumLowSymbols, c, prices + kLenNumLowSymbols, ProbPrices);
+ }
+ }
- UInt32 *prices = p->prices[posState];
+ /*
{
- const CLzmaProb *probs = enc->low + (posState << (1 + kLenNumLowBits));
- SetPrices_3(probs, GET_PRICEa_0(enc->low[0]), prices, ProbPrices);
+ unsigned i;
+ UInt32 b;
+ a = GET_PRICEa_0(enc->low[0]);
+ for (i = 0; i < kLenNumLowSymbols; i++)
+ p->prices2[i] = a;
a = GET_PRICEa_1(enc->low[0]);
- SetPrices_3(probs + kLenNumLowSymbols, a + GET_PRICEa_0(enc->low[kLenNumLowSymbols]), prices + kLenNumLowSymbols, ProbPrices);
+ b = a + GET_PRICEa_0(enc->low[kLenNumLowSymbols]);
+ for (i = kLenNumLowSymbols; i < kLenNumLowSymbols * 2; i++)
+ p->prices2[i] = b;
a += GET_PRICEa_1(enc->low[kLenNumLowSymbols]);
}
- numSymbols = p->tableSize;
- p->counters[posState] = numSymbols;
- for (i = kLenNumLowSymbols * 2; i < numSymbols; i += 1)
+ */
+
+ // p->counter = numSymbols;
+ // p->counter = 64;
+
{
- prices[i] = a +
- // RcTree_GetPrice(enc->high, kLenNumHighBits, i - kLenNumLowSymbols * 2, ProbPrices);
- LitEnc_GetPrice(enc->high, i - kLenNumLowSymbols * 2, ProbPrices);
- /*
- unsigned sym = (i - kLenNumLowSymbols * 2) >> 1;
- UInt32 price = a + RcTree_GetPrice(enc->high, kLenNumHighBits - 1, sym, ProbPrices);
- UInt32 prob = enc->high[(1 << 7) + sym];
- prices[i ] = price + GET_PRICEa_0(prob);
- prices[i + 1] = price + GET_PRICEa_1(prob);
- */
- }
- // }
-}
+ unsigned i = p->tableSize;
+
+ if (i > kLenNumLowSymbols * 2)
+ {
+ const CLzmaProb *probs = enc->high;
+ UInt32 *prices = p->prices[0] + kLenNumLowSymbols * 2;
+ i -= kLenNumLowSymbols * 2 - 1;
+ i >>= 1;
+ b += GET_PRICEa_1(enc->low[kLenNumLowSymbols]);
+ do
+ {
+ /*
+ p->prices2[i] = a +
+ // RcTree_GetPrice(enc->high, kLenNumHighBits, i - kLenNumLowSymbols * 2, ProbPrices);
+ LitEnc_GetPrice(probs, i - kLenNumLowSymbols * 2, ProbPrices);
+ */
+ // UInt32 price = a + RcTree_GetPrice(probs, kLenNumHighBits - 1, sym, ProbPrices);
+ unsigned sym = --i + (1 << (kLenNumHighBits - 1));
+ UInt32 price = b;
+ do
+ {
+ unsigned bit = sym & 1;
+ sym >>= 1;
+ price += GET_PRICEa(probs[sym], bit);
+ }
+ while (sym >= 2);
-static void LenPriceEnc_UpdateTables(CLenPriceEnc *p, unsigned numPosStates,
- const CLenEnc *enc,
- const CProbPrice *ProbPrices)
-{
- unsigned posState;
- for (posState = 0; posState < numPosStates; posState++)
- LenPriceEnc_UpdateTable(p, posState, enc, ProbPrices);
-}
+ {
+ unsigned prob = probs[(size_t)i + (1 << (kLenNumHighBits - 1))];
+ prices[(size_t)i * 2 ] = price + GET_PRICEa_0(prob);
+ prices[(size_t)i * 2 + 1] = price + GET_PRICEa_1(prob);
+ }
+ }
+ while (i);
+ {
+ unsigned posState;
+ size_t num = (p->tableSize - kLenNumLowSymbols * 2) * sizeof(p->prices[0][0]);
+ for (posState = 1; posState < numPosStates; posState++)
+ memcpy(p->prices[posState] + kLenNumLowSymbols * 2, p->prices[0] + kLenNumLowSymbols * 2, num);
+ }
+ }
+ }
+}
/*
#ifdef SHOW_STAT
@@ -917,7 +976,7 @@ static void LenPriceEnc_UpdateTables(CLenPriceEnc *p, unsigned numPosStates,
#define MOVE_POS(p, num) { \
p->additionalOffset += (num); \
- p->matchFinder.Skip(p->matchFinderObj, (num)); }
+ p->matchFinder.Skip(p->matchFinderObj, (UInt32)(num)); }
static unsigned ReadMatchDistances(CLzmaEnc *p, unsigned *numPairsRes)
@@ -954,7 +1013,8 @@ static unsigned ReadMatchDistances(CLzmaEnc *p, unsigned *numPairsRes)
const Byte *p2 = p1 + len;
ptrdiff_t dif = (ptrdiff_t)-1 - p->matches[(size_t)numPairs - 1];
const Byte *lim = p1 + numAvail;
- for (; p2 != lim && *p2 == p2[dif]; p2++);
+ for (; p2 != lim && *p2 == p2[dif]; p2++)
+ {}
return (unsigned)(p2 - p1);
}
}
@@ -977,7 +1037,7 @@ static unsigned ReadMatchDistances(CLzmaEnc *p, unsigned *numPairsRes)
+ GET_PRICE_1(p->isRep[state]) \
+ GET_PRICE_0(p->isRepG0[state])
-
+MY_FORCE_INLINE
static UInt32 GetPrice_PureRep(const CLzmaEnc *p, unsigned repIndex, size_t state, size_t posState)
{
UInt32 price;
@@ -1011,14 +1071,14 @@ static unsigned Backward(CLzmaEnc *p, unsigned cur)
for (;;)
{
UInt32 dist = p->opt[cur].dist;
- UInt32 len = p->opt[cur].len;
- UInt32 extra = p->opt[cur].extra;
+ unsigned len = (unsigned)p->opt[cur].len;
+ unsigned extra = (unsigned)p->opt[cur].extra;
cur -= len;
if (extra)
{
wr--;
- p->opt[wr].len = len;
+ p->opt[wr].len = (UInt32)len;
cur -= extra;
len = extra;
if (extra == 1)
@@ -1045,7 +1105,7 @@ static unsigned Backward(CLzmaEnc *p, unsigned cur)
wr--;
p->opt[wr].dist = dist;
- p->opt[wr].len = len;
+ p->opt[wr].len = (UInt32)len;
}
}
@@ -1102,7 +1162,8 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
repLens[i] = 0;
continue;
}
- for (len = 2; len < numAvail && data[len] == data2[len]; len++);
+ for (len = 2; len < numAvail && data[len] == data2[len]; len++)
+ {}
repLens[i] = len;
if (len > repLens[repMaxIndex])
repMaxIndex = i;
@@ -1111,7 +1172,7 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
if (repLens[repMaxIndex] >= p->numFastBytes)
{
unsigned len;
- p->backRes = repMaxIndex;
+ p->backRes = (UInt32)repMaxIndex;
len = repLens[repMaxIndex];
MOVE_POS(p, len - 1)
return len;
@@ -1128,8 +1189,12 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
curByte = *data;
matchByte = *(data - reps[0]);
+
+ last = repLens[repMaxIndex];
+ if (last <= mainLen)
+ last = mainLen;
- if (mainLen < 2 && curByte != matchByte && repLens[repMaxIndex] < 2)
+ if (last < 2 && curByte != matchByte)
{
p->backRes = MARK_LIT;
return 1;
@@ -1146,13 +1211,14 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
LitEnc_Matched_GetPrice(probs, curByte, matchByte, p->ProbPrices) :
LitEnc_GetPrice(probs, curByte, p->ProbPrices));
}
-
+
MakeAs_Lit(&p->opt[1]);
matchPrice = GET_PRICE_1(p->isMatch[p->state][posState]);
repMatchPrice = matchPrice + GET_PRICE_1(p->isRep[p->state]);
- if (matchByte == curByte)
+ // 18.06
+ if (matchByte == curByte && repLens[0] == 0)
{
UInt32 shortRepPrice = repMatchPrice + GetPrice_ShortRep(p, p->state, posState);
if (shortRepPrice < p->opt[1].price)
@@ -1160,16 +1226,13 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
p->opt[1].price = shortRepPrice;
MakeAs_ShortRep(&p->opt[1]);
}
+ if (last < 2)
+ {
+ p->backRes = p->opt[1].dist;
+ return 1;
+ }
}
-
- last = (mainLen >= repLens[repMaxIndex] ? mainLen : repLens[repMaxIndex]);
-
- if (last < 2)
- {
- p->backRes = p->opt[1].dist;
- return 1;
- }
-
+
p->opt[1].len = 1;
p->opt[0].reps[0] = reps[0];
@@ -1177,13 +1240,6 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
p->opt[0].reps[2] = reps[2];
p->opt[0].reps[3] = reps[3];
- {
- unsigned len = last;
- do
- p->opt[len--].price = kInfinityPrice;
- while (len >= 2);
- }
-
// ---------- REP ----------
for (i = 0; i < LZMA_NUM_REPS; i++)
@@ -1195,13 +1251,13 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
price = repMatchPrice + GetPrice_PureRep(p, i, p->state, posState);
do
{
- UInt32 price2 = price + p->repLenEnc.prices[posState][(size_t)repLen - 2];
+ UInt32 price2 = price + GET_PRICE_LEN(&p->repLenEnc, posState, repLen);
COptimal *opt = &p->opt[repLen];
if (price2 < opt->price)
{
opt->price = price2;
- opt->len = repLen;
- opt->dist = i;
+ opt->len = (UInt32)repLen;
+ opt->dist = (UInt32)i;
opt->extra = 0;
}
}
@@ -1211,38 +1267,41 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
// ---------- MATCH ----------
{
- unsigned len = ((repLens[0] >= 2) ? repLens[0] + 1 : 2);
+ unsigned len = repLens[0] + 1;
if (len <= mainLen)
{
unsigned offs = 0;
UInt32 normalMatchPrice = matchPrice + GET_PRICE_0(p->isRep[p->state]);
- while (len > matches[offs])
- offs += 2;
+ if (len < 2)
+ len = 2;
+ else
+ while (len > matches[offs])
+ offs += 2;
for (; ; len++)
{
COptimal *opt;
UInt32 dist = matches[(size_t)offs + 1];
- UInt32 price2 = normalMatchPrice + p->lenEnc.prices[posState][(size_t)len - LZMA_MATCH_LEN_MIN];
+ UInt32 price = normalMatchPrice + GET_PRICE_LEN(&p->lenEnc, posState, len);
unsigned lenToPosState = GetLenToPosState(len);
if (dist < kNumFullDistances)
- price2 += p->distancesPrices[lenToPosState][dist & (kNumFullDistances - 1)];
+ price += p->distancesPrices[lenToPosState][dist & (kNumFullDistances - 1)];
else
{
unsigned slot;
GetPosSlot2(dist, slot);
- price2 += p->alignPrices[dist & kAlignMask];
- price2 += p->posSlotPrices[lenToPosState][slot];
+ price += p->alignPrices[dist & kAlignMask];
+ price += p->posSlotPrices[lenToPosState][slot];
}
opt = &p->opt[len];
- if (price2 < opt->price)
+ if (price < opt->price)
{
- opt->price = price2;
- opt->len = len;
+ opt->price = price;
+ opt->len = (UInt32)len;
opt->dist = dist + LZMA_NUM_REPS;
opt->extra = 0;
}
@@ -1277,16 +1336,43 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
for (;;)
{
- UInt32 numAvail, numAvailFull;
+ unsigned numAvail;
+ UInt32 numAvailFull;
unsigned newLen, numPairs, prev, state, posState, startLen;
- UInt32 curPrice, litPrice, matchPrice, repMatchPrice;
- Bool nextIsLit;
+ UInt32 litPrice, matchPrice, repMatchPrice;
+ BoolInt nextIsLit;
Byte curByte, matchByte;
const Byte *data;
COptimal *curOpt, *nextOpt;
if (++cur == last)
- return Backward(p, cur);
+ break;
+
+ // 18.06
+ if (cur >= kNumOpts - 64)
+ {
+ unsigned j, best;
+ UInt32 price = p->opt[cur].price;
+ best = cur;
+ for (j = cur + 1; j <= last; j++)
+ {
+ UInt32 price2 = p->opt[j].price;
+ if (price >= price2)
+ {
+ price = price2;
+ best = j;
+ }
+ }
+ {
+ unsigned delta = best - cur;
+ if (delta != 0)
+ {
+ MOVE_POS(p, delta);
+ }
+ }
+ cur = best;
+ break;
+ }
newLen = ReadMatchDistances(p, &numPairs);
@@ -1294,15 +1380,24 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
{
p->numPairs = numPairs;
p->longestMatchLen = newLen;
- return Backward(p, cur);
+ break;
}
curOpt = &p->opt[cur];
+
+ position++;
+
+ // we need that check here, if skip_items in p->opt are possible
+ /*
+ if (curOpt->price >= kInfinityPrice)
+ continue;
+ */
+
prev = cur - curOpt->len;
-
+
if (curOpt->len == 1)
{
- state = p->opt[prev].state;
+ state = (unsigned)p->opt[prev].state;
if (IsShortRep(curOpt))
state = kShortRepNextStates[state];
else
@@ -1316,14 +1411,14 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
if (curOpt->extra)
{
- prev -= curOpt->extra;
+ prev -= (unsigned)curOpt->extra;
state = kState_RepAfterLit;
if (curOpt->extra == 1)
- state = (dist < LZMA_NUM_REPS) ? kState_RepAfterLit : kState_MatchAfterLit;
+ state = (dist < LZMA_NUM_REPS ? kState_RepAfterLit : kState_MatchAfterLit);
}
else
{
- state = p->opt[prev].state;
+ state = (unsigned)p->opt[prev].state;
if (dist < LZMA_NUM_REPS)
state = kRepNextStates[state];
else
@@ -1379,7 +1474,6 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
curByte = *data;
matchByte = *(data - reps[0]);
- position++;
posState = (position & p->pbMask);
/*
@@ -1391,13 +1485,25 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
< MATCH [ : LIT : REP_0 ]
*/
- curPrice = curOpt->price;
- litPrice = curPrice + GET_PRICE_0(p->isMatch[state][posState]);
+ {
+ UInt32 curPrice = curOpt->price;
+ unsigned prob = p->isMatch[state][posState];
+ matchPrice = curPrice + GET_PRICE_1(prob);
+ litPrice = curPrice + GET_PRICE_0(prob);
+ }
nextOpt = &p->opt[(size_t)cur + 1];
nextIsLit = False;
- // if (litPrice >= nextOpt->price) litPrice = 0; else // 18.new
+ // here we can allow skip_items in p->opt, if we don't check (nextOpt->price < kInfinityPrice)
+ // 18.new.06
+ if (nextOpt->price < kInfinityPrice
+ // && !IsLitState(state)
+ && matchByte == curByte
+ || litPrice > nextOpt->price
+ )
+ litPrice = 0;
+ else
{
const CLzmaProb *probs = LIT_PROBS(position, *(data - 1));
litPrice += (!IsLitState(state) ?
@@ -1413,21 +1519,32 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
}
}
- matchPrice = curPrice + GET_PRICE_1(p->isMatch[state][posState]);
repMatchPrice = matchPrice + GET_PRICE_1(p->isRep[state]);
+ numAvailFull = p->numAvail;
+ {
+ unsigned temp = kNumOpts - 1 - cur;
+ if (numAvailFull > temp)
+ numAvailFull = (UInt32)temp;
+ }
+
+ // 18.06
// ---------- SHORT_REP ----------
- // if (IsLitState(state)) // 18.new
+ if (IsLitState(state)) // 18.new
if (matchByte == curByte)
- // if (repMatchPrice < nextOpt->price) // 18.new
- if (nextOpt->len < 2
+ if (repMatchPrice < nextOpt->price) // 18.new
+ // if (numAvailFull < 2 || data[1] != *(data - reps[0] + 1))
+ if (
+ // nextOpt->price >= kInfinityPrice ||
+ nextOpt->len < 2 // we can check nextOpt->len, if skip items are not allowed in p->opt
|| (nextOpt->dist != 0
- && nextOpt->extra <= 1 // 17.old
- ))
+ // && nextOpt->extra <= 1 // 17.old
+ )
+ )
{
UInt32 shortRepPrice = repMatchPrice + GetPrice_ShortRep(p, state, posState);
- if (shortRepPrice <= nextOpt->price) // 17.old
- // if (shortRepPrice < nextOpt->price) // 18.new
+ // if (shortRepPrice <= nextOpt->price) // 17.old
+ if (shortRepPrice < nextOpt->price) // 18.new
{
nextOpt->price = shortRepPrice;
nextOpt->len = 1;
@@ -1436,13 +1553,6 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
}
}
- numAvailFull = p->numAvail;
- {
- UInt32 temp = kNumOpts - 1 - cur;
- if (numAvailFull > temp)
- numAvailFull = temp;
- }
-
if (numAvailFull < 2)
continue;
numAvail = (numAvailFull <= p->numFastBytes ? numAvailFull : p->numFastBytes);
@@ -1451,9 +1561,8 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
// ---------- LIT : REP_0 ----------
- if (
- // litPrice != 0 && // 18.new
- !nextIsLit
+ if (!nextIsLit
+ && litPrice != 0 // 18.new
&& matchByte != curByte
&& numAvailFull > 2)
{
@@ -1464,7 +1573,8 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
unsigned limit = p->numFastBytes + 1;
if (limit > numAvailFull)
limit = numAvailFull;
- for (len = 3; len < limit && data[len] == data2[len]; len++);
+ for (len = 3; len < limit && data[len] == data2[len]; len++)
+ {}
{
unsigned state2 = kLiteralNextStates[state];
@@ -1472,8 +1582,9 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
UInt32 price = litPrice + GetPrice_Rep_0(p, state2, posState2);
{
unsigned offset = cur + len;
- while (last < offset)
- p->opt[++last].price = kInfinityPrice;
+
+ if (last < offset)
+ last = offset;
// do
{
@@ -1481,14 +1592,14 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
COptimal *opt;
len--;
// price2 = price + GetPrice_Len_Rep_0(p, len, state2, posState2);
- price2 = price + p->repLenEnc.prices[posState2][len - LZMA_MATCH_LEN_MIN];
+ price2 = price + GET_PRICE_LEN(&p->repLenEnc, posState2, len);
opt = &p->opt[offset];
// offset--;
if (price2 < opt->price)
{
opt->price = price2;
- opt->len = len;
+ opt->len = (UInt32)len;
opt->dist = 0;
opt->extra = 1;
}
@@ -1500,6 +1611,7 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
}
startLen = 2; /* speed optimization */
+
{
// ---------- REP ----------
unsigned repIndex = 0; // 17.old
@@ -1512,24 +1624,28 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
if (data[0] != data2[0] || data[1] != data2[1])
continue;
- for (len = 2; len < numAvail && data[len] == data2[len]; len++);
+ for (len = 2; len < numAvail && data[len] == data2[len]; len++)
+ {}
// if (len < startLen) continue; // 18.new: speed optimization
- while (last < cur + len)
- p->opt[++last].price = kInfinityPrice;
+ {
+ unsigned offset = cur + len;
+ if (last < offset)
+ last = offset;
+ }
{
unsigned len2 = len;
price = repMatchPrice + GetPrice_PureRep(p, repIndex, state, posState);
do
{
- UInt32 price2 = price + p->repLenEnc.prices[posState][(size_t)len2 - 2];
+ UInt32 price2 = price + GET_PRICE_LEN(&p->repLenEnc, posState, len2);
COptimal *opt = &p->opt[cur + len2];
if (price2 < opt->price)
{
opt->price = price2;
- opt->len = len2;
- opt->dist = repIndex;
+ opt->len = (UInt32)len2;
+ opt->dist = (UInt32)repIndex;
opt->extra = 0;
}
}
@@ -1549,15 +1665,14 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
if (limit > numAvailFull)
limit = numAvailFull;
- for (; len2 < limit && data[len2] == data2[len2]; len2++);
-
- len2 -= len;
- if (len2 >= 3)
+ len2 += 2;
+ if (len2 <= limit)
+ if (data[len2 - 2] == data2[len2 - 2])
+ if (data[len2 - 1] == data2[len2 - 1])
{
unsigned state2 = kRepNextStates[state];
unsigned posState2 = (position + len) & p->pbMask;
- price +=
- p->repLenEnc.prices[posState][(size_t)len - 2]
+ price += GET_PRICE_LEN(&p->repLenEnc, posState, len)
+ GET_PRICE_0(p->isMatch[state2][posState2])
+ LitEnc_Matched_GetPrice(LIT_PROBS(position + len, data[(size_t)len - 1]),
data[len], data2[len], p->ProbPrices);
@@ -1568,31 +1683,40 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
price += GetPrice_Rep_0(p, state2, posState2);
+
+ for (; len2 < limit && data[len2] == data2[len2]; len2++)
+ {}
+
+ len2 -= len;
+ // if (len2 >= 3)
+ {
{
unsigned offset = cur + len + len2;
- while (last < offset)
- p->opt[++last].price = kInfinityPrice;
+
+ if (last < offset)
+ last = offset;
// do
{
- unsigned price2;
+ UInt32 price2;
COptimal *opt;
len2--;
// price2 = price + GetPrice_Len_Rep_0(p, len2, state2, posState2);
- price2 = price + p->repLenEnc.prices[posState2][len2 - LZMA_MATCH_LEN_MIN];
+ price2 = price + GET_PRICE_LEN(&p->repLenEnc, posState2, len2);
opt = &p->opt[offset];
// offset--;
if (price2 < opt->price)
{
opt->price = price2;
- opt->len = len2;
+ opt->len = (UInt32)len2;
opt->extra = (CExtra)(len + 1);
- opt->dist = repIndex;
+ opt->dist = (UInt32)repIndex;
}
}
// while (len2 >= 3);
}
}
+ }
}
}
}
@@ -1604,17 +1728,23 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
{
newLen = numAvail;
for (numPairs = 0; newLen > matches[numPairs]; numPairs += 2);
- matches[numPairs] = newLen;
+ matches[numPairs] = (UInt32)newLen;
numPairs += 2;
}
+ // startLen = 2; /* speed optimization */
+
if (newLen >= startLen)
{
UInt32 normalMatchPrice = matchPrice + GET_PRICE_0(p->isRep[state]);
UInt32 dist;
unsigned offs, posSlot, len;
- while (last < cur + newLen)
- p->opt[++last].price = kInfinityPrice;
+
+ {
+ unsigned offset = cur + newLen;
+ if (last < offset)
+ last = offset;
+ }
offs = 0;
while (startLen > matches[offs])
@@ -1626,27 +1756,29 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
for (len = /*2*/ startLen; ; len++)
{
- UInt32 price = normalMatchPrice + p->lenEnc.prices[posState][(size_t)len - LZMA_MATCH_LEN_MIN];
+ UInt32 price = normalMatchPrice + GET_PRICE_LEN(&p->lenEnc, posState, len);
{
COptimal *opt;
- unsigned lenToPosState = len - 2; lenToPosState = GetLenToPosState2(lenToPosState);
+ unsigned lenNorm = len - 2;
+ lenNorm = GetLenToPosState2(lenNorm);
if (dist < kNumFullDistances)
- price += p->distancesPrices[lenToPosState][dist & (kNumFullDistances - 1)];
+ price += p->distancesPrices[lenNorm][dist & (kNumFullDistances - 1)];
else
- price += p->posSlotPrices[lenToPosState][posSlot] + p->alignPrices[dist & kAlignMask];
+ price += p->posSlotPrices[lenNorm][posSlot] + p->alignPrices[dist & kAlignMask];
opt = &p->opt[cur + len];
if (price < opt->price)
{
opt->price = price;
- opt->len = len;
+ opt->len = (UInt32)len;
opt->dist = dist + LZMA_NUM_REPS;
opt->extra = 0;
}
}
- if (/*_maxMode && */ len == matches[offs])
+ if (len == matches[offs])
{
+ // if (p->_maxMode) {
// MATCH : LIT : REP_0
const Byte *data2 = data - dist - 1;
@@ -1655,11 +1787,17 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
if (limit > numAvailFull)
limit = numAvailFull;
- for (; len2 < limit && data[len2] == data2[len2]; len2++);
+ len2 += 2;
+ if (len2 <= limit)
+ if (data[len2 - 2] == data2[len2 - 2])
+ if (data[len2 - 1] == data2[len2 - 1])
+ {
+ for (; len2 < limit && data[len2] == data2[len2]; len2++)
+ {}
len2 -= len;
- if (len2 >= 3)
+ // if (len2 >= 3)
{
unsigned state2 = kMatchNextStates[state];
unsigned posState2 = (position + len) & p->pbMask;
@@ -1675,27 +1813,30 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
price += GetPrice_Rep_0(p, state2, posState2);
offset = cur + len + len2;
- while (last < offset)
- p->opt[++last].price = kInfinityPrice;
+
+ if (last < offset)
+ last = offset;
// do
{
UInt32 price2;
COptimal *opt;
len2--;
// price2 = price + GetPrice_Len_Rep_0(p, len2, state2, posState2);
- price2 = price + p->repLenEnc.prices[posState2][len2 - LZMA_MATCH_LEN_MIN];
+ price2 = price + GET_PRICE_LEN(&p->repLenEnc, posState2, len2);
opt = &p->opt[offset];
// offset--;
if (price2 < opt->price)
{
opt->price = price2;
- opt->len = len2;
+ opt->len = (UInt32)len2;
opt->extra = (CExtra)(len + 1);
opt->dist = dist + LZMA_NUM_REPS;
}
}
// while (len2 >= 3);
}
+
+ }
offs += 2;
if (offs == numPairs)
@@ -1707,6 +1848,12 @@ static unsigned GetOptimum(CLzmaEnc *p, UInt32 position)
}
}
}
+
+ do
+ p->opt[last].price = kInfinityPrice;
+ while (--last);
+
+ return Backward(p, cur);
}
@@ -1733,6 +1880,7 @@ static unsigned GetOptimumFast(CLzmaEnc *p)
p->backRes = MARK_LIT;
if (numAvail < 2)
return 1;
+ // if (mainLen < 2 && p->state == 0) return 1; // 18.06.notused
if (numAvail > LZMA_MATCH_LEN_MAX)
numAvail = LZMA_MATCH_LEN_MAX;
data = p->matchFinder.GetPointerToCurrentPos(p->matchFinderObj) - 1;
@@ -1744,10 +1892,11 @@ static unsigned GetOptimumFast(CLzmaEnc *p)
const Byte *data2 = data - p->reps[i];
if (data[0] != data2[0] || data[1] != data2[1])
continue;
- for (len = 2; len < numAvail && data[len] == data2[len]; len++);
+ for (len = 2; len < numAvail && data[len] == data2[len]; len++)
+ {}
if (len >= p->numFastBytes)
{
- p->backRes = i;
+ p->backRes = (UInt32)i;
MOVE_POS(p, len - 1)
return len;
}
@@ -1791,7 +1940,7 @@ static unsigned GetOptimumFast(CLzmaEnc *p)
|| (repLen + 2 >= mainLen && mainDist >= (1 << 9))
|| (repLen + 3 >= mainLen && mainDist >= (1 << 15)))
{
- p->backRes = repIndex;
+ p->backRes = (UInt32)repIndex;
MOVE_POS(p, repLen - 1)
return repLen;
}
@@ -1930,23 +2079,22 @@ MY_NO_INLINE static SRes Flush(CLzmaEnc *p, UInt32 nowPos)
}
-
-static void FillAlignPrices(CLzmaEnc *p)
+MY_NO_INLINE static void FillAlignPrices(CLzmaEnc *p)
{
unsigned i;
const CProbPrice *ProbPrices = p->ProbPrices;
const CLzmaProb *probs = p->posAlignEncoder;
- p->alignPriceCount = 0;
+ // p->alignPriceCount = 0;
for (i = 0; i < kAlignTableSize / 2; i++)
{
UInt32 price = 0;
- unsigned symbol = i;
+ unsigned sym = i;
unsigned m = 1;
unsigned bit;
UInt32 prob;
- bit = symbol & 1; symbol >>= 1; price += GET_PRICEa(probs[m], bit); m = (m << 1) + bit;
- bit = symbol & 1; symbol >>= 1; price += GET_PRICEa(probs[m], bit); m = (m << 1) + bit;
- bit = symbol & 1; symbol >>= 1; price += GET_PRICEa(probs[m], bit); m = (m << 1) + bit;
+ bit = sym & 1; sym >>= 1; price += GET_PRICEa(probs[m], bit); m = (m << 1) + bit;
+ bit = sym & 1; sym >>= 1; price += GET_PRICEa(probs[m], bit); m = (m << 1) + bit;
+ bit = sym & 1; sym >>= 1; price += GET_PRICEa(probs[m], bit); m = (m << 1) + bit;
prob = probs[m];
p->alignPrices[i ] = price + GET_PRICEa_0(prob);
p->alignPrices[i + 8] = price + GET_PRICEa_1(prob);
@@ -1955,78 +2103,97 @@ static void FillAlignPrices(CLzmaEnc *p)
}
-static void FillDistancesPrices(CLzmaEnc *p)
+MY_NO_INLINE static void FillDistancesPrices(CLzmaEnc *p)
{
+ // int y; for (y = 0; y < 100; y++) {
+
UInt32 tempPrices[kNumFullDistances];
- unsigned i, lenToPosState;
+ unsigned i, lps;
const CProbPrice *ProbPrices = p->ProbPrices;
p->matchPriceCount = 0;
- for (i = kStartPosModelIndex; i < kNumFullDistances; i++)
+ for (i = kStartPosModelIndex / 2; i < kNumFullDistances / 2; i++)
{
unsigned posSlot = GetPosSlot1(i);
- unsigned footerBits = ((posSlot >> 1) - 1);
+ unsigned footerBits = (posSlot >> 1) - 1;
unsigned base = ((2 | (posSlot & 1)) << footerBits);
+ const CLzmaProb *probs = p->posEncoders + (size_t)base * 2;
// tempPrices[i] = RcTree_ReverseGetPrice(p->posEncoders + base, footerBits, i - base, p->ProbPrices);
-
- const CLzmaProb *probs = p->posEncoders + base;
UInt32 price = 0;
unsigned m = 1;
- unsigned symbol = i - base;
+ unsigned sym = i;
+ unsigned offset = (unsigned)1 << footerBits;
+ base += i;
+
+ if (footerBits)
do
{
- unsigned bit = symbol & 1;
- symbol >>= 1;
+ unsigned bit = sym & 1;
+ sym >>= 1;
price += GET_PRICEa(probs[m], bit);
m = (m << 1) + bit;
}
while (--footerBits);
- tempPrices[i] = price;
+
+ {
+ unsigned prob = probs[m];
+ tempPrices[base ] = price + GET_PRICEa_0(prob);
+ tempPrices[base + offset] = price + GET_PRICEa_1(prob);
+ }
}
- for (lenToPosState = 0; lenToPosState < kNumLenToPosStates; lenToPosState++)
+ for (lps = 0; lps < kNumLenToPosStates; lps++)
{
- unsigned posSlot;
- const CLzmaProb *encoder = p->posSlotEncoder[lenToPosState];
- UInt32 *posSlotPrices = p->posSlotPrices[lenToPosState];
- unsigned distTableSize = p->distTableSize;
- const CLzmaProb *probs = encoder;
- for (posSlot = 0; posSlot < distTableSize; posSlot += 2)
+ unsigned slot;
+ unsigned distTableSize2 = (p->distTableSize + 1) >> 1;
+ UInt32 *posSlotPrices = p->posSlotPrices[lps];
+ const CLzmaProb *probs = p->posSlotEncoder[lps];
+
+ for (slot = 0; slot < distTableSize2; slot++)
{
- // posSlotPrices[posSlot] = RcTree_GetPrice(encoder, kNumPosSlotBits, posSlot, p->ProbPrices);
- UInt32 price = 0;
+ // posSlotPrices[slot] = RcTree_GetPrice(encoder, kNumPosSlotBits, slot, p->ProbPrices);
+ UInt32 price;
unsigned bit;
- unsigned symbol = (posSlot >> 1) + (1 << (kNumPosSlotBits - 1));
- UInt32 prob;
- bit = symbol & 1; symbol >>= 1; price += GET_PRICEa(probs[symbol], bit);
- bit = symbol & 1; symbol >>= 1; price += GET_PRICEa(probs[symbol], bit);
- bit = symbol & 1; symbol >>= 1; price += GET_PRICEa(probs[symbol], bit);
- bit = symbol & 1; symbol >>= 1; price += GET_PRICEa(probs[symbol], bit);
- bit = symbol & 1; symbol >>= 1; price += GET_PRICEa(probs[symbol], bit);
- prob = probs[(posSlot >> 1) + (1 << (kNumPosSlotBits - 1))];
- posSlotPrices[posSlot ] = price + GET_PRICEa_0(prob);
- posSlotPrices[posSlot + 1] = price + GET_PRICEa_1(prob);
+ unsigned sym = slot + (1 << (kNumPosSlotBits - 1));
+ unsigned prob;
+ bit = sym & 1; sym >>= 1; price = GET_PRICEa(probs[sym], bit);
+ bit = sym & 1; sym >>= 1; price += GET_PRICEa(probs[sym], bit);
+ bit = sym & 1; sym >>= 1; price += GET_PRICEa(probs[sym], bit);
+ bit = sym & 1; sym >>= 1; price += GET_PRICEa(probs[sym], bit);
+ bit = sym & 1; sym >>= 1; price += GET_PRICEa(probs[sym], bit);
+ prob = probs[(size_t)slot + (1 << (kNumPosSlotBits - 1))];
+ posSlotPrices[(size_t)slot * 2 ] = price + GET_PRICEa_0(prob);
+ posSlotPrices[(size_t)slot * 2 + 1] = price + GET_PRICEa_1(prob);
}
- for (posSlot = kEndPosModelIndex; posSlot < distTableSize; posSlot++)
- posSlotPrices[posSlot] += ((UInt32)(((posSlot >> 1) - 1) - kNumAlignBits) << kNumBitPriceShiftBits);
-
+
{
- UInt32 *distancesPrices = p->distancesPrices[lenToPosState];
+ UInt32 delta = ((UInt32)((kEndPosModelIndex / 2 - 1) - kNumAlignBits) << kNumBitPriceShiftBits);
+ for (slot = kEndPosModelIndex / 2; slot < distTableSize2; slot++)
{
- distancesPrices[0] = posSlotPrices[0];
- distancesPrices[1] = posSlotPrices[1];
- distancesPrices[2] = posSlotPrices[2];
- distancesPrices[3] = posSlotPrices[3];
+ posSlotPrices[(size_t)slot * 2 ] += delta;
+ posSlotPrices[(size_t)slot * 2 + 1] += delta;
+ delta += ((UInt32)1 << kNumBitPriceShiftBits);
}
+ }
+
+ {
+ UInt32 *dp = p->distancesPrices[lps];
+
+ dp[0] = posSlotPrices[0];
+ dp[1] = posSlotPrices[1];
+ dp[2] = posSlotPrices[2];
+ dp[3] = posSlotPrices[3];
+
for (i = 4; i < kNumFullDistances; i += 2)
{
UInt32 slotPrice = posSlotPrices[GetPosSlot1(i)];
- distancesPrices[i ] = slotPrice + tempPrices[i];
- distancesPrices[i + 1] = slotPrice + tempPrices[i + 1];
+ dp[i ] = slotPrice + tempPrices[i];
+ dp[i + 1] = slotPrice + tempPrices[i + 1];
}
}
}
+ // }
}
@@ -2243,10 +2410,7 @@ static SRes LzmaEnc_CodeOneBlock(CLzmaEnc *p, UInt32 maxPackSize, UInt32 maxUnpa
if (len != 1)
{
LenEnc_Encode(&p->repLenProbs, &p->rc, len - LZMA_MATCH_LEN_MIN, posState);
- if (!p->fastMode)
- if (--p->repLenEnc.counters[posState] == 0)
- LenPriceEnc_UpdateTable(&p->repLenEnc, posState, &p->repLenProbs, p->ProbPrices);
-
+ --p->repLenEncCounter;
p->state = kRepNextStates[p->state];
}
}
@@ -2258,9 +2422,7 @@ static SRes LzmaEnc_CodeOneBlock(CLzmaEnc *p, UInt32 maxPackSize, UInt32 maxUnpa
p->state = kMatchNextStates[p->state];
LenEnc_Encode(&p->lenProbs, &p->rc, len - LZMA_MATCH_LEN_MIN, posState);
- if (!p->fastMode)
- if (--p->lenEnc.counters[posState] == 0)
- LenPriceEnc_UpdateTable(&p->lenEnc, posState, &p->lenProbs, p->ProbPrices);
+ // --p->lenEnc.counter;
dist -= LZMA_NUM_REPS;
p->reps[3] = p->reps[2];
@@ -2272,17 +2434,17 @@ static SRes LzmaEnc_CodeOneBlock(CLzmaEnc *p, UInt32 maxPackSize, UInt32 maxUnpa
GetPosSlot(dist, posSlot);
// RcTree_Encode_PosSlot(&p->rc, p->posSlotEncoder[GetLenToPosState(len)], posSlot);
{
- UInt32 symbol = posSlot + (1 << kNumPosSlotBits);
+ UInt32 sym = (UInt32)posSlot + (1 << kNumPosSlotBits);
range = p->rc.range;
probs = p->posSlotEncoder[GetLenToPosState(len)];
do
{
- CLzmaProb *prob = probs + (symbol >> kNumPosSlotBits);
- UInt32 bit = (symbol >> (kNumPosSlotBits - 1)) & 1;
- symbol <<= 1;
+ CLzmaProb *prob = probs + (sym >> kNumPosSlotBits);
+ UInt32 bit = (sym >> (kNumPosSlotBits - 1)) & 1;
+ sym <<= 1;
RC_BIT(&p->rc, prob, bit);
}
- while (symbol < (1 << kNumPosSlotBits * 2));
+ while (sym < (1 << kNumPosSlotBits * 2));
p->rc.range = range;
}
@@ -2293,7 +2455,7 @@ static SRes LzmaEnc_CodeOneBlock(CLzmaEnc *p, UInt32 maxPackSize, UInt32 maxUnpa
if (dist < kNumFullDistances)
{
unsigned base = ((2 | (posSlot & 1)) << footerBits);
- RcTree_ReverseEncode(&p->rc, p->posEncoders + base, footerBits, dist - base);
+ RcTree_ReverseEncode(&p->rc, p->posEncoders + base, footerBits, (unsigned)(dist /* - base */));
}
else
{
@@ -2329,14 +2491,14 @@ static SRes LzmaEnc_CodeOneBlock(CLzmaEnc *p, UInt32 maxPackSize, UInt32 maxUnpa
bit = dist & 1; dist >>= 1; RC_BIT(&p->rc, p->posAlignEncoder + m, bit); m = (m << 1) + bit;
bit = dist & 1; RC_BIT(&p->rc, p->posAlignEncoder + m, bit);
p->rc.range = range;
- p->alignPriceCount++;
+ // p->alignPriceCount++;
}
}
}
}
}
- nowPos32 += len;
+ nowPos32 += (UInt32)len;
p->additionalOffset -= len;
if (p->additionalOffset == 0)
@@ -2345,10 +2507,27 @@ static SRes LzmaEnc_CodeOneBlock(CLzmaEnc *p, UInt32 maxPackSize, UInt32 maxUnpa
if (!p->fastMode)
{
- if (p->matchPriceCount >= (1 << 7))
+ /*
+ if (p->alignPriceCount >= 16) // kAlignTableSize
+ FillAlignPrices(p);
+ if (p->matchPriceCount >= 128)
FillDistancesPrices(p);
- if (p->alignPriceCount >= kAlignTableSize)
+ if (p->lenEnc.counter <= 0)
+ LenPriceEnc_UpdateTables(&p->lenEnc, 1 << p->pb, &p->lenProbs, p->ProbPrices);
+ */
+ if (p->matchPriceCount >= 64)
+ {
FillAlignPrices(p);
+ // { int y; for (y = 0; y < 100; y++) {
+ FillDistancesPrices(p);
+ // }}
+ LenPriceEnc_UpdateTables(&p->lenEnc, 1 << p->pb, &p->lenProbs, p->ProbPrices);
+ }
+ if (p->repLenEncCounter <= 0)
+ {
+ p->repLenEncCounter = REP_LEN_COUNT;
+ LenPriceEnc_UpdateTables(&p->repLenEnc, 1 << p->pb, &p->repLenProbs, p->ProbPrices);
+ }
}
if (p->matchFinder.GetNumAvailableBytes(p->matchFinderObj) == 0)
@@ -2488,12 +2667,19 @@ void LzmaEnc_Init(CLzmaEnc *p)
p->optEnd = 0;
p->optCur = 0;
+
+ {
+ for (i = 0; i < kNumOpts; i++)
+ p->opt[i].price = kInfinityPrice;
+ }
+
p->additionalOffset = 0;
p->pbMask = (1 << p->pb) - 1;
p->lpMask = ((UInt32)0x100 << p->lp) - ((unsigned)0x100 >> p->lc);
}
+
void LzmaEnc_InitPrices(CLzmaEnc *p)
{
if (!p->fastMode)
@@ -2505,6 +2691,9 @@ void LzmaEnc_InitPrices(CLzmaEnc *p)
p->lenEnc.tableSize =
p->repLenEnc.tableSize =
p->numFastBytes + 1 - LZMA_MATCH_LEN_MIN;
+
+ p->repLenEncCounter = REP_LEN_COUNT;
+
LenPriceEnc_UpdateTables(&p->lenEnc, 1 << p->pb, &p->lenProbs, p->ProbPrices);
LenPriceEnc_UpdateTables(&p->repLenEnc, 1 << p->pb, &p->repLenProbs, p->ProbPrices);
}
@@ -2581,7 +2770,7 @@ typedef struct
ISeqOutStream vt;
Byte *data;
SizeT rem;
- Bool overflow;
+ BoolInt overflow;
} CLzmaEnc_SeqOutStreamBuf;
static size_t SeqOutStreamBuf_Write(const ISeqOutStream *pp, const void *data, size_t size)
@@ -2613,7 +2802,7 @@ const Byte *LzmaEnc_GetCurBuf(CLzmaEncHandle pp)
}
-SRes LzmaEnc_CodeOneMemBlock(CLzmaEncHandle pp, Bool reInit,
+SRes LzmaEnc_CodeOneMemBlock(CLzmaEncHandle pp, BoolInt reInit,
Byte *dest, size_t *destLen, UInt32 desiredPackSize, UInt32 *unpackSize)
{
CLzmaEnc *p = (CLzmaEnc *)pp;
diff --git a/C/MtCoder.c b/C/MtCoder.c
index ddc7c02..5667f2d 100644
--- a/C/MtCoder.c
+++ b/C/MtCoder.c
@@ -1,5 +1,5 @@
/* MtCoder.c -- Multi-thread Coder
-2018-02-21 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -119,7 +119,7 @@ static SRes ThreadFunc2(CMtCoderThread *t)
unsigned bi;
SRes res;
SRes res2;
- Bool finished;
+ BoolInt finished;
unsigned bufIndex;
size_t size;
const Byte *inData;
@@ -294,7 +294,7 @@ static SRes ThreadFunc2(CMtCoderThread *t)
if (++wi >= mtc->numBlocksMax)
wi = 0;
{
- Bool isReady;
+ BoolInt isReady;
CriticalSection_Enter(&mtc->cs);
@@ -547,7 +547,7 @@ SRes MtCoder_Code(CMtCoder *p)
{
const CMtCoderBlock *block = &p->blocks[bi];
unsigned bufIndex = block->bufIndex;
- Bool finished = block->finished;
+ BoolInt finished = block->finished;
if (res == SZ_OK && block->res != SZ_OK)
res = block->res;
diff --git a/C/MtCoder.h b/C/MtCoder.h
index 7982e84..603329d 100644
--- a/C/MtCoder.h
+++ b/C/MtCoder.h
@@ -1,5 +1,5 @@
/* MtCoder.h -- Multi-thread Coder
-2018-02-21 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#ifndef __MT_CODER_H
#define __MT_CODER_H
@@ -67,7 +67,7 @@ typedef struct
{
SRes res;
unsigned bufIndex;
- Bool finished;
+ BoolInt finished;
} CMtCoderBlock;
@@ -97,7 +97,7 @@ typedef struct _CMtCoder
CAutoResetEvent readEvent;
CSemaphore blocksSemaphore;
- Bool stopReading;
+ BoolInt stopReading;
SRes readRes;
#ifdef MTCODER__USE_WRITE_THREAD
diff --git a/C/MtDec.c b/C/MtDec.c
index 60d31b0..374e4d3 100644
--- a/C/MtDec.c
+++ b/C/MtDec.c
@@ -1,5 +1,5 @@
/* MtDec.c -- Multi-thread Decoder
-2018-03-02 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -197,7 +197,7 @@ static SRes FullRead(ISeqInStream *stream, Byte *data, size_t *processedSize)
}
-static SRes MtDec_GetError_Spec(CMtDec *p, UInt64 interruptIndex, Bool *wasInterrupted)
+static SRes MtDec_GetError_Spec(CMtDec *p, UInt64 interruptIndex, BoolInt *wasInterrupted)
{
SRes res;
CriticalSection_Enter(&p->mtProgress.cs);
@@ -207,7 +207,7 @@ static SRes MtDec_GetError_Spec(CMtDec *p, UInt64 interruptIndex, Bool *wasInter
return res;
}
-static SRes MtDec_Progress_GetError_Spec(CMtDec *p, UInt64 inSize, UInt64 outSize, UInt64 interruptIndex, Bool *wasInterrupted)
+static SRes MtDec_Progress_GetError_Spec(CMtDec *p, UInt64 inSize, UInt64 outSize, UInt64 interruptIndex, BoolInt *wasInterrupted)
{
SRes res;
CriticalSection_Enter(&p->mtProgress.cs);
@@ -271,9 +271,9 @@ static WRes ThreadFunc2(CMtDecThread *t)
for (;;)
{
SRes res, codeRes;
- Bool wasInterrupted, isAllocError, overflow, finish;
+ BoolInt wasInterrupted, isAllocError, overflow, finish;
SRes threadingErrorSRes;
- Bool needCode, needWrite, needContinue;
+ BoolInt needCode, needWrite, needContinue;
size_t inDataSize_Start;
UInt64 inDataSize;
@@ -289,7 +289,7 @@ static WRes ThreadFunc2(CMtDecThread *t)
Byte *afterEndData = NULL;
size_t afterEndData_Size = 0;
- Bool canCreateNewThread = False;
+ BoolInt canCreateNewThread = False;
// CMtDecCallbackInfo parse;
CMtDecThread *nextThread;
@@ -629,7 +629,7 @@ static WRes ThreadFunc2(CMtDecThread *t)
if (res == SZ_OK && needCode && codeRes == SZ_OK)
{
- Bool isStartBlock = True;
+ BoolInt isStartBlock = True;
CMtDecBufLink *link = (CMtDecBufLink *)t->inBuf;
for (;;)
@@ -691,9 +691,9 @@ static WRes ThreadFunc2(CMtDecThread *t)
RINOK_THREAD(Event_Wait(&t->canWrite));
{
- Bool isErrorMode = False;
- Bool canRecode = True;
- Bool needWriteToStream = needWrite;
+ BoolInt isErrorMode = False;
+ BoolInt canRecode = True;
+ BoolInt needWriteToStream = needWrite;
if (p->exitThread) return 0; // it's never executed in normal cases
diff --git a/C/MtDec.h b/C/MtDec.h
index b445bc9..9864cc8 100644
--- a/C/MtDec.h
+++ b/C/MtDec.h
@@ -1,5 +1,5 @@
/* MtDec.h -- Multi-thread Decoder
-2018-03-02 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#ifndef __MT_DEC_H
#define __MT_DEC_H
@@ -76,7 +76,7 @@ typedef struct
// out
EMtDecParseState state;
- Bool canCreateNewThread;
+ BoolInt canCreateNewThread;
UInt64 outPos; // check it (size_t)
} CMtDecCallbackInfo;
@@ -107,11 +107,11 @@ typedef struct
if (*canRecode), we didn't flush current block data, so we still can decode current block later.
*/
SRes (*Write)(void *p, unsigned coderIndex,
- Bool needWriteToStream,
+ BoolInt needWriteToStream,
const Byte *src, size_t srcSize,
// int srcFinished,
- Bool *needContinue,
- Bool *canRecode);
+ BoolInt *needContinue,
+ BoolInt *canRecode);
} IMtDecCallback;
@@ -140,22 +140,22 @@ typedef struct _CMtDec
size_t allocatedBufsSize;
- Bool exitThread;
+ BoolInt exitThread;
WRes exitThreadWRes;
UInt64 blockIndex;
- Bool isAllocError;
- Bool overflow;
+ BoolInt isAllocError;
+ BoolInt overflow;
SRes threadingErrorSRes;
- Bool needContinue;
+ BoolInt needContinue;
// CAutoResetEvent finishedEvent;
SRes readRes;
SRes codeRes;
- Bool wasInterrupted;
+ BoolInt wasInterrupted;
unsigned numStartedThreads_Limit;
unsigned numStartedThreads;
@@ -164,14 +164,14 @@ typedef struct _CMtDec
size_t crossStart;
size_t crossEnd;
UInt64 readProcessed;
- Bool readWasFinished;
+ BoolInt readWasFinished;
UInt64 inProcessed;
unsigned filledThreadStart;
unsigned numFilledThreads;
#ifndef _7ZIP_ST
- Bool needInterrupt;
+ BoolInt needInterrupt;
UInt64 interruptIndex;
CMtProgress mtProgress;
CMtDecThread threads[MTDEC__THREADS_MAX];
diff --git a/C/Ppmd7.c b/C/Ppmd7.c
index ef93cb2..80e7de9 100644
--- a/C/Ppmd7.c
+++ b/C/Ppmd7.c
@@ -1,5 +1,5 @@
/* Ppmd7.c -- PPMdH codec
-2017-04-03 : Igor Pavlov : Public domain
+2018-07-04 : Igor Pavlov : Public domain
This code is based on PPMd var.H (2001): Dmitry Shkarin : Public domain */
#include "Precomp.h"
@@ -95,7 +95,7 @@ void Ppmd7_Free(CPpmd7 *p, ISzAllocPtr alloc)
p->Base = 0;
}
-Bool Ppmd7_Alloc(CPpmd7 *p, UInt32 size, ISzAllocPtr alloc)
+BoolInt Ppmd7_Alloc(CPpmd7 *p, UInt32 size, ISzAllocPtr alloc)
{
if (!p->Base || p->Size != size)
{
@@ -342,7 +342,7 @@ void Ppmd7_Init(CPpmd7 *p, unsigned maxOrder)
p->DummySee.Count = 64; /* unused */
}
-static CTX_PTR CreateSuccessors(CPpmd7 *p, Bool skip)
+static CTX_PTR CreateSuccessors(CPpmd7 *p, BoolInt skip)
{
CPpmd_State upState;
CTX_PTR c = p->MinContext;
diff --git a/C/Ppmd7.h b/C/Ppmd7.h
index ee2c035..cce93f1 100644
--- a/C/Ppmd7.h
+++ b/C/Ppmd7.h
@@ -1,5 +1,5 @@
/* Ppmd7.h -- PPMdH compression codec
-2017-04-03 : Igor Pavlov : Public domain
+2018-07-04 : Igor Pavlov : Public domain
This code is based on PPMd var.H (2001): Dmitry Shkarin : Public domain */
/* This code supports virtual RangeDecoder and includes the implementation
@@ -60,7 +60,7 @@ typedef struct
} CPpmd7;
void Ppmd7_Construct(CPpmd7 *p);
-Bool Ppmd7_Alloc(CPpmd7 *p, UInt32 size, ISzAllocPtr alloc);
+BoolInt Ppmd7_Alloc(CPpmd7 *p, UInt32 size, ISzAllocPtr alloc);
void Ppmd7_Free(CPpmd7 *p, ISzAllocPtr alloc);
void Ppmd7_Init(CPpmd7 *p, unsigned maxOrder);
#define Ppmd7_WasAllocated(p) ((p)->Base != NULL)
@@ -115,7 +115,7 @@ typedef struct
} CPpmd7z_RangeDec;
void Ppmd7z_RangeDec_CreateVTable(CPpmd7z_RangeDec *p);
-Bool Ppmd7z_RangeDec_Init(CPpmd7z_RangeDec *p);
+BoolInt Ppmd7z_RangeDec_Init(CPpmd7z_RangeDec *p);
#define Ppmd7z_RangeDec_IsFinishedOK(p) ((p)->Code == 0)
int Ppmd7_DecodeSymbol(CPpmd7 *p, const IPpmd7_RangeDec *rc);
diff --git a/C/Ppmd7Dec.c b/C/Ppmd7Dec.c
index 3023b67..2026407 100644
--- a/C/Ppmd7Dec.c
+++ b/C/Ppmd7Dec.c
@@ -1,5 +1,5 @@
/* Ppmd7Dec.c -- PPMdH Decoder
-2017-04-03 : Igor Pavlov : Public domain
+2018-07-04 : Igor Pavlov : Public domain
This code is based on PPMd var.H (2001): Dmitry Shkarin : Public domain */
#include "Precomp.h"
@@ -8,7 +8,7 @@ This code is based on PPMd var.H (2001): Dmitry Shkarin : Public domain */
#define kTopValue (1 << 24)
-Bool Ppmd7z_RangeDec_Init(CPpmd7z_RangeDec *p)
+BoolInt Ppmd7z_RangeDec_Init(CPpmd7z_RangeDec *p)
{
unsigned i;
p->Code = 0;
diff --git a/C/Util/7z/7zMain.c b/C/Util/7z/7zMain.c
index 82aac89..3e86076 100644
--- a/C/Util/7z/7zMain.c
+++ b/C/Util/7z/7zMain.c
@@ -1,5 +1,5 @@
/* 7zMain.c - Test application for 7z Decoder
-2018-04-19 : Igor Pavlov : Public domain */
+2018-08-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -354,7 +354,7 @@ static void PrintError(char *s)
PrintLF();
}
-static void GetAttribString(UInt32 wa, Bool isDir, char *s)
+static void GetAttribString(UInt32 wa, BoolInt isDir, char *s)
{
#ifdef USE_WINDOWS_FILE
s[0] = (char)(((wa & FILE_ATTRIBUTE_DIRECTORY) != 0 || isDir) ? 'D' : '.');
diff --git a/C/Util/Lzma/LzmaUtil.c b/C/Util/Lzma/LzmaUtil.c
index cf88c77..82130e8 100644
--- a/C/Util/Lzma/LzmaUtil.c
+++ b/C/Util/Lzma/LzmaUtil.c
@@ -1,5 +1,5 @@
/* LzmaUtil.c -- Test application for LZMA compression
-2017-04-27 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "../../Precomp.h"
@@ -177,7 +177,7 @@ static int main2(int numArgs, const char *args[], char *rs)
char c;
int res;
int encodeMode;
- Bool useOutFile = False;
+ BoolInt useOutFile = False;
FileSeqInStream_CreateVTable(&inStream);
File_Construct(&inStream.file);
diff --git a/C/Util/SfxSetup/SfxSetup.c b/C/Util/SfxSetup/SfxSetup.c
index bfbf430..7e00240 100644
--- a/C/Util/SfxSetup/SfxSetup.c
+++ b/C/Util/SfxSetup/SfxSetup.c
@@ -1,5 +1,5 @@
/* SfxSetup.c - 7z SFX Setup
-2017-04-04 : Igor Pavlov : Public domain */
+2018-08-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -127,7 +127,7 @@ static WRes MyCreateDir(const WCHAR *name)
#define kSignatureSearchLimit (1 << 22)
-static Bool FindSignature(CSzFile *stream, UInt64 *resPos)
+static BoolInt FindSignature(CSzFile *stream, UInt64 *resPos)
{
Byte buf[kBufferSize];
size_t numPrevBytes = 0;
@@ -163,7 +163,7 @@ static Bool FindSignature(CSzFile *stream, UInt64 *resPos)
}
}
-static Bool DoesFileOrDirExist(const WCHAR *path)
+static BoolInt DoesFileOrDirExist(const WCHAR *path)
{
WIN32_FIND_DATAW fd;
HANDLE handle;
@@ -254,7 +254,7 @@ int APIENTRY WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance,
DWORD winRes;
const wchar_t *cmdLineParams;
const char *errorMessage = NULL;
- Bool useShellExecute = True;
+ BoolInt useShellExecute = True;
DWORD exitCode = 0;
LoadSecurityDlls();
@@ -287,7 +287,7 @@ int APIENTRY WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance,
cmdLineParams = GetCommandLineW();
#ifndef UNDER_CE
{
- Bool quoteMode = False;
+ BoolInt quoteMode = False;
for (;; cmdLineParams++)
{
wchar_t c = *cmdLineParams;
diff --git a/C/Xz.h b/C/Xz.h
index 7b88f51..fad56a3 100644
--- a/C/Xz.h
+++ b/C/Xz.h
@@ -1,5 +1,5 @@
/* Xz.h - Xz interface
-2018-02-28 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#ifndef __XZ_H
#define __XZ_H
@@ -53,7 +53,7 @@ typedef struct
#define XzBlock_HasUnsupportedFlags(p) (((p)->flags & ~(XZ_BF_NUM_FILTERS_MASK | XZ_BF_PACK_SIZE | XZ_BF_UNPACK_SIZE)) != 0)
SRes XzBlock_Parse(CXzBlock *p, const Byte *header);
-SRes XzBlock_ReadHeader(CXzBlock *p, ISeqInStream *inStream, Bool *isIndex, UInt32 *headerSizeRes);
+SRes XzBlock_ReadHeader(CXzBlock *p, ISeqInStream *inStream, BoolInt *isIndex, UInt32 *headerSizeRes);
/* ---------- xz stream ---------- */
@@ -186,10 +186,10 @@ typedef struct
Byte *outBuf;
size_t outBufSize;
size_t outWritten; // is equal to lzmaDecoder.dicPos (in outBuf mode)
- Bool wasFinished;
+ BoolInt wasFinished;
SRes res;
ECoderStatus status;
- // Bool SingleBufMode;
+ // BoolInt SingleBufMode;
int finished[MIXCODER_NUM_FILTERS_MAX - 1];
size_t pos[MIXCODER_NUM_FILTERS_MAX - 1];
@@ -241,9 +241,9 @@ typedef struct
CXzCheck check;
CSha256 sha;
- Bool parseMode;
- Bool headerParsedOk;
- Bool decodeToStreamSignature;
+ BoolInt parseMode;
+ BoolInt headerParsedOk;
+ BoolInt decodeToStreamSignature;
unsigned decodeOnlyOneBlock;
Byte *outBuf;
@@ -335,7 +335,7 @@ SRes XzUnpacker_CodeFull(CXzUnpacker *p, Byte *dest, SizeT *destLen,
const Byte *src, SizeT *srcLen,
ECoderFinishMode finishMode, ECoderStatus *status);
-Bool XzUnpacker_IsStreamWasFinished(const CXzUnpacker *p);
+BoolInt XzUnpacker_IsStreamWasFinished(const CXzUnpacker *p);
/*
XzUnpacker_GetExtraSize() returns then number of uncofirmed bytes,
@@ -365,7 +365,7 @@ UInt64 XzUnpacker_GetExtraSize(const CXzUnpacker *p);
*/
void XzUnpacker_PrepareToRandomBlockDecoding(CXzUnpacker *p);
-Bool XzUnpacker_IsBlockFinished(const CXzUnpacker *p);
+BoolInt XzUnpacker_IsBlockFinished(const CXzUnpacker *p);
#define XzUnpacker_GetPackSizeForIndex(p) ((p)->packSize + (p)->blockHeaderSize + XzFlags_GetCheckSize((p)->streamFlags))
@@ -378,7 +378,7 @@ typedef struct
{
size_t inBufSize_ST;
size_t outStep_ST;
- Bool ignoreErrors;
+ BoolInt ignoreErrors;
#ifndef _7ZIP_ST
unsigned numThreads;
diff --git a/C/XzDec.c b/C/XzDec.c
index ebf1983..da6c2e7 100644
--- a/C/XzDec.c
+++ b/C/XzDec.c
@@ -1,5 +1,5 @@
/* XzDec.c -- Xz Decode
-2018-04-24 : Igor Pavlov : Public domain */
+2018-12-29 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -341,7 +341,7 @@ static SRes SbState_SetFromMethod(IStateCoder *p, ISzAllocPtr alloc)
typedef struct
{
CLzma2Dec decoder;
- Bool outBufMode;
+ BoolInt outBufMode;
} CLzma2Dec_Spec;
@@ -637,8 +637,8 @@ static SRes MixCoder_Code(CMixCoder *p,
for (;;)
{
- Bool processed = False;
- Bool allFinished = True;
+ BoolInt processed = False;
+ BoolInt allFinished = True;
SRes resMain = SZ_OK;
unsigned i;
@@ -761,7 +761,7 @@ SRes Xz_ParseHeader(CXzStreamFlags *p, const Byte *buf)
return XzFlags_IsSupported(*p) ? SZ_OK : SZ_ERROR_UNSUPPORTED;
}
-static Bool Xz_CheckFooter(CXzStreamFlags flags, UInt64 indexSize, const Byte *buf)
+static BoolInt Xz_CheckFooter(CXzStreamFlags flags, UInt64 indexSize, const Byte *buf)
{
return indexSize == (((UInt64)GetUi32(buf + 4) + 1) << 2)
&& GetUi32(buf) == CrcCalc(buf + 4, 6)
@@ -775,7 +775,7 @@ static Bool Xz_CheckFooter(CXzStreamFlags flags, UInt64 indexSize, const Byte *b
if (s == 0) return SZ_ERROR_ARCHIVE; pos += s; }
-static Bool XzBlock_AreSupportedFilters(const CXzBlock *p)
+static BoolInt XzBlock_AreSupportedFilters(const CXzBlock *p)
{
unsigned numFilters = XzBlock_GetNumFilters(p) - 1;
unsigned i;
@@ -866,7 +866,7 @@ SRes XzBlock_Parse(CXzBlock *p, const Byte *header)
static SRes XzDecMix_Init(CMixCoder *p, const CXzBlock *block, Byte *outBuf, size_t outBufSize)
{
unsigned i;
- Bool needReInit = True;
+ BoolInt needReInit = True;
unsigned numFilters = XzBlock_GetNumFilters(block);
if (numFilters == p->numCoders && ((p->outBuf && outBuf) || (!p->outBuf && !outBuf)))
@@ -999,8 +999,8 @@ SRes XzUnpacker_Code(CXzUnpacker *p, Byte *dest, SizeT *destLen,
SRes res;
ECoderFinishMode finishMode2 = finishMode;
- Bool srcFinished2 = srcFinished;
- Bool destFinish = False;
+ BoolInt srcFinished2 = srcFinished;
+ BoolInt destFinish = False;
if (p->block.packSize != (UInt64)(Int64)-1)
{
@@ -1346,12 +1346,12 @@ SRes XzUnpacker_CodeFull(CXzUnpacker *p, Byte *dest, SizeT *destLen,
}
-Bool XzUnpacker_IsBlockFinished(const CXzUnpacker *p)
+BoolInt XzUnpacker_IsBlockFinished(const CXzUnpacker *p)
{
return (p->state == XZ_STATE_BLOCK_HEADER) && (p->pos == 0);
}
-Bool XzUnpacker_IsStreamWasFinished(const CXzUnpacker *p)
+BoolInt XzUnpacker_IsStreamWasFinished(const CXzUnpacker *p)
{
return (p->state == XZ_STATE_STREAM_PADDING) && (((UInt32)p->padSize & 3) == 0);
}
@@ -1423,18 +1423,18 @@ typedef struct
size_t outCodeSize;
ECoderStatus status;
SRes codeRes;
- Bool skipMode;
- // Bool finishedWithMark;
+ BoolInt skipMode;
+ // BoolInt finishedWithMark;
EMtDecParseState parseState;
- Bool parsing_Truncated;
- Bool atBlockHeader;
+ BoolInt parsing_Truncated;
+ BoolInt atBlockHeader;
CXzStreamFlags streamFlags;
// UInt64 numFinishedStreams
UInt64 numStreams;
UInt64 numTotalBlocks;
UInt64 numBlocks;
- Bool dec_created;
+ BoolInt dec_created;
CXzUnpacker dec;
Byte mtPad[1 << 7];
@@ -1458,14 +1458,14 @@ typedef struct
ICompressProgress *progress;
// CXzStatInfo *stat;
- Bool finishMode;
- Bool outSize_Defined;
+ BoolInt finishMode;
+ BoolInt outSize_Defined;
UInt64 outSize;
UInt64 outProcessed;
UInt64 inProcessed;
UInt64 readProcessed;
- Bool readWasFinished;
+ BoolInt readWasFinished;
SRes readRes;
SRes writeRes;
@@ -1473,14 +1473,14 @@ typedef struct
size_t outBufSize;
Byte *inBuf;
size_t inBufSize;
- Bool dec_created;
+
CXzUnpacker dec;
ECoderStatus status;
SRes codeRes;
#ifndef _7ZIP_ST
- Bool mainDecoderWasCalled;
+ BoolInt mainDecoderWasCalled;
// int statErrorDefined;
int finishedDecoderIndex;
@@ -1494,12 +1494,12 @@ typedef struct
// UInt64 numBadBlocks;
SRes mainErrorCode;
- Bool isBlockHeaderState_Parse;
- Bool isBlockHeaderState_Write;
+ BoolInt isBlockHeaderState_Parse;
+ BoolInt isBlockHeaderState_Write;
UInt64 outProcessed_Parse;
- Bool parsing_Truncated;
+ BoolInt parsing_Truncated;
- Bool mtc_WasConstructed;
+ BoolInt mtc_WasConstructed;
CMtDec mtc;
CXzDecMtThread coders[MTDEC__THREADS_MAX];
#endif
@@ -1525,7 +1525,8 @@ CXzDecMtHandle XzDecMt_Create(ISzAllocPtr alloc, ISzAllocPtr allocMid)
p->outBufSize = 0;
p->inBuf = NULL;
p->inBufSize = 0;
- p->dec_created = False;
+
+ XzUnpacker_Construct(&p->dec, &p->alignOffsetAlloc.vt);
p->unpackBlockMaxSize = 0;
@@ -1573,11 +1574,7 @@ static void XzDecMt_FreeOutBufs(CXzDecMt *p)
static void XzDecMt_FreeSt(CXzDecMt *p)
{
- if (p->dec_created)
- {
- XzUnpacker_Free(&p->dec);
- p->dec_created = False;
- }
+ XzUnpacker_Free(&p->dec);
if (p->outBuf)
{
@@ -1968,11 +1965,11 @@ static SRes XzDecMt_Callback_Code(void *pp, unsigned coderIndex,
#define XZDECMT_STREAM_WRITE_STEP (1 << 24)
static SRes XzDecMt_Callback_Write(void *pp, unsigned coderIndex,
- Bool needWriteToStream,
+ BoolInt needWriteToStream,
const Byte *src, size_t srcSize,
// int srcFinished,
- Bool *needContinue,
- Bool *canRecode)
+ BoolInt *needContinue,
+ BoolInt *canRecode)
{
CXzDecMt *me = (CXzDecMt *)pp;
const CXzDecMtThread *coder = &me->coders[coderIndex];
@@ -2302,7 +2299,7 @@ void XzStatInfo_Clear(CXzStatInfo *p)
static SRes XzDecMt_Decode_ST(CXzDecMt *p
#ifndef _7ZIP_ST
- , Bool tMode
+ , BoolInt tMode
#endif
, CXzStatInfo *stat)
{
@@ -2358,7 +2355,7 @@ static SRes XzDecMt_Decode_ST(CXzDecMt *p
for (;;)
{
SizeT outSize;
- Bool finished;
+ BoolInt finished;
ECoderFinishMode finishMode;
SizeT inProcessed;
ECoderStatus status;
@@ -2466,7 +2463,7 @@ static SRes XzStatInfo_SetStat(const CXzUnpacker *dec,
int finishMode,
UInt64 readProcessed, UInt64 inProcessed,
SRes res, ECoderStatus status,
- Bool decodingTruncated,
+ BoolInt decodingTruncated,
CXzStatInfo *stat)
{
UInt64 extraSize;
@@ -2531,7 +2528,7 @@ SRes XzDecMt_Decode(CXzDecMtHandle pp,
{
CXzDecMt *p = (CXzDecMt *)pp;
#ifndef _7ZIP_ST
- Bool tMode;
+ BoolInt tMode;
#endif
XzStatInfo_Clear(stat);
@@ -2564,13 +2561,7 @@ SRes XzDecMt_Decode(CXzDecMtHandle pp,
p->codeRes = 0;
p->status = CODER_STATUS_NOT_SPECIFIED;
- if (!p->dec_created)
- {
- XzUnpacker_Construct(&p->dec, &p->alignOffsetAlloc.vt);
- p->dec_created = True;
- }
XzUnpacker_Init(&p->dec);
-
*isMT = False;
@@ -2600,6 +2591,8 @@ SRes XzDecMt_Decode(CXzDecMtHandle pp,
{
IMtDecCallback vt;
+ // we just free ST buffers here
+ // but we still keep state variables, that was set in XzUnpacker_Init()
XzDecMt_FreeSt(p);
p->outProcessed_Parse = 0;
@@ -2636,7 +2629,7 @@ SRes XzDecMt_Decode(CXzDecMtHandle pp,
vt.Write = XzDecMt_Callback_Write;
{
- Bool needContinue;
+ BoolInt needContinue;
SRes res = MtDec_Code(&p->mtc);
@@ -2665,7 +2658,7 @@ SRes XzDecMt_Decode(CXzDecMtHandle pp,
if (!needContinue)
{
SRes codeRes;
- Bool truncated = False;
+ BoolInt truncated = False;
ECoderStatus status;
CXzUnpacker *dec;
diff --git a/C/XzEnc.c b/C/XzEnc.c
index 432cbfe..1f512a5 100644
--- a/C/XzEnc.c
+++ b/C/XzEnc.c
@@ -1,5 +1,5 @@
/* XzEnc.c -- Xz Encode
-2018-04-28 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -814,7 +814,7 @@ static SRes Xz_CompressBlock(
SRes res;
Byte *outBuf = NULL;
size_t outSize = 0;
- Bool useStream = (fp || inStream);
+ BoolInt useStream = (fp || inStream);
// useStream = True;
if (!useStream)
@@ -940,7 +940,7 @@ typedef struct
#ifndef _7ZIP_ST
unsigned checkType;
ISeqOutStream *outStream;
- Bool mtCoder_WasConstructed;
+ BoolInt mtCoder_WasConstructed;
CMtCoder mtCoder;
CXzEncBlockInfo EncBlocks[MTCODER__BLOCKS_MAX];
#endif
diff --git a/C/XzIn.c b/C/XzIn.c
index 42da1de..792a617 100644
--- a/C/XzIn.c
+++ b/C/XzIn.c
@@ -1,5 +1,5 @@
/* XzIn.c - Xz input
-2018-02-02 : Igor Pavlov : Public domain */
+2018-07-04 : Igor Pavlov : Public domain */
#include "Precomp.h"
@@ -28,7 +28,7 @@ SRes Xz_ReadHeader(CXzStreamFlags *p, ISeqInStream *inStream)
{ unsigned s = Xz_ReadVarInt(buf + pos, size - pos, res); \
if (s == 0) return SZ_ERROR_ARCHIVE; pos += s; }
-SRes XzBlock_ReadHeader(CXzBlock *p, ISeqInStream *inStream, Bool *isIndex, UInt32 *headerSizeRes)
+SRes XzBlock_ReadHeader(CXzBlock *p, ISeqInStream *inStream, BoolInt *isIndex, UInt32 *headerSizeRes)
{
Byte header[XZ_BLOCK_HEADER_SIZE_MAX];
unsigned headerSize;
diff --git a/CPP/7zip/Aes.mak b/CPP/7zip/Aes.mak
index 4d5e98b..3d0d877 100644
--- a/CPP/7zip/Aes.mak
+++ b/CPP/7zip/Aes.mak
@@ -1,7 +1,7 @@
C_OBJS = $(C_OBJS) \
$O\Aes.obj
-!IF "$(CPU)" != "IA64" && "$(CPU)" != "MIPS" && "$(CPU)" != "ARM" && "$(CPU)" != "ARM64"
+!IF "$(PLATFORM)" != "ia64" && "$(PLATFORM)" != "mips" && "$(PLATFORM)" != "arm" && "$(PLATFORM)" != "arm64"
ASM_OBJS = $(ASM_OBJS) \
$O\AesOpt.obj
!ENDIF
diff --git a/CPP/7zip/Archive/7z/7zHandler.cpp b/CPP/7zip/Archive/7z/7zHandler.cpp
index a3b0bce..45dda9c 100644
--- a/CPP/7zip/Archive/7z/7zHandler.cpp
+++ b/CPP/7zip/Archive/7z/7zHandler.cpp
@@ -236,6 +236,13 @@ STDMETHODIMP CHandler::GetArchiveProperty(PROPID propID, PROPVARIANT *value)
prop = v;
break;
}
+
+ case kpidReadOnly:
+ {
+ if (!_db.CanUpdate())
+ prop = true;
+ break;
+ }
}
prop.Detach(value);
return S_OK;
diff --git a/CPP/7zip/Archive/7z/7zHandlerOut.cpp b/CPP/7zip/Archive/7z/7zHandlerOut.cpp
index f0474bb..62587e8 100644
--- a/CPP/7zip/Archive/7z/7zHandlerOut.cpp
+++ b/CPP/7zip/Archive/7z/7zHandlerOut.cpp
@@ -267,6 +267,9 @@ STDMETHODIMP CHandler::UpdateItems(ISequentialOutStream *outStream, UInt32 numIt
db = &_db;
#endif
+ if (db && !db->CanUpdate())
+ return E_NOTIMPL;
+
/*
CMyComPtr<IArchiveGetRawProps> getRawProps;
updateCallback->QueryInterface(IID_IArchiveGetRawProps, (void **)&getRawProps);
diff --git a/CPP/7zip/Archive/7z/7zIn.cpp b/CPP/7zip/Archive/7z/7zIn.cpp
index bbc77b0..ae5ff19 100644
--- a/CPP/7zip/Archive/7z/7zIn.cpp
+++ b/CPP/7zip/Archive/7z/7zIn.cpp
@@ -1465,21 +1465,24 @@ void CDbEx::FillLinks()
}
if (indexInFolder != 0)
+ {
folderIndex++;
- /*
- if (indexInFolder != 0)
- ThrowIncorrect();
- */
+ // 18.06
+ ThereIsHeaderError = true;
+ // ThrowIncorrect();
+ }
for (;;)
{
if (folderIndex >= NumFolders)
return;
FolderStartFileIndex[folderIndex] = i;
- /*
if (NumUnpackStreamsVector[folderIndex] != 0)
- ThrowIncorrect();;
- */
+ {
+ // 18.06
+ ThereIsHeaderError = true;
+ // ThrowIncorrect();
+ }
folderIndex++;
}
}
diff --git a/CPP/7zip/Archive/7z/7zIn.h b/CPP/7zip/Archive/7z/7zIn.h
index c5fb909..bb0e474 100644
--- a/CPP/7zip/Archive/7z/7zIn.h
+++ b/CPP/7zip/Archive/7z/7zIn.h
@@ -257,6 +257,16 @@ struct CDbEx: public CDatabase
PhySize = 0;
}
+ bool CanUpdate() const
+ {
+ if (ThereIsHeaderError
+ || UnexpectedEnd
+ || StartHeaderWasRecovered
+ || UnsupportedFeatureError)
+ return false;
+ return true;
+ }
+
void FillLinks();
UInt64 GetFolderStreamPos(CNum folderIndex, unsigned indexInFolder) const
diff --git a/CPP/7zip/Archive/7z/7zUpdate.cpp b/CPP/7zip/Archive/7z/7zUpdate.cpp
index 44de9ac..5b156be 100644
--- a/CPP/7zip/Archive/7z/7zUpdate.cpp
+++ b/CPP/7zip/Archive/7z/7zUpdate.cpp
@@ -124,13 +124,13 @@ static int Parse_EXE(const Byte *buf, size_t size, CFilterMode *filterMode)
#define ELF_DATA_2LSB 1
#define ELF_DATA_2MSB 2
-static UInt16 Get16(const Byte *p, Bool be) { if (be) return (UInt16)GetBe16(p); return (UInt16)GetUi16(p); }
-static UInt32 Get32(const Byte *p, Bool be) { if (be) return GetBe32(p); return GetUi32(p); }
-// static UInt64 Get64(const Byte *p, Bool be) { if (be) return GetBe64(p); return GetUi64(p); }
+static UInt16 Get16(const Byte *p, BoolInt be) { if (be) return (UInt16)GetBe16(p); return (UInt16)GetUi16(p); }
+static UInt32 Get32(const Byte *p, BoolInt be) { if (be) return GetBe32(p); return GetUi32(p); }
+// static UInt64 Get64(const Byte *p, BoolInt be) { if (be) return GetBe64(p); return GetUi64(p); }
static int Parse_ELF(const Byte *buf, size_t size, CFilterMode *filterMode)
{
- Bool /* is32, */ be;
+ BoolInt /* is32, */ be;
UInt32 filterId;
if (size < 512 || buf[6] != 1) /* ver */
@@ -200,7 +200,7 @@ static unsigned Parse_MACH(const Byte *buf, size_t size, CFilterMode *filterMode
if (size < 512)
return 0;
- Bool /* mode64, */ be;
+ BoolInt /* mode64, */ be;
switch (GetUi32(buf))
{
case MACH_SIG_BE_32: /* mode64 = False; */ be = True; break;
@@ -239,7 +239,7 @@ static unsigned Parse_MACH(const Byte *buf, size_t size, CFilterMode *filterMode
#define RIFF_SIG 0x46464952
-static Bool Parse_WAV(const Byte *buf, size_t size, CFilterMode *filterMode)
+static BoolInt Parse_WAV(const Byte *buf, size_t size, CFilterMode *filterMode)
{
UInt32 subChunkSize, pos;
if (size < 0x2C)
@@ -285,7 +285,7 @@ static Bool Parse_WAV(const Byte *buf, size_t size, CFilterMode *filterMode)
return False;
}
-static Bool ParseFile(const Byte *buf, size_t size, CFilterMode *filterMode)
+static BoolInt ParseFile(const Byte *buf, size_t size, CFilterMode *filterMode)
{
filterMode->Id = 0;
filterMode->Delta = 0;
@@ -894,7 +894,7 @@ HRESULT CAnalysis::GetFilterGroup(UInt32 index, const CUpdateItem &ui, CFilterMo
// RINOK(Callback->SetOperationResult2(index, NUpdate::NOperationResult::kOK));
if (result == S_OK)
{
- Bool parseRes = ParseFile(Buffer, size, &filterModeTemp);
+ BoolInt parseRes = ParseFile(Buffer, size, &filterModeTemp);
if (parseRes && filterModeTemp.Delta == 0)
{
filterModeTemp.SetDelta();
@@ -1648,6 +1648,9 @@ HRESULT Update(
for (CNum fi = db->FolderStartFileIndex[i]; indexInFolder < numUnpackStreams; fi++)
{
+ if (fi >= db->Files.Size())
+ return E_FAIL;
+
const CFileItem &file = db->Files[fi];
if (file.HasStream)
{
diff --git a/CPP/7zip/Archive/Common/CoderMixer2.cpp b/CPP/7zip/Archive/Common/CoderMixer2.cpp
index d4d9949..baddddf 100644
--- a/CPP/7zip/Archive/Common/CoderMixer2.cpp
+++ b/CPP/7zip/Archive/Common/CoderMixer2.cpp
@@ -91,6 +91,7 @@ HRESULT CCoder::CheckDataAfterEnd(bool &dataAfterEnd_Error /* , bool &InternalPa
{
CMyComPtr<ICompressGetInStreamProcessedSize2> getInStreamProcessedSize2;
Coder2.QueryInterface(IID_ICompressGetInStreamProcessedSize2, (void **)&getInStreamProcessedSize2);
+ if (getInStreamProcessedSize2)
FOR_VECTOR (i, PackSizePointers)
{
if (!PackSizePointers[i])
diff --git a/CPP/7zip/Archive/LzmaHandler.cpp b/CPP/7zip/Archive/LzmaHandler.cpp
index 28079df..f13fca7 100644
--- a/CPP/7zip/Archive/LzmaHandler.cpp
+++ b/CPP/7zip/Archive/LzmaHandler.cpp
@@ -131,13 +131,7 @@ HRESULT CDecoder::Code(const CHeader &header, ISequentialOutStream *outStream,
if (header.FilterID > 1)
return E_NOTIMPL;
- {
- CMyComPtr<ICompressSetDecoderProperties2> setDecoderProperties;
- _lzmaDecoder.QueryInterface(IID_ICompressSetDecoderProperties2, &setDecoderProperties);
- if (!setDecoderProperties)
- return E_NOTIMPL;
- RINOK(setDecoderProperties->SetDecoderProperties2(header.LzmaProps, 5));
- }
+ RINOK(_lzmaDecoderSpec->SetDecoderProperties2(header.LzmaProps, 5));
bool filteredMode = (header.FilterID == 1);
@@ -357,24 +351,54 @@ API_FUNC_static_IsArc IsArc_Lzma86(const Byte *p, size_t size)
}
}
+
+
STDMETHODIMP CHandler::Open(IInStream *inStream, const UInt64 *, IArchiveOpenCallback *)
{
Close();
- const UInt32 kBufSize = 1 + 5 + 8 + 2;
+ const unsigned headerSize = GetHeaderSize();
+ const UInt32 kBufSize = 1 << 7;
Byte buf[kBufSize];
-
- RINOK(ReadStream_FALSE(inStream, buf, kBufSize));
-
+ size_t processedSize = kBufSize;
+ RINOK(ReadStream(inStream, buf, &processedSize));
+ if (processedSize < headerSize + 2)
+ return S_FALSE;
if (!_header.Parse(buf, _lzma86))
return S_FALSE;
- const Byte *start = buf + GetHeaderSize();
+ const Byte *start = buf + headerSize;
if (start[0] != 0 /* || (start[1] & 0x80) != 0 */ ) // empty stream with EOS is not 0x80
return S_FALSE;
-
+
RINOK(inStream->Seek(0, STREAM_SEEK_END, &_packSize));
- if (_packSize >= 24 && _header.Size == 0 && _header.FilterID == 0 && _header.LzmaProps[0] == 0)
+
+ SizeT srcLen = processedSize - headerSize;
+
+ if (srcLen > 10
+ && _header.Size == 0
+ // && _header.FilterID == 0
+ && _header.LzmaProps[0] == 0
+ )
return S_FALSE;
+
+ CDecoder state;
+ const UInt32 outLimit = 1 << 11;
+ Byte outBuf[outLimit];
+
+ SizeT outSize = outLimit;
+ if (outSize > _header.Size)
+ outSize = (SizeT)_header.Size;
+ SizeT destLen = outSize;
+ ELzmaStatus status;
+
+ SRes res = LzmaDecode(outBuf, &destLen, start, &srcLen,
+ _header.LzmaProps, 5, LZMA_FINISH_ANY,
+ &status, &g_Alloc);
+
+ if (res != SZ_OK)
+ if (res != SZ_ERROR_INPUT_EOF)
+ return S_FALSE;
+
_isArc = true;
_stream = inStream;
_seqStream = inStream;
diff --git a/CPP/7zip/Archive/XzHandler.cpp b/CPP/7zip/Archive/XzHandler.cpp
index 74cbca9..743271a 100644
--- a/CPP/7zip/Archive/XzHandler.cpp
+++ b/CPP/7zip/Archive/XzHandler.cpp
@@ -488,7 +488,7 @@ HRESULT CHandler::Open2(IInStream *inStream, /* UInt32 flags, */ IArchiveOpenCal
{
CXzBlock block;
- Bool isIndex;
+ BoolInt isIndex;
UInt32 headerSizeRes;
SRes res2 = XzBlock_ReadHeader(&block, &inStreamWrap.vt, &isIndex, &headerSizeRes);
if (res2 == SZ_OK && !isIndex)
@@ -820,7 +820,7 @@ static HRESULT DecodeBlock(CXzUnpackerCPP2 &xzu,
packRem -= inLen;
- Bool blockFinished = XzUnpacker_IsBlockFinished(&xzu.p);
+ BoolInt blockFinished = XzUnpacker_IsBlockFinished(&xzu.p);
if ((inLen == 0 && outLen == 0) || blockFinished)
{
diff --git a/CPP/7zip/Bundles/LzmaCon/makefile b/CPP/7zip/Bundles/LzmaCon/makefile
index 2609763..7374df9 100644
--- a/CPP/7zip/Bundles/LzmaCon/makefile
+++ b/CPP/7zip/Bundles/LzmaCon/makefile
@@ -55,5 +55,6 @@ C_OBJS = \
$O\Threads.obj \
!include "../../Crc.mak"
+!include "../../LzmaDec.mak"
!include "../../7zip.mak"
diff --git a/CPP/7zip/Crc.mak b/CPP/7zip/Crc.mak
index 19a7f7b..66b35c1 100644
--- a/CPP/7zip/Crc.mak
+++ b/CPP/7zip/Crc.mak
@@ -1,6 +1,6 @@
C_OBJS = $(C_OBJS) \
$O\7zCrc.obj
-!IF "$(CPU)" == "IA64" || "$(CPU)" == "MIPS" || "$(CPU)" == "ARM" || "$(CPU)" == "ARM64"
+!IF "$(PLATFORM)" == "ia64" || "$(PLATFORM)" == "mips" || "$(PLATFORM)" == "arm" || "$(PLATFORM)" == "arm64"
C_OBJS = $(C_OBJS) \
!ELSE
ASM_OBJS = $(ASM_OBJS) \
diff --git a/CPP/7zip/Crc64.mak b/CPP/7zip/Crc64.mak
index 1ac6a0c..6df9b40 100644
--- a/CPP/7zip/Crc64.mak
+++ b/CPP/7zip/Crc64.mak
@@ -1,6 +1,6 @@
C_OBJS = $(C_OBJS) \
$O\XzCrc64.obj
-!IF "$(CPU)" == "IA64" || "$(CPU)" == "MIPS" || "$(CPU)" == "ARM" || "$(CPU)" == "ARM64"
+!IF "$(PLATFORM)" == "ia64" || "$(PLATFORM)" == "mips" || "$(PLATFORM)" == "arm" || "$(PLATFORM)" == "arm64"
C_OBJS = $(C_OBJS) \
!ELSE
ASM_OBJS = $(ASM_OBJS) \
diff --git a/CPP/7zip/LzmaDec.mak b/CPP/7zip/LzmaDec.mak
index 3c0e7c5..9aa3086 100644
--- a/CPP/7zip/LzmaDec.mak
+++ b/CPP/7zip/LzmaDec.mak
@@ -1,4 +1,4 @@
-!IF "$(CPU)" == "AMD64"
+!IF "$(PLATFORM)" == "x64"
CFLAGS_C_SPEC = -D_LZMA_DEC_OPT
ASM_OBJS = $(ASM_OBJS) \
$O\LzmaDecOpt.obj
diff --git a/CPP/7zip/UI/Common/ArchiveCommandLine.cpp b/CPP/7zip/UI/Common/ArchiveCommandLine.cpp
index f14aafb..35dbd74 100644
--- a/CPP/7zip/UI/Common/ArchiveCommandLine.cpp
+++ b/CPP/7zip/UI/Common/ArchiveCommandLine.cpp
@@ -23,6 +23,7 @@
#include "../../../Common/StringConvert.h"
#include "../../../Common/StringToInt.h"
+#include "../../../Windows/ErrorMsg.h"
#include "../../../Windows/FileDir.h"
#include "../../../Windows/FileName.h"
#ifdef _WIN32
@@ -39,7 +40,9 @@
extern bool g_CaseSensitive;
extern bool g_PathTrailReplaceMode;
+#ifdef _7ZIP_LARGE_PAGES
bool g_LargePagesMode = false;
+#endif
#ifdef UNDER_CE
@@ -410,8 +413,19 @@ static void AddToCensorFromListFile(
UStringVector names;
if (!NFind::DoesFileExist(us2fs(fileName)))
throw CArcCmdLineException(kCannotFindListFile, fileName);
- if (!ReadNamesFromListFile(us2fs(fileName), names, codePage))
+ DWORD lastError = 0;
+ if (!ReadNamesFromListFile2(us2fs(fileName), names, codePage, lastError))
+ {
+ if (lastError != 0)
+ {
+ UString m;
+ m = "The file operation error for listfile";
+ m.Add_LF();
+ m += NError::MyFormatMessage(lastError);
+ throw CArcCmdLineException(m, fileName);
+ }
throw CArcCmdLineException(kIncorrectListFile, fileName);
+ }
if (renamePairs)
{
if ((names.Size() & 1) != 0)
diff --git a/CPP/7zip/UI/Common/ArchiveExtractCallback.cpp b/CPP/7zip/UI/Common/ArchiveExtractCallback.cpp
index 1119d1b..aae08ad 100644
--- a/CPP/7zip/UI/Common/ArchiveExtractCallback.cpp
+++ b/CPP/7zip/UI/Common/ArchiveExtractCallback.cpp
@@ -1182,7 +1182,9 @@ if (askExtractMode == NArchive::NExtract::NAskMode::kExtract && !_testMode)
bool needDelete = true;
if (needDelete)
{
+ if (NFind::DoesFileExist(fullProcessedPath))
if (!DeleteFileAlways(fullProcessedPath))
+ if (GetLastError() != ERROR_FILE_NOT_FOUND)
{
RINOK(SendMessageError_with_LastError(kCantDeleteOutputFile, fullProcessedPath));
return S_OK;
@@ -1368,13 +1370,35 @@ if (askExtractMode == NArchive::NExtract::NAskMode::kExtract && !_testMode)
// UInt64 ticks = GetCpuTicks();
bool res = _outFileStreamSpec->File.SetLength(_curSize);
_fileLengthWasSet = res;
- _outFileStreamSpec->File.SeekToBegin();
+
// ticks = GetCpuTicks() - ticks;
// printf("\nticks = %10d\n", (unsigned)ticks);
if (!res)
{
RINOK(SendMessageError_with_LastError(kCantSetFileLen, fullProcessedPath));
}
+
+ /*
+ _outFileStreamSpec->File.Close();
+ ticks = GetCpuTicks() - ticks;
+ printf("\nticks = %10d\n", (unsigned)ticks);
+ return S_FALSE;
+ */
+
+ /*
+ File.SetLength() on FAT (xp64): is fast, but then File.Close() can be slow,
+ if we don't write any data.
+ File.SetLength() for remote share file (exFAT) can be slow in some cases,
+ and the Windows can return "network error" after 1 minute,
+ while remote file still can grow.
+ We need some way to detect such bad cases and disable PreAllocateOutFile mode.
+ */
+
+ res = _outFileStreamSpec->File.SeekToBegin();
+ if (!res)
+ {
+ RINOK(SendMessageError_with_LastError("Can not seek to begin of file", fullProcessedPath));
+ }
}
#ifdef SUPPORT_ALT_STREAMS
diff --git a/CPP/7zip/UI/Common/ArchiveName.cpp b/CPP/7zip/UI/Common/ArchiveName.cpp
index aa47f7a..b725024 100644
--- a/CPP/7zip/UI/Common/ArchiveName.cpp
+++ b/CPP/7zip/UI/Common/ArchiveName.cpp
@@ -2,6 +2,8 @@
#include "StdAfx.h"
+#include "../../../Common/Wildcard.h"
+
#include "../../../Windows/FileDir.h"
#include "../../../Windows/FileName.h"
@@ -11,7 +13,7 @@
using namespace NWindows;
using namespace NFile;
-UString CreateArchiveName(const NFind::CFileInfo &fi, bool keepName)
+static UString CreateArchiveName(const NFind::CFileInfo &fi, bool keepName)
{
FString resultName = fi.Name;
if (!fi.IsDir() && !keepName)
@@ -72,7 +74,75 @@ static FString CreateArchiveName2(const FString &path, bool fromPrev, bool keepN
return resultName;
}
-UString CreateArchiveName(const UString &path, bool fromPrev, bool keepName)
+
+UString CreateArchiveName(const UStringVector &paths, const NFind::CFileInfo *fi)
{
- return Get_Correct_FsFile_Name(fs2us(CreateArchiveName2(us2fs(path), fromPrev, keepName)));
+ bool keepName = false;
+ /*
+ if (paths.Size() == 1)
+ {
+ const UString &name = paths[0];
+ if (name.Len() > 4)
+ if (CompareFileNames(name.RightPtr(4), L".tar") == 0)
+ keepName = true;
+ }
+ */
+
+ UString name;
+ if (fi)
+ name = CreateArchiveName(*fi, keepName);
+ else
+ {
+ if (paths.IsEmpty())
+ return L"archive";
+ bool fromPrev = (paths.Size() > 1);
+ name = Get_Correct_FsFile_Name(fs2us(CreateArchiveName2(us2fs(paths.Front()), fromPrev, keepName)));
+ }
+
+ UString postfix;
+ UInt32 index = 1;
+
+ for (;;)
+ {
+ // we don't want cases when we include archive to itself.
+ // so we find first available name for archive
+ const UString name2 = name + postfix;
+ const UString name2_zip = name2 + L".zip";
+ const UString name2_7z = name2 + L".7z";
+ const UString name2_tar = name2 + L".tar";
+ const UString name2_wim = name2 + L".wim";
+
+ unsigned i = 0;
+
+ for (i = 0; i < paths.Size(); i++)
+ {
+ const UString &fn = paths[i];
+ NFind::CFileInfo fi2;
+
+ const NFind::CFileInfo *fp;
+ if (fi && paths.Size() == 1)
+ fp = fi;
+ else
+ {
+ if (!fi2.Find(us2fs(fn)))
+ continue;
+ fp = &fi2;
+ }
+ const UString fname = fs2us(fp->Name);
+ if ( 0 == CompareFileNames(fname, name2_zip)
+ || 0 == CompareFileNames(fname, name2_7z)
+ || 0 == CompareFileNames(fname, name2_tar)
+ || 0 == CompareFileNames(fname, name2_wim))
+ break;
+ }
+
+ if (i == paths.Size())
+ break;
+ index++;
+ postfix = "_";
+ postfix.Add_UInt32(index);
+ }
+
+ name += postfix;
+ return name;
}
diff --git a/CPP/7zip/UI/Common/ArchiveName.h b/CPP/7zip/UI/Common/ArchiveName.h
index 7b49c7b..ce2d192 100644
--- a/CPP/7zip/UI/Common/ArchiveName.h
+++ b/CPP/7zip/UI/Common/ArchiveName.h
@@ -3,11 +3,8 @@
#ifndef __ARCHIVE_NAME_H
#define __ARCHIVE_NAME_H
-#include "../../../Common/MyString.h"
-
#include "../../../Windows/FileFind.h"
-UString CreateArchiveName(const UString &path, bool fromPrev, bool keepName);
-UString CreateArchiveName(const NWindows::NFile::NFind::CFileInfo &fileInfo, bool keepName);
+UString CreateArchiveName(const UStringVector &paths, const NWindows::NFile::NFind::CFileInfo *fi = NULL);
#endif
diff --git a/CPP/7zip/UI/Common/Bench.cpp b/CPP/7zip/UI/Common/Bench.cpp
index c0d0e54..20db0b1 100644
--- a/CPP/7zip/UI/Common/Bench.cpp
+++ b/CPP/7zip/UI/Common/Bench.cpp
@@ -522,10 +522,9 @@ class CBenchProgressInfo:
{
public:
CBenchProgressStatus *Status;
- HRESULT Res;
IBenchCallback *Callback;
- CBenchProgressInfo(): Callback(0) {}
+ CBenchProgressInfo(): Callback(NULL) {}
MY_UNKNOWN_IMP
STDMETHOD(SetRatioInfo)(const UInt64 *inSize, const UInt64 *outSize);
};
@@ -758,7 +757,7 @@ struct CEncoderInfo
fileData(NULL),
CheckCrc_Enc(true),
CheckCrc_Dec(true),
- outStreamSpec(0), callback(0), printCallback(0), propStreamSpec(0) {}
+ outStreamSpec(NULL), callback(NULL), printCallback(NULL), propStreamSpec(NULL) {}
#ifndef _7ZIP_ST
@@ -1144,7 +1143,7 @@ static const UInt32 kNumThreadsMax = (1 << 12);
struct CBenchEncoders
{
CEncoderInfo *encoders;
- CBenchEncoders(UInt32 num): encoders(0) { encoders = new CEncoderInfo[num]; }
+ CBenchEncoders(UInt32 num): encoders(NULL) { encoders = new CEncoderInfo[num]; }
~CBenchEncoders() { delete []encoders; }
};
@@ -1545,7 +1544,7 @@ struct CFreqThreads
CFreqInfo *Items;
UInt32 NumThreads;
- CFreqThreads(): Items(0), NumThreads(0) {}
+ CFreqThreads(): Items(NULL), NumThreads(0) {}
void WaitAll()
{
for (UInt32 i = 0; i < NumThreads; i++)
@@ -1603,7 +1602,7 @@ struct CCrcThreads
CCrcInfo *Items;
UInt32 NumThreads;
- CCrcThreads(): Items(0), NumThreads(0) {}
+ CCrcThreads(): Items(NULL), NumThreads(0) {}
void WaitAll()
{
for (UInt32 i = 0; i < NumThreads; i++)
@@ -1885,8 +1884,51 @@ AString GetProcessThreadsInfo(const NSystem::CProcessAffinity &ti)
}
+static void PrintSize(AString &s, UInt64 v)
+{
+ char c = 0;
+ if ((v & 0x3FF) == 0) { v >>= 10; c = 'K';
+ if ((v & 0x3FF) == 0) { v >>= 10; c = 'M';
+ if ((v & 0x3FF) == 0) { v >>= 10; c = 'G';
+ if ((v & 0x3FF) == 0) { v >>= 10; c = 'T';
+ }}}}
+ else
+ {
+ PrintHex(s, v);
+ return;
+ }
+ char temp[32];
+ ConvertUInt64ToString(v, temp);
+ s += temp;
+ if (c)
+ s += c;
+}
+
+
+#ifdef _7ZIP_LARGE_PAGES
+
extern bool g_LargePagesMode;
+extern "C"
+{
+ extern SIZE_T g_LargePageSize;
+}
+
+void Add_LargePages_String(AString &s)
+{
+ if (g_LargePagesMode || g_LargePageSize != 0)
+ {
+ s += " (LP-";
+ PrintSize(s, g_LargePageSize);
+ if (!g_LargePagesMode)
+ s += "-NA";
+ s += ")";
+ }
+}
+
+#endif
+
+
static void PrintRequirements(IBenchPrintCallback &f, const char *sizeString,
bool size_Defined, UInt64 size, const char *threadsString, UInt32 numThreads)
@@ -1898,8 +1940,15 @@ static void PrintRequirements(IBenchPrintCallback &f, const char *sizeString,
else
f.Print(" ?");
f.Print(" MB");
- if (g_LargePagesMode)
- f.Print(" LP");
+
+ #ifdef _7ZIP_LARGE_PAGES
+ {
+ AString s;
+ Add_LargePages_String(s);
+ f.Print(s);
+ }
+ #endif
+
f.Print(", # ");
f.Print(threadsString);
PrintNumber(f, numThreads, 3);
@@ -2539,26 +2588,7 @@ static const char * const k_PF[] =
#endif
-static void PrintSize(AString &s, UInt64 v)
-{
- char c = 0;
- if ((v & 0x3FF) == 0) { v >>= 10; c = 'K';
- if ((v & 0x3FF) == 0) { v >>= 10; c = 'M';
- if ((v & 0x3FF) == 0) { v >>= 10; c = 'G';
- if ((v & 0x3FF) == 0) { v >>= 10; c = 'T';
- }}}}
- else
- {
- PrintHex(s, v);
- return;
- }
- char temp[32];
- ConvertUInt64ToString(v, temp);
- s += temp;
- if (c)
- s += c;
-}
-
+
static void PrintPage(AString &s, UInt32 v)
{
@@ -2707,8 +2737,9 @@ void GetCpuName(AString &s)
#endif
- if (g_LargePagesMode)
- s += " (LP)";
+ #ifdef _7ZIP_LARGE_PAGES
+ Add_LargePages_String(s);
+ #endif
}
@@ -2968,6 +2999,9 @@ HRESULT Bench(
UInt64 start = ::GetTimeCount();
UInt32 sum = (UInt32)start;
sum = CountCpuFreq(sum, (UInt32)(numMilCommands * 1000000 / kNumFreqCommands), g_BenchCpuFreqTemp);
+ if (sum == 0xF1541213)
+ if (printCallback)
+ printCallback->Print("");
const UInt64 realDelta = ::GetTimeCount() - start;
start = realDelta;
if (start == 0)
@@ -2984,7 +3018,7 @@ HRESULT Bench(
else
{
// PrintNumber(*printCallback, start, 0);
- PrintNumber(*printCallback, mipsVal, 5 + ((sum == 0xF1541213) ? 1 : 0));
+ PrintNumber(*printCallback, mipsVal, 5);
}
}
/*
diff --git a/CPP/7zip/UI/Common/Bench.h b/CPP/7zip/UI/Common/Bench.h
index 1990aab..1d052aa 100644
--- a/CPP/7zip/UI/Common/Bench.h
+++ b/CPP/7zip/UI/Common/Bench.h
@@ -68,5 +68,10 @@ void GetSysInfo(AString &s1, AString &s2);
void GetCpuName(AString &s);
void GetCpuFeatures(AString &s);
+#ifdef _7ZIP_LARGE_PAGES
+void Add_LargePages_String(AString &s);
+#else
+// #define Add_LargePages_String
+#endif
#endif
diff --git a/CPP/7zip/UI/Common/EnumDirItems.cpp b/CPP/7zip/UI/Common/EnumDirItems.cpp
index 032e2ff..2c941e1 100644
--- a/CPP/7zip/UI/Common/EnumDirItems.cpp
+++ b/CPP/7zip/UI/Common/EnumDirItems.cpp
@@ -1084,3 +1084,13 @@ CMessagePathException::CMessagePathException(const char *a, const wchar_t *u)
(*this) += u;
}
}
+
+CMessagePathException::CMessagePathException(const wchar_t *a, const wchar_t *u)
+{
+ (*this) += a;
+ if (u)
+ {
+ Add_LF();
+ (*this) += u;
+ }
+}
diff --git a/CPP/7zip/UI/Common/EnumDirItems.h b/CPP/7zip/UI/Common/EnumDirItems.h
index 6220500..ae1d226 100644
--- a/CPP/7zip/UI/Common/EnumDirItems.h
+++ b/CPP/7zip/UI/Common/EnumDirItems.h
@@ -22,6 +22,7 @@ HRESULT EnumerateItems(
struct CMessagePathException: public UString
{
CMessagePathException(const char *a, const wchar_t *u = NULL);
+ CMessagePathException(const wchar_t *a, const wchar_t *u = NULL);
};
diff --git a/CPP/7zip/UI/Common/HashCalc.cpp b/CPP/7zip/UI/Common/HashCalc.cpp
index 9c0a1d0..822018d 100644
--- a/CPP/7zip/UI/Common/HashCalc.cpp
+++ b/CPP/7zip/UI/Common/HashCalc.cpp
@@ -230,7 +230,7 @@ HRESULT HashCalc(
unsigned i;
CHashBundle hb;
RINOK(hb.SetMethods(EXTERNAL_CODECS_LOC_VARS options.Methods));
- hb.Init();
+ // hb.Init();
hb.NumErrors = dirItems.Stat.NumErrors;
diff --git a/CPP/7zip/UI/Common/HashCalc.h b/CPP/7zip/UI/Common/HashCalc.h
index 77373b8..524bd3b 100644
--- a/CPP/7zip/UI/Common/HashCalc.h
+++ b/CPP/7zip/UI/Common/HashCalc.h
@@ -51,9 +51,13 @@ struct CHashBundle: public IHashCalc
UInt64 CurSize;
+ UString MainName;
+ UString FirstFileName;
+
HRESULT SetMethods(DECL_EXTERNAL_CODECS_LOC_VARS const UStringVector &methods);
- void Init()
+ // void Init() {}
+ CHashBundle()
{
NumDirs = NumFiles = NumAltStreams = FilesSize = AltStreamsSize = NumErrors = 0;
}
@@ -76,7 +80,7 @@ struct CHashBundle: public IHashCalc
virtual HRESULT GetStream(const wchar_t *name, bool isFolder) x; \
virtual HRESULT OpenFileError(const FString &path, DWORD systemError) x; \
virtual HRESULT SetOperationResult(UInt64 fileSize, const CHashBundle &hb, bool showHash) x; \
- virtual HRESULT AfterLastFile(const CHashBundle &hb) x; \
+ virtual HRESULT AfterLastFile(CHashBundle &hb) x; \
struct IHashCallbackUI: public IDirItemsCallback
{
diff --git a/CPP/7zip/UI/Common/OpenArchive.cpp b/CPP/7zip/UI/Common/OpenArchive.cpp
index 9549269..d72454c 100644
--- a/CPP/7zip/UI/Common/OpenArchive.cpp
+++ b/CPP/7zip/UI/Common/OpenArchive.cpp
@@ -563,6 +563,8 @@ HRESULT CArc::GetItemPathToParent(UInt32 index, UInt32 parent, UStringVector &pa
UInt32 parentType = 0;
RINOK(GetRawProps->GetParent(curIndex, &curParent, &parentType));
+ // 18.06: fixed : we don't want to split name to parts
+ /*
if (parentType != NParentType::kAltStream)
{
for (;;)
@@ -576,6 +578,7 @@ HRESULT CArc::GetItemPathToParent(UInt32 index, UInt32 parent, UStringVector &pa
s.DeleteFrom(pos);
}
}
+ */
parts.Insert(0, s);
@@ -2013,7 +2016,7 @@ HRESULT CArc::OpenStream2(const COpenOptions &op)
}
else
{
- const CArcInfoEx &ai = op.codecs->Formats[formatIndex];
+ const CArcInfoEx &ai = op.codecs->Formats[(unsigned)formatIndex];
if (ai.FindExtension(extension) >= 0)
{
if (ai.Flags_FindSignature() && searchMarkerInHandler)
diff --git a/CPP/7zip/UI/Common/Update.cpp b/CPP/7zip/UI/Common/Update.cpp
index fc1eede..2f1b365 100644
--- a/CPP/7zip/UI/Common/Update.cpp
+++ b/CPP/7zip/UI/Common/Update.cpp
@@ -288,29 +288,27 @@ void CArchivePath::ParseFromPath(const UString &path, EArcNameMode mode)
if (mode == k_ArcNameMode_Add)
return;
- if (mode == k_ArcNameMode_Exact)
- {
- BaseExtension.Empty();
- return;
- }
- int dotPos = Name.ReverseFind_Dot();
- if (dotPos < 0)
- return;
- if ((unsigned)dotPos == Name.Len() - 1)
+ if (mode != k_ArcNameMode_Exact)
{
- Name.DeleteBack();
- BaseExtension.Empty();
- return;
- }
- const UString ext = Name.Ptr(dotPos + 1);
- if (BaseExtension.IsEqualTo_NoCase(ext))
- {
- BaseExtension = ext;
- Name.DeleteFrom(dotPos);
+ int dotPos = Name.ReverseFind_Dot();
+ if (dotPos < 0)
+ return;
+ if ((unsigned)dotPos == Name.Len() - 1)
+ Name.DeleteBack();
+ else
+ {
+ const UString ext = Name.Ptr(dotPos + 1);
+ if (BaseExtension.IsEqualTo_NoCase(ext))
+ {
+ BaseExtension = ext;
+ Name.DeleteFrom(dotPos);
+ return;
+ }
+ }
}
- else
- BaseExtension.Empty();
+
+ BaseExtension.Empty();
}
UString CArchivePath::GetFinalPath() const
@@ -327,6 +325,7 @@ UString CArchivePath::GetFinalPath() const
UString CArchivePath::GetFinalVolPath() const
{
UString path = GetPathWithoutExt();
+ // if BaseExtension is empty, we must ignore VolExtension also.
if (!BaseExtension.IsEmpty())
{
path += '.';
@@ -1166,7 +1165,7 @@ HRESULT UpdateArchive(
{
errorInfo.SystemError = ERROR_ACCESS_DENIED;
errorInfo.Message = "The file is read-only";
- errorInfo.FileNames.Add(arcPath);
+ errorInfo.FileNames.Add(us2fs(arcPath));
return errorInfo.Get_HRESULT_Error();
}
@@ -1377,6 +1376,31 @@ HRESULT UpdateArchive(
unsigned ci;
+
+ // self including protection
+ if (options.DeleteAfterCompressing)
+ {
+ for (ci = 0; ci < options.Commands.Size(); ci++)
+ {
+ CArchivePath &ap = options.Commands[ci].ArchivePath;
+ const FString path = us2fs(ap.GetFinalPath());
+ // maybe we must compare absolute paths path here
+ FOR_VECTOR (i, dirItems.Items)
+ {
+ const FString phyPath = dirItems.GetPhyPath(i);
+ if (phyPath == path)
+ {
+ UString s;
+ s = "It is not allowed to include archive to itself";
+ s.Add_LF();
+ s += path;
+ throw s;
+ }
+ }
+ }
+ }
+
+
for (ci = 0; ci < options.Commands.Size(); ci++)
{
CArchivePath &ap = options.Commands[ci].ArchivePath;
@@ -1562,26 +1586,39 @@ HRESULT UpdateArchive(
}
CCurrentDirRestorer curDirRestorer;
+
+ AStringVector paths;
+ AStringVector names;
for (i = 0; i < fullPaths.Size(); i++)
{
const UString arcPath2 = fs2us(fullPaths[i]);
const UString fileName = ExtractFileNameFromPath(arcPath2);
- const AString path (GetAnsiString(arcPath2));
- const AString name (GetAnsiString(fileName));
+ paths.Add(GetAnsiString(arcPath2));
+ names.Add(GetAnsiString(fileName));
+ // const AString path (GetAnsiString(arcPath2));
+ // const AString name (GetAnsiString(fileName));
// Warning!!! MAPISendDocuments function changes Current directory
// fnSend(0, ";", (LPSTR)(LPCSTR)path, (LPSTR)(LPCSTR)name, 0);
+ }
- MapiFileDesc f;
+ CRecordVector<MapiFileDesc> files;
+ files.ClearAndSetSize(paths.Size());
+
+ for (i = 0; i < paths.Size(); i++)
+ {
+ MapiFileDesc &f = files[i];
memset(&f, 0, sizeof(f));
f.nPosition = 0xFFFFFFFF;
- f.lpszPathName = (char *)(const char *)path;
- f.lpszFileName = (char *)(const char *)name;
-
+ f.lpszPathName = (char *)(const char *)paths[i];
+ f.lpszFileName = (char *)(const char *)names[i];
+ }
+
+ {
MapiMessage m;
memset(&m, 0, sizeof(m));
- m.nFileCount = 1;
- m.lpFiles = &f;
+ m.nFileCount = files.Size();
+ m.lpFiles = &files.Front();
const AString addr (GetAnsiString(options.EMailAddress));
MapiRecipDesc rec;
diff --git a/CPP/7zip/UI/Console/HashCon.cpp b/CPP/7zip/UI/Console/HashCon.cpp
index ec8e6dc..3ade0fd 100644
--- a/CPP/7zip/UI/Console/HashCon.cpp
+++ b/CPP/7zip/UI/Console/HashCon.cpp
@@ -332,7 +332,7 @@ void CHashCallbackConsole::PrintProperty(const char *name, UInt64 value)
*_so << name << s << endl;
}
-HRESULT CHashCallbackConsole::AfterLastFile(const CHashBundle &hb)
+HRESULT CHashCallbackConsole::AfterLastFile(CHashBundle &hb)
{
ClosePercents2();
diff --git a/CPP/7zip/UI/Console/List.cpp b/CPP/7zip/UI/Console/List.cpp
index ebcabb6..c56e2e2 100644
--- a/CPP/7zip/UI/Console/List.cpp
+++ b/CPP/7zip/UI/Console/List.cpp
@@ -1072,18 +1072,24 @@ HRESULT ListArchives(CCodecs *codecs,
errorCode = ERROR_FILE_NOT_FOUND;
lastError = HRESULT_FROM_WIN32(lastError);;
g_StdOut.Flush();
- *g_ErrStream << endl << kError << NError::MyFormatMessage(errorCode) << endl;
- g_ErrStream->NormalizePrint_UString(arcPath);
- *g_ErrStream << endl << endl;
+ if (g_ErrStream)
+ {
+ *g_ErrStream << endl << kError << NError::MyFormatMessage(errorCode) << endl;
+ g_ErrStream->NormalizePrint_UString(arcPath);
+ *g_ErrStream << endl << endl;
+ }
numErrors++;
continue;
}
if (fi.IsDir())
{
g_StdOut.Flush();
- *g_ErrStream << endl << kError;
- g_ErrStream->NormalizePrint_UString(arcPath);
- *g_ErrStream << " is not a file" << endl << endl;
+ if (g_ErrStream)
+ {
+ *g_ErrStream << endl << kError;
+ g_ErrStream->NormalizePrint_UString(arcPath);
+ *g_ErrStream << " is not a file" << endl << endl;
+ }
numErrors++;
continue;
}
@@ -1133,24 +1139,28 @@ HRESULT ListArchives(CCodecs *codecs,
{
if (result == E_ABORT)
return result;
+ if (result != S_FALSE)
+ lastError = result;
g_StdOut.Flush();
- *g_ErrStream << endl << kError;
- g_ErrStream->NormalizePrint_UString(arcPath);
- *g_ErrStream << " : ";
- if (result == S_FALSE)
+ if (g_ErrStream)
{
- Print_OpenArchive_Error(*g_ErrStream, codecs, arcLink);
- }
- else
- {
- lastError = result;
- *g_ErrStream << "opening : ";
- if (result == E_OUTOFMEMORY)
- *g_ErrStream << "Can't allocate required memory";
+ *g_ErrStream << endl << kError;
+ g_ErrStream->NormalizePrint_UString(arcPath);
+ *g_ErrStream << " : ";
+ if (result == S_FALSE)
+ {
+ Print_OpenArchive_Error(*g_ErrStream, codecs, arcLink);
+ }
else
- *g_ErrStream << NError::MyFormatMessage(result);
+ {
+ *g_ErrStream << "opening : ";
+ if (result == E_OUTOFMEMORY)
+ *g_ErrStream << "Can't allocate required memory";
+ else
+ *g_ErrStream << NError::MyFormatMessage(result);
+ }
+ *g_ErrStream << endl;
}
- *g_ErrStream << endl;
numErrors++;
continue;
}
diff --git a/CPP/7zip/UI/Console/Main.cpp b/CPP/7zip/UI/Console/Main.cpp
index 8f2825c..906fa91 100644
--- a/CPP/7zip/UI/Console/Main.cpp
+++ b/CPP/7zip/UI/Console/Main.cpp
@@ -24,6 +24,7 @@
#include "../../../Windows/TimeUtils.h"
#include "../Common/ArchiveCommandLine.h"
+#include "../Common/Bench.h"
#include "../Common/ExitCode.h"
#include "../Common/Extract.h"
@@ -56,8 +57,6 @@ using namespace NCommandLineParser;
HINSTANCE g_hInstance = 0;
#endif
-extern bool g_LargePagesMode;
-
extern CStdOutStream *g_StdStream;
extern CStdOutStream *g_ErrStream;
@@ -236,7 +235,7 @@ static void PrintWarningsPaths(const CErrorPathCodes &pc, CStdOutStream &so)
{
FOR_VECTOR(i, pc.Paths)
{
- so.NormalizePrint_UString(pc.Paths[i]);
+ so.NormalizePrint_UString(fs2us(pc.Paths[i]));
so << " : ";
so << NError::MyFormatMessage(pc.Codes[i]) << endl;
}
@@ -376,8 +375,13 @@ static void PrintMemUsage(const char *s, UInt64 val)
*g_StdStream << " " << s << " Memory =";
PrintNum(SHIFT_SIZE_VALUE(val, 20), 7);
*g_StdStream << " MB";
- if (g_LargePagesMode)
- *g_StdStream << " (LP)";
+
+ #ifdef _7ZIP_LARGE_PAGES
+ AString lp;
+ Add_LargePages_String(lp);
+ if (!lp.IsEmpty())
+ *g_StdStream << lp;
+ #endif
}
EXTERN_C_BEGIN
@@ -911,7 +915,7 @@ int Main2(
{
hashCalc = &hb;
ThrowException_if_Error(hb.SetMethods(EXTERNAL_CODECS_VARS_L options.HashMethods));
- hb.Init();
+ // hb.Init();
}
hresultMain = Extract(
diff --git a/CPP/7zip/UI/FileManager/ExtractCallback.cpp b/CPP/7zip/UI/FileManager/ExtractCallback.cpp
index 6433e91..f230594 100644
--- a/CPP/7zip/UI/FileManager/ExtractCallback.cpp
+++ b/CPP/7zip/UI/FileManager/ExtractCallback.cpp
@@ -759,11 +759,15 @@ STDMETHODIMP CExtractCallbackImp::AskWrite(
destPathResultTemp = fs2us(destPathSys);
}
else
+ {
+ if (NFind::DoesFileExist(destPathSys))
if (!NDir::DeleteFileAlways(destPathSys))
+ if (GetLastError() != ERROR_FILE_NOT_FOUND)
{
RINOK(MessageError("can not delete output file", destPathSys));
return E_ABORT;
}
+ }
}
*writeAnswer = BoolToInt(true);
return StringToBstr(destPathResultTemp, destPathResult);
diff --git a/CPP/7zip/UI/GUI/ExtractGUI.cpp b/CPP/7zip/UI/GUI/ExtractGUI.cpp
index 37aa45b..99db743 100644
--- a/CPP/7zip/UI/GUI/ExtractGUI.cpp
+++ b/CPP/7zip/UI/GUI/ExtractGUI.cpp
@@ -99,8 +99,10 @@ HRESULT CThreadExtracting::ProcessVirt()
CDecompressStat Stat;
#ifndef _SFX
+ /*
if (HashBundle)
HashBundle->Init();
+ */
#endif
HRESULT res = Extract(codecs,
@@ -119,7 +121,7 @@ HRESULT CThreadExtracting::ProcessVirt()
{
AddValuePair(Pairs, IDS_ARCHIVES_COLON, Stat.NumArchives);
AddSizeValuePair(Pairs, IDS_PROP_PACKED_SIZE, Stat.PackSize);
- AddHashBundleRes(Pairs, *HashBundle, UString());
+ AddHashBundleRes(Pairs, *HashBundle);
}
else if (Options->TestMode)
{
diff --git a/CPP/7zip/UI/GUI/HashGUI.h b/CPP/7zip/UI/GUI/HashGUI.h
index d6caa53..b626823 100644
--- a/CPP/7zip/UI/GUI/HashGUI.h
+++ b/CPP/7zip/UI/GUI/HashGUI.h
@@ -18,10 +18,10 @@ void AddValuePair(CPropNameValPairs &pairs, UINT resourceID, UInt64 value);
void AddSizeValue(UString &s, UInt64 value);
void AddSizeValuePair(CPropNameValPairs &pairs, UINT resourceID, UInt64 value);
-void AddHashBundleRes(CPropNameValPairs &s, const CHashBundle &hb, const UString &firstFileName);
-void AddHashBundleRes(UString &s, const CHashBundle &hb, const UString &firstFileName);
+void AddHashBundleRes(CPropNameValPairs &s, const CHashBundle &hb);
+void AddHashBundleRes(UString &s, const CHashBundle &hb);
void ShowHashResults(const CPropNameValPairs &propPairs, HWND hwnd);
-void ShowHashResults(const CHashBundle &hb, const UString &firstFileName, HWND hwnd);
+void ShowHashResults(const CHashBundle &hb, HWND hwnd);
#endif
diff --git a/CPP/Build.mak b/CPP/Build.mak
index 28d5eca..b3ea8e2 100644
--- a/CPP/Build.mak
+++ b/CPP/Build.mak
@@ -4,19 +4,20 @@ LIBS = $(LIBS) oleaut32.lib ole32.lib
CFLAGS = $(CFLAGS) -DUNICODE -D_UNICODE
!ENDIF
-# CFLAGS = $(CFLAGS) -FAsc -Fa$O/Asm/
-
!IFNDEF O
-!IFDEF CPU
-O=$(CPU)
+!IFDEF PLATFORM
+O=$(PLATFORM)
!ELSE
-O=O
+O=o
!ENDIF
!ENDIF
-!IF "$(CPU)" == "AMD64"
+# CFLAGS = $(CFLAGS) -FAsc -Fa$O/asm/
+
+
+!IF "$(PLATFORM)" == "x64"
MY_ML = ml64 -Dx64 -WX
-!ELSEIF "$(CPU)" == "ARM"
+!ELSEIF "$(PLATFORM)" == "arm"
MY_ML = armasm -WX
!ELSE
MY_ML = ml -WX
@@ -29,16 +30,16 @@ RFLAGS = $(RFLAGS) -dUNDER_CE
LFLAGS = $(LFLAGS) /ENTRY:mainACRTStartup
!ENDIF
!ELSE
-!IFNDEF NEW_COMPILER
+!IFDEF OLD_COMPILER
LFLAGS = $(LFLAGS) -OPT:NOWIN98
!ENDIF
-!IF "$(CPU)" != "ARM" && "$(CPU)" != "ARM64"
+!IF "$(PLATFORM)" != "arm" && "$(PLATFORM)" != "arm64"
CFLAGS = $(CFLAGS) -Gr
!ENDIF
LIBS = $(LIBS) user32.lib advapi32.lib shell32.lib
!ENDIF
-!IF "$(CPU)" == "ARM"
+!IF "$(PLATFORM)" == "arm"
COMPL_ASM = $(MY_ML) $** $O/$(*B).obj
!ELSE
COMPL_ASM = $(MY_ML) -c -Fo$O/ $**
@@ -46,19 +47,19 @@ COMPL_ASM = $(MY_ML) -c -Fo$O/ $**
CFLAGS = $(CFLAGS) -nologo -c -Fo$O/ -W4 -WX -EHsc -Gy -GR- -GF
-!IFDEF MY_STATIC_LINK
+!IFDEF MY_DYNAMIC_LINK
+CFLAGS = $(CFLAGS) -MD
+!ELSE
!IFNDEF MY_SINGLE_THREAD
CFLAGS = $(CFLAGS) -MT
!ENDIF
-!ELSE
-CFLAGS = $(CFLAGS) -MD
!ENDIF
-!IFDEF NEW_COMPILER
+!IFNDEF OLD_COMPILER
CFLAGS = $(CFLAGS) -GS- -Zc:forScope -Zc:wchar_t
!IFNDEF UNDER_CE
CFLAGS = $(CFLAGS) -MP2
-!IFNDEF CPU
+!IFNDEF PLATFORM
# CFLAGS = $(CFLAGS) -arch:IA32
!ENDIF
!ENDIF
@@ -66,7 +67,13 @@ CFLAGS = $(CFLAGS) -MP2
CFLAGS = $(CFLAGS)
!ENDIF
-!IF "$(CPU)" == "AMD64"
+!IFNDEF UNDER_CE
+!IF "$(PLATFORM)" == "arm"
+CFLAGS = $(CFLAGS) -D_ARM_WINAPI_PARTITION_DESKTOP_SDK_AVAILABLE
+!ENDIF
+!ENDIF
+
+!IF "$(PLATFORM)" == "x64"
CFLAGS_O1 = $(CFLAGS) -O1
!ELSE
CFLAGS_O1 = $(CFLAGS) -O1
@@ -82,7 +89,7 @@ LFLAGS = $(LFLAGS) /LARGEADDRESSAWARE
!IFDEF DEF_FILE
LFLAGS = $(LFLAGS) -DLL -DEF:$(DEF_FILE)
!ELSE
-!IF defined(MY_FIXED) && "$(CPU)" != "ARM" && "$(CPU)" != "ARM64"
+!IF defined(MY_FIXED) && "$(PLATFORM)" != "arm" && "$(PLATFORM)" != "arm64"
LFLAGS = $(LFLAGS) /FIXED
!ELSE
LFLAGS = $(LFLAGS) /FIXED:NO
@@ -91,7 +98,7 @@ LFLAGS = $(LFLAGS) /FIXED:NO
!ENDIF
-# !IF "$(CPU)" == "AMD64"
+# !IF "$(PLATFORM)" == "x64"
!IFDEF SUB_SYS_VER
@@ -131,10 +138,10 @@ clean:
$O:
if not exist "$O" mkdir "$O"
-$O/Asm:
- if not exist "$O/Asm" mkdir "$O/Asm"
+$O/asm:
+ if not exist "$O/asm" mkdir "$O/asm"
-$(PROGPATH): $O $O/Asm $(OBJS) $(DEF_FILE)
+$(PROGPATH): $O $O/asm $(OBJS) $(DEF_FILE)
link $(LFLAGS) -out:$(PROGPATH) $(OBJS) $(LIBS)
!IFNDEF NO_DEFAULT_RES
diff --git a/CPP/Common/ListFileUtils.cpp b/CPP/Common/ListFileUtils.cpp
index f22680b..e40b2d0 100644
--- a/CPP/Common/ListFileUtils.cpp
+++ b/CPP/Common/ListFileUtils.cpp
@@ -25,14 +25,21 @@ static void AddName(UStringVector &strings, UString &s)
strings.Add(s);
}
-bool ReadNamesFromListFile(CFSTR fileName, UStringVector &strings, UINT codePage)
+bool ReadNamesFromListFile2(CFSTR fileName, UStringVector &strings, UINT codePage, DWORD &lastError)
{
+ lastError = 0;
NWindows::NFile::NIO::CInFile file;
if (!file.Open(fileName))
+ {
+ lastError = ::GetLastError();
return false;
+ }
UInt64 fileSize;
if (!file.GetLength(fileSize))
+ {
+ lastError = ::GetLastError();
return false;
+ }
if (fileSize >= ((UInt32)1 << 31) - 32)
return false;
UString u;
@@ -43,7 +50,10 @@ bool ReadNamesFromListFile(CFSTR fileName, UStringVector &strings, UINT codePage
CByteArr buf((size_t)fileSize);
UInt32 processed;
if (!file.Read(buf, (UInt32)fileSize, processed))
+ {
+ lastError = ::GetLastError();
return false;
+ }
if (processed != fileSize)
return false;
file.Close();
@@ -74,7 +84,10 @@ bool ReadNamesFromListFile(CFSTR fileName, UStringVector &strings, UINT codePage
char *p = s.GetBuf((unsigned)fileSize);
UInt32 processed;
if (!file.Read(p, (UInt32)fileSize, processed))
+ {
+ lastError = ::GetLastError();
return false;
+ }
if (processed != fileSize)
return false;
file.Close();
diff --git a/CPP/Common/ListFileUtils.h b/CPP/Common/ListFileUtils.h
index ec32d8e..a4f0d16 100644
--- a/CPP/Common/ListFileUtils.h
+++ b/CPP/Common/ListFileUtils.h
@@ -9,6 +9,10 @@
#define MY__CP_UTF16 1200
#define MY__CP_UTF16BE 1201
-bool ReadNamesFromListFile(CFSTR fileName, UStringVector &strings, UINT codePage = CP_OEMCP);
+// bool ReadNamesFromListFile(CFSTR fileName, UStringVector &strings, UINT codePage = CP_OEMCP);
+
+ // = CP_OEMCP
+bool ReadNamesFromListFile2(CFSTR fileName, UStringVector &strings, UINT codePage,
+ DWORD &lastError);
#endif
diff --git a/CPP/Common/MyString.h b/CPP/Common/MyString.h
index f484ad2..45cea98 100644
--- a/CPP/Common/MyString.h
+++ b/CPP/Common/MyString.h
@@ -307,6 +307,7 @@ public:
void ReplaceOneCharAtPos(unsigned pos, char c) { _chars[pos] = c; }
+ char *GetBuf() { return _chars; }
/* GetBuf(minLen): provides the buffer that can store
at least (minLen) characters and additional null terminator.
9.35: GetBuf doesn't preserve old characters and terminator */
diff --git a/DOC/7zFormat.txt b/DOC/7zFormat.txt
index 6b8678f..9239e93 100644
--- a/DOC/7zFormat.txt
+++ b/DOC/7zFormat.txt
@@ -1,4 +1,4 @@
-7z Format description (4.59)
+7z Format description (18.06)
----------------------------
This file contains description of 7z archive format.
@@ -175,7 +175,7 @@ SignatureHeader
ArchiveVersion
{
BYTE Major; // now = 0
- BYTE Minor; // now = 2
+ BYTE Minor; // now = 4
};
UINT32 StartHeaderCRC;
@@ -399,7 +399,7 @@ FilesInfo
UINT64 DataIndex
[]
for(Definded Items)
- UINT64 Time
+ REAL_UINT64 Time
[]
kNames: (0x11)
diff --git a/DOC/Methods.txt b/DOC/Methods.txt
index 11adcb0..6d0641b 100644
--- a/DOC/Methods.txt
+++ b/DOC/Methods.txt
@@ -1,8 +1,8 @@
7-Zip method IDs for 7z and xz archives
---------------------------------------
-Version: 17.01
-Date: 2017-05-27
+Version: 18.06
+Date: 2018-06-30
Each compression or crypto method in 7z is associated with unique binary value (ID).
The length of ID in bytes is arbitrary but it can not exceed 63 bits (8 bytes).
@@ -132,6 +132,12 @@ List of defined IDs
04 - LZ4
05 - LZ5
06 - LIZARD
+
+ 12 xx - reserverd (Denis Anisimov)
+
+ 01 - WavPack2
+ FE - eSplitter
+ FF - RawSplitter
06.. - Crypto
diff --git a/DOC/lzma-history.txt b/DOC/lzma-history.txt
index c53e3bd..c4ea98d 100644
--- a/DOC/lzma-history.txt
+++ b/DOC/lzma-history.txt
@@ -1,6 +1,20 @@
HISTORY of the LZMA SDK
-----------------------
+18.06 2018-12-30
+-------------------------
+- The speed for LZMA/LZMA2 compressing was increased by 3-10%,
+ and there are minor changes in compression ratio.
+- Some bugs were fixed.
+- The bug in 7-Zip 18.02-18.05 was fixed:
+ There was memory leak in multithreading xz decoder - XzDecMt_Decode(),
+ if xz stream contains only one block.
+- The changes for MSVS compiler makefiles:
+ - the makefiles now use "PLATFORM" macroname with values (x64, x86, arm64)
+ instead of "CPU" macroname with values (AMD64, ARM64).
+ - the makefiles by default now use static version of the run-time library.
+
+
18.05 2018-04-30
-------------------------
- The speed for LZMA/LZMA2 compressing was increased
diff --git a/DOC/lzma-sdk.txt b/DOC/lzma-sdk.txt
index 01521e9..97fc26d 100644
--- a/DOC/lzma-sdk.txt
+++ b/DOC/lzma-sdk.txt
@@ -1,4 +1,4 @@
-LZMA SDK 18.05
+LZMA SDK 18.06
--------------
LZMA SDK provides the documentation, samples, header files,
diff --git a/Java/Tukaani/Android.bp b/Java/Tukaani/Android.bp
deleted file mode 100644
index 2ce803b..0000000
--- a/Java/Tukaani/Android.bp
+++ /dev/null
@@ -1,23 +0,0 @@
-//
-// Copyright (C) 2011 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-java_library_static {
- name: "xz-java",
-
- srcs: ["src/**/*.java"],
- java_resource_dirs: ["src"],
-
- sdk_version: "current",
-}
diff --git a/Java/Tukaani/COPYING b/Java/Tukaani/COPYING
deleted file mode 100644
index c1d404d..0000000
--- a/Java/Tukaani/COPYING
+++ /dev/null
@@ -1,10 +0,0 @@
-
-Licensing of XZ for Java
-========================
-
- All the files in this package have been written by Lasse Collin
- and/or Igor Pavlov. All these files have been put into the
- public domain. You can do whatever you want with these files.
-
- This software is provided "as is", without any warranty.
-
diff --git a/Java/Tukaani/NEWS b/Java/Tukaani/NEWS
deleted file mode 100644
index 88ecd75..0000000
--- a/Java/Tukaani/NEWS
+++ /dev/null
@@ -1,56 +0,0 @@
-
-XZ for Java release notes
-=========================
-
-1.5 (2014-03-08)
-
- * Fix a wrong assertion in BCJ decoders.
-
- * Use a field instead of reallocating a temporary one-byte buffer
- in read() and write() implementations in several classes.
-
-1.4 (2013-09-22)
-
- * Add LZMAInputStream for decoding .lzma files and raw LZMA streams.
-
-1.3 (2013-05-12)
-
- * Fix a data corruption bug when flushing the LZMA2 encoder or
- when using a preset dictionary.
-
- * Make information about the XZ Block positions and sizes available
- in SeekableXZInputStream by adding the following public functions:
- - int getStreamCount()
- - int getBlockCount()
- - long getBlockPos(int blockNumber)
- - long getBlockSize(int blockNumber)
- - long getBlockCompPos(int blockNumber)
- - long getBlockCompSize(int blockNumber)
- - int getBlockCheckType(int blockNumber)
- - int getBlockNumber(long pos)
- - void seekToBlock(int blockNumber)
-
- * Minor improvements to javadoc comments were made.
-
-1.2 (2013-01-29)
-
- * Use fields instead of reallocating frequently-needed temporary
- objects in the LZMA encoder.
-
- * Fix the contents of xz-${version}-sources.jar.
-
- * Add OSGi attributes to xz.jar.
-
-1.1 (2012-07-05)
-
- * The depthLimit argument in the LZMA2Options constructor is
- no longer ignored.
-
- * LZMA2Options() can no longer throw UnsupportedOptionsException.
-
- * Fix bugs in the preset dictionary support in the LZMA2 encoder.
-
-1.0 (2011-10-22)
-
- * The first stable release
-
diff --git a/Java/Tukaani/README b/Java/Tukaani/README
deleted file mode 100644
index c230299..0000000
--- a/Java/Tukaani/README
+++ /dev/null
@@ -1,50 +0,0 @@
-
-XZ for Java
-===========
-
-Introduction
-
- This aims to be a complete implementation of XZ data compression
- in pure Java. Features:
- - Full support for the .xz file format specification version 1.0.4
- - Single-threaded streamed compression and decompression
- - Single-threaded decompression with limited random access support
- - Raw streams (no .xz headers) for advanced users, including LZMA2
- with preset dictionary
-
- Threading is planned but it is unknown when it will be implemented.
-
- For the latest source code, see the project home page:
-
- http://tukaani.org/xz/java.html
-
- The source code is compatible with Java 1.4 and later.
-
-Building
-
- It is recommended to use Apache Ant. Type "ant" to compile the
- classes and create the .jar files. Type "ant doc" to build the
- javadoc HTML documentation. Note that building the documentation
- will download a small file named "package-list" from Oracle to
- enable linking to the documentation of the standard Java classes.
-
- If you cannot or don't want to use Ant, just compile all .java
- files under the "src" directory.
-
-Demo programs
-
- You can test compression with XZEncDemo, which compresses from
- standard input to standard output:
-
- java -jar build/jar/XZEncDemo.jar < foo.txt > foo.txt.xz
-
- You can test decompression with XZDecDemo, which decompresses to
- standard output:
-
- java -jar build/jar/XZDecDemo.jar foo.txt.xz
-
-Reporting bugs
-
- Report bugs to <lasse.collin@tukaani.org> or visit the IRC channel
- #tukaani on Freenode and talk to Larhzu.
-
diff --git a/Java/Tukaani/THANKS b/Java/Tukaani/THANKS
deleted file mode 100644
index f345069..0000000
--- a/Java/Tukaani/THANKS
+++ /dev/null
@@ -1,16 +0,0 @@
-
-Thanks
-======
-
-People (in alphabetical order):
- - Stefan Bodewig
- - Carl Hasselskog
- - Arunesh Mathur
- - Jim Meyering
- - Benoit Nadeau
- - Christian Schlichtherle
- - Alyosha Vasilieva
-
-Companies:
- - Red Hat
-
diff --git a/Java/Tukaani/build.properties b/Java/Tukaani/build.properties
deleted file mode 100644
index 64ceac4..0000000
--- a/Java/Tukaani/build.properties
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# build.properties
-#
-# Author: Lasse Collin <lasse.collin@tukaani.org>
-#
-# This file has been put into the public domain.
-# You can do whatever you want with this file.
-#
-
-title = XZ data compression
-homepage = http://tukaani.org/xz/java.html
-version = 1.5
-
-debug = false
-sourcever = 1.4
-src_dir = src
-
-build_dir = build
-dist_dir = ${build_dir}/dist
-dist_file = ${dist_dir}/xz-java-${version}.zip
-classes_dir = ${build_dir}/classes
-jar_dir = ${build_dir}/jar
-doc_dir = ${build_dir}/doc
-
-extdoc_url = http://docs.oracle.com/javase/7/docs/api
-extdoc_dir = extdoc
-
-pom_template = maven/pom_template.xml
-maven_dir = ${build_dir}/maven
diff --git a/Java/Tukaani/build.xml b/Java/Tukaani/build.xml
deleted file mode 100644
index f22e6d6..0000000
--- a/Java/Tukaani/build.xml
+++ /dev/null
@@ -1,143 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
- build.xml
-
- Author: Lasse Collin <lasse.collin@tukaani.org>
-
- This file has been put into the public domain.
- You can do whatever you want with this file.
--->
-
-<project name="XZ" default="jar">
-
- <property file="build.properties"/>
-
- <target name="clean"
- description="Deletes generated files except 'extdoc/package-list'">
- <delete dir="${build_dir}"/>
- </target>
-
- <target name="distclean" depends="clean"
- description="Deletes all generated files">
- <delete dir="extdoc"/>
- </target>
-
- <target name="dist" description="Creates a source package (.zip)">
- <mkdir dir="${dist_dir}"/>
- <zip destfile="${dist_file}">
- <fileset dir="." includesfile="fileset-misc.txt"/>
- <fileset dir="." includesfile="fileset-src.txt"/>
- </zip>
- </target>
-
- <target name="doc"
- description="Generates HTML documentation with javadoc">
- <mkdir dir="${doc_dir}"/>
- <mkdir dir="${extdoc_dir}"/>
- <get src="${extdoc_url}/package-list"
- dest="${extdoc_dir}/package-list" skipexisting="true"/>
- <javadoc sourcepath="${src_dir}" destdir="${doc_dir}"
- source="${sourcever}" packagenames="org.tukaani.xz"
- windowtitle="XZ data compression"
- linkoffline="${extdoc_url} ${extdoc_dir}"/>
- </target>
-
- <target name="compile" description="Compiles the classes">
- <mkdir dir="${classes_dir}"/>
- <javac srcdir="." sourcepath="${src_dir}" destdir="${classes_dir}"
- includeAntRuntime="false" debug="${debug}"
- source="${sourcever}" includesfile="fileset-src.txt"
- excludes="**/package-info.java">
- <compilerarg compiler="modern" value="-Xlint"/>
- </javac>
- </target>
-
- <target name="jar" depends="compile"
- description="Creates JAR packages">
- <mkdir dir="${jar_dir}"/>
-
- <jar destfile="${jar_dir}/xz.jar" basedir="${classes_dir}"
- includes="org/tukaani/xz/**">
- <manifest>
- <attribute name="Implementation-Title" value="${title}"/>
- <attribute name="Implementation-Version" value="${version}"/>
- <attribute name="Implementation-URL" value="${homepage}"/>
- <attribute name="Sealed" value="true"/>
- <!-- Attributes required for OSGi bundles. -->
- <attribute name="Bundle-ManifestVersion" value="2"/>
- <attribute name="Bundle-SymbolicName" value="org.tukaani.xz"/>
- <attribute name="Bundle-Version" value="${version}"/>
- <attribute name="Export-Package" value="org.tukaani.xz"/>
- <attribute name="Bundle-Name" value="${title}"/>
- <attribute name="Bundle-DocURL" value="${homepage}"/>
- </manifest>
- </jar>
-
- <jar destfile="${jar_dir}/XZEncDemo.jar" basedir="${classes_dir}"
- includes="XZEncDemo.class">
- <manifest>
- <attribute name="Main-Class" value="XZEncDemo"/>
- <attribute name="Class-Path" value="xz.jar"/>
- </manifest>
- </jar>
-
- <jar destfile="${jar_dir}/XZDecDemo.jar" basedir="${classes_dir}"
- includes="XZDecDemo.class">
- <manifest>
- <attribute name="Main-Class" value="XZDecDemo"/>
- <attribute name="Class-Path" value="xz.jar"/>
- </manifest>
- </jar>
-
- <jar destfile="${jar_dir}/XZSeekEncDemo.jar" basedir="${classes_dir}"
- includes="XZSeekEncDemo.class">
- <manifest>
- <attribute name="Main-Class" value="XZSeekEncDemo"/>
- <attribute name="Class-Path" value="xz.jar"/>
- </manifest>
- </jar>
-
- <jar destfile="${jar_dir}/XZSeekDecDemo.jar" basedir="${classes_dir}"
- includes="XZSeekDecDemo.class">
- <manifest>
- <attribute name="Main-Class" value="XZSeekDecDemo"/>
- <attribute name="Class-Path" value="xz.jar"/>
- </manifest>
- </jar>
-
- <jar destfile="${jar_dir}/LZMADecDemo.jar" basedir="${classes_dir}"
- includes="LZMADecDemo.class">
- <manifest>
- <attribute name="Main-Class" value="LZMADecDemo"/>
- <attribute name="Class-Path" value="xz.jar"/>
- </manifest>
- </jar>
- </target>
-
- <!-- It's an ugly quick hack. Maybe some day there will be a cleaner
- version (e.g. by using Maven). -->
- <target name="maven" depends="dist, doc, jar"
- description="Creates the files for a Maven repository">
- <mkdir dir="${maven_dir}"/>
-
- <copy file="${pom_template}" tofile="${maven_dir}/xz-${version}.pom"
- overwrite="true">
- <filterset>
- <filter token="VERSION" value="${version}"/>
- <filter token="TITLE" value="${title}"/>
- <filter token="HOMEPAGE" value="${homepage}"/>
- </filterset>
- </copy>
-
- <copy file="${jar_dir}/xz.jar" tofile="${maven_dir}/xz-${version}.jar"
- preservelastmodified="true" overwrite="true"/>
-
- <jar destfile="${maven_dir}/xz-${version}-javadoc.jar"
- basedir="${doc_dir}"/>
-
- <jar destfile="${maven_dir}/xz-${version}-sources.jar"
- basedir="${src_dir}" includes="org/tukaani/xz/**"/>
- </target>
-
-</project>
diff --git a/Java/Tukaani/fileset-misc.txt b/Java/Tukaani/fileset-misc.txt
deleted file mode 100644
index 9d16359..0000000
--- a/Java/Tukaani/fileset-misc.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-README
-NEWS
-COPYING
-THANKS
-build.xml
-build.properties
-fileset-src.txt
-fileset-misc.txt
-.gitignore
-maven/README
-maven/pom_template.xml
diff --git a/Java/Tukaani/fileset-src.txt b/Java/Tukaani/fileset-src.txt
deleted file mode 100644
index c7181fb..0000000
--- a/Java/Tukaani/fileset-src.txt
+++ /dev/null
@@ -1,100 +0,0 @@
-src/LZMADecDemo.java
-src/XZDecDemo.java
-src/XZEncDemo.java
-src/XZSeekDecDemo.java
-src/XZSeekEncDemo.java
-src/org/tukaani/xz/ARMOptions.java
-src/org/tukaani/xz/ARMThumbOptions.java
-src/org/tukaani/xz/BCJCoder.java
-src/org/tukaani/xz/BCJDecoder.java
-src/org/tukaani/xz/BCJEncoder.java
-src/org/tukaani/xz/BCJOptions.java
-src/org/tukaani/xz/BlockInputStream.java
-src/org/tukaani/xz/BlockOutputStream.java
-src/org/tukaani/xz/CorruptedInputException.java
-src/org/tukaani/xz/CountingInputStream.java
-src/org/tukaani/xz/CountingOutputStream.java
-src/org/tukaani/xz/DeltaCoder.java
-src/org/tukaani/xz/DeltaDecoder.java
-src/org/tukaani/xz/DeltaEncoder.java
-src/org/tukaani/xz/DeltaInputStream.java
-src/org/tukaani/xz/DeltaOptions.java
-src/org/tukaani/xz/DeltaOutputStream.java
-src/org/tukaani/xz/FilterCoder.java
-src/org/tukaani/xz/FilterDecoder.java
-src/org/tukaani/xz/FilterEncoder.java
-src/org/tukaani/xz/FilterOptions.java
-src/org/tukaani/xz/FinishableOutputStream.java
-src/org/tukaani/xz/FinishableWrapperOutputStream.java
-src/org/tukaani/xz/IA64Options.java
-src/org/tukaani/xz/IndexIndicatorException.java
-src/org/tukaani/xz/LZMA2Coder.java
-src/org/tukaani/xz/LZMA2Decoder.java
-src/org/tukaani/xz/LZMA2Encoder.java
-src/org/tukaani/xz/LZMA2InputStream.java
-src/org/tukaani/xz/LZMA2Options.java
-src/org/tukaani/xz/LZMA2OutputStream.java
-src/org/tukaani/xz/LZMAInputStream.java
-src/org/tukaani/xz/MemoryLimitException.java
-src/org/tukaani/xz/PowerPCOptions.java
-src/org/tukaani/xz/RawCoder.java
-src/org/tukaani/xz/SPARCOptions.java
-src/org/tukaani/xz/SeekableFileInputStream.java
-src/org/tukaani/xz/SeekableInputStream.java
-src/org/tukaani/xz/SeekableXZInputStream.java
-src/org/tukaani/xz/SimpleInputStream.java
-src/org/tukaani/xz/SimpleOutputStream.java
-src/org/tukaani/xz/SingleXZInputStream.java
-src/org/tukaani/xz/UncompressedLZMA2OutputStream.java
-src/org/tukaani/xz/UnsupportedOptionsException.java
-src/org/tukaani/xz/X86Options.java
-src/org/tukaani/xz/XZ.java
-src/org/tukaani/xz/XZFormatException.java
-src/org/tukaani/xz/XZIOException.java
-src/org/tukaani/xz/XZInputStream.java
-src/org/tukaani/xz/XZOutputStream.java
-src/org/tukaani/xz/check/CRC32.java
-src/org/tukaani/xz/check/CRC64.java
-src/org/tukaani/xz/check/Check.java
-src/org/tukaani/xz/check/None.java
-src/org/tukaani/xz/check/SHA256.java
-src/org/tukaani/xz/common/DecoderUtil.java
-src/org/tukaani/xz/common/EncoderUtil.java
-src/org/tukaani/xz/common/StreamFlags.java
-src/org/tukaani/xz/common/Util.java
-src/org/tukaani/xz/delta/DeltaCoder.java
-src/org/tukaani/xz/delta/DeltaDecoder.java
-src/org/tukaani/xz/delta/DeltaEncoder.java
-src/org/tukaani/xz/index/BlockInfo.java
-src/org/tukaani/xz/index/IndexBase.java
-src/org/tukaani/xz/index/IndexDecoder.java
-src/org/tukaani/xz/index/IndexEncoder.java
-src/org/tukaani/xz/index/IndexHash.java
-src/org/tukaani/xz/index/IndexRecord.java
-src/org/tukaani/xz/lz/BT4.java
-src/org/tukaani/xz/lz/CRC32Hash.java
-src/org/tukaani/xz/lz/HC4.java
-src/org/tukaani/xz/lz/Hash234.java
-src/org/tukaani/xz/lz/LZDecoder.java
-src/org/tukaani/xz/lz/LZEncoder.java
-src/org/tukaani/xz/lz/Matches.java
-src/org/tukaani/xz/lzma/LZMACoder.java
-src/org/tukaani/xz/lzma/LZMADecoder.java
-src/org/tukaani/xz/lzma/LZMAEncoder.java
-src/org/tukaani/xz/lzma/LZMAEncoderFast.java
-src/org/tukaani/xz/lzma/LZMAEncoderNormal.java
-src/org/tukaani/xz/lzma/Optimum.java
-src/org/tukaani/xz/lzma/State.java
-src/org/tukaani/xz/package-info.java
-src/org/tukaani/xz/rangecoder/RangeCoder.java
-src/org/tukaani/xz/rangecoder/RangeDecoder.java
-src/org/tukaani/xz/rangecoder/RangeDecoderFromBuffer.java
-src/org/tukaani/xz/rangecoder/RangeDecoderFromStream.java
-src/org/tukaani/xz/rangecoder/RangeEncoder.java
-src/org/tukaani/xz/simple/ARM.java
-src/org/tukaani/xz/simple/ARMThumb.java
-src/org/tukaani/xz/simple/IA64.java
-src/org/tukaani/xz/simple/PowerPC.java
-src/org/tukaani/xz/simple/SPARC.java
-src/org/tukaani/xz/simple/SimpleFilter.java
-src/org/tukaani/xz/simple/X86.java
diff --git a/Java/Tukaani/maven/README b/Java/Tukaani/maven/README
deleted file mode 100644
index 2692a97..0000000
--- a/Java/Tukaani/maven/README
+++ /dev/null
@@ -1,2 +0,0 @@
-The pom_template.xml is for a Maven repository but it's not meant
-for building the project. Note that build.xml will replace @foo@ tags.
diff --git a/Java/Tukaani/maven/pom_template.xml b/Java/Tukaani/maven/pom_template.xml
deleted file mode 100644
index 1adeece..0000000
--- a/Java/Tukaani/maven/pom_template.xml
+++ /dev/null
@@ -1,58 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
- Author: Lasse Collin <lasse.collin@tukaani.org>
-
- This file has been put into the public domain.
- You can do whatever you want with this file.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
- http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
- <modelVersion>4.0.0</modelVersion>
-
- <groupId>org.tukaani</groupId>
- <artifactId>xz</artifactId>
- <version>@VERSION@</version>
- <packaging>jar</packaging>
-
- <name>XZ for Java</name>
- <description>@TITLE@</description>
- <url>@HOMEPAGE@</url>
-
- <licenses>
- <license>
- <name>Public Domain</name>
- <comments>You can do whatever you want with this package.</comments>
- <distribution>repo</distribution>
- </license>
- </licenses>
-
- <scm>
- <url>http://git.tukaani.org/?p=xz-java.git</url>
- <connection>scm:git:http://git.tukaani.org/xz-java.git</connection>
- </scm>
-
- <developers>
- <developer>
- <name>Lasse Collin</name>
- <email>lasse.collin@tukaani.org</email>
- </developer>
- </developers>
-
- <contributors>
- <contributor>
- <!-- According to Maven docs, it's good to only list those people
- as <developers> that should be contacted if someone wants
- to talk with an upstream developer. Thus, Igor Pavlov is
- marked as a <contributor> even though XZ for Java simply
- couldn't exist without Igor Pavlov's code. -->
- <name>Igor Pavlov</name>
- <url>http://7-zip.org/</url>
- </contributor>
- </contributors>
-
-</project>
diff --git a/Java/Tukaani/src/LZMADecDemo.java b/Java/Tukaani/src/LZMADecDemo.java
deleted file mode 100644
index 1098725..0000000
--- a/Java/Tukaani/src/LZMADecDemo.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * LZMADecDemo
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-import java.io.*;
-import org.tukaani.xz.*;
-
-/**
- * Decompresses .lzma files to standard output. If no arguments are given,
- * reads from standard input.
- *
- * NOTE: For most purposes, .lzma is a legacy format and usually you should
- * use .xz instead.
- */
-class LZMADecDemo {
- public static void main(String[] args) {
- byte[] buf = new byte[8192];
- String name = null;
-
- try {
- if (args.length == 0) {
- name = "standard input";
-
- // No need to use BufferedInputStream with System.in which
- // seems to be fast with one-byte reads.
- InputStream in = new LZMAInputStream(System.in);
-
- int size;
- while ((size = in.read(buf)) != -1)
- System.out.write(buf, 0, size);
-
- } else {
- // Read from files given on the command line.
- for (int i = 0; i < args.length; ++i) {
- name = args[i];
- InputStream in = new FileInputStream(name);
-
- try {
- // In contrast to other classes in org.tukaani.xz,
- // LZMAInputStream doesn't do buffering internally
- // and reads one byte at a time. BufferedInputStream
- // gives a huge performance improvement here but even
- // then it's slower than the other input streams from
- // org.tukaani.xz.
- in = new BufferedInputStream(in);
- in = new LZMAInputStream(in);
-
- int size;
- while ((size = in.read(buf)) != -1)
- System.out.write(buf, 0, size);
-
- } finally {
- // Close FileInputStream (directly or indirectly
- // via LZMAInputStream, it doesn't matter).
- in.close();
- }
- }
- }
- } catch (FileNotFoundException e) {
- System.err.println("LZMADecDemo: Cannot open " + name + ": "
- + e.getMessage());
- System.exit(1);
-
- } catch (EOFException e) {
- System.err.println("LZMADecDemo: Unexpected end of input on "
- + name);
- System.exit(1);
-
- } catch (IOException e) {
- System.err.println("LZMADecDemo: Error decompressing from "
- + name + ": " + e.getMessage());
- System.exit(1);
- }
- }
-}
diff --git a/Java/Tukaani/src/XZDecDemo.java b/Java/Tukaani/src/XZDecDemo.java
deleted file mode 100644
index 6876eea..0000000
--- a/Java/Tukaani/src/XZDecDemo.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * XZDecDemo
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-import java.io.*;
-import org.tukaani.xz.*;
-
-/**
- * Decompresses .xz files to standard output. If no arguments are given,
- * reads from standard input.
- */
-class XZDecDemo {
- public static void main(String[] args) {
- byte[] buf = new byte[8192];
- String name = null;
-
- try {
- if (args.length == 0) {
- name = "standard input";
- InputStream in = new XZInputStream(System.in);
-
- int size;
- while ((size = in.read(buf)) != -1)
- System.out.write(buf, 0, size);
-
- } else {
- // Read from files given on the command line.
- for (int i = 0; i < args.length; ++i) {
- name = args[i];
- InputStream in = new FileInputStream(name);
-
- try {
- // Since XZInputStream does some buffering internally
- // anyway, BufferedInputStream doesn't seem to be
- // needed here to improve performance.
- // in = new BufferedInputStream(in);
- in = new XZInputStream(in);
-
- int size;
- while ((size = in.read(buf)) != -1)
- System.out.write(buf, 0, size);
-
- } finally {
- // Close FileInputStream (directly or indirectly
- // via XZInputStream, it doesn't matter).
- in.close();
- }
- }
- }
- } catch (FileNotFoundException e) {
- System.err.println("XZDecDemo: Cannot open " + name + ": "
- + e.getMessage());
- System.exit(1);
-
- } catch (EOFException e) {
- System.err.println("XZDecDemo: Unexpected end of input on "
- + name);
- System.exit(1);
-
- } catch (IOException e) {
- System.err.println("XZDecDemo: Error decompressing from "
- + name + ": " + e.getMessage());
- System.exit(1);
- }
- }
-}
diff --git a/Java/Tukaani/src/XZEncDemo.java b/Java/Tukaani/src/XZEncDemo.java
deleted file mode 100644
index e9ae38a..0000000
--- a/Java/Tukaani/src/XZEncDemo.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * XZEncDemo
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-import java.io.*;
-import org.tukaani.xz.*;
-
-/**
- * Compresses a single file from standard input to standard ouput into
- * the .xz file format.
- * <p>
- * One optional argument is supported: LZMA2 preset level which is an integer
- * in the range [0, 9]. The default is 6.
- */
-class XZEncDemo {
- public static void main(String[] args) throws Exception {
- LZMA2Options options = new LZMA2Options();
-
- if (args.length >= 1)
- options.setPreset(Integer.parseInt(args[0]));
-
- System.err.println("Encoder memory usage: "
- + options.getEncoderMemoryUsage() + " KiB");
- System.err.println("Decoder memory usage: "
- + options.getDecoderMemoryUsage() + " KiB");
-
- XZOutputStream out = new XZOutputStream(System.out, options);
-
- byte[] buf = new byte[8192];
- int size;
- while ((size = System.in.read(buf)) != -1)
- out.write(buf, 0, size);
-
- out.finish();
- }
-}
diff --git a/Java/Tukaani/src/XZSeekDecDemo.java b/Java/Tukaani/src/XZSeekDecDemo.java
deleted file mode 100644
index 5c54a87..0000000
--- a/Java/Tukaani/src/XZSeekDecDemo.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * XZSeekDecDemo
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-import java.io.*;
-import org.tukaani.xz.*;
-
-/**
- * Decompresses a .xz file in random access mode to standard output.
- * <p>
- * Arguments: filename [offset1 length1] [offset2 length2] ...
- * <p>
- * If only the filename is given, the whole file is decompressed. The only
- * difference to XZDecDemo is that this will still use the random access code.
- * <p>
- * If one or more of the offset-length pairs are given,
- * for each pair, <code>length</code> number of bytes are
- * decompressed from <code>offset</code>.
- */
-class XZSeekDecDemo {
- public static void main(String[] args) throws Exception {
- SeekableFileInputStream file = new SeekableFileInputStream(args[0]);
- SeekableXZInputStream in = new SeekableXZInputStream(file);
-
- System.err.println("Number of XZ Streams: " + in.getStreamCount());
- System.err.println("Number of XZ Blocks: " + in.getBlockCount());
-
- System.err.println("Uncompressed size: " + in.length() + " B");
-
- System.err.println("Largest XZ Block size: "
- + in.getLargestBlockSize() + " B");
-
- System.err.print("List of Check IDs:");
- int checkTypes = in.getCheckTypes();
- for (int i = 0; i < 16; ++i)
- if ((checkTypes & (1 << i)) != 0)
- System.err.print(" " + i);
- System.err.println();
-
- System.err.println("Index memory usage: "
- + in.getIndexMemoryUsage() + " KiB");
-
- byte[] buf = new byte[8192];
- if (args.length == 1) {
- int size;
- while ((size = in.read(buf)) != -1)
- System.out.write(buf, 0, size);
- } else {
- for (int i = 1; i < args.length; i += 2) {
- int pos = Integer.parseInt(args[i]);
- int len = Integer.parseInt(args[i + 1]);
-
- in.seek(pos);
-
- while (len > 0) {
- int size = Math.min(len, buf.length);
- size = in.read(buf, 0, size);
-
- if (size == -1) {
- System.err.println("Error: End of file reached");
- System.exit(1);
- }
-
- System.out.write(buf, 0, size);
- len -= size;
- }
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/XZSeekEncDemo.java b/Java/Tukaani/src/XZSeekEncDemo.java
deleted file mode 100644
index 157e788..0000000
--- a/Java/Tukaani/src/XZSeekEncDemo.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * XZSeekEncDemo
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-import java.io.*;
-import org.tukaani.xz.*;
-
-/**
- * Compresses a single file from standard input to standard ouput into
- * a random-accessible .xz file.
- * <p>
- * Arguments: [preset [block size]]
- * <p>
- * Preset is an LZMA2 preset level which is an integer in the range [0, 9].
- * The default is 6.
- * <p>
- * Block size specifies the amount of uncompressed data to store per
- * XZ Block. The default is 1 MiB (1048576 bytes). Bigger means better
- * compression ratio. Smaller means faster random access.
- */
-class XZSeekEncDemo {
- public static void main(String[] args) throws Exception {
- LZMA2Options options = new LZMA2Options();
-
- if (args.length >= 1)
- options.setPreset(Integer.parseInt(args[0]));
-
- int blockSize = 1024 * 1024;
- if (args.length >= 2)
- blockSize = Integer.parseInt(args[1]);
-
- options.setDictSize(Math.min(options.getDictSize(),
- Math.max(LZMA2Options.DICT_SIZE_MIN,
- blockSize)));
-
- System.err.println("Encoder memory usage: "
- + options.getEncoderMemoryUsage() + " KiB");
- System.err.println("Decoder memory usage: "
- + options.getDecoderMemoryUsage() + " KiB");
- System.err.println("Block size: " + blockSize + " B");
-
- XZOutputStream out = new XZOutputStream(System.out, options);
-
- byte[] buf = new byte[8192];
- int left = blockSize;
-
- while (true) {
- int size = System.in.read(buf, 0, Math.min(buf.length, left));
- if (size == -1)
- break;
-
- out.write(buf, 0, size);
- left -= size;
-
- if (left == 0) {
- out.endBlock();
- left = blockSize;
- }
- }
-
- out.finish();
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/ARMOptions.java b/Java/Tukaani/src/org/tukaani/xz/ARMOptions.java
deleted file mode 100644
index 9577101..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/ARMOptions.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * ARMOptions
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import org.tukaani.xz.simple.ARM;
-
-/**
- * BCJ filter for little endian ARM instructions.
- */
-public class ARMOptions extends BCJOptions {
- private static final int ALIGNMENT = 4;
-
- public ARMOptions() {
- super(ALIGNMENT);
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return new SimpleOutputStream(out, new ARM(true, startOffset));
- }
-
- public InputStream getInputStream(InputStream in) {
- return new SimpleInputStream(in, new ARM(false, startOffset));
- }
-
- FilterEncoder getFilterEncoder() {
- return new BCJEncoder(this, BCJCoder.ARM_FILTER_ID);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/ARMThumbOptions.java b/Java/Tukaani/src/org/tukaani/xz/ARMThumbOptions.java
deleted file mode 100644
index 60eb6ec..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/ARMThumbOptions.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * ARMThumbOptions
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import org.tukaani.xz.simple.ARMThumb;
-
-/**
- * BCJ filter for little endian ARM-Thumb instructions.
- */
-public class ARMThumbOptions extends BCJOptions {
- private static final int ALIGNMENT = 2;
-
- public ARMThumbOptions() {
- super(ALIGNMENT);
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return new SimpleOutputStream(out, new ARMThumb(true, startOffset));
- }
-
- public InputStream getInputStream(InputStream in) {
- return new SimpleInputStream(in, new ARMThumb(false, startOffset));
- }
-
- FilterEncoder getFilterEncoder() {
- return new BCJEncoder(this, BCJCoder.ARMTHUMB_FILTER_ID);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/BCJCoder.java b/Java/Tukaani/src/org/tukaani/xz/BCJCoder.java
deleted file mode 100644
index 81862f7..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/BCJCoder.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * BCJCoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-abstract class BCJCoder implements FilterCoder {
- public static final long X86_FILTER_ID = 0x04;
- public static final long POWERPC_FILTER_ID = 0x05;
- public static final long IA64_FILTER_ID = 0x06;
- public static final long ARM_FILTER_ID = 0x07;
- public static final long ARMTHUMB_FILTER_ID = 0x08;
- public static final long SPARC_FILTER_ID = 0x09;
-
- public static boolean isBCJFilterID(long filterID) {
- return filterID >= 0x04 && filterID <= 0x09;
- }
-
- public boolean changesSize() {
- return false;
- }
-
- public boolean nonLastOK() {
- return true;
- }
-
- public boolean lastOK() {
- return false;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/BCJDecoder.java b/Java/Tukaani/src/org/tukaani/xz/BCJDecoder.java
deleted file mode 100644
index f8a6ae2..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/BCJDecoder.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * BCJDecoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import org.tukaani.xz.simple.*;
-
-class BCJDecoder extends BCJCoder implements FilterDecoder {
- private final long filterID;
- private final int startOffset;
-
- BCJDecoder(long filterID, byte[] props)
- throws UnsupportedOptionsException {
- assert isBCJFilterID(filterID);
- this.filterID = filterID;
-
- if (props.length == 0) {
- startOffset = 0;
- } else if (props.length == 4) {
- int n = 0;
- for (int i = 0; i < 4; ++i)
- n |= (props[i] & 0xFF) << (i * 8);
-
- startOffset = n;
- } else {
- throw new UnsupportedOptionsException(
- "Unsupported BCJ filter properties");
- }
- }
-
- public int getMemoryUsage() {
- return SimpleInputStream.getMemoryUsage();
- }
-
- public InputStream getInputStream(InputStream in) {
- SimpleFilter simpleFilter = null;
-
- if (filterID == X86_FILTER_ID)
- simpleFilter = new X86(false, startOffset);
- else if (filterID == POWERPC_FILTER_ID)
- simpleFilter = new PowerPC(false, startOffset);
- else if (filterID == IA64_FILTER_ID)
- simpleFilter = new IA64(false, startOffset);
- else if (filterID == ARM_FILTER_ID)
- simpleFilter = new ARM(false, startOffset);
- else if (filterID == ARMTHUMB_FILTER_ID)
- simpleFilter = new ARMThumb(false, startOffset);
- else if (filterID == SPARC_FILTER_ID)
- simpleFilter = new SPARC(false, startOffset);
- else
- assert false;
-
- return new SimpleInputStream(in, simpleFilter);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/BCJEncoder.java b/Java/Tukaani/src/org/tukaani/xz/BCJEncoder.java
deleted file mode 100644
index 136bbb7..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/BCJEncoder.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * BCJEncoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-class BCJEncoder extends BCJCoder implements FilterEncoder {
- private final BCJOptions options;
- private final long filterID;
- private final byte[] props;
-
- BCJEncoder(BCJOptions options, long filterID) {
- assert isBCJFilterID(filterID);
- int startOffset = options.getStartOffset();
-
- if (startOffset == 0) {
- props = new byte[0];
- } else {
- props = new byte[4];
- for (int i = 0; i < 4; ++i)
- props[i] = (byte)(startOffset >>> (i * 8));
- }
-
- this.filterID = filterID;
- this.options = (BCJOptions)options.clone();
- }
-
- public long getFilterID() {
- return filterID;
- }
-
- public byte[] getFilterProps() {
- return props;
- }
-
- public boolean supportsFlushing() {
- return false;
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return options.getOutputStream(out);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/BCJOptions.java b/Java/Tukaani/src/org/tukaani/xz/BCJOptions.java
deleted file mode 100644
index 705a2c0..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/BCJOptions.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * BCJOptions
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-abstract class BCJOptions extends FilterOptions {
- private final int alignment;
- int startOffset = 0;
-
- BCJOptions(int alignment) {
- this.alignment = alignment;
- }
-
- /**
- * Sets the start offset for the address conversions.
- * Normally this is useless so you shouldn't use this function.
- * The default value is <code>0</code>.
- */
- public void setStartOffset(int startOffset)
- throws UnsupportedOptionsException {
- if ((startOffset & (alignment - 1)) != 0)
- throw new UnsupportedOptionsException(
- "Start offset must be a multiple of " + alignment);
-
- this.startOffset = startOffset;
- }
-
- /**
- * Gets the start offset.
- */
- public int getStartOffset() {
- return startOffset;
- }
-
- public int getEncoderMemoryUsage() {
- return SimpleOutputStream.getMemoryUsage();
- }
-
- public int getDecoderMemoryUsage() {
- return SimpleInputStream.getMemoryUsage();
- }
-
- public Object clone() {
- try {
- return super.clone();
- } catch (CloneNotSupportedException e) {
- assert false;
- throw new RuntimeException();
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/BlockInputStream.java b/Java/Tukaani/src/org/tukaani/xz/BlockInputStream.java
deleted file mode 100644
index d1e72af..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/BlockInputStream.java
+++ /dev/null
@@ -1,284 +0,0 @@
-/*
- * BlockInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.DataInputStream;
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.util.Arrays;
-import org.tukaani.xz.common.DecoderUtil;
-import org.tukaani.xz.check.Check;
-
-class BlockInputStream extends InputStream {
- private final DataInputStream inData;
- private final CountingInputStream inCounted;
- private InputStream filterChain;
- private final Check check;
- private final boolean verifyCheck;
-
- private long uncompressedSizeInHeader = -1;
- private long compressedSizeInHeader = -1;
- private long compressedSizeLimit;
- private final int headerSize;
- private long uncompressedSize = 0;
- private boolean endReached = false;
-
- private final byte[] tempBuf = new byte[1];
-
- public BlockInputStream(InputStream in,
- Check check, boolean verifyCheck,
- int memoryLimit,
- long unpaddedSizeInIndex,
- long uncompressedSizeInIndex)
- throws IOException, IndexIndicatorException {
- this.check = check;
- this.verifyCheck = verifyCheck;
- inData = new DataInputStream(in);
-
- byte[] buf = new byte[DecoderUtil.BLOCK_HEADER_SIZE_MAX];
-
- // Block Header Size or Index Indicator
- inData.readFully(buf, 0, 1);
-
- // See if this begins the Index field.
- if (buf[0] == 0x00)
- throw new IndexIndicatorException();
-
- // Read the rest of the Block Header.
- headerSize = 4 * ((buf[0] & 0xFF) + 1);
- inData.readFully(buf, 1, headerSize - 1);
-
- // Validate the CRC32.
- if (!DecoderUtil.isCRC32Valid(buf, 0, headerSize - 4, headerSize - 4))
- throw new CorruptedInputException("XZ Block Header is corrupt");
-
- // Check for reserved bits in Block Flags.
- if ((buf[1] & 0x3C) != 0)
- throw new UnsupportedOptionsException(
- "Unsupported options in XZ Block Header");
-
- // Memory for the Filter Flags field
- int filterCount = (buf[1] & 0x03) + 1;
- long[] filterIDs = new long[filterCount];
- byte[][] filterProps = new byte[filterCount][];
-
- // Use a stream to parse the fields after the Block Flags field.
- // Exclude the CRC32 field at the end.
- ByteArrayInputStream bufStream = new ByteArrayInputStream(
- buf, 2, headerSize - 6);
-
- try {
- // Set the maximum valid compressed size. This is overriden
- // by the value from the Compressed Size field if it is present.
- compressedSizeLimit = (DecoderUtil.VLI_MAX & ~3)
- - headerSize - check.getSize();
-
- // Decode and validate Compressed Size if the relevant flag
- // is set in Block Flags.
- if ((buf[1] & 0x40) != 0x00) {
- compressedSizeInHeader = DecoderUtil.decodeVLI(bufStream);
-
- if (compressedSizeInHeader == 0
- || compressedSizeInHeader > compressedSizeLimit)
- throw new CorruptedInputException();
-
- compressedSizeLimit = compressedSizeInHeader;
- }
-
- // Decode Uncompressed Size if the relevant flag is set
- // in Block Flags.
- if ((buf[1] & 0x80) != 0x00)
- uncompressedSizeInHeader = DecoderUtil.decodeVLI(bufStream);
-
- // Decode Filter Flags.
- for (int i = 0; i < filterCount; ++i) {
- filterIDs[i] = DecoderUtil.decodeVLI(bufStream);
-
- long filterPropsSize = DecoderUtil.decodeVLI(bufStream);
- if (filterPropsSize > bufStream.available())
- throw new CorruptedInputException();
-
- filterProps[i] = new byte[(int)filterPropsSize];
- bufStream.read(filterProps[i]);
- }
-
- } catch (IOException e) {
- throw new CorruptedInputException("XZ Block Header is corrupt");
- }
-
- // Check that the remaining bytes are zero.
- for (int i = bufStream.available(); i > 0; --i)
- if (bufStream.read() != 0x00)
- throw new UnsupportedOptionsException(
- "Unsupported options in XZ Block Header");
-
- // Validate the Blcok Header against the Index when doing
- // random access reading.
- if (unpaddedSizeInIndex != -1) {
- // Compressed Data must be at least one byte, so if Block Header
- // and Check alone take as much or more space than the size
- // stored in the Index, the file is corrupt.
- int headerAndCheckSize = headerSize + check.getSize();
- if (headerAndCheckSize >= unpaddedSizeInIndex)
- throw new CorruptedInputException(
- "XZ Index does not match a Block Header");
-
- // The compressed size calculated from Unpadded Size must
- // match the value stored in the Compressed Size field in
- // the Block Header.
- long compressedSizeFromIndex
- = unpaddedSizeInIndex - headerAndCheckSize;
- if (compressedSizeFromIndex > compressedSizeLimit
- || (compressedSizeInHeader != -1
- && compressedSizeInHeader != compressedSizeFromIndex))
- throw new CorruptedInputException(
- "XZ Index does not match a Block Header");
-
- // The uncompressed size stored in the Index must match
- // the value stored in the Uncompressed Size field in
- // the Block Header.
- if (uncompressedSizeInHeader != -1
- && uncompressedSizeInHeader != uncompressedSizeInIndex)
- throw new CorruptedInputException(
- "XZ Index does not match a Block Header");
-
- // For further validation, pretend that the values from the Index
- // were stored in the Block Header.
- compressedSizeLimit = compressedSizeFromIndex;
- compressedSizeInHeader = compressedSizeFromIndex;
- uncompressedSizeInHeader = uncompressedSizeInIndex;
- }
-
- // Check if the Filter IDs are supported, decode
- // the Filter Properties, and check that they are
- // supported by this decoder implementation.
- FilterDecoder[] filters = new FilterDecoder[filterIDs.length];
-
- for (int i = 0; i < filters.length; ++i) {
- if (filterIDs[i] == LZMA2Coder.FILTER_ID)
- filters[i] = new LZMA2Decoder(filterProps[i]);
-
- else if (filterIDs[i] == DeltaCoder.FILTER_ID)
- filters[i] = new DeltaDecoder(filterProps[i]);
-
- else if (BCJDecoder.isBCJFilterID(filterIDs[i]))
- filters[i] = new BCJDecoder(filterIDs[i], filterProps[i]);
-
- else
- throw new UnsupportedOptionsException(
- "Unknown Filter ID " + filterIDs[i]);
- }
-
- RawCoder.validate(filters);
-
- // Check the memory usage limit.
- if (memoryLimit >= 0) {
- int memoryNeeded = 0;
- for (int i = 0; i < filters.length; ++i)
- memoryNeeded += filters[i].getMemoryUsage();
-
- if (memoryNeeded > memoryLimit)
- throw new MemoryLimitException(memoryNeeded, memoryLimit);
- }
-
- // Use an input size counter to calculate
- // the size of the Compressed Data field.
- inCounted = new CountingInputStream(in);
-
- // Initialize the filter chain.
- filterChain = inCounted;
- for (int i = filters.length - 1; i >= 0; --i)
- filterChain = filters[i].getInputStream(filterChain);
- }
-
- public int read() throws IOException {
- return read(tempBuf, 0, 1) == -1 ? -1 : (tempBuf[0] & 0xFF);
- }
-
- public int read(byte[] buf, int off, int len) throws IOException {
- if (endReached)
- return -1;
-
- int ret = filterChain.read(buf, off, len);
-
- if (ret > 0) {
- if (verifyCheck)
- check.update(buf, off, ret);
-
- uncompressedSize += ret;
-
- // Catch invalid values.
- long compressedSize = inCounted.getSize();
- if (compressedSize < 0
- || compressedSize > compressedSizeLimit
- || uncompressedSize < 0
- || (uncompressedSizeInHeader != -1
- && uncompressedSize > uncompressedSizeInHeader))
- throw new CorruptedInputException();
-
- // Check the Block integrity as soon as possible:
- // - The filter chain shouldn't return less than requested
- // unless it hit the end of the input.
- // - If the uncompressed size is known, we know when there
- // shouldn't be more data coming. We still need to read
- // one byte to let the filter chain catch errors and to
- // let it read end of payload marker(s).
- if (ret < len || uncompressedSize == uncompressedSizeInHeader) {
- if (filterChain.read() != -1)
- throw new CorruptedInputException();
-
- validate();
- endReached = true;
- }
- } else if (ret == -1) {
- validate();
- endReached = true;
- }
-
- return ret;
- }
-
- private void validate() throws IOException {
- long compressedSize = inCounted.getSize();
-
- // Validate Compressed Size and Uncompressed Size if they were
- // present in Block Header.
- if ((compressedSizeInHeader != -1
- && compressedSizeInHeader != compressedSize)
- || (uncompressedSizeInHeader != -1
- && uncompressedSizeInHeader != uncompressedSize))
- throw new CorruptedInputException();
-
- // Block Padding bytes must be zeros.
- while ((compressedSize++ & 3) != 0)
- if (inData.readUnsignedByte() != 0x00)
- throw new CorruptedInputException();
-
- // Validate the integrity check if verifyCheck is true.
- byte[] storedCheck = new byte[check.getSize()];
- inData.readFully(storedCheck);
- if (verifyCheck && !Arrays.equals(check.finish(), storedCheck))
- throw new CorruptedInputException("Integrity check ("
- + check.getName() + ") does not match");
- }
-
- public int available() throws IOException {
- return filterChain.available();
- }
-
- public long getUnpaddedSize() {
- return headerSize + inCounted.getSize() + check.getSize();
- }
-
- public long getUncompressedSize() {
- return uncompressedSize;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/BlockOutputStream.java b/Java/Tukaani/src/org/tukaani/xz/BlockOutputStream.java
deleted file mode 100644
index 03fd0a9..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/BlockOutputStream.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * BlockOutputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.OutputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import org.tukaani.xz.common.EncoderUtil;
-import org.tukaani.xz.check.Check;
-
-class BlockOutputStream extends FinishableOutputStream {
- private final OutputStream out;
- private final CountingOutputStream outCounted;
- private FinishableOutputStream filterChain;
- private final Check check;
-
- private final int headerSize;
- private final long compressedSizeLimit;
- private long uncompressedSize = 0;
-
- private final byte[] tempBuf = new byte[1];
-
- public BlockOutputStream(OutputStream out, FilterEncoder[] filters,
- Check check) throws IOException {
- this.out = out;
- this.check = check;
-
- // Initialize the filter chain.
- outCounted = new CountingOutputStream(out);
- filterChain = outCounted;
- for (int i = filters.length - 1; i >= 0; --i)
- filterChain = filters[i].getOutputStream(filterChain);
-
- // Prepare to encode the Block Header field.
- ByteArrayOutputStream bufStream = new ByteArrayOutputStream();
-
- // Write a dummy Block Header Size field. The real value is written
- // once everything else except CRC32 has been written.
- bufStream.write(0x00);
-
- // Write Block Flags. Storing Compressed Size or Uncompressed Size
- // isn't supported for now.
- bufStream.write(filters.length - 1);
-
- // List of Filter Flags
- for (int i = 0; i < filters.length; ++i) {
- EncoderUtil.encodeVLI(bufStream, filters[i].getFilterID());
- byte[] filterProps = filters[i].getFilterProps();
- EncoderUtil.encodeVLI(bufStream, filterProps.length);
- bufStream.write(filterProps);
- }
-
- // Header Padding
- while ((bufStream.size() & 3) != 0)
- bufStream.write(0x00);
-
- byte[] buf = bufStream.toByteArray();
-
- // Total size of the Block Header: Take the size of the CRC32 field
- // into account.
- headerSize = buf.length + 4;
-
- // This is just a sanity check.
- if (headerSize > EncoderUtil.BLOCK_HEADER_SIZE_MAX)
- throw new UnsupportedOptionsException();
-
- // Block Header Size
- buf[0] = (byte)(buf.length / 4);
-
- // Write the Block Header field to the output stream.
- out.write(buf);
- EncoderUtil.writeCRC32(out, buf);
-
- // Calculate the maximum allowed size of the Compressed Data field.
- // It is hard to exceed it so this is mostly to be pedantic.
- compressedSizeLimit = (EncoderUtil.VLI_MAX & ~3)
- - headerSize - check.getSize();
- }
-
- public void write(int b) throws IOException {
- tempBuf[0] = (byte)b;
- write(tempBuf, 0, 1);
- }
-
- public void write(byte[] buf, int off, int len) throws IOException {
- filterChain.write(buf, off, len);
- check.update(buf, off, len);
- uncompressedSize += len;
- validate();
- }
-
- public void flush() throws IOException {
- filterChain.flush();
- validate();
- }
-
- public void finish() throws IOException {
- // Finish the Compressed Data field.
- filterChain.finish();
- validate();
-
- // Block Padding
- for (long i = outCounted.getSize(); (i & 3) != 0; ++i)
- out.write(0x00);
-
- // Check
- out.write(check.finish());
- }
-
- private void validate() throws IOException {
- long compressedSize = outCounted.getSize();
-
- // It is very hard to trigger this exception.
- // This is just to be pedantic.
- if (compressedSize < 0 || compressedSize > compressedSizeLimit
- || uncompressedSize < 0)
- throw new XZIOException("XZ Stream has grown too big");
- }
-
- public long getUnpaddedSize() {
- return headerSize + outCounted.getSize() + check.getSize();
- }
-
- public long getUncompressedSize() {
- return uncompressedSize;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/CorruptedInputException.java b/Java/Tukaani/src/org/tukaani/xz/CorruptedInputException.java
deleted file mode 100644
index d7d9520..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/CorruptedInputException.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * CorruptedInputException
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-/**
- * Thrown when the compressed input data is corrupt.
- * However, it is possible that some or all of the data
- * already read from the input stream was corrupt too.
- */
-public class CorruptedInputException extends XZIOException {
- private static final long serialVersionUID = 3L;
-
- /**
- * Creates a new CorruptedInputException with
- * the default error detail message.
- */
- public CorruptedInputException() {
- super("Compressed data is corrupt");
- }
-
- /**
- * Creates a new CorruptedInputException with
- * the specified error detail message.
- *
- * @param s error detail message
- */
- public CorruptedInputException(String s) {
- super(s);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/CountingInputStream.java b/Java/Tukaani/src/org/tukaani/xz/CountingInputStream.java
deleted file mode 100644
index ce0935a..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/CountingInputStream.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * CountingInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.FilterInputStream;
-import java.io.InputStream;
-import java.io.IOException;
-
-/**
- * Counts the number of bytes read from an input stream.
- */
-class CountingInputStream extends FilterInputStream {
- private long size = 0;
-
- public CountingInputStream(InputStream in) {
- super(in);
- }
-
- public int read() throws IOException {
- int ret = in.read();
- if (ret != -1 && size >= 0)
- ++size;
-
- return ret;
- }
-
- public int read(byte[] b, int off, int len) throws IOException {
- int ret = in.read(b, off, len);
- if (ret > 0 && size >= 0)
- size += ret;
-
- return ret;
- }
-
- public long getSize() {
- return size;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/CountingOutputStream.java b/Java/Tukaani/src/org/tukaani/xz/CountingOutputStream.java
deleted file mode 100644
index 9b3eef3..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/CountingOutputStream.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * CountingOutputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.OutputStream;
-import java.io.IOException;
-
-/**
- * Counts the number of bytes written to an output stream.
- * <p>
- * The <code>finish</code> method does nothing.
- * This is <code>FinishableOutputStream</code> instead
- * of <code>OutputStream</code> solely because it allows
- * using this as the output stream for a chain of raw filters.
- */
-class CountingOutputStream extends FinishableOutputStream {
- private final OutputStream out;
- private long size = 0;
-
- public CountingOutputStream(OutputStream out) {
- this.out = out;
- }
-
- public void write(int b) throws IOException {
- out.write(b);
- if (size >= 0)
- ++size;
- }
-
- public void write(byte[] b, int off, int len) throws IOException {
- out.write(b, off, len);
- if (size >= 0)
- size += len;
- }
-
- public void flush() throws IOException {
- out.flush();
- }
-
- public void close() throws IOException {
- out.close();
- }
-
- public long getSize() {
- return size;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/DeltaCoder.java b/Java/Tukaani/src/org/tukaani/xz/DeltaCoder.java
deleted file mode 100644
index 808834c..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/DeltaCoder.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * DeltaCoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-abstract class DeltaCoder implements FilterCoder {
- public static final long FILTER_ID = 0x03;
-
- public boolean changesSize() {
- return false;
- }
-
- public boolean nonLastOK() {
- return true;
- }
-
- public boolean lastOK() {
- return false;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/DeltaDecoder.java b/Java/Tukaani/src/org/tukaani/xz/DeltaDecoder.java
deleted file mode 100644
index 445d178..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/DeltaDecoder.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * DeltaDecoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-
-class DeltaDecoder extends DeltaCoder implements FilterDecoder {
- private final int distance;
-
- DeltaDecoder(byte[] props) throws UnsupportedOptionsException {
- if (props.length != 1)
- throw new UnsupportedOptionsException(
- "Unsupported Delta filter properties");
-
- distance = (props[0] & 0xFF) + 1;
- }
-
- public int getMemoryUsage() {
- return 1;
- }
-
- public InputStream getInputStream(InputStream in) {
- return new DeltaInputStream(in, distance);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/DeltaEncoder.java b/Java/Tukaani/src/org/tukaani/xz/DeltaEncoder.java
deleted file mode 100644
index 384afe4..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/DeltaEncoder.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * DeltaEncoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-class DeltaEncoder extends DeltaCoder implements FilterEncoder {
- private final DeltaOptions options;
- private final byte[] props = new byte[1];
-
- DeltaEncoder(DeltaOptions options) {
- props[0] = (byte)(options.getDistance() - 1);
- this.options = (DeltaOptions)options.clone();
- }
-
- public long getFilterID() {
- return FILTER_ID;
- }
-
- public byte[] getFilterProps() {
- return props;
- }
-
- public boolean supportsFlushing() {
- return true;
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return options.getOutputStream(out);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/DeltaInputStream.java b/Java/Tukaani/src/org/tukaani/xz/DeltaInputStream.java
deleted file mode 100644
index 56478f5..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/DeltaInputStream.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * DeltaInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.IOException;
-import org.tukaani.xz.delta.DeltaDecoder;
-
-/**
- * Decodes raw Delta-filtered data (no XZ headers).
- * <p>
- * The delta filter doesn't change the size of the data and thus it
- * cannot have an end-of-payload marker. It will simply decode until
- * its input stream indicates end of input.
- */
-public class DeltaInputStream extends InputStream {
- /**
- * Smallest supported delta calculation distance.
- */
- public static final int DISTANCE_MIN = 1;
-
- /**
- * Largest supported delta calculation distance.
- */
- public static final int DISTANCE_MAX = 256;
-
- private InputStream in;
- private final DeltaDecoder delta;
-
- private IOException exception = null;
-
- private final byte[] tempBuf = new byte[1];
-
- /**
- * Creates a new Delta decoder with the given delta calculation distance.
- *
- * @param in input stream from which Delta filtered data
- * is read
- *
- * @param distance delta calculation distance, must be in the
- * range [<code>DISTANCE_MIN</code>,
- * <code>DISTANCE_MAX</code>]
- */
- public DeltaInputStream(InputStream in, int distance) {
- // Check for null because otherwise null isn't detect
- // in this constructor.
- if (in == null)
- throw new NullPointerException();
-
- this.in = in;
- this.delta = new DeltaDecoder(distance);
- }
-
- /**
- * Decode the next byte from this input stream.
- *
- * @return the next decoded byte, or <code>-1</code> to indicate
- * the end of input on the input stream <code>in</code>
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read() throws IOException {
- return read(tempBuf, 0, 1) == -1 ? -1 : (tempBuf[0] & 0xFF);
- }
-
- /**
- * Decode into an array of bytes.
- * <p>
- * This calls <code>in.read(buf, off, len)</code> and defilters the
- * returned data.
- *
- * @param buf target buffer for decoded data
- * @param off start offset in <code>buf</code>
- * @param len maximum number of bytes to read
- *
- * @return number of bytes read, or <code>-1</code> to indicate
- * the end of the input stream <code>in</code>
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws IOException may be thrown by underlaying input
- * stream <code>in</code>
- */
- public int read(byte[] buf, int off, int len) throws IOException {
- if (len == 0)
- return 0;
-
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- int size;
- try {
- size = in.read(buf, off, len);
- } catch (IOException e) {
- exception = e;
- throw e;
- }
-
- if (size == -1)
- return -1;
-
- delta.decode(buf, off, size);
- return size;
- }
-
- /**
- * Calls <code>in.available()</code>.
- *
- * @return the value returned by <code>in.available()</code>
- */
- public int available() throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- return in.available();
- }
-
- /**
- * Closes the stream and calls <code>in.close()</code>.
- * If the stream was already closed, this does nothing.
- *
- * @throws IOException if thrown by <code>in.close()</code>
- */
- public void close() throws IOException {
- if (in != null) {
- try {
- in.close();
- } finally {
- in = null;
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/DeltaOptions.java b/Java/Tukaani/src/org/tukaani/xz/DeltaOptions.java
deleted file mode 100644
index 145130b..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/DeltaOptions.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * DeltaOptions
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-
-/**
- * Delta filter options. The Delta filter can be used only as a non-last
- * filter in the chain, for example Delta + LZMA2.
- * <p>
- * Currently only simple byte-wise delta is supported. The only option
- * is the delta distance, which you should set to match your data.
- * It's not possible to provide a generic default value for it.
- * <p>
- * For example, with distance = 2 and eight-byte input
- * A1 B1 A2 B3 A3 B5 A4 B7, the output will be A1 B1 01 02 01 02 01 02.
- * <p>
- * The Delta filter can be good with uncompressed bitmap images. It can
- * also help with PCM audio, although special-purpose compressors like
- * FLAC will give much smaller result at much better compression speed.
- */
-public class DeltaOptions extends FilterOptions {
- /**
- * Smallest supported delta calculation distance.
- */
- public static final int DISTANCE_MIN = 1;
-
- /**
- * Largest supported delta calculation distance.
- */
- public static final int DISTANCE_MAX = 256;
-
- private int distance = DISTANCE_MIN;
-
- /**
- * Creates new Delta options and sets the delta distance to 1 byte.
- */
- public DeltaOptions() {}
-
- /**
- * Creates new Delta options and sets the distance to the given value.
- */
- public DeltaOptions(int distance) throws UnsupportedOptionsException {
- setDistance(distance);
- }
-
- /**
- * Sets the delta distance in bytes. The new distance must be in
- * the range [DISTANCE_MIN, DISTANCE_MAX].
- */
- public void setDistance(int distance) throws UnsupportedOptionsException {
- if (distance < DISTANCE_MIN || distance > DISTANCE_MAX)
- throw new UnsupportedOptionsException(
- "Delta distance must be in the range [" + DISTANCE_MIN
- + ", " + DISTANCE_MAX + "]: " + distance);
-
- this.distance = distance;
- }
-
- /**
- * Gets the delta distance.
- */
- public int getDistance() {
- return distance;
- }
-
- public int getEncoderMemoryUsage() {
- return DeltaOutputStream.getMemoryUsage();
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return new DeltaOutputStream(out, this);
- }
-
- public int getDecoderMemoryUsage() {
- return 1;
- }
-
- public InputStream getInputStream(InputStream in) {
- return new DeltaInputStream(in, distance);
- }
-
- FilterEncoder getFilterEncoder() {
- return new DeltaEncoder(this);
- }
-
- public Object clone() {
- try {
- return super.clone();
- } catch (CloneNotSupportedException e) {
- assert false;
- throw new RuntimeException();
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/DeltaOutputStream.java b/Java/Tukaani/src/org/tukaani/xz/DeltaOutputStream.java
deleted file mode 100644
index bd880db..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/DeltaOutputStream.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * DeltaOutputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.IOException;
-import org.tukaani.xz.delta.DeltaEncoder;
-
-class DeltaOutputStream extends FinishableOutputStream {
- private static final int FILTER_BUF_SIZE = 4096;
-
- private FinishableOutputStream out;
- private final DeltaEncoder delta;
- private final byte[] filterBuf = new byte[FILTER_BUF_SIZE];
-
- private boolean finished = false;
- private IOException exception = null;
-
- private final byte[] tempBuf = new byte[1];
-
- static int getMemoryUsage() {
- return 1 + FILTER_BUF_SIZE / 1024;
- }
-
- DeltaOutputStream(FinishableOutputStream out, DeltaOptions options) {
- this.out = out;
- delta = new DeltaEncoder(options.getDistance());
- }
-
- public void write(int b) throws IOException {
- tempBuf[0] = (byte)b;
- write(tempBuf, 0, 1);
- }
-
- public void write(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished");
-
- try {
- while (len > FILTER_BUF_SIZE) {
- delta.encode(buf, off, FILTER_BUF_SIZE, filterBuf);
- out.write(filterBuf);
- off += FILTER_BUF_SIZE;
- len -= FILTER_BUF_SIZE;
- }
-
- delta.encode(buf, off, len, filterBuf);
- out.write(filterBuf, 0, len);
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- public void flush() throws IOException {
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- try {
- out.flush();
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- public void finish() throws IOException {
- if (!finished) {
- if (exception != null)
- throw exception;
-
- try {
- out.finish();
- } catch (IOException e) {
- exception = e;
- throw e;
- }
-
- finished = true;
- }
- }
-
- public void close() throws IOException {
- if (out != null) {
- try {
- out.close();
- } catch (IOException e) {
- if (exception == null)
- exception = e;
- }
-
- out = null;
- }
-
- if (exception != null)
- throw exception;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/FilterCoder.java b/Java/Tukaani/src/org/tukaani/xz/FilterCoder.java
deleted file mode 100644
index 1e95e37..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/FilterCoder.java
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * FilterCoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-interface FilterCoder {
- boolean changesSize();
- boolean nonLastOK();
- boolean lastOK();
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/FilterDecoder.java b/Java/Tukaani/src/org/tukaani/xz/FilterDecoder.java
deleted file mode 100644
index 8e2d006..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/FilterDecoder.java
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * FilterDecoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-
-interface FilterDecoder extends FilterCoder {
- int getMemoryUsage();
- InputStream getInputStream(InputStream in);
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/FilterEncoder.java b/Java/Tukaani/src/org/tukaani/xz/FilterEncoder.java
deleted file mode 100644
index 4558aad..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/FilterEncoder.java
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * FilterEncoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-interface FilterEncoder extends FilterCoder {
- long getFilterID();
- byte[] getFilterProps();
- boolean supportsFlushing();
- FinishableOutputStream getOutputStream(FinishableOutputStream out);
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/FilterOptions.java b/Java/Tukaani/src/org/tukaani/xz/FilterOptions.java
deleted file mode 100644
index a2398b4..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/FilterOptions.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * FilterOptions
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.IOException;
-
-/**
- * Base class for filter-specific options classes.
- */
-public abstract class FilterOptions implements Cloneable {
- /**
- * Gets how much memory the encoder will need with
- * the given filter chain. This function simply calls
- * <code>getEncoderMemoryUsage()</code> for every filter
- * in the array and returns the sum of the returned values.
- */
- public static int getEncoderMemoryUsage(FilterOptions[] options) {
- int m = 0;
-
- for (int i = 0; i < options.length; ++i)
- m += options[i].getEncoderMemoryUsage();
-
- return m;
- }
-
- /**
- * Gets how much memory the decoder will need with
- * the given filter chain. This function simply calls
- * <code>getDecoderMemoryUsage()</code> for every filter
- * in the array and returns the sum of the returned values.
- */
- public static int getDecoderMemoryUsage(FilterOptions[] options) {
- int m = 0;
-
- for (int i = 0; i < options.length; ++i)
- m += options[i].getDecoderMemoryUsage();
-
- return m;
- }
-
- /**
- * Gets how much memory the encoder will need with these options.
- */
- public abstract int getEncoderMemoryUsage();
-
- /**
- * Gets a raw (no XZ headers) encoder output stream using these options.
- * Raw streams are an advanced feature. In most cases you want to store
- * the compressed data in the .xz container format instead of using
- * a raw stream. To use this filter in a .xz file, pass this object
- * to XZOutputStream.
- */
- public abstract FinishableOutputStream getOutputStream(
- FinishableOutputStream out);
-
- /**
- * Gets how much memory the decoder will need to decompress the data
- * that was encoded with these options.
- */
- public abstract int getDecoderMemoryUsage();
-
- /**
- * Gets a raw (no XZ headers) decoder input stream using these options.
- */
- public abstract InputStream getInputStream(InputStream in)
- throws IOException;
-
- abstract FilterEncoder getFilterEncoder();
-
- FilterOptions() {}
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/FinishableOutputStream.java b/Java/Tukaani/src/org/tukaani/xz/FinishableOutputStream.java
deleted file mode 100644
index b360628..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/FinishableOutputStream.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * FinishableOutputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.OutputStream;
-import java.io.IOException;
-
-/**
- * Output stream that supports finishing without closing
- * the underlying stream.
- */
-public abstract class FinishableOutputStream extends OutputStream {
- /**
- * Finish the stream without closing the underlying stream.
- * No more data may be written to the stream after finishing.
- * <p>
- * The <code>finish</code> method of <code>FinishableOutputStream</code>
- * does nothing. Subclasses should override it if they need finishing
- * support, which is the case, for example, with compressors.
- *
- * @throws IOException
- */
- public void finish() throws IOException {};
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/FinishableWrapperOutputStream.java b/Java/Tukaani/src/org/tukaani/xz/FinishableWrapperOutputStream.java
deleted file mode 100644
index 2e0ac99..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/FinishableWrapperOutputStream.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * FinishableWrapperOutputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.OutputStream;
-import java.io.IOException;
-
-/**
- * Wraps an output stream to a finishable output stream for use with
- * raw encoders. This is not needed for XZ compression and thus most
- * people will never need this.
- */
-public class FinishableWrapperOutputStream extends FinishableOutputStream {
- /**
- * The {@link java.io.OutputStream OutputStream} that has been
- * wrapped into a FinishableWrapperOutputStream.
- */
- protected OutputStream out;
-
- /**
- * Creates a new output stream which support finishing.
- * The <code>finish()</code> method will do nothing.
- */
- public FinishableWrapperOutputStream(OutputStream out) {
- this.out = out;
- }
-
- /**
- * Calls {@link java.io.OutputStream#write(int) out.write(b)}.
- */
- public void write(int b) throws IOException {
- out.write(b);
- }
-
- /**
- * Calls {@link java.io.OutputStream#write(byte[]) out.write(buf)}.
- */
- public void write(byte[] buf) throws IOException {
- out.write(buf);
- }
-
- /**
- * Calls {@link java.io.OutputStream#write(byte[],int,int)
- out.write(buf, off, len)}.
- */
- public void write(byte[] buf, int off, int len) throws IOException {
- out.write(buf, off, len);
- }
-
- /**
- * Calls {@link java.io.OutputStream#flush() out.flush()}.
- */
- public void flush() throws IOException {
- out.flush();
- }
-
- /**
- * Calls {@link java.io.OutputStream#close() out.close()}.
- */
- public void close() throws IOException {
- out.close();
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/IA64Options.java b/Java/Tukaani/src/org/tukaani/xz/IA64Options.java
deleted file mode 100644
index ba57870..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/IA64Options.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * IA64Options
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import org.tukaani.xz.simple.IA64;
-
-/**
- * BCJ filter for Itanium (IA-64) instructions.
- */
-public class IA64Options extends BCJOptions {
- private static final int ALIGNMENT = 16;
-
- public IA64Options() {
- super(ALIGNMENT);
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return new SimpleOutputStream(out, new IA64(true, startOffset));
- }
-
- public InputStream getInputStream(InputStream in) {
- return new SimpleInputStream(in, new IA64(false, startOffset));
- }
-
- FilterEncoder getFilterEncoder() {
- return new BCJEncoder(this, BCJCoder.IA64_FILTER_ID);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/IndexIndicatorException.java b/Java/Tukaani/src/org/tukaani/xz/IndexIndicatorException.java
deleted file mode 100644
index fc6bc03..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/IndexIndicatorException.java
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * IndexIndicatorException
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-class IndexIndicatorException extends Exception {
- private static final long serialVersionUID = 1L;
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/LZMA2Coder.java b/Java/Tukaani/src/org/tukaani/xz/LZMA2Coder.java
deleted file mode 100644
index b0963b7..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/LZMA2Coder.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * LZMA2Coder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-abstract class LZMA2Coder implements FilterCoder {
- public static final long FILTER_ID = 0x21;
-
- public boolean changesSize() {
- return true;
- }
-
- public boolean nonLastOK() {
- return false;
- }
-
- public boolean lastOK() {
- return true;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/LZMA2Decoder.java b/Java/Tukaani/src/org/tukaani/xz/LZMA2Decoder.java
deleted file mode 100644
index 82075c2..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/LZMA2Decoder.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * LZMA2Decoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-
-class LZMA2Decoder extends LZMA2Coder implements FilterDecoder {
- private int dictSize;
-
- LZMA2Decoder(byte[] props) throws UnsupportedOptionsException {
- // Up to 1.5 GiB dictionary is supported. The bigger ones
- // are too big for int.
- if (props.length != 1 || (props[0] & 0xFF) > 37)
- throw new UnsupportedOptionsException(
- "Unsupported LZMA2 properties");
-
- dictSize = 2 | (props[0] & 1);
- dictSize <<= (props[0] >>> 1) + 11;
- }
-
- public int getMemoryUsage() {
- return LZMA2InputStream.getMemoryUsage(dictSize);
- }
-
- public InputStream getInputStream(InputStream in) {
- return new LZMA2InputStream(in, dictSize);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/LZMA2Encoder.java b/Java/Tukaani/src/org/tukaani/xz/LZMA2Encoder.java
deleted file mode 100644
index 7c7facc..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/LZMA2Encoder.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * LZMA2Encoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import org.tukaani.xz.lzma.LZMAEncoder;
-
-class LZMA2Encoder extends LZMA2Coder implements FilterEncoder {
- private final LZMA2Options options;
- private final byte[] props = new byte[1];
-
- LZMA2Encoder(LZMA2Options options) {
- if (options.getPresetDict() != null)
- throw new IllegalArgumentException(
- "XZ doesn't support a preset dictionary for now");
-
- if (options.getMode() == LZMA2Options.MODE_UNCOMPRESSED) {
- props[0] = (byte)0;
- } else {
- int d = Math.max(options.getDictSize(),
- LZMA2Options.DICT_SIZE_MIN);
- props[0] = (byte)(LZMAEncoder.getDistSlot(d - 1) - 23);
- }
-
- // Make a private copy so that the caller is free to change its copy.
- this.options = (LZMA2Options)options.clone();
- }
-
- public long getFilterID() {
- return FILTER_ID;
- }
-
- public byte[] getFilterProps() {
- return props;
- }
-
- public boolean supportsFlushing() {
- return true;
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return options.getOutputStream(out);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/LZMA2InputStream.java b/Java/Tukaani/src/org/tukaani/xz/LZMA2InputStream.java
deleted file mode 100644
index 4e865c6..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/LZMA2InputStream.java
+++ /dev/null
@@ -1,358 +0,0 @@
-/*
- * LZMA2InputStream
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import org.tukaani.xz.lz.LZDecoder;
-import org.tukaani.xz.rangecoder.RangeDecoderFromBuffer;
-import org.tukaani.xz.lzma.LZMADecoder;
-
-/**
- * Decompresses a raw LZMA2 stream (no XZ headers).
- */
-public class LZMA2InputStream extends InputStream {
- /**
- * Smallest valid LZMA2 dictionary size.
- * <p>
- * Very tiny dictionaries would be a performance problem, so
- * the minimum is 4 KiB.
- */
- public static final int DICT_SIZE_MIN = 4096;
-
- /**
- * Largest dictionary size supported by this implementation.
- * <p>
- * The LZMA2 algorithm allows dictionaries up to one byte less than 4 GiB.
- * This implementation supports only 16 bytes less than 2 GiB for raw
- * LZMA2 streams, and for .xz files the maximum is 1.5 GiB. This
- * limitation is due to Java using signed 32-bit integers for array
- * indexing. The limitation shouldn't matter much in practice since so
- * huge dictionaries are not normally used.
- */
- public static final int DICT_SIZE_MAX = Integer.MAX_VALUE & ~15;
-
- private static final int COMPRESSED_SIZE_MAX = 1 << 16;
-
- private DataInputStream in;
-
- private final LZDecoder lz;
- private final RangeDecoderFromBuffer rc
- = new RangeDecoderFromBuffer(COMPRESSED_SIZE_MAX);
- private LZMADecoder lzma;
-
- private int uncompressedSize = 0;
- private boolean isLZMAChunk;
-
- private boolean needDictReset = true;
- private boolean needProps = true;
- private boolean endReached = false;
-
- private IOException exception = null;
-
- private final byte[] tempBuf = new byte[1];
-
- /**
- * Gets approximate decompressor memory requirements as kibibytes for
- * the given dictionary size.
- *
- * @param dictSize LZMA2 dictionary size as bytes, must be
- * in the range [<code>DICT_SIZE_MIN</code>,
- * <code>DICT_SIZE_MAX</code>]
- *
- * @return approximate memory requirements as kibibytes (KiB)
- */
- public static int getMemoryUsage(int dictSize) {
- // The base state is around 30-40 KiB (probabilities etc.),
- // range decoder needs COMPRESSED_SIZE_MAX bytes for buffering,
- // and LZ decoder needs a dictionary buffer.
- return 40 + COMPRESSED_SIZE_MAX / 1024 + getDictSize(dictSize) / 1024;
- }
-
- private static int getDictSize(int dictSize) {
- if (dictSize < DICT_SIZE_MIN || dictSize > DICT_SIZE_MAX)
- throw new IllegalArgumentException(
- "Unsupported dictionary size " + dictSize);
-
- // Round dictionary size upward to a multiple of 16. This way LZMA
- // can use LZDecoder.getPos() for calculating LZMA's posMask.
- // Note that this check is needed only for raw LZMA2 streams; it is
- // redundant with .xz.
- return (dictSize + 15) & ~15;
- }
-
- /**
- * Creates a new input stream that decompresses raw LZMA2 data
- * from <code>in</code>.
- * <p>
- * The caller needs to know the dictionary size used when compressing;
- * the dictionary size isn't stored as part of a raw LZMA2 stream.
- * <p>
- * Specifying a too small dictionary size will prevent decompressing
- * the stream. Specifying a too big dictionary is waste of memory but
- * decompression will work.
- * <p>
- * There is no need to specify a dictionary bigger than
- * the uncompressed size of the data even if a bigger dictionary
- * was used when compressing. If you know the uncompressed size
- * of the data, this might allow saving some memory.
- *
- * @param in input stream from which LZMA2-compressed
- * data is read
- *
- * @param dictSize LZMA2 dictionary size as bytes, must be
- * in the range [<code>DICT_SIZE_MIN</code>,
- * <code>DICT_SIZE_MAX</code>]
- */
- public LZMA2InputStream(InputStream in, int dictSize) {
- this(in, dictSize, null);
- }
-
- /**
- * Creates a new LZMA2 decompressor using a preset dictionary.
- * <p>
- * This is like <code>LZMA2InputStream(InputStream, int)</code> except
- * that the dictionary may be initialized using a preset dictionary.
- * If a preset dictionary was used when compressing the data, the
- * same preset dictionary must be provided when decompressing.
- *
- * @param in input stream from which LZMA2-compressed
- * data is read
- *
- * @param dictSize LZMA2 dictionary size as bytes, must be
- * in the range [<code>DICT_SIZE_MIN</code>,
- * <code>DICT_SIZE_MAX</code>]
- *
- * @param presetDict preset dictionary or <code>null</code>
- * to use no preset dictionary
- */
- public LZMA2InputStream(InputStream in, int dictSize, byte[] presetDict) {
- // Check for null because otherwise null isn't detect
- // in this constructor.
- if (in == null)
- throw new NullPointerException();
-
- this.in = new DataInputStream(in);
- this.lz = new LZDecoder(getDictSize(dictSize), presetDict);
-
- if (presetDict != null && presetDict.length > 0)
- needDictReset = false;
- }
-
- /**
- * Decompresses the next byte from this input stream.
- * <p>
- * Reading lots of data with <code>read()</code> from this input stream
- * may be inefficient. Wrap it in <code>java.io.BufferedInputStream</code>
- * if you need to read lots of data one byte at a time.
- *
- * @return the next decompressed byte, or <code>-1</code>
- * to indicate the end of the compressed stream
- *
- * @throws CorruptedInputException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws EOFException
- * compressed input is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read() throws IOException {
- return read(tempBuf, 0, 1) == -1 ? -1 : (tempBuf[0] & 0xFF);
- }
-
- /**
- * Decompresses into an array of bytes.
- * <p>
- * If <code>len</code> is zero, no bytes are read and <code>0</code>
- * is returned. Otherwise this will block until <code>len</code>
- * bytes have been decompressed, the end of the LZMA2 stream is reached,
- * or an exception is thrown.
- *
- * @param buf target buffer for uncompressed data
- * @param off start offset in <code>buf</code>
- * @param len maximum number of uncompressed bytes to read
- *
- * @return number of bytes read, or <code>-1</code> to indicate
- * the end of the compressed stream
- *
- * @throws CorruptedInputException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws EOFException
- * compressed input is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (len == 0)
- return 0;
-
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- if (endReached)
- return -1;
-
- try {
- int size = 0;
-
- while (len > 0) {
- if (uncompressedSize == 0) {
- decodeChunkHeader();
- if (endReached)
- return size == 0 ? -1 : size;
- }
-
- int copySizeMax = Math.min(uncompressedSize, len);
-
- if (!isLZMAChunk) {
- lz.copyUncompressed(in, copySizeMax);
- } else {
- lz.setLimit(copySizeMax);
- lzma.decode();
- if (!rc.isInBufferOK())
- throw new CorruptedInputException();
- }
-
- int copiedSize = lz.flush(buf, off);
- off += copiedSize;
- len -= copiedSize;
- size += copiedSize;
- uncompressedSize -= copiedSize;
-
- if (uncompressedSize == 0)
- if (!rc.isFinished() || lz.hasPending())
- throw new CorruptedInputException();
- }
-
- return size;
-
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- private void decodeChunkHeader() throws IOException {
- int control = in.readUnsignedByte();
-
- if (control == 0x00) {
- endReached = true;
- return;
- }
-
- if (control >= 0xE0 || control == 0x01) {
- needProps = true;
- needDictReset = false;
- lz.reset();
- } else if (needDictReset) {
- throw new CorruptedInputException();
- }
-
- if (control >= 0x80) {
- isLZMAChunk = true;
-
- uncompressedSize = (control & 0x1F) << 16;
- uncompressedSize += in.readUnsignedShort() + 1;
-
- int compressedSize = in.readUnsignedShort() + 1;
-
- if (control >= 0xC0) {
- needProps = false;
- decodeProps();
-
- } else if (needProps) {
- throw new CorruptedInputException();
-
- } else if (control >= 0xA0) {
- lzma.reset();
- }
-
- rc.prepareInputBuffer(in, compressedSize);
-
- } else if (control > 0x02) {
- throw new CorruptedInputException();
-
- } else {
- isLZMAChunk = false;
- uncompressedSize = in.readUnsignedShort() + 1;
- }
- }
-
- private void decodeProps() throws IOException {
- int props = in.readUnsignedByte();
-
- if (props > (4 * 5 + 4) * 9 + 8)
- throw new CorruptedInputException();
-
- int pb = props / (9 * 5);
- props -= pb * 9 * 5;
- int lp = props / 9;
- int lc = props - lp * 9;
-
- if (lc + lp > 4)
- throw new CorruptedInputException();
-
- lzma = new LZMADecoder(lz, rc, lc, lp, pb);
- }
-
- /**
- * Returns the number of uncompressed bytes that can be read
- * without blocking. The value is returned with an assumption
- * that the compressed input data will be valid. If the compressed
- * data is corrupt, <code>CorruptedInputException</code> may get
- * thrown before the number of bytes claimed to be available have
- * been read from this input stream.
- * <p>
- * In LZMA2InputStream, the return value will be non-zero when the
- * decompressor is in the middle of an LZMA2 chunk. The return value
- * will then be the number of uncompressed bytes remaining from that
- * chunk.
- *
- * @return the number of uncompressed bytes that can be read
- * without blocking
- */
- public int available() throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- return uncompressedSize;
- }
-
- /**
- * Closes the stream and calls <code>in.close()</code>.
- * If the stream was already closed, this does nothing.
- *
- * @throws IOException if thrown by <code>in.close()</code>
- */
- public void close() throws IOException {
- if (in != null) {
- try {
- in.close();
- } finally {
- in = null;
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/LZMA2Options.java b/Java/Tukaani/src/org/tukaani/xz/LZMA2Options.java
deleted file mode 100644
index 42777f0..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/LZMA2Options.java
+++ /dev/null
@@ -1,581 +0,0 @@
-/*
- * LZMA2Options
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.IOException;
-import org.tukaani.xz.lz.LZEncoder;
-import org.tukaani.xz.lzma.LZMAEncoder;
-
-/**
- * LZMA2 compression options.
- * <p>
- * While this allows setting the LZMA2 compression options in detail,
- * often you only need <code>LZMA2Options()</code> or
- * <code>LZMA2Options(int)</code>.
- */
-public class LZMA2Options extends FilterOptions {
- /**
- * Minimum valid compression preset level is 0.
- */
- public static final int PRESET_MIN = 0;
-
- /**
- * Maximum valid compression preset level is 9.
- */
- public static final int PRESET_MAX = 9;
-
- /**
- * Default compression preset level is 6.
- */
- public static final int PRESET_DEFAULT = 6;
-
- /**
- * Minimum dictionary size is 4 KiB.
- */
- public static final int DICT_SIZE_MIN = 4096;
-
- /**
- * Maximum dictionary size for compression is 768 MiB.
- * <p>
- * The decompressor supports bigger dictionaries, up to almost 2 GiB.
- * With HC4 the encoder would support dictionaries bigger than 768 MiB.
- * The 768 MiB limit comes from the current implementation of BT4 where
- * we would otherwise hit the limits of signed ints in array indexing.
- * <p>
- * If you really need bigger dictionary for decompression,
- * use {@link LZMA2InputStream} directly.
- */
- public static final int DICT_SIZE_MAX = 768 << 20;
-
- /**
- * The default dictionary size is 8 MiB.
- */
- public static final int DICT_SIZE_DEFAULT = 8 << 20;
-
- /**
- * Maximum value for lc + lp is 4.
- */
- public static final int LC_LP_MAX = 4;
-
- /**
- * The default number of literal context bits is 3.
- */
- public static final int LC_DEFAULT = 3;
-
- /**
- * The default number of literal position bits is 0.
- */
- public static final int LP_DEFAULT = 0;
-
- /**
- * Maximum value for pb is 4.
- */
- public static final int PB_MAX = 4;
-
- /**
- * The default number of position bits is 2.
- */
- public static final int PB_DEFAULT = 2;
-
- /**
- * Compression mode: uncompressed.
- * The data is wrapped into a LZMA2 stream without compression.
- */
- public static final int MODE_UNCOMPRESSED = 0;
-
- /**
- * Compression mode: fast.
- * This is usually combined with a hash chain match finder.
- */
- public static final int MODE_FAST = LZMAEncoder.MODE_FAST;
-
- /**
- * Compression mode: normal.
- * This is usually combined with a binary tree match finder.
- */
- public static final int MODE_NORMAL = LZMAEncoder.MODE_NORMAL;
-
- /**
- * Minimum value for <code>niceLen</code> is 8.
- */
- public static final int NICE_LEN_MIN = 8;
-
- /**
- * Maximum value for <code>niceLen</code> is 273.
- */
- public static final int NICE_LEN_MAX = 273;
-
- /**
- * Match finder: Hash Chain 2-3-4
- */
- public static final int MF_HC4 = LZEncoder.MF_HC4;
-
- /**
- * Match finder: Binary tree 2-3-4
- */
- public static final int MF_BT4 = LZEncoder.MF_BT4;
-
- private static final int[] presetToDictSize = {
- 1 << 18, 1 << 20, 1 << 21, 1 << 22, 1 << 22,
- 1 << 23, 1 << 23, 1 << 24, 1 << 25, 1 << 26 };
-
- private static final int[] presetToDepthLimit = { 4, 8, 24, 48 };
-
- private int dictSize;
- private byte[] presetDict = null;
- private int lc;
- private int lp;
- private int pb;
- private int mode;
- private int niceLen;
- private int mf;
- private int depthLimit;
-
- /**
- * Creates new LZMA2 options and sets them to the default values.
- * This is equivalent to <code>LZMA2Options(PRESET_DEFAULT)</code>.
- */
- public LZMA2Options() {
- try {
- setPreset(PRESET_DEFAULT);
- } catch (UnsupportedOptionsException e) {
- assert false;
- throw new RuntimeException();
- }
- }
-
- /**
- * Creates new LZMA2 options and sets them to the given preset.
- *
- * @throws UnsupportedOptionsException
- * <code>preset</code> is not supported
- */
- public LZMA2Options(int preset) throws UnsupportedOptionsException {
- setPreset(preset);
- }
-
- /**
- * Creates new LZMA2 options and sets them to the given custom values.
- *
- * @throws UnsupportedOptionsException
- * unsupported options were specified
- */
- public LZMA2Options(int dictSize, int lc, int lp, int pb, int mode,
- int niceLen, int mf, int depthLimit)
- throws UnsupportedOptionsException {
- setDictSize(dictSize);
- setLcLp(lc, lp);
- setPb(pb);
- setMode(mode);
- setNiceLen(niceLen);
- setMatchFinder(mf);
- setDepthLimit(depthLimit);
- }
-
- /**
- * Sets the compression options to the given preset.
- * <p>
- * The presets 0-3 are fast presets with medium compression.
- * The presets 4-6 are fairly slow presets with high compression.
- * The default preset (<code>PRESET_DEFAULT</code>) is 6.
- * <p>
- * The presets 7-9 are like the preset 6 but use bigger dictionaries
- * and have higher compressor and decompressor memory requirements.
- * Unless the uncompressed size of the file exceeds 8&nbsp;MiB,
- * 16&nbsp;MiB, or 32&nbsp;MiB, it is waste of memory to use the
- * presets 7, 8, or 9, respectively.
- *
- * @throws UnsupportedOptionsException
- * <code>preset</code> is not supported
- */
- public void setPreset(int preset) throws UnsupportedOptionsException {
- if (preset < 0 || preset > 9)
- throw new UnsupportedOptionsException(
- "Unsupported preset: " + preset);
-
- lc = LC_DEFAULT;
- lp = LP_DEFAULT;
- pb = PB_DEFAULT;
- dictSize = presetToDictSize[preset];
-
- if (preset <= 3) {
- mode = MODE_FAST;
- mf = MF_HC4;
- niceLen = preset <= 1 ? 128 : NICE_LEN_MAX;
- depthLimit = presetToDepthLimit[preset];
- } else {
- mode = MODE_NORMAL;
- mf = MF_BT4;
- niceLen = (preset == 4) ? 16 : (preset == 5) ? 32 : 64;
- depthLimit = 0;
- }
- }
-
- /**
- * Sets the dictionary size in bytes.
- * <p>
- * The dictionary (or history buffer) holds the most recently seen
- * uncompressed data. Bigger dictionary usually means better compression.
- * However, using a dictioanary bigger than the size of the uncompressed
- * data is waste of memory.
- * <p>
- * Any value in the range [DICT_SIZE_MIN, DICT_SIZE_MAX] is valid,
- * but sizes of 2^n and 2^n&nbsp;+&nbsp;2^(n-1) bytes are somewhat
- * recommended.
- *
- * @throws UnsupportedOptionsException
- * <code>dictSize</code> is not supported
- */
- public void setDictSize(int dictSize) throws UnsupportedOptionsException {
- if (dictSize < DICT_SIZE_MIN)
- throw new UnsupportedOptionsException(
- "LZMA2 dictionary size must be at least 4 KiB: "
- + dictSize + " B");
-
- if (dictSize > DICT_SIZE_MAX)
- throw new UnsupportedOptionsException(
- "LZMA2 dictionary size must not exceed "
- + (DICT_SIZE_MAX >> 20) + " MiB: " + dictSize + " B");
-
- this.dictSize = dictSize;
- }
-
- /**
- * Gets the dictionary size in bytes.
- */
- public int getDictSize() {
- return dictSize;
- }
-
- /**
- * Sets a preset dictionary. Use null to disable the use of
- * a preset dictionary. By default there is no preset dictionary.
- * <p>
- * <b>The .xz format doesn't support a preset dictionary for now.
- * Do not set a preset dictionary unless you use raw LZMA2.</b>
- * <p>
- * Preset dictionary can be useful when compressing many similar,
- * relatively small chunks of data independently from each other.
- * A preset dictionary should contain typical strings that occur in
- * the files being compressed. The most probable strings should be
- * near the end of the preset dictionary. The preset dictionary used
- * for compression is also needed for decompression.
- */
- public void setPresetDict(byte[] presetDict) {
- this.presetDict = presetDict;
- }
-
- /**
- * Gets the preset dictionary.
- */
- public byte[] getPresetDict() {
- return presetDict;
- }
-
- /**
- * Sets the number of literal context bits and literal position bits.
- * <p>
- * The sum of <code>lc</code> and <code>lp</code> is limited to 4.
- * Trying to exceed it will throw an exception. This function lets
- * you change both at the same time.
- *
- * @throws UnsupportedOptionsException
- * <code>lc</code> and <code>lp</code>
- * are invalid
- */
- public void setLcLp(int lc, int lp) throws UnsupportedOptionsException {
- if (lc < 0 || lp < 0 || lc > LC_LP_MAX || lp > LC_LP_MAX
- || lc + lp > LC_LP_MAX)
- throw new UnsupportedOptionsException(
- "lc + lp must not exceed " + LC_LP_MAX + ": "
- + lc + " + " + lp);
-
- this.lc = lc;
- this.lp = lp;
- }
-
- /**
- * Sets the number of literal context bits.
- * <p>
- * All bytes that cannot be encoded as matches are encoded as literals.
- * That is, literals are simply 8-bit bytes that are encoded one at
- * a time.
- * <p>
- * The literal coding makes an assumption that the highest <code>lc</code>
- * bits of the previous uncompressed byte correlate with the next byte.
- * For example, in typical English text, an upper-case letter is often
- * followed by a lower-case letter, and a lower-case letter is usually
- * followed by another lower-case letter. In the US-ASCII character set,
- * the highest three bits are 010 for upper-case letters and 011 for
- * lower-case letters. When <code>lc</code> is at least 3, the literal
- * coding can take advantage of this property in the uncompressed data.
- * <p>
- * The default value (3) is usually good. If you want maximum compression,
- * try <code>setLc(4)</code>. Sometimes it helps a little, and sometimes it
- * makes compression worse. If it makes it worse, test for example
- * <code>setLc(2)</code> too.
- *
- * @throws UnsupportedOptionsException
- * <code>lc</code> is invalid, or the sum
- * of <code>lc</code> and <code>lp</code>
- * exceed LC_LP_MAX
- */
- public void setLc(int lc) throws UnsupportedOptionsException {
- setLcLp(lc, lp);
- }
-
- /**
- * Sets the number of literal position bits.
- * <p>
- * This affets what kind of alignment in the uncompressed data is
- * assumed when encoding literals. See {@link #setPb(int) setPb} for
- * more information about alignment.
- *
- * @throws UnsupportedOptionsException
- * <code>lp</code> is invalid, or the sum
- * of <code>lc</code> and <code>lp</code>
- * exceed LC_LP_MAX
- */
- public void setLp(int lp) throws UnsupportedOptionsException {
- setLcLp(lc, lp);
- }
-
- /**
- * Gets the number of literal context bits.
- */
- public int getLc() {
- return lc;
- }
-
- /**
- * Gets the number of literal position bits.
- */
- public int getLp() {
- return lp;
- }
-
- /**
- * Sets the number of position bits.
- * <p>
- * This affects what kind of alignment in the uncompressed data is
- * assumed in general. The default (2) means four-byte alignment
- * (2^<code>pb</code> = 2^2 = 4), which is often a good choice when
- * there's no better guess.
- * <p>
- * When the alignment is known, setting the number of position bits
- * accordingly may reduce the file size a little. For example with text
- * files having one-byte alignment (US-ASCII, ISO-8859-*, UTF-8), using
- * <code>setPb(0)</code> can improve compression slightly. For UTF-16
- * text, <code>setPb(1)</code> is a good choice. If the alignment is
- * an odd number like 3 bytes, <code>setPb(0)</code> might be the best
- * choice.
- * <p>
- * Even though the assumed alignment can be adjusted with
- * <code>setPb</code> and <code>setLp</code>, LZMA2 still slightly favors
- * 16-byte alignment. It might be worth taking into account when designing
- * file formats that are likely to be often compressed with LZMA2.
- *
- * @throws UnsupportedOptionsException
- * <code>pb</code> is invalid
- */
- public void setPb(int pb) throws UnsupportedOptionsException {
- if (pb < 0 || pb > PB_MAX)
- throw new UnsupportedOptionsException(
- "pb must not exceed " + PB_MAX + ": " + pb);
-
- this.pb = pb;
- }
-
- /**
- * Gets the number of position bits.
- */
- public int getPb() {
- return pb;
- }
-
- /**
- * Sets the compression mode.
- * <p>
- * This specifies the method to analyze the data produced by
- * a match finder. The default is <code>MODE_FAST</code> for presets
- * 0-3 and <code>MODE_NORMAL</code> for presets 4-9.
- * <p>
- * Usually <code>MODE_FAST</code> is used with Hash Chain match finders
- * and <code>MODE_NORMAL</code> with Binary Tree match finders. This is
- * also what the presets do.
- * <p>
- * The special mode <code>MODE_UNCOMPRESSED</code> doesn't try to
- * compress the data at all (and doesn't use a match finder) and will
- * simply wrap it in uncompressed LZMA2 chunks.
- *
- * @throws UnsupportedOptionsException
- * <code>mode</code> is not supported
- */
- public void setMode(int mode) throws UnsupportedOptionsException {
- if (mode < MODE_UNCOMPRESSED || mode > MODE_NORMAL)
- throw new UnsupportedOptionsException(
- "Unsupported compression mode: " + mode);
-
- this.mode = mode;
- }
-
- /**
- * Gets the compression mode.
- */
- public int getMode() {
- return mode;
- }
-
- /**
- * Sets the nice length of matches.
- * Once a match of at least <code>niceLen</code> bytes is found,
- * the algorithm stops looking for better matches. Higher values tend
- * to give better compression at the expense of speed. The default
- * depends on the preset.
- *
- * @throws UnsupportedOptionsException
- * <code>niceLen</code> is invalid
- */
- public void setNiceLen(int niceLen) throws UnsupportedOptionsException {
- if (niceLen < NICE_LEN_MIN)
- throw new UnsupportedOptionsException(
- "Minimum nice length of matches is "
- + NICE_LEN_MIN + " bytes: " + niceLen);
-
- if (niceLen > NICE_LEN_MAX)
- throw new UnsupportedOptionsException(
- "Maximum nice length of matches is " + NICE_LEN_MAX
- + ": " + niceLen);
-
- this.niceLen = niceLen;
- }
-
- /**
- * Gets the nice length of matches.
- */
- public int getNiceLen() {
- return niceLen;
- }
-
- /**
- * Sets the match finder type.
- * <p>
- * Match finder has a major effect on compression speed, memory usage,
- * and compression ratio. Usually Hash Chain match finders are faster
- * than Binary Tree match finders. The default depends on the preset:
- * 0-3 use <code>MF_HC4</code> and 4-9 use <code>MF_BT4</code>.
- *
- * @throws UnsupportedOptionsException
- * <code>mf</code> is not supported
- */
- public void setMatchFinder(int mf) throws UnsupportedOptionsException {
- if (mf != MF_HC4 && mf != MF_BT4)
- throw new UnsupportedOptionsException(
- "Unsupported match finder: " + mf);
-
- this.mf = mf;
- }
-
- /**
- * Gets the match finder type.
- */
- public int getMatchFinder() {
- return mf;
- }
-
- /**
- * Sets the match finder search depth limit.
- * <p>
- * The default is a special value of <code>0</code> which indicates that
- * the depth limit should be automatically calculated by the selected
- * match finder from the nice length of matches.
- * <p>
- * Reasonable depth limit for Hash Chain match finders is 4-100 and
- * 16-1000 for Binary Tree match finders. Using very high values can
- * make the compressor extremely slow with some files. Avoid settings
- * higher than 1000 unless you are prepared to interrupt the compression
- * in case it is taking far too long.
- *
- * @throws UnsupportedOptionsException
- * <code>depthLimit</code> is invalid
- */
- public void setDepthLimit(int depthLimit)
- throws UnsupportedOptionsException {
- if (depthLimit < 0)
- throw new UnsupportedOptionsException(
- "Depth limit cannot be negative: " + depthLimit);
-
- this.depthLimit = depthLimit;
- }
-
- /**
- * Gets the match finder search depth limit.
- */
- public int getDepthLimit() {
- return depthLimit;
- }
-
- public int getEncoderMemoryUsage() {
- return (mode == MODE_UNCOMPRESSED)
- ? UncompressedLZMA2OutputStream.getMemoryUsage()
- : LZMA2OutputStream.getMemoryUsage(this);
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- if (mode == MODE_UNCOMPRESSED)
- return new UncompressedLZMA2OutputStream(out);
-
- return new LZMA2OutputStream(out, this);
- }
-
- /**
- * Gets how much memory the LZMA2 decoder will need to decompress the data
- * that was encoded with these options and stored in a .xz file.
- * <p>
- * The returned value may bigger than the value returned by a direct call
- * to {@link LZMA2InputStream#getMemoryUsage(int)} if the dictionary size
- * is not 2^n or 2^n&nbsp;+&nbsp;2^(n-1) bytes. This is because the .xz
- * headers store the dictionary size in such a format and other values
- * are rounded up to the next such value. Such rounding is harmess except
- * it might waste some memory if an unsual dictionary size is used.
- * <p>
- * If you use raw LZMA2 streams and unusual dictioanary size, call
- * {@link LZMA2InputStream#getMemoryUsage} directly to get raw decoder
- * memory requirements.
- */
- public int getDecoderMemoryUsage() {
- // Round the dictionary size up to the next 2^n or 2^n + 2^(n-1).
- int d = dictSize - 1;
- d |= d >>> 2;
- d |= d >>> 3;
- d |= d >>> 4;
- d |= d >>> 8;
- d |= d >>> 16;
- return LZMA2InputStream.getMemoryUsage(d + 1);
- }
-
- public InputStream getInputStream(InputStream in) throws IOException {
- return new LZMA2InputStream(in, dictSize);
- }
-
- FilterEncoder getFilterEncoder() {
- return new LZMA2Encoder(this);
- }
-
- public Object clone() {
- try {
- return super.clone();
- } catch (CloneNotSupportedException e) {
- assert false;
- throw new RuntimeException();
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/LZMA2OutputStream.java b/Java/Tukaani/src/org/tukaani/xz/LZMA2OutputStream.java
deleted file mode 100644
index 5724d10..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/LZMA2OutputStream.java
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * LZMA2OutputStream
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-import org.tukaani.xz.lz.LZEncoder;
-import org.tukaani.xz.rangecoder.RangeEncoder;
-import org.tukaani.xz.lzma.LZMAEncoder;
-
-class LZMA2OutputStream extends FinishableOutputStream {
- static final int COMPRESSED_SIZE_MAX = 64 << 10;
-
- private FinishableOutputStream out;
- private final DataOutputStream outData;
-
- private final LZEncoder lz;
- private final RangeEncoder rc;
- private final LZMAEncoder lzma;
-
- private final int props; // Cannot change props on the fly for now.
- private boolean dictResetNeeded = true;
- private boolean stateResetNeeded = true;
- private boolean propsNeeded = true;
-
- private int pendingSize = 0;
- private boolean finished = false;
- private IOException exception = null;
-
- private final byte[] tempBuf = new byte[1];
-
- private static int getExtraSizeBefore(int dictSize) {
- return COMPRESSED_SIZE_MAX > dictSize
- ? COMPRESSED_SIZE_MAX - dictSize : 0;
- }
-
- static int getMemoryUsage(LZMA2Options options) {
- // 64 KiB buffer for the range encoder + a little extra + LZMAEncoder
- int dictSize = options.getDictSize();
- int extraSizeBefore = getExtraSizeBefore(dictSize);
- return 70 + LZMAEncoder.getMemoryUsage(options.getMode(),
- dictSize, extraSizeBefore,
- options.getMatchFinder());
- }
-
- LZMA2OutputStream(FinishableOutputStream out, LZMA2Options options) {
- if (out == null)
- throw new NullPointerException();
-
- this.out = out;
- outData = new DataOutputStream(out);
- rc = new RangeEncoder(COMPRESSED_SIZE_MAX);
-
- int dictSize = options.getDictSize();
- int extraSizeBefore = getExtraSizeBefore(dictSize);
- lzma = LZMAEncoder.getInstance(rc,
- options.getLc(), options.getLp(), options.getPb(),
- options.getMode(),
- dictSize, extraSizeBefore, options.getNiceLen(),
- options.getMatchFinder(), options.getDepthLimit());
-
- lz = lzma.getLZEncoder();
-
- byte[] presetDict = options.getPresetDict();
- if (presetDict != null && presetDict.length > 0) {
- lz.setPresetDict(dictSize, presetDict);
- dictResetNeeded = false;
- }
-
- props = (options.getPb() * 5 + options.getLp()) * 9 + options.getLc();
- }
-
- public void write(int b) throws IOException {
- tempBuf[0] = (byte)b;
- write(tempBuf, 0, 1);
- }
-
- public void write(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- try {
- while (len > 0) {
- int used = lz.fillWindow(buf, off, len);
- off += used;
- len -= used;
- pendingSize += used;
-
- if (lzma.encodeForLZMA2())
- writeChunk();
- }
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- private void writeChunk() throws IOException {
- int compressedSize = rc.finish();
- int uncompressedSize = lzma.getUncompressedSize();
-
- assert compressedSize > 0 : compressedSize;
- assert uncompressedSize > 0 : uncompressedSize;
-
- // +2 because the header of a compressed chunk is 2 bytes
- // bigger than the header of an uncompressed chunk.
- if (compressedSize + 2 < uncompressedSize) {
- writeLZMA(uncompressedSize, compressedSize);
- } else {
- lzma.reset();
- uncompressedSize = lzma.getUncompressedSize();
- assert uncompressedSize > 0 : uncompressedSize;
- writeUncompressed(uncompressedSize);
- }
-
- pendingSize -= uncompressedSize;
- lzma.resetUncompressedSize();
- rc.reset();
- }
-
- private void writeLZMA(int uncompressedSize, int compressedSize)
- throws IOException {
- int control;
-
- if (propsNeeded) {
- if (dictResetNeeded)
- control = 0x80 + (3 << 5);
- else
- control = 0x80 + (2 << 5);
- } else {
- if (stateResetNeeded)
- control = 0x80 + (1 << 5);
- else
- control = 0x80;
- }
-
- control |= (uncompressedSize - 1) >>> 16;
- outData.writeByte(control);
-
- outData.writeShort(uncompressedSize - 1);
- outData.writeShort(compressedSize - 1);
-
- if (propsNeeded)
- outData.writeByte(props);
-
- rc.write(out);
-
- propsNeeded = false;
- stateResetNeeded = false;
- dictResetNeeded = false;
- }
-
- private void writeUncompressed(int uncompressedSize) throws IOException {
- while (uncompressedSize > 0) {
- int chunkSize = Math.min(uncompressedSize, COMPRESSED_SIZE_MAX);
- outData.writeByte(dictResetNeeded ? 0x01 : 0x02);
- outData.writeShort(chunkSize - 1);
- lz.copyUncompressed(out, uncompressedSize, chunkSize);
- uncompressedSize -= chunkSize;
- dictResetNeeded = false;
- }
-
- stateResetNeeded = true;
- }
-
- private void writeEndMarker() throws IOException {
- assert !finished;
-
- if (exception != null)
- throw exception;
-
- lz.setFinishing();
-
- try {
- while (pendingSize > 0) {
- lzma.encodeForLZMA2();
- writeChunk();
- }
-
- out.write(0x00);
- } catch (IOException e) {
- exception = e;
- throw e;
- }
-
- finished = true;
- }
-
- public void flush() throws IOException {
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- try {
- lz.setFlushing();
-
- while (pendingSize > 0) {
- lzma.encodeForLZMA2();
- writeChunk();
- }
-
- out.flush();
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- public void finish() throws IOException {
- if (!finished) {
- writeEndMarker();
-
- try {
- out.finish();
- } catch (IOException e) {
- exception = e;
- throw e;
- }
-
- finished = true;
- }
- }
-
- public void close() throws IOException {
- if (out != null) {
- if (!finished) {
- try {
- writeEndMarker();
- } catch (IOException e) {}
- }
-
- try {
- out.close();
- } catch (IOException e) {
- if (exception == null)
- exception = e;
- }
-
- out = null;
- }
-
- if (exception != null)
- throw exception;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/LZMAInputStream.java b/Java/Tukaani/src/org/tukaani/xz/LZMAInputStream.java
deleted file mode 100644
index 9bbd261..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/LZMAInputStream.java
+++ /dev/null
@@ -1,569 +0,0 @@
-/*
- * LZMAInputStream
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import org.tukaani.xz.lz.LZDecoder;
-import org.tukaani.xz.rangecoder.RangeDecoderFromStream;
-import org.tukaani.xz.lzma.LZMADecoder;
-
-/**
- * Decompresses legacy .lzma files and raw LZMA streams (no .lzma header).
- * <p>
- * <b>IMPORTANT:</b> In contrast to other classes in this package, this class
- * reads data from its input stream one byte at a time. If the input stream
- * is for example {@link java.io.FileInputStream}, wrapping it into
- * {@link java.io.BufferedInputStream} tends to improve performance a lot.
- * This is not automatically done by this class because there may be use
- * cases where it is desired that this class won't read any bytes past
- * the end of the LZMA stream.
- * <p>
- * Even when using <code>BufferedInputStream</code>, the performance tends
- * to be worse (maybe 10-20&nbsp;% slower) than with {@link LZMA2InputStream}
- * or {@link XZInputStream} (when the .xz file contains LZMA2-compressed data).
- *
- * @since 1.4
- */
-public class LZMAInputStream extends InputStream {
- /**
- * Largest dictionary size supported by this implementation.
- * <p>
- * LZMA allows dictionaries up to one byte less than 4 GiB. This
- * implementation supports only 16 bytes less than 2 GiB. This
- * limitation is due to Java using signed 32-bit integers for array
- * indexing. The limitation shouldn't matter much in practice since so
- * huge dictionaries are not normally used.
- */
- public static final int DICT_SIZE_MAX = Integer.MAX_VALUE & ~15;
-
- private InputStream in;
- private LZDecoder lz;
- private RangeDecoderFromStream rc;
- private LZMADecoder lzma;
-
- private boolean endReached = false;
-
- private final byte[] tempBuf = new byte[1];
-
- /**
- * Number of uncompressed bytes left to be decompressed, or -1 if
- * the end marker is used.
- */
- private long remainingSize;
-
- private IOException exception = null;
-
- /**
- * Gets approximate decompressor memory requirements as kibibytes for
- * the given dictionary size and LZMA properties byte (lc, lp, and pb).
- *
- * @param dictSize LZMA dictionary size as bytes, should be
- * in the range [<code>0</code>,
- * <code>DICT_SIZE_MAX</code>]
- *
- * @param propsByte LZMA properties byte that encodes the values
- * of lc, lp, and pb
- *
- * @return approximate memory requirements as kibibytes (KiB)
- *
- * @throws UnsupportedOptionsException
- * if <code>dictSize</code> is outside
- * the range [<code>0</code>,
- * <code>DICT_SIZE_MAX</code>]
- *
- * @throws CorruptedInputException
- * if <code>propsByte</code> is invalid
- */
- public static int getMemoryUsage(int dictSize, byte propsByte)
- throws UnsupportedOptionsException, CorruptedInputException {
- if (dictSize < 0 || dictSize > DICT_SIZE_MAX)
- throw new UnsupportedOptionsException(
- "LZMA dictionary is too big for this implementation");
-
- int props = propsByte & 0xFF;
- if (props > (4 * 5 + 4) * 9 + 8)
- throw new CorruptedInputException("Invalid LZMA properties byte");
-
- props %= 9 * 5;
- int lp = props / 9;
- int lc = props - lp * 9;
-
- return getMemoryUsage(dictSize, lc, lp);
- }
-
- /**
- * Gets approximate decompressor memory requirements as kibibytes for
- * the given dictionary size, lc, and lp. Note that pb isn't needed.
- *
- * @param dictSize LZMA dictionary size as bytes, must be
- * in the range [<code>0</code>,
- * <code>DICT_SIZE_MAX</code>]
- *
- * @param lc number of literal context bits, must be
- * in the range [0, 8]
- *
- * @param lp number of literal position bits, must be
- * in the range [0, 4]
- *
- * @return approximate memory requirements as kibibytes (KiB)
- */
- public static int getMemoryUsage(int dictSize, int lc, int lp) {
- if (lc < 0 || lc > 8 || lp < 0 || lp > 4)
- throw new IllegalArgumentException("Invalid lc or lp");
-
- // Probability variables have the type "short". There are
- // 0x300 (768) probability variables in each literal subcoder.
- // The number of literal subcoders is 2^(lc + lp).
- //
- // Roughly 10 KiB for the base state + LZ decoder's dictionary buffer
- // + sizeof(short) * number probability variables per literal subcoder
- // * number of literal subcoders
- return 10 + getDictSize(dictSize) / 1024
- + ((2 * 0x300) << (lc + lp)) / 1024;
- }
-
- private static int getDictSize(int dictSize) {
- if (dictSize < 0 || dictSize > DICT_SIZE_MAX)
- throw new IllegalArgumentException(
- "LZMA dictionary is too big for this implementation");
-
- // For performance reasons, use a 4 KiB dictionary if something
- // smaller was requested. It's a rare situation and the performance
- // difference isn't huge, and it starts to matter mostly when the
- // dictionary is just a few bytes. But we need to handle the special
- // case of dictSize == 0 anyway, which is an allowed value but in
- // practice means one-byte dictionary.
- //
- // Note that using a dictionary bigger than specified in the headers
- // can hide errors if there is a reference to data beyond the original
- // dictionary size but is still within 4 KiB.
- if (dictSize < 4096)
- dictSize = 4096;
-
- // Round dictionary size upward to a multiple of 16. This way LZMA
- // can use LZDecoder.getPos() for calculating LZMA's posMask.
- return (dictSize + 15) & ~15;
- }
-
- /**
- * Creates a new .lzma file format decompressor without
- * a memory usage limit.
- *
- * @param in input stream from which .lzma data is read;
- * it might be a good idea to wrap it in
- * <code>BufferedInputStream</code>, see the
- * note at the top of this page
- *
- * @throws CorruptedInputException
- * file is corrupt or perhaps not in
- * the .lzma format at all
- *
- * @throws UnsupportedOptionsException
- * dictionary size or uncompressed size is too
- * big for this implementation
- *
- * @throws EOFException
- * file is truncated or perhaps not in
- * the .lzma format at all
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public LZMAInputStream(InputStream in) throws IOException {
- this(in, -1);
- }
-
- /**
- * Creates a new .lzma file format decompressor with an optional
- * memory usage limit.
- *
- * @param in input stream from which .lzma data is read;
- * it might be a good idea to wrap it in
- * <code>BufferedInputStream</code>, see the
- * note at the top of this page
- *
- * @param memoryLimit memory usage limit in kibibytes (KiB)
- * or <code>-1</code> to impose no
- * memory usage limit
- *
- * @throws CorruptedInputException
- * file is corrupt or perhaps not in
- * the .lzma format at all
- *
- * @throws UnsupportedOptionsException
- * dictionary size or uncompressed size is too
- * big for this implementation
- *
- * @throws MemoryLimitException
- * memory usage limit was exceeded
- *
- * @throws EOFException
- * file is truncated or perhaps not in
- * the .lzma format at all
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public LZMAInputStream(InputStream in, int memoryLimit)
- throws IOException {
- DataInputStream inData = new DataInputStream(in);
-
- // Properties byte (lc, lp, and pb)
- byte propsByte = inData.readByte();
-
- // Dictionary size is an unsigned 32-bit little endian integer.
- int dictSize = 0;
- for (int i = 0; i < 4; ++i)
- dictSize |= inData.readUnsignedByte() << (8 * i);
-
- // Uncompressed size is an unsigned 64-bit little endian integer.
- // The maximum 64-bit value is a special case (becomes -1 here)
- // which indicates that the end marker is used instead of knowing
- // the uncompressed size beforehand.
- long uncompSize = 0;
- for (int i = 0; i < 8; ++i)
- uncompSize |= (long)inData.readUnsignedByte() << (8 * i);
-
- // Check the memory usage limit.
- int memoryNeeded = getMemoryUsage(dictSize, propsByte);
- if (memoryLimit != -1 && memoryNeeded > memoryLimit)
- throw new MemoryLimitException(memoryNeeded, memoryLimit);
-
- initialize(in, uncompSize, propsByte, dictSize, null);
- }
-
- /**
- * Creates a new input stream that decompresses raw LZMA data (no .lzma
- * header) from <code>in</code>.
- * <p>
- * The caller needs to know if the "end of payload marker (EOPM)" alias
- * "end of stream marker (EOS marker)" alias "end marker" present.
- * If the end marker isn't used, the caller must know the exact
- * uncompressed size of the stream.
- * <p>
- * The caller also needs to provide the LZMA properties byte that encodes
- * the number of literal context bits (lc), literal position bits (lp),
- * and position bits (pb).
- * <p>
- * The dictionary size used when compressing is also needed. Specifying
- * a too small dictionary size will prevent decompressing the stream.
- * Specifying a too big dictionary is waste of memory but decompression
- * will work.
- * <p>
- * There is no need to specify a dictionary bigger than
- * the uncompressed size of the data even if a bigger dictionary
- * was used when compressing. If you know the uncompressed size
- * of the data, this might allow saving some memory.
- *
- * @param in input stream from which compressed
- * data is read
- *
- * @param uncompSize uncompressed size of the LZMA stream or -1
- * if the end marker is used in the LZMA stream
- *
- * @param propsByte LZMA properties byte that has the encoded
- * values for literal context bits (lc), literal
- * position bits (lp), and position bits (pb)
- *
- * @param dictSize dictionary size as bytes, must be in the range
- * [<code>0</code>, <code>DICT_SIZE_MAX</code>]
- *
- * @throws CorruptedInputException
- * if <code>propsByte</code> is invalid or
- * the first input byte is not 0x00
- *
- * @throws UnsupportedOptionsException
- * dictionary size or uncompressed size is too
- * big for this implementation
- *
- *
- */
- public LZMAInputStream(InputStream in, long uncompSize, byte propsByte,
- int dictSize) throws IOException {
- initialize(in, uncompSize, propsByte, dictSize, null);
- }
-
- /**
- * Creates a new input stream that decompresses raw LZMA data (no .lzma
- * header) from <code>in</code> optionally with a preset dictionary.
- *
- * @param in input stream from which LZMA-compressed
- * data is read
- *
- * @param uncompSize uncompressed size of the LZMA stream or -1
- * if the end marker is used in the LZMA stream
- *
- * @param propsByte LZMA properties byte that has the encoded
- * values for literal context bits (lc), literal
- * position bits (lp), and position bits (pb)
- *
- * @param dictSize dictionary size as bytes, must be in the range
- * [<code>0</code>, <code>DICT_SIZE_MAX</code>]
- *
- * @param presetDict preset dictionary or <code>null</code>
- * to use no preset dictionary
- *
- * @throws CorruptedInputException
- * if <code>propsByte</code> is invalid or
- * the first input byte is not 0x00
- *
- * @throws UnsupportedOptionsException
- * dictionary size or uncompressed size is too
- * big for this implementation
- *
- * @throws EOFException file is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public LZMAInputStream(InputStream in, long uncompSize, byte propsByte,
- int dictSize, byte[] presetDict)
- throws IOException {
- initialize(in, uncompSize, propsByte, dictSize, presetDict);
- }
-
- /**
- * Creates a new input stream that decompresses raw LZMA data (no .lzma
- * header) from <code>in</code> optionally with a preset dictionary.
- *
- * @param in input stream from which LZMA-compressed
- * data is read
- *
- * @param uncompSize uncompressed size of the LZMA stream or -1
- * if the end marker is used in the LZMA stream
- *
- * @param lc number of literal context bits, must be
- * in the range [0, 8]
- *
- * @param lp number of literal position bits, must be
- * in the range [0, 4]
- *
- * @param pb number position bits, must be
- * in the range [0, 4]
- *
- * @param dictSize dictionary size as bytes, must be in the range
- * [<code>0</code>, <code>DICT_SIZE_MAX</code>]
- *
- * @param presetDict preset dictionary or <code>null</code>
- * to use no preset dictionary
- *
- * @throws CorruptedInputException
- * if the first input byte is not 0x00
- *
- * @throws EOFException file is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public LZMAInputStream(InputStream in, long uncompSize,
- int lc, int lp, int pb,
- int dictSize, byte[] presetDict)
- throws IOException {
- initialize(in, uncompSize, lc, lp, pb, dictSize, presetDict);
- }
-
- private void initialize(InputStream in, long uncompSize, byte propsByte,
- int dictSize, byte[] presetDict)
- throws IOException {
- // Validate the uncompressed size since the other "initialize" throws
- // IllegalArgumentException if uncompSize < -1.
- if (uncompSize < -1)
- throw new UnsupportedOptionsException(
- "Uncompressed size is too big");
-
- // Decode the properties byte. In contrast to LZMA2, there is no
- // limit of lc + lp <= 4.
- int props = propsByte & 0xFF;
- if (props > (4 * 5 + 4) * 9 + 8)
- throw new CorruptedInputException("Invalid LZMA properties byte");
-
- int pb = props / (9 * 5);
- props -= pb * 9 * 5;
- int lp = props / 9;
- int lc = props - lp * 9;
-
- // Validate the dictionary size since the other "initialize" throws
- // IllegalArgumentException if dictSize is not supported.
- if (dictSize < 0 || dictSize > DICT_SIZE_MAX)
- throw new UnsupportedOptionsException(
- "LZMA dictionary is too big for this implementation");
-
- initialize(in, uncompSize, lc, lp, pb, dictSize, presetDict);
- }
-
- private void initialize(InputStream in, long uncompSize,
- int lc, int lp, int pb,
- int dictSize, byte[] presetDict)
- throws IOException {
- // getDictSize validates dictSize and gives a message in
- // the exception too, so skip validating dictSize here.
- if (uncompSize < -1 || lc < 0 || lc > 8 || lp < 0 || lp > 4
- || pb < 0 || pb > 4)
- throw new IllegalArgumentException();
-
- this.in = in;
-
- // If uncompressed size is known, use it to avoid wasting memory for
- // a uselessly large dictionary buffer.
- dictSize = getDictSize(dictSize);
- if (uncompSize >= 0 && dictSize > uncompSize)
- dictSize = getDictSize((int)uncompSize);
-
- lz = new LZDecoder(getDictSize(dictSize), presetDict);
- rc = new RangeDecoderFromStream(in);
- lzma = new LZMADecoder(lz, rc, lc, lp, pb);
- remainingSize = uncompSize;
- }
-
- /**
- * Decompresses the next byte from this input stream.
- * <p>
- * Reading lots of data with <code>read()</code> from this input stream
- * may be inefficient. Wrap it in <code>java.io.BufferedInputStream</code>
- * if you need to read lots of data one byte at a time.
- *
- * @return the next decompressed byte, or <code>-1</code>
- * to indicate the end of the compressed stream
- *
- * @throws CorruptedInputException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws EOFException
- * compressed input is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read() throws IOException {
- return read(tempBuf, 0, 1) == -1 ? -1 : (tempBuf[0] & 0xFF);
- }
-
- /**
- * Decompresses into an array of bytes.
- * <p>
- * If <code>len</code> is zero, no bytes are read and <code>0</code>
- * is returned. Otherwise this will block until <code>len</code>
- * bytes have been decompressed, the end of the LZMA stream is reached,
- * or an exception is thrown.
- *
- * @param buf target buffer for uncompressed data
- * @param off start offset in <code>buf</code>
- * @param len maximum number of uncompressed bytes to read
- *
- * @return number of bytes read, or <code>-1</code> to indicate
- * the end of the compressed stream
- *
- * @throws CorruptedInputException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws EOFException compressed input is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (len == 0)
- return 0;
-
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- if (endReached)
- return -1;
-
- try {
- int size = 0;
-
- while (len > 0) {
- // If uncompressed size is known and thus no end marker will
- // be present, set the limit so that the uncompressed size
- // won't be exceeded.
- int copySizeMax = len;
- if (remainingSize >= 0 && remainingSize < len)
- copySizeMax = (int)remainingSize;
-
- lz.setLimit(copySizeMax);
-
- // Decode into the dictionary buffer.
- try {
- lzma.decode();
- } catch (CorruptedInputException e) {
- // The end marker is encoded with a LZMA symbol that
- // indicates maximum match distance. This is larger
- // than any supported dictionary and thus causes
- // CorruptedInputException from LZDecoder.repeat.
- if (remainingSize != -1 || !lzma.endMarkerDetected())
- throw e;
-
- endReached = true;
-
- // The exception makes lzma.decode() miss the last range
- // decoder normalization, so do it here. This might
- // cause an IOException if it needs to read a byte
- // from the input stream.
- rc.normalize();
- }
-
- // Copy from the dictionary to buf.
- int copiedSize = lz.flush(buf, off);
- off += copiedSize;
- len -= copiedSize;
- size += copiedSize;
-
- if (remainingSize >= 0) {
- // Update the number of bytes left to be decompressed.
- remainingSize -= copiedSize;
- assert remainingSize >= 0;
-
- if (remainingSize == 0)
- endReached = true;
- }
-
- if (endReached) {
- // Checking these helps a lot when catching corrupt
- // or truncated .lzma files. LZMA Utils doesn't do
- // the first check and thus it accepts many invalid
- // files that this implementation and XZ Utils don't.
- if (!rc.isFinished() || lz.hasPending())
- throw new CorruptedInputException();
-
- return size == 0 ? -1 : size;
- }
- }
-
- return size;
-
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- /**
- * Closes the stream and calls <code>in.close()</code>.
- * If the stream was already closed, this does nothing.
- *
- * @throws IOException if thrown by <code>in.close()</code>
- */
- public void close() throws IOException {
- if (in != null) {
- try {
- in.close();
- } finally {
- in = null;
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/MemoryLimitException.java b/Java/Tukaani/src/org/tukaani/xz/MemoryLimitException.java
deleted file mode 100644
index 9d766bd..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/MemoryLimitException.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * MemoryLimitException
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-/**
- * Thrown when the memory usage limit given to the XZ decompressor
- * would be exceeded.
- * <p>
- * The amount of memory required and the memory usage limit are
- * included in the error detail message in human readable format.
- */
-public class MemoryLimitException extends XZIOException {
- private static final long serialVersionUID = 3L;
-
- private final int memoryNeeded;
- private final int memoryLimit;
-
- /**
- * Creates a new MemoryLimitException.
- * <p>
- * The amount of memory needed and the memory usage limit are
- * included in the error detail message.
- *
- * @param memoryNeeded amount of memory needed as kibibytes (KiB)
- * @param memoryLimit specified memory usage limit as kibibytes (KiB)
- */
- public MemoryLimitException(int memoryNeeded, int memoryLimit) {
- super("" + memoryNeeded + " KiB of memory would be needed; limit was "
- + memoryLimit + " KiB");
-
- this.memoryNeeded = memoryNeeded;
- this.memoryLimit = memoryLimit;
- }
-
- /**
- * Gets how much memory is required to decompress the data.
- *
- * @return amount of memory needed as kibibytes (KiB)
- */
- public int getMemoryNeeded() {
- return memoryNeeded;
- }
-
- /**
- * Gets what the memory usage limit was at the time the exception
- * was created.
- *
- * @return memory usage limit as kibibytes (KiB)
- */
- public int getMemoryLimit() {
- return memoryLimit;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/PowerPCOptions.java b/Java/Tukaani/src/org/tukaani/xz/PowerPCOptions.java
deleted file mode 100644
index f36d361..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/PowerPCOptions.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * PowerPCOptions
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import org.tukaani.xz.simple.PowerPC;
-
-/**
- * BCJ filter for big endian PowerPC instructions.
- */
-public class PowerPCOptions extends BCJOptions {
- private static final int ALIGNMENT = 4;
-
- public PowerPCOptions() {
- super(ALIGNMENT);
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return new SimpleOutputStream(out, new PowerPC(true, startOffset));
- }
-
- public InputStream getInputStream(InputStream in) {
- return new SimpleInputStream(in, new PowerPC(false, startOffset));
- }
-
- FilterEncoder getFilterEncoder() {
- return new BCJEncoder(this, BCJCoder.POWERPC_FILTER_ID);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/RawCoder.java b/Java/Tukaani/src/org/tukaani/xz/RawCoder.java
deleted file mode 100644
index 12c7da8..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/RawCoder.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * RawCoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-class RawCoder {
- static void validate(FilterCoder[] filters)
- throws UnsupportedOptionsException {
- for (int i = 0; i < filters.length - 1; ++i)
- if (!filters[i].nonLastOK())
- throw new UnsupportedOptionsException(
- "Unsupported XZ filter chain");
-
- if (!filters[filters.length - 1].lastOK())
- throw new UnsupportedOptionsException(
- "Unsupported XZ filter chain");
-
- int changesSizeCount = 0;
- for (int i = 0; i < filters.length; ++i)
- if (filters[i].changesSize())
- ++changesSizeCount;
-
- if (changesSizeCount > 3)
- throw new UnsupportedOptionsException(
- "Unsupported XZ filter chain");
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/SPARCOptions.java b/Java/Tukaani/src/org/tukaani/xz/SPARCOptions.java
deleted file mode 100644
index 0f20677..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/SPARCOptions.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * SPARCOptions
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import org.tukaani.xz.simple.SPARC;
-
-/**
- * BCJ filter for SPARC.
- */
-public class SPARCOptions extends BCJOptions {
- private static final int ALIGNMENT = 4;
-
- public SPARCOptions() {
- super(ALIGNMENT);
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return new SimpleOutputStream(out, new SPARC(true, startOffset));
- }
-
- public InputStream getInputStream(InputStream in) {
- return new SimpleInputStream(in, new SPARC(false, startOffset));
- }
-
- FilterEncoder getFilterEncoder() {
- return new BCJEncoder(this, BCJCoder.SPARC_FILTER_ID);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/SeekableFileInputStream.java b/Java/Tukaani/src/org/tukaani/xz/SeekableFileInputStream.java
deleted file mode 100644
index fe2d685..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/SeekableFileInputStream.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * SeekableFileInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.File;
-import java.io.RandomAccessFile;
-import java.io.IOException;
-import java.io.FileNotFoundException;
-
-/**
- * Wraps a {@link java.io.RandomAccessFile RandomAccessFile}
- * in a SeekableInputStream.
- */
-public class SeekableFileInputStream extends SeekableInputStream {
- /**
- * The RandomAccessFile that has been wrapped
- * into a SeekableFileInputStream.
- */
- protected RandomAccessFile randomAccessFile;
-
- /**
- * Creates a new seekable input stream that reads from the specified file.
- */
- public SeekableFileInputStream(File file) throws FileNotFoundException {
- randomAccessFile = new RandomAccessFile(file, "r");
- }
-
- /**
- * Creates a new seekable input stream that reads from a file with
- * the specified name.
- */
- public SeekableFileInputStream(String name) throws FileNotFoundException {
- randomAccessFile = new RandomAccessFile(name, "r");
- }
-
- /**
- * Creates a new seekable input stream from an existing
- * <code>RandomAccessFile</code> object.
- */
- public SeekableFileInputStream(RandomAccessFile randomAccessFile) {
- this.randomAccessFile = randomAccessFile;
- }
-
- /**
- * Calls {@link RandomAccessFile#read() randomAccessFile.read()}.
- */
- public int read() throws IOException {
- return randomAccessFile.read();
- }
-
- /**
- * Calls {@link RandomAccessFile#read(byte[]) randomAccessFile.read(buf)}.
- */
- public int read(byte[] buf) throws IOException {
- return randomAccessFile.read(buf);
- }
-
- /**
- * Calls
- * {@link RandomAccessFile#read(byte[],int,int)
- * randomAccessFile.read(buf, off, len)}.
- */
- public int read(byte[] buf, int off, int len) throws IOException {
- return randomAccessFile.read(buf, off, len);
- }
-
- /**
- * Calls {@link RandomAccessFile#close() randomAccessFile.close()}.
- */
- public void close() throws IOException {
- randomAccessFile.close();
- }
-
- /**
- * Calls {@link RandomAccessFile#length() randomAccessFile.length()}.
- */
- public long length() throws IOException {
- return randomAccessFile.length();
- }
-
- /**
- * Calls {@link RandomAccessFile#getFilePointer()
- randomAccessFile.getFilePointer()}.
- */
- public long position() throws IOException {
- return randomAccessFile.getFilePointer();
- }
-
- /**
- * Calls {@link RandomAccessFile#seek(long) randomAccessFile.seek(long)}.
- */
- public void seek(long pos) throws IOException {
- randomAccessFile.seek(pos);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/SeekableInputStream.java b/Java/Tukaani/src/org/tukaani/xz/SeekableInputStream.java
deleted file mode 100644
index a2f908a..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/SeekableInputStream.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * SeekableInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.IOException;
-
-/**
- * Input stream with random access support.
- */
-public abstract class SeekableInputStream extends InputStream {
- /**
- * Seeks <code>n</code> bytes forward in this stream.
- * <p>
- * This will not seek past the end of the file. If the current position
- * is already at or past the end of the file, this doesn't seek at all
- * and returns <code>0</code>. Otherwise, if skipping <code>n</code> bytes
- * would cause the position to exceed the stream size, this will do
- * equivalent of <code>seek(length())</code> and the return value will
- * be adjusted accordingly.
- * <p>
- * If <code>n</code> is negative, the position isn't changed and
- * the return value is <code>0</code>. It doesn't seek backward
- * because it would conflict with the specification of
- * {@link java.io.InputStream#skip(long) InputStream.skip}.
- *
- * @return <code>0</code> if <code>n</code> is negative,
- * less than <code>n</code> if skipping <code>n</code>
- * bytes would seek past the end of the file,
- * <code>n</code> otherwise
- *
- * @throws IOException might be thrown by {@link #seek(long)}
- */
- public long skip(long n) throws IOException {
- if (n <= 0)
- return 0;
-
- long size = length();
- long pos = position();
- if (pos >= size)
- return 0;
-
- if (size - pos < n)
- n = size - pos;
-
- seek(pos + n);
- return n;
- }
-
- /**
- * Gets the size of the stream.
- */
- public abstract long length() throws IOException;
-
- /**
- * Gets the current position in the stream.
- */
- public abstract long position() throws IOException;
-
- /**
- * Seeks to the specified absolute position in the stream.
- * <p>
- * Seeking past the end of the file should be supported by the subclasses
- * unless there is a good reason to do otherwise. If one has seeked
- * past the end of the stream, <code>read</code> will return
- * <code>-1</code> to indicate end of stream.
- *
- * @param pos new read position in the stream
- *
- * @throws IOException if <code>pos</code> is negative or if
- * a stream-specific I/O error occurs
- */
- public abstract void seek(long pos) throws IOException;
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/SeekableXZInputStream.java b/Java/Tukaani/src/org/tukaani/xz/SeekableXZInputStream.java
deleted file mode 100644
index f929d40..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/SeekableXZInputStream.java
+++ /dev/null
@@ -1,964 +0,0 @@
-/*
- * SeekableXZInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.util.Arrays;
-import java.util.ArrayList;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.io.EOFException;
-import org.tukaani.xz.common.DecoderUtil;
-import org.tukaani.xz.common.StreamFlags;
-import org.tukaani.xz.check.Check;
-import org.tukaani.xz.index.IndexDecoder;
-import org.tukaani.xz.index.BlockInfo;
-
-/**
- * Decompresses a .xz file in random access mode.
- * This supports decompressing concatenated .xz files.
- * <p>
- * Each .xz file consist of one or more Streams. Each Stream consist of zero
- * or more Blocks. Each Stream contains an Index of Streams' Blocks.
- * The Indexes from all Streams are loaded in RAM by a constructor of this
- * class. A typical .xz file has only one Stream, and parsing its Index will
- * need only three or four seeks.
- * <p>
- * To make random access possible, the data in a .xz file must be splitted
- * into multiple Blocks of reasonable size. Decompression can only start at
- * a Block boundary. When seeking to an uncompressed position that is not at
- * a Block boundary, decompression starts at the beginning of the Block and
- * throws away data until the target position is reached. Thus, smaller Blocks
- * mean faster seeks to arbitrary uncompressed positions. On the other hand,
- * smaller Blocks mean worse compression. So one has to make a compromise
- * between random access speed and compression ratio.
- * <p>
- * Implementation note: This class uses linear search to locate the correct
- * Stream from the data structures in RAM. It was the simplest to implement
- * and should be fine as long as there aren't too many Streams. The correct
- * Block inside a Stream is located using binary search and thus is fast
- * even with a huge number of Blocks.
- *
- * <h4>Memory usage</h4>
- * <p>
- * The amount of memory needed for the Indexes is taken into account when
- * checking the memory usage limit. Each Stream is calculated to need at
- * least 1&nbsp;KiB of memory and each Block 16 bytes of memory, rounded up
- * to the next kibibyte. So unless the file has a huge number of Streams or
- * Blocks, these don't take significant amount of memory.
- *
- * <h4>Creating random-accessible .xz files</h4>
- * <p>
- * When using {@link XZOutputStream}, a new Block can be started by calling
- * its {@link XZOutputStream#endBlock() endBlock} method. If you know
- * that the decompressor will only need to seek to certain uncompressed
- * positions, it can be a good idea to start a new Block at (some of) these
- * positions (and only at these positions to get better compression ratio).
- * <p>
- * liblzma in XZ Utils supports starting a new Block with
- * <code>LZMA_FULL_FLUSH</code>. XZ Utils 5.1.1alpha added threaded
- * compression which creates multi-Block .xz files. XZ Utils 5.1.1alpha
- * also added the option <code>--block-size=SIZE</code> to the xz command
- * line tool. XZ Utils 5.1.2alpha added a partial implementation of
- * <code>--block-list=SIZES</code> which allows specifying sizes of
- * individual Blocks.
- *
- * @see SeekableFileInputStream
- * @see XZInputStream
- * @see XZOutputStream
- */
-public class SeekableXZInputStream extends SeekableInputStream {
- /**
- * The input stream containing XZ compressed data.
- */
- private SeekableInputStream in;
-
- /**
- * Memory usage limit after the memory usage of the IndexDecoders have
- * been substracted.
- */
- private final int memoryLimit;
-
- /**
- * Memory usage of the IndexDecoders.
- * <code>memoryLimit + indexMemoryUsage</code> equals the original
- * memory usage limit that was passed to the constructor.
- */
- private int indexMemoryUsage = 0;
-
- /**
- * List of IndexDecoders, one for each Stream in the file.
- * The list is in reverse order: The first element is
- * the last Stream in the file.
- */
- private final ArrayList streams = new ArrayList();
-
- /**
- * Bitmask of all Check IDs seen.
- */
- private int checkTypes = 0;
-
- /**
- * Uncompressed size of the file (all Streams).
- */
- private long uncompressedSize = 0;
-
- /**
- * Uncompressed size of the largest XZ Block in the file.
- */
- private long largestBlockSize = 0;
-
- /**
- * Number of XZ Blocks in the file.
- */
- private int blockCount = 0;
-
- /**
- * Size and position information about the current Block.
- * If there are no Blocks, all values will be <code>-1</code>.
- */
- private final BlockInfo curBlockInfo;
-
- /**
- * Temporary (and cached) information about the Block whose information
- * is queried via <code>getBlockPos</code> and related functions.
- */
- private final BlockInfo queriedBlockInfo;
-
- /**
- * Integrity Check in the current XZ Stream. The constructor leaves
- * this to point to the Check of the first Stream.
- */
- private Check check;
-
- /**
- * Flag indicating if the integrity checks will be verified.
- */
- private final boolean verifyCheck;
-
- /**
- * Decoder of the current XZ Block, if any.
- */
- private BlockInputStream blockDecoder = null;
-
- /**
- * Current uncompressed position.
- */
- private long curPos = 0;
-
- /**
- * Target position for seeking.
- */
- private long seekPos;
-
- /**
- * True when <code>seek(long)</code> has been called but the actual
- * seeking hasn't been done yet.
- */
- private boolean seekNeeded = false;
-
- /**
- * True when end of the file was reached. This can be cleared by
- * calling <code>seek(long)</code>.
- */
- private boolean endReached = false;
-
- /**
- * Pending exception from an earlier error.
- */
- private IOException exception = null;
-
- /**
- * Temporary buffer for read(). This avoids reallocating memory
- * on every read() call.
- */
- private final byte[] tempBuf = new byte[1];
-
- /**
- * Creates a new seekable XZ decompressor without a memory usage limit.
- *
- * @param in seekable input stream containing one or more
- * XZ Streams; the whole input stream is used
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ data is corrupt or truncated
- *
- * @throws UnsupportedOptionsException
- * XZ headers seem valid but they specify
- * options not supported by this implementation
- *
- * @throws EOFException
- * less than 6 bytes of input was available
- * from <code>in</code>, or (unlikely) the size
- * of the underlying stream got smaller while
- * this was reading from it
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public SeekableXZInputStream(SeekableInputStream in)
- throws IOException {
- this(in, -1);
- }
-
- /**
- * Creates a new seekable XZ decomporessor with an optional
- * memory usage limit.
- *
- * @param in seekable input stream containing one or more
- * XZ Streams; the whole input stream is used
- *
- * @param memoryLimit memory usage limit in kibibytes (KiB)
- * or <code>-1</code> to impose no
- * memory usage limit
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ data is corrupt or truncated
- *
- * @throws UnsupportedOptionsException
- * XZ headers seem valid but they specify
- * options not supported by this implementation
- *
- * @throws MemoryLimitException
- * decoded XZ Indexes would need more memory
- * than allowed by the memory usage limit
- *
- * @throws EOFException
- * less than 6 bytes of input was available
- * from <code>in</code>, or (unlikely) the size
- * of the underlying stream got smaller while
- * this was reading from it
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public SeekableXZInputStream(SeekableInputStream in, int memoryLimit)
- throws IOException {
- this(in, memoryLimit, true);
- }
-
- /**
- * Creates a new seekable XZ decomporessor with an optional
- * memory usage limit and ability to disable verification
- * of integrity checks.
- * <p>
- * Note that integrity check verification should almost never be disabled.
- * Possible reasons to disable integrity check verification:
- * <ul>
- * <li>Trying to recover data from a corrupt .xz file.</li>
- * <li>Speeding up decompression. This matters mostly with SHA-256
- * or with files that have compressed extremely well. It's recommended
- * that integrity checking isn't disabled for performance reasons
- * unless the file integrity is verified externally in some other
- * way.</li>
- * </ul>
- * <p>
- * <code>verifyCheck</code> only affects the integrity check of
- * the actual compressed data. The CRC32 fields in the headers
- * are always verified.
- *
- * @param in seekable input stream containing one or more
- * XZ Streams; the whole input stream is used
- *
- * @param memoryLimit memory usage limit in kibibytes (KiB)
- * or <code>-1</code> to impose no
- * memory usage limit
- *
- * @param verifyCheck if <code>true</code>, the integrity checks
- * will be verified; this should almost never
- * be set to <code>false</code>
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ data is corrupt or truncated
- *
- * @throws UnsupportedOptionsException
- * XZ headers seem valid but they specify
- * options not supported by this implementation
- *
- * @throws MemoryLimitException
- * decoded XZ Indexes would need more memory
- * than allowed by the memory usage limit
- *
- * @throws EOFException
- * less than 6 bytes of input was available
- * from <code>in</code>, or (unlikely) the size
- * of the underlying stream got smaller while
- * this was reading from it
- *
- * @throws IOException may be thrown by <code>in</code>
- *
- * @since 1.6
- */
- public SeekableXZInputStream(SeekableInputStream in, int memoryLimit,
- boolean verifyCheck)
- throws IOException {
- this.verifyCheck = verifyCheck;
- this.in = in;
- DataInputStream inData = new DataInputStream(in);
-
- // Check the magic bytes in the beginning of the file.
- {
- in.seek(0);
- byte[] buf = new byte[XZ.HEADER_MAGIC.length];
- inData.readFully(buf);
- if (!Arrays.equals(buf, XZ.HEADER_MAGIC))
- throw new XZFormatException();
- }
-
- // Get the file size and verify that it is a multiple of 4 bytes.
- long pos = in.length();
- if ((pos & 3) != 0)
- throw new CorruptedInputException(
- "XZ file size is not a multiple of 4 bytes");
-
- // Parse the headers starting from the end of the file.
- byte[] buf = new byte[DecoderUtil.STREAM_HEADER_SIZE];
- long streamPadding = 0;
-
- while (pos > 0) {
- if (pos < DecoderUtil.STREAM_HEADER_SIZE)
- throw new CorruptedInputException();
-
- // Read the potential Stream Footer.
- in.seek(pos - DecoderUtil.STREAM_HEADER_SIZE);
- inData.readFully(buf);
-
- // Skip Stream Padding four bytes at a time.
- // Skipping more at once would be faster,
- // but usually there isn't much Stream Padding.
- if (buf[8] == 0x00 && buf[9] == 0x00 && buf[10] == 0x00
- && buf[11] == 0x00) {
- streamPadding += 4;
- pos -= 4;
- continue;
- }
-
- // It's not Stream Padding. Update pos.
- pos -= DecoderUtil.STREAM_HEADER_SIZE;
-
- // Decode the Stream Footer and check if Backward Size
- // looks reasonable.
- StreamFlags streamFooter = DecoderUtil.decodeStreamFooter(buf);
- if (streamFooter.backwardSize >= pos)
- throw new CorruptedInputException(
- "Backward Size in XZ Stream Footer is too big");
-
- // Check that the Check ID is supported. Store it in case this
- // is the first Stream in the file.
- check = Check.getInstance(streamFooter.checkType);
-
- // Remember which Check IDs have been seen.
- checkTypes |= 1 << streamFooter.checkType;
-
- // Seek to the beginning of the Index.
- in.seek(pos - streamFooter.backwardSize);
-
- // Decode the Index field.
- IndexDecoder index;
- try {
- index = new IndexDecoder(in, streamFooter, streamPadding,
- memoryLimit);
- } catch (MemoryLimitException e) {
- // IndexDecoder doesn't know how much memory we had
- // already needed so we need to recreate the exception.
- assert memoryLimit >= 0;
- throw new MemoryLimitException(
- e.getMemoryNeeded() + indexMemoryUsage,
- memoryLimit + indexMemoryUsage);
- }
-
- // Update the memory usage and limit counters.
- indexMemoryUsage += index.getMemoryUsage();
- if (memoryLimit >= 0) {
- memoryLimit -= index.getMemoryUsage();
- assert memoryLimit >= 0;
- }
-
- // Remember the uncompressed size of the largest Block.
- if (largestBlockSize < index.getLargestBlockSize())
- largestBlockSize = index.getLargestBlockSize();
-
- // Calculate the offset to the beginning of this XZ Stream and
- // check that it looks sane.
- long off = index.getStreamSize() - DecoderUtil.STREAM_HEADER_SIZE;
- if (pos < off)
- throw new CorruptedInputException("XZ Index indicates "
- + "too big compressed size for the XZ Stream");
-
- // Seek to the beginning of this Stream.
- pos -= off;
- in.seek(pos);
-
- // Decode the Stream Header.
- inData.readFully(buf);
- StreamFlags streamHeader = DecoderUtil.decodeStreamHeader(buf);
-
- // Verify that the Stream Header matches the Stream Footer.
- if (!DecoderUtil.areStreamFlagsEqual(streamHeader, streamFooter))
- throw new CorruptedInputException(
- "XZ Stream Footer does not match Stream Header");
-
- // Update the total uncompressed size of the file and check that
- // it doesn't overflow.
- uncompressedSize += index.getUncompressedSize();
- if (uncompressedSize < 0)
- throw new UnsupportedOptionsException("XZ file is too big");
-
- // Update the Block count and check that it fits into an int.
- blockCount += index.getRecordCount();
- if (blockCount < 0)
- throw new UnsupportedOptionsException(
- "XZ file has over " + Integer.MAX_VALUE + " Blocks");
-
- // Add this Stream to the list of Streams.
- streams.add(index);
-
- // Reset to be ready to parse the next Stream.
- streamPadding = 0;
- }
-
- assert pos == 0;
-
- // Save it now that indexMemoryUsage has been substracted from it.
- this.memoryLimit = memoryLimit;
-
- // Store the relative offsets of the Streams. This way we don't
- // need to recalculate them in this class when seeking; the
- // IndexDecoder instances will handle them.
- IndexDecoder prev = (IndexDecoder)streams.get(streams.size() - 1);
- for (int i = streams.size() - 2; i >= 0; --i) {
- IndexDecoder cur = (IndexDecoder)streams.get(i);
- cur.setOffsets(prev);
- prev = cur;
- }
-
- // Initialize curBlockInfo to point to the first Stream.
- // The blockNumber will be left to -1 so that .hasNext()
- // and .setNext() work to get the first Block when starting
- // to decompress from the beginning of the file.
- IndexDecoder first = (IndexDecoder)streams.get(streams.size() - 1);
- curBlockInfo = new BlockInfo(first);
-
- // queriedBlockInfo needs to be allocated too. The Stream used for
- // initialization doesn't matter though.
- queriedBlockInfo = new BlockInfo(first);
- }
-
- /**
- * Gets the types of integrity checks used in the .xz file.
- * Multiple checks are possible only if there are multiple
- * concatenated XZ Streams.
- * <p>
- * The returned value has a bit set for every check type that is present.
- * For example, if CRC64 and SHA-256 were used, the return value is
- * <code>(1&nbsp;&lt;&lt;&nbsp;XZ.CHECK_CRC64)
- * | (1&nbsp;&lt;&lt;&nbsp;XZ.CHECK_SHA256)</code>.
- */
- public int getCheckTypes() {
- return checkTypes;
- }
-
- /**
- * Gets the amount of memory in kibibytes (KiB) used by
- * the data structures needed to locate the XZ Blocks.
- * This is usually useless information but since it is calculated
- * for memory usage limit anyway, it is nice to make it available to too.
- */
- public int getIndexMemoryUsage() {
- return indexMemoryUsage;
- }
-
- /**
- * Gets the uncompressed size of the largest XZ Block in bytes.
- * This can be useful if you want to check that the file doesn't
- * have huge XZ Blocks which could make seeking to arbitrary offsets
- * very slow. Note that huge Blocks don't automatically mean that
- * seeking would be slow, for example, seeking to the beginning of
- * any Block is always fast.
- */
- public long getLargestBlockSize() {
- return largestBlockSize;
- }
-
- /**
- * Gets the number of Streams in the .xz file.
- *
- * @since 1.3
- */
- public int getStreamCount() {
- return streams.size();
- }
-
- /**
- * Gets the number of Blocks in the .xz file.
- *
- * @since 1.3
- */
- public int getBlockCount() {
- return blockCount;
- }
-
- /**
- * Gets the uncompressed start position of the given Block.
- *
- * @throws IndexOutOfBoundsException if
- * <code>blockNumber&nbsp;&lt;&nbsp;0</code> or
- * <code>blockNumber&nbsp;&gt;=&nbsp;getBlockCount()</code>.
- *
- * @since 1.3
- */
- public long getBlockPos(int blockNumber) {
- locateBlockByNumber(queriedBlockInfo, blockNumber);
- return queriedBlockInfo.uncompressedOffset;
- }
-
- /**
- * Gets the uncompressed size of the given Block.
- *
- * @throws IndexOutOfBoundsException if
- * <code>blockNumber&nbsp;&lt;&nbsp;0</code> or
- * <code>blockNumber&nbsp;&gt;=&nbsp;getBlockCount()</code>.
- *
- * @since 1.3
- */
- public long getBlockSize(int blockNumber) {
- locateBlockByNumber(queriedBlockInfo, blockNumber);
- return queriedBlockInfo.uncompressedSize;
- }
-
- /**
- * Gets the position where the given compressed Block starts in
- * the underlying .xz file.
- * This information is rarely useful to the users of this class.
- *
- * @throws IndexOutOfBoundsException if
- * <code>blockNumber&nbsp;&lt;&nbsp;0</code> or
- * <code>blockNumber&nbsp;&gt;=&nbsp;getBlockCount()</code>.
- *
- * @since 1.3
- */
- public long getBlockCompPos(int blockNumber) {
- locateBlockByNumber(queriedBlockInfo, blockNumber);
- return queriedBlockInfo.compressedOffset;
- }
-
- /**
- * Gets the compressed size of the given Block.
- * This together with the uncompressed size can be used to calculate
- * the compression ratio of the specific Block.
- *
- * @throws IndexOutOfBoundsException if
- * <code>blockNumber&nbsp;&lt;&nbsp;0</code> or
- * <code>blockNumber&nbsp;&gt;=&nbsp;getBlockCount()</code>.
- *
- * @since 1.3
- */
- public long getBlockCompSize(int blockNumber) {
- locateBlockByNumber(queriedBlockInfo, blockNumber);
- return (queriedBlockInfo.unpaddedSize + 3) & ~3;
- }
-
- /**
- * Gets integrity check type (Check ID) of the given Block.
- *
- * @throws IndexOutOfBoundsException if
- * <code>blockNumber&nbsp;&lt;&nbsp;0</code> or
- * <code>blockNumber&nbsp;&gt;=&nbsp;getBlockCount()</code>.
- *
- * @see #getCheckTypes()
- *
- * @since 1.3
- */
- public int getBlockCheckType(int blockNumber) {
- locateBlockByNumber(queriedBlockInfo, blockNumber);
- return queriedBlockInfo.getCheckType();
- }
-
- /**
- * Gets the number of the Block that contains the byte at the given
- * uncompressed position.
- *
- * @throws IndexOutOfBoundsException if
- * <code>pos&nbsp;&lt;&nbsp;0</code> or
- * <code>pos&nbsp;&gt;=&nbsp;length()</code>.
- *
- * @since 1.3
- */
- public int getBlockNumber(long pos) {
- locateBlockByPos(queriedBlockInfo, pos);
- return queriedBlockInfo.blockNumber;
- }
-
- /**
- * Decompresses the next byte from this input stream.
- *
- * @return the next decompressed byte, or <code>-1</code>
- * to indicate the end of the compressed stream
- *
- * @throws CorruptedInputException
- * @throws UnsupportedOptionsException
- * @throws MemoryLimitException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read() throws IOException {
- return read(tempBuf, 0, 1) == -1 ? -1 : (tempBuf[0] & 0xFF);
- }
-
- /**
- * Decompresses into an array of bytes.
- * <p>
- * If <code>len</code> is zero, no bytes are read and <code>0</code>
- * is returned. Otherwise this will try to decompress <code>len</code>
- * bytes of uncompressed data. Less than <code>len</code> bytes may
- * be read only in the following situations:
- * <ul>
- * <li>The end of the compressed data was reached successfully.</li>
- * <li>An error is detected after at least one but less than
- * <code>len</code> bytes have already been successfully
- * decompressed. The next call with non-zero <code>len</code>
- * will immediately throw the pending exception.</li>
- * <li>An exception is thrown.</li>
- * </ul>
- *
- * @param buf target buffer for uncompressed data
- * @param off start offset in <code>buf</code>
- * @param len maximum number of uncompressed bytes to read
- *
- * @return number of bytes read, or <code>-1</code> to indicate
- * the end of the compressed stream
- *
- * @throws CorruptedInputException
- * @throws UnsupportedOptionsException
- * @throws MemoryLimitException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (len == 0)
- return 0;
-
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- int size = 0;
-
- try {
- if (seekNeeded)
- seek();
-
- if (endReached)
- return -1;
-
- while (len > 0) {
- if (blockDecoder == null) {
- seek();
- if (endReached)
- break;
- }
-
- int ret = blockDecoder.read(buf, off, len);
-
- if (ret > 0) {
- curPos += ret;
- size += ret;
- off += ret;
- len -= ret;
- } else if (ret == -1) {
- blockDecoder = null;
- }
- }
- } catch (IOException e) {
- // We know that the file isn't simply truncated because we could
- // parse the Indexes in the constructor. So convert EOFException
- // to CorruptedInputException.
- if (e instanceof EOFException)
- e = new CorruptedInputException();
-
- exception = e;
- if (size == 0)
- throw e;
- }
-
- return size;
- }
-
- /**
- * Returns the number of uncompressed bytes that can be read
- * without blocking. The value is returned with an assumption
- * that the compressed input data will be valid. If the compressed
- * data is corrupt, <code>CorruptedInputException</code> may get
- * thrown before the number of bytes claimed to be available have
- * been read from this input stream.
- *
- * @return the number of uncompressed bytes that can be read
- * without blocking
- */
- public int available() throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- if (endReached || seekNeeded || blockDecoder == null)
- return 0;
-
- return blockDecoder.available();
- }
-
- /**
- * Closes the stream and calls <code>in.close()</code>.
- * If the stream was already closed, this does nothing.
- *
- * @throws IOException if thrown by <code>in.close()</code>
- */
- public void close() throws IOException {
- if (in != null) {
- try {
- in.close();
- } finally {
- in = null;
- }
- }
- }
-
- /**
- * Gets the uncompressed size of this input stream. If there are multiple
- * XZ Streams, the total uncompressed size of all XZ Streams is returned.
- */
- public long length() {
- return uncompressedSize;
- }
-
- /**
- * Gets the current uncompressed position in this input stream.
- *
- * @throws XZIOException if the stream has been closed
- */
- public long position() throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- return seekNeeded ? seekPos : curPos;
- }
-
- /**
- * Seeks to the specified absolute uncompressed position in the stream.
- * This only stores the new position, so this function itself is always
- * very fast. The actual seek is done when <code>read</code> is called
- * to read at least one byte.
- * <p>
- * Seeking past the end of the stream is possible. In that case
- * <code>read</code> will return <code>-1</code> to indicate
- * the end of the stream.
- *
- * @param pos new uncompressed read position
- *
- * @throws XZIOException
- * if <code>pos</code> is negative, or
- * if stream has been closed
- */
- public void seek(long pos) throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (pos < 0)
- throw new XZIOException("Negative seek position: " + pos);
-
- seekPos = pos;
- seekNeeded = true;
- }
-
- /**
- * Seeks to the beginning of the given XZ Block.
- *
- * @throws XZIOException
- * if <code>blockNumber&nbsp;&lt;&nbsp;0</code> or
- * <code>blockNumber&nbsp;&gt;=&nbsp;getBlockCount()</code>,
- * or if stream has been closed
- *
- * @since 1.3
- */
- public void seekToBlock(int blockNumber) throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (blockNumber < 0 || blockNumber >= blockCount)
- throw new XZIOException("Invalid XZ Block number: " + blockNumber);
-
- // This is a bit silly implementation. Here we locate the uncompressed
- // offset of the specified Block, then when doing the actual seek in
- // seek(), we need to find the Block number based on seekPos.
- seekPos = getBlockPos(blockNumber);
- seekNeeded = true;
- }
-
- /**
- * Does the actual seeking. This is also called when <code>read</code>
- * needs a new Block to decode.
- */
- private void seek() throws IOException {
- // If seek(long) wasn't called, we simply need to get the next Block
- // from the same Stream. If there are no more Blocks in this Stream,
- // then we behave as if seek(long) had been called.
- if (!seekNeeded) {
- if (curBlockInfo.hasNext()) {
- curBlockInfo.setNext();
- initBlockDecoder();
- return;
- }
-
- seekPos = curPos;
- }
-
- seekNeeded = false;
-
- // Check if we are seeking to or past the end of the file.
- if (seekPos >= uncompressedSize) {
- curPos = seekPos;
- blockDecoder = null;
- endReached = true;
- return;
- }
-
- endReached = false;
-
- // Locate the Block that contains the uncompressed target position.
- locateBlockByPos(curBlockInfo, seekPos);
-
- // Seek in the underlying stream and create a new Block decoder
- // only if really needed. We can skip it if the current position
- // is already in the correct Block and the target position hasn't
- // been decompressed yet.
- //
- // NOTE: If curPos points to the beginning of this Block, it's
- // because it was left there after decompressing an earlier Block.
- // In that case, decoding of the current Block hasn't been started
- // yet. (Decoding of a Block won't be started until at least one
- // byte will also be read from it.)
- if (!(curPos > curBlockInfo.uncompressedOffset && curPos <= seekPos)) {
- // Seek to the beginning of the Block.
- in.seek(curBlockInfo.compressedOffset);
-
- // Since it is possible that this Block is from a different
- // Stream than the previous Block, initialize a new Check.
- check = Check.getInstance(curBlockInfo.getCheckType());
-
- // Create a new Block decoder.
- initBlockDecoder();
- curPos = curBlockInfo.uncompressedOffset;
- }
-
- // If the target wasn't at a Block boundary, decompress and throw
- // away data to reach the target position.
- if (seekPos > curPos) {
- // NOTE: The "if" below is there just in case. In this situation,
- // blockDecoder.skip will always skip the requested amount
- // or throw an exception.
- long skipAmount = seekPos - curPos;
- if (blockDecoder.skip(skipAmount) != skipAmount)
- throw new CorruptedInputException();
-
- curPos = seekPos;
- }
- }
-
- /**
- * Locates the Block that contains the given uncompressed position.
- */
- private void locateBlockByPos(BlockInfo info, long pos) {
- if (pos < 0 || pos >= uncompressedSize)
- throw new IndexOutOfBoundsException(
- "Invalid uncompressed position: " + pos);
-
- // Locate the Stream that contains the target position.
- IndexDecoder index;
- for (int i = 0; ; ++i) {
- index = (IndexDecoder)streams.get(i);
- if (index.hasUncompressedOffset(pos))
- break;
- }
-
- // Locate the Block from the Stream that contains the target position.
- index.locateBlock(info, pos);
-
- assert (info.compressedOffset & 3) == 0;
- assert info.uncompressedSize > 0;
- assert pos >= info.uncompressedOffset;
- assert pos < info.uncompressedOffset + info.uncompressedSize;
- }
-
- /**
- * Locates the given Block and stores information about it
- * to <code>info</code>.
- */
- private void locateBlockByNumber(BlockInfo info, int blockNumber) {
- // Validate.
- if (blockNumber < 0 || blockNumber >= blockCount)
- throw new IndexOutOfBoundsException(
- "Invalid XZ Block number: " + blockNumber);
-
- // Skip the search if info already points to the correct Block.
- if (info.blockNumber == blockNumber)
- return;
-
- // Search the Stream that contains the given Block and then
- // search the Block from that Stream.
- for (int i = 0; ; ++i) {
- IndexDecoder index = (IndexDecoder)streams.get(i);
- if (index.hasRecord(blockNumber)) {
- index.setBlockInfo(info, blockNumber);
- return;
- }
- }
- }
-
- /**
- * Initializes a new BlockInputStream. This is a helper function for
- * <code>seek()</code>.
- */
- private void initBlockDecoder() throws IOException {
- try {
- // Set it to null first so that GC can collect it if memory
- // runs tight when initializing a new BlockInputStream.
- blockDecoder = null;
- blockDecoder = new BlockInputStream(
- in, check, verifyCheck, memoryLimit,
- curBlockInfo.unpaddedSize, curBlockInfo.uncompressedSize);
- } catch (MemoryLimitException e) {
- // BlockInputStream doesn't know how much memory we had
- // already needed so we need to recreate the exception.
- assert memoryLimit >= 0;
- throw new MemoryLimitException(
- e.getMemoryNeeded() + indexMemoryUsage,
- memoryLimit + indexMemoryUsage);
- } catch (IndexIndicatorException e) {
- // It cannot be Index so the file must be corrupt.
- throw new CorruptedInputException();
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/SimpleInputStream.java b/Java/Tukaani/src/org/tukaani/xz/SimpleInputStream.java
deleted file mode 100644
index afd40c7..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/SimpleInputStream.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * SimpleInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.IOException;
-import org.tukaani.xz.simple.SimpleFilter;
-
-class SimpleInputStream extends InputStream {
- private static final int FILTER_BUF_SIZE = 4096;
-
- private InputStream in;
- private final SimpleFilter simpleFilter;
-
- private final byte[] filterBuf = new byte[FILTER_BUF_SIZE];
- private int pos = 0;
- private int filtered = 0;
- private int unfiltered = 0;
-
- private boolean endReached = false;
- private IOException exception = null;
-
- private final byte[] tempBuf = new byte[1];
-
- static int getMemoryUsage() {
- return 1 + FILTER_BUF_SIZE / 1024;
- }
-
- SimpleInputStream(InputStream in, SimpleFilter simpleFilter) {
- // Check for null because otherwise null isn't detect
- // in this constructor.
- if (in == null)
- throw new NullPointerException();
-
- // The simpleFilter argument comes from this package
- // so it is known to be non-null already.
- assert simpleFilter != null;
-
- this.in = in;
- this.simpleFilter = simpleFilter;
- }
-
- public int read() throws IOException {
- return read(tempBuf, 0, 1) == -1 ? -1 : (tempBuf[0] & 0xFF);
- }
-
- public int read(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (len == 0)
- return 0;
-
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- try {
- int size = 0;
-
- while (true) {
- // Copy filtered data into the caller-provided buffer.
- int copySize = Math.min(filtered, len);
- System.arraycopy(filterBuf, pos, buf, off, copySize);
- pos += copySize;
- filtered -= copySize;
- off += copySize;
- len -= copySize;
- size += copySize;
-
- // If end of filterBuf was reached, move the pending data to
- // the beginning of the buffer so that more data can be
- // copied into filterBuf on the next loop iteration.
- if (pos + filtered + unfiltered == FILTER_BUF_SIZE) {
- System.arraycopy(filterBuf, pos, filterBuf, 0,
- filtered + unfiltered);
- pos = 0;
- }
-
- if (len == 0 || endReached)
- return size > 0 ? size : -1;
-
- assert filtered == 0;
-
- // Get more data into the temporary buffer.
- int inSize = FILTER_BUF_SIZE - (pos + filtered + unfiltered);
- inSize = in.read(filterBuf, pos + filtered + unfiltered,
- inSize);
-
- if (inSize == -1) {
- // Mark the remaining unfiltered bytes to be ready
- // to be copied out.
- endReached = true;
- filtered = unfiltered;
- unfiltered = 0;
- } else {
- // Filter the data in filterBuf.
- unfiltered += inSize;
- filtered = simpleFilter.code(filterBuf, pos, unfiltered);
- assert filtered <= unfiltered;
- unfiltered -= filtered;
- }
- }
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- public int available() throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- return filtered;
- }
-
- public void close() throws IOException {
- if (in != null) {
- try {
- in.close();
- } finally {
- in = null;
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/SimpleOutputStream.java b/Java/Tukaani/src/org/tukaani/xz/SimpleOutputStream.java
deleted file mode 100644
index 771b1fb..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/SimpleOutputStream.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * SimpleOutputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.IOException;
-import org.tukaani.xz.simple.SimpleFilter;
-
-class SimpleOutputStream extends FinishableOutputStream {
- private static final int FILTER_BUF_SIZE = 4096;
-
- private FinishableOutputStream out;
- private final SimpleFilter simpleFilter;
-
- private final byte[] filterBuf = new byte[FILTER_BUF_SIZE];
- private int pos = 0;
- private int unfiltered = 0;
-
- private IOException exception = null;
- private boolean finished = false;
-
- private final byte[] tempBuf = new byte[1];
-
- static int getMemoryUsage() {
- return 1 + FILTER_BUF_SIZE / 1024;
- }
-
- SimpleOutputStream(FinishableOutputStream out,
- SimpleFilter simpleFilter) {
- if (out == null)
- throw new NullPointerException();
-
- this.out = out;
- this.simpleFilter = simpleFilter;
- }
-
- public void write(int b) throws IOException {
- tempBuf[0] = (byte)b;
- write(tempBuf, 0, 1);
- }
-
- public void write(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- while (len > 0) {
- // Copy more unfiltered data into filterBuf.
- int copySize = Math.min(len, FILTER_BUF_SIZE - (pos + unfiltered));
- System.arraycopy(buf, off, filterBuf, pos + unfiltered, copySize);
- off += copySize;
- len -= copySize;
- unfiltered += copySize;
-
- // Filter the data in filterBuf.
- int filtered = simpleFilter.code(filterBuf, pos, unfiltered);
- assert filtered <= unfiltered;
- unfiltered -= filtered;
-
- // Write out the filtered data.
- try {
- out.write(filterBuf, pos, filtered);
- } catch (IOException e) {
- exception = e;
- throw e;
- }
-
- pos += filtered;
-
- // If end of filterBuf was reached, move the pending unfiltered
- // data to the beginning of the buffer so that more data can
- // be copied into filterBuf on the next loop iteration.
- if (pos + unfiltered == FILTER_BUF_SIZE) {
- System.arraycopy(filterBuf, pos, filterBuf, 0, unfiltered);
- pos = 0;
- }
- }
- }
-
- private void writePending() throws IOException {
- assert !finished;
-
- if (exception != null)
- throw exception;
-
- try {
- out.write(filterBuf, pos, unfiltered);
- } catch (IOException e) {
- exception = e;
- throw e;
- }
-
- finished = true;
- }
-
- public void flush() throws IOException {
- throw new UnsupportedOptionsException("Flushing is not supported");
- }
-
- public void finish() throws IOException {
- if (!finished) {
- // If it fails, don't call out.finish().
- writePending();
-
- try {
- out.finish();
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
- }
-
- public void close() throws IOException {
- if (out != null) {
- if (!finished) {
- // out.close() must be called even if writePending() fails.
- // writePending() saves the possible exception so we can
- // ignore exceptions here.
- try {
- writePending();
- } catch (IOException e) {}
- }
-
- try {
- out.close();
- } catch (IOException e) {
- // If there is an earlier exception, the exception
- // from out.close() is lost.
- if (exception == null)
- exception = e;
- }
-
- out = null;
- }
-
- if (exception != null)
- throw exception;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/SingleXZInputStream.java b/Java/Tukaani/src/org/tukaani/xz/SingleXZInputStream.java
deleted file mode 100644
index f0c5a16..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/SingleXZInputStream.java
+++ /dev/null
@@ -1,375 +0,0 @@
-/*
- * SingleXZInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.io.EOFException;
-import org.tukaani.xz.common.DecoderUtil;
-import org.tukaani.xz.common.StreamFlags;
-import org.tukaani.xz.index.IndexHash;
-import org.tukaani.xz.check.Check;
-
-/**
- * Decompresses exactly one XZ Stream in streamed mode (no seeking).
- * The decompression stops after the first XZ Stream has been decompressed,
- * and the read position in the input stream is left at the first byte
- * after the end of the XZ Stream. This can be useful when XZ data has
- * been stored inside some other file format or protocol.
- * <p>
- * Unless you know what you are doing, don't use this class to decompress
- * standalone .xz files. For that purpose, use <code>XZInputStream</code>.
- *
- * <h4>When uncompressed size is known beforehand</h4>
- * <p>
- * If you are decompressing complete XZ streams and your application knows
- * exactly how much uncompressed data there should be, it is good to try
- * reading one more byte by calling <code>read()</code> and checking
- * that it returns <code>-1</code>. This way the decompressor will parse the
- * file footers and verify the integrity checks, giving the caller more
- * confidence that the uncompressed data is valid.
- *
- * @see XZInputStream
- */
-public class SingleXZInputStream extends InputStream {
- private InputStream in;
- private final int memoryLimit;
- private final StreamFlags streamHeaderFlags;
- private final Check check;
- private final boolean verifyCheck;
- private BlockInputStream blockDecoder = null;
- private final IndexHash indexHash = new IndexHash();
- private boolean endReached = false;
- private IOException exception = null;
-
- private final byte[] tempBuf = new byte[1];
-
- /**
- * Reads the Stream Header into a buffer.
- * This is a helper function for the constructors.
- */
- private static byte[] readStreamHeader(InputStream in) throws IOException {
- byte[] streamHeader = new byte[DecoderUtil.STREAM_HEADER_SIZE];
- new DataInputStream(in).readFully(streamHeader);
- return streamHeader;
- }
-
- /**
- * Creates a new XZ decompressor that decompresses exactly one
- * XZ Stream from <code>in</code> without a memory usage limit.
- * <p>
- * This constructor reads and parses the XZ Stream Header (12 bytes)
- * from <code>in</code>. The header of the first Block is not read
- * until <code>read</code> is called.
- *
- * @param in input stream from which XZ-compressed
- * data is read
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ header CRC32 doesn't match
- *
- * @throws UnsupportedOptionsException
- * XZ header is valid but specifies options
- * not supported by this implementation
- *
- * @throws EOFException
- * less than 12 bytes of input was available
- * from <code>in</code>
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public SingleXZInputStream(InputStream in) throws IOException {
- this(in, -1);
- }
-
- /**
- * Creates a new XZ decompressor that decompresses exactly one
- * XZ Stream from <code>in</code> with an optional memory usage limit.
- * <p>
- * This is identical to <code>SingleXZInputStream(InputStream)</code>
- * except that this takes also the <code>memoryLimit</code> argument.
- *
- * @param in input stream from which XZ-compressed
- * data is read
- *
- * @param memoryLimit memory usage limit in kibibytes (KiB)
- * or <code>-1</code> to impose no
- * memory usage limit
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ header CRC32 doesn't match
- *
- * @throws UnsupportedOptionsException
- * XZ header is valid but specifies options
- * not supported by this implementation
- *
- * @throws EOFException
- * less than 12 bytes of input was available
- * from <code>in</code>
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public SingleXZInputStream(InputStream in, int memoryLimit)
- throws IOException {
- this(in, memoryLimit, true, readStreamHeader(in));
- }
-
- /**
- * Creates a new XZ decompressor that decompresses exactly one
- * XZ Stream from <code>in</code> with an optional memory usage limit
- * and ability to disable verification of integrity checks.
- * <p>
- * This is identical to <code>SingleXZInputStream(InputStream,int)</code>
- * except that this takes also the <code>verifyCheck</code> argument.
- * <p>
- * Note that integrity check verification should almost never be disabled.
- * Possible reasons to disable integrity check verification:
- * <ul>
- * <li>Trying to recover data from a corrupt .xz file.</li>
- * <li>Speeding up decompression. This matters mostly with SHA-256
- * or with files that have compressed extremely well. It's recommended
- * that integrity checking isn't disabled for performance reasons
- * unless the file integrity is verified externally in some other
- * way.</li>
- * </ul>
- * <p>
- * <code>verifyCheck</code> only affects the integrity check of
- * the actual compressed data. The CRC32 fields in the headers
- * are always verified.
- *
- * @param in input stream from which XZ-compressed
- * data is read
- *
- * @param memoryLimit memory usage limit in kibibytes (KiB)
- * or <code>-1</code> to impose no
- * memory usage limit
- *
- * @param verifyCheck if <code>true</code>, the integrity checks
- * will be verified; this should almost never
- * be set to <code>false</code>
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ header CRC32 doesn't match
- *
- * @throws UnsupportedOptionsException
- * XZ header is valid but specifies options
- * not supported by this implementation
- *
- * @throws EOFException
- * less than 12 bytes of input was available
- * from <code>in</code>
- *
- * @throws IOException may be thrown by <code>in</code>
- *
- * @since 1.6
- */
- public SingleXZInputStream(InputStream in, int memoryLimit,
- boolean verifyCheck) throws IOException {
- this(in, memoryLimit, verifyCheck, readStreamHeader(in));
- }
-
- SingleXZInputStream(InputStream in, int memoryLimit, boolean verifyCheck,
- byte[] streamHeader) throws IOException {
- this.in = in;
- this.memoryLimit = memoryLimit;
- this.verifyCheck = verifyCheck;
- streamHeaderFlags = DecoderUtil.decodeStreamHeader(streamHeader);
- check = Check.getInstance(streamHeaderFlags.checkType);
- }
-
- /**
- * Gets the ID of the integrity check used in this XZ Stream.
- *
- * @return the Check ID specified in the XZ Stream Header
- */
- public int getCheckType() {
- return streamHeaderFlags.checkType;
- }
-
- /**
- * Gets the name of the integrity check used in this XZ Stream.
- *
- * @return the name of the check specified in the XZ Stream Header
- */
- public String getCheckName() {
- return check.getName();
- }
-
- /**
- * Decompresses the next byte from this input stream.
- * <p>
- * Reading lots of data with <code>read()</code> from this input stream
- * may be inefficient. Wrap it in {@link java.io.BufferedInputStream}
- * if you need to read lots of data one byte at a time.
- *
- * @return the next decompressed byte, or <code>-1</code>
- * to indicate the end of the compressed stream
- *
- * @throws CorruptedInputException
- * @throws UnsupportedOptionsException
- * @throws MemoryLimitException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws EOFException
- * compressed input is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read() throws IOException {
- return read(tempBuf, 0, 1) == -1 ? -1 : (tempBuf[0] & 0xFF);
- }
-
- /**
- * Decompresses into an array of bytes.
- * <p>
- * If <code>len</code> is zero, no bytes are read and <code>0</code>
- * is returned. Otherwise this will try to decompress <code>len</code>
- * bytes of uncompressed data. Less than <code>len</code> bytes may
- * be read only in the following situations:
- * <ul>
- * <li>The end of the compressed data was reached successfully.</li>
- * <li>An error is detected after at least one but less <code>len</code>
- * bytes have already been successfully decompressed.
- * The next call with non-zero <code>len</code> will immediately
- * throw the pending exception.</li>
- * <li>An exception is thrown.</li>
- * </ul>
- *
- * @param buf target buffer for uncompressed data
- * @param off start offset in <code>buf</code>
- * @param len maximum number of uncompressed bytes to read
- *
- * @return number of bytes read, or <code>-1</code> to indicate
- * the end of the compressed stream
- *
- * @throws CorruptedInputException
- * @throws UnsupportedOptionsException
- * @throws MemoryLimitException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws EOFException
- * compressed input is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (len == 0)
- return 0;
-
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- if (endReached)
- return -1;
-
- int size = 0;
-
- try {
- while (len > 0) {
- if (blockDecoder == null) {
- try {
- blockDecoder = new BlockInputStream(
- in, check, verifyCheck, memoryLimit, -1, -1);
- } catch (IndexIndicatorException e) {
- indexHash.validate(in);
- validateStreamFooter();
- endReached = true;
- return size > 0 ? size : -1;
- }
- }
-
- int ret = blockDecoder.read(buf, off, len);
-
- if (ret > 0) {
- size += ret;
- off += ret;
- len -= ret;
- } else if (ret == -1) {
- indexHash.add(blockDecoder.getUnpaddedSize(),
- blockDecoder.getUncompressedSize());
- blockDecoder = null;
- }
- }
- } catch (IOException e) {
- exception = e;
- if (size == 0)
- throw e;
- }
-
- return size;
- }
-
- private void validateStreamFooter() throws IOException {
- byte[] buf = new byte[DecoderUtil.STREAM_HEADER_SIZE];
- new DataInputStream(in).readFully(buf);
- StreamFlags streamFooterFlags = DecoderUtil.decodeStreamFooter(buf);
-
- if (!DecoderUtil.areStreamFlagsEqual(streamHeaderFlags,
- streamFooterFlags)
- || indexHash.getIndexSize() != streamFooterFlags.backwardSize)
- throw new CorruptedInputException(
- "XZ Stream Footer does not match Stream Header");
- }
-
- /**
- * Returns the number of uncompressed bytes that can be read
- * without blocking. The value is returned with an assumption
- * that the compressed input data will be valid. If the compressed
- * data is corrupt, <code>CorruptedInputException</code> may get
- * thrown before the number of bytes claimed to be available have
- * been read from this input stream.
- *
- * @return the number of uncompressed bytes that can be read
- * without blocking
- */
- public int available() throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- return blockDecoder == null ? 0 : blockDecoder.available();
- }
-
- /**
- * Closes the stream and calls <code>in.close()</code>.
- * If the stream was already closed, this does nothing.
- *
- * @throws IOException if thrown by <code>in.close()</code>
- */
- public void close() throws IOException {
- if (in != null) {
- try {
- in.close();
- } finally {
- in = null;
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/UncompressedLZMA2OutputStream.java b/Java/Tukaani/src/org/tukaani/xz/UncompressedLZMA2OutputStream.java
deleted file mode 100644
index 1ff9675..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/UncompressedLZMA2OutputStream.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * UncompressedLZMA2OutputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-class UncompressedLZMA2OutputStream extends FinishableOutputStream {
- private FinishableOutputStream out;
- private final DataOutputStream outData;
-
- private final byte[] uncompBuf
- = new byte[LZMA2OutputStream.COMPRESSED_SIZE_MAX];
- private int uncompPos = 0;
- private boolean dictResetNeeded = true;
-
- private boolean finished = false;
- private IOException exception = null;
-
- private final byte[] tempBuf = new byte[1];
-
- static int getMemoryUsage() {
- // uncompBuf + a little extra
- return 70;
- }
-
- UncompressedLZMA2OutputStream(FinishableOutputStream out) {
- if (out == null)
- throw new NullPointerException();
-
- this.out = out;
- outData = new DataOutputStream(out);
- }
-
- public void write(int b) throws IOException {
- tempBuf[0] = (byte)b;
- write(tempBuf, 0, 1);
- }
-
- public void write(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- try {
- while (len > 0) {
- int copySize = Math.min(uncompBuf.length - uncompPos, len);
- System.arraycopy(buf, off, uncompBuf, uncompPos, copySize);
- len -= copySize;
- uncompPos += copySize;
-
- if (uncompPos == uncompBuf.length)
- writeChunk();
- }
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- private void writeChunk() throws IOException {
- outData.writeByte(dictResetNeeded ? 0x01 : 0x02);
- outData.writeShort(uncompPos - 1);
- outData.write(uncompBuf, 0, uncompPos);
- uncompPos = 0;
- dictResetNeeded = false;
- }
-
- private void writeEndMarker() throws IOException {
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- try {
- if (uncompPos > 0)
- writeChunk();
-
- out.write(0x00);
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- public void flush() throws IOException {
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- try {
- if (uncompPos > 0)
- writeChunk();
-
- out.flush();
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- public void finish() throws IOException {
- if (!finished) {
- writeEndMarker();
-
- try {
- out.finish();
- } catch (IOException e) {
- exception = e;
- throw e;
- }
-
- finished = true;
- }
- }
-
- public void close() throws IOException {
- if (out != null) {
- if (!finished) {
- try {
- writeEndMarker();
- } catch (IOException e) {}
- }
-
- try {
- out.close();
- } catch (IOException e) {
- if (exception == null)
- exception = e;
- }
-
- out = null;
- }
-
- if (exception != null)
- throw exception;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/UnsupportedOptionsException.java b/Java/Tukaani/src/org/tukaani/xz/UnsupportedOptionsException.java
deleted file mode 100644
index 9aa16e8..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/UnsupportedOptionsException.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * UnsupportedOptionsException
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-/**
- * Thrown when compression options not supported by this implementation
- * are detected. Some other implementation might support those options.
- */
-public class UnsupportedOptionsException extends XZIOException {
- private static final long serialVersionUID = 3L;
-
- /**
- * Creates a new UnsupportedOptionsException with null
- * as its error detail message.
- */
- public UnsupportedOptionsException() {}
-
- /**
- * Creates a new UnsupportedOptionsException with the given
- * error detail message.
- *
- * @param s error detail message
- */
- public UnsupportedOptionsException(String s) {
- super(s);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/X86Options.java b/Java/Tukaani/src/org/tukaani/xz/X86Options.java
deleted file mode 100644
index e6d241f..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/X86Options.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * X86Options
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import org.tukaani.xz.simple.X86;
-
-/**
- * BCJ filter for x86 (32-bit and 64-bit) instructions.
- */
-public class X86Options extends BCJOptions {
- private static final int ALIGNMENT = 1;
-
- public X86Options() {
- super(ALIGNMENT);
- }
-
- public FinishableOutputStream getOutputStream(FinishableOutputStream out) {
- return new SimpleOutputStream(out, new X86(true, startOffset));
- }
-
- public InputStream getInputStream(InputStream in) {
- return new SimpleInputStream(in, new X86(false, startOffset));
- }
-
- FilterEncoder getFilterEncoder() {
- return new BCJEncoder(this, BCJCoder.X86_FILTER_ID);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/XZ.java b/Java/Tukaani/src/org/tukaani/xz/XZ.java
deleted file mode 100644
index 4e0857f..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/XZ.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * XZ
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-/**
- * XZ constants.
- */
-public class XZ {
- /**
- * XZ Header Magic Bytes begin a XZ file.
- * This can be useful to detect XZ compressed data.
- */
- public static final byte[] HEADER_MAGIC = {
- (byte)0xFD, '7', 'z', 'X', 'Z', '\0' };
-
- /**
- * XZ Footer Magic Bytes are the last bytes of a XZ Stream.
- */
- public static final byte[] FOOTER_MAGIC = { 'Y', 'Z' };
-
- /**
- * Integrity check ID indicating that no integrity check is calculated.
- * <p>
- * Omitting the integrity check is strongly discouraged except when
- * the integrity of the data will be verified by other means anyway,
- * and calculating the check twice would be useless.
- */
- public static final int CHECK_NONE = 0;
-
- /**
- * Integrity check ID for CRC32.
- */
- public static final int CHECK_CRC32 = 1;
-
- /**
- * Integrity check ID for CRC64.
- */
- public static final int CHECK_CRC64 = 4;
-
- /**
- * Integrity check ID for SHA-256.
- */
- public static final int CHECK_SHA256 = 10;
-
- private XZ() {}
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/XZFormatException.java b/Java/Tukaani/src/org/tukaani/xz/XZFormatException.java
deleted file mode 100644
index 6f63020..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/XZFormatException.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * XZFormatException
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-/**
- * Thrown when the input data is not in the XZ format.
- */
-public class XZFormatException extends XZIOException {
- private static final long serialVersionUID = 3L;
-
- /**
- * Creates a new exception with the default error detail message.
- */
- public XZFormatException() {
- super("Input is not in the XZ format");
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/XZIOException.java b/Java/Tukaani/src/org/tukaani/xz/XZIOException.java
deleted file mode 100644
index 14675f5..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/XZIOException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * XZIOException
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-/**
- * Generic {@link java.io.IOException IOException} specific to this package.
- * The other IOExceptions in this package extend
- * from <code>XZIOException</code>.
- */
-public class XZIOException extends java.io.IOException {
- private static final long serialVersionUID = 3L;
-
- public XZIOException() {
- super();
- }
-
- public XZIOException(String s) {
- super(s);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/XZInputStream.java b/Java/Tukaani/src/org/tukaani/xz/XZInputStream.java
deleted file mode 100644
index 0d460ea..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/XZInputStream.java
+++ /dev/null
@@ -1,371 +0,0 @@
-/*
- * XZInputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.InputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.io.EOFException;
-import org.tukaani.xz.common.DecoderUtil;
-
-/**
- * Decompresses a .xz file in streamed mode (no seeking).
- * <p>
- * Use this to decompress regular standalone .xz files. This reads from
- * its input stream until the end of the input or until an error occurs.
- * This supports decompressing concatenated .xz files.
- *
- * <h4>Typical use cases</h4>
- * <p>
- * Getting an input stream to decompress a .xz file:
- * <p><blockquote><pre>
- * InputStream infile = new FileInputStream("foo.xz");
- * XZInputStream inxz = new XZInputStream(infile);
- * </pre></blockquote>
- * <p>
- * It's important to keep in mind that decompressor memory usage depends
- * on the settings used to compress the file. The worst-case memory usage
- * of XZInputStream is currently 1.5&nbsp;GiB. Still, very few files will
- * require more than about 65&nbsp;MiB because that's how much decompressing
- * a file created with the highest preset level will need, and only a few
- * people use settings other than the predefined presets.
- * <p>
- * It is possible to specify a memory usage limit for
- * <code>XZInputStream</code>. If decompression requires more memory than
- * the specified limit, MemoryLimitException will be thrown when reading
- * from the stream. For example, the following sets the memory usage limit
- * to 100&nbsp;MiB:
- * <p><blockquote><pre>
- * InputStream infile = new FileInputStream("foo.xz");
- * XZInputStream inxz = new XZInputStream(infile, 100 * 1024);
- * </pre></blockquote>
- *
- * <h4>When uncompressed size is known beforehand</h4>
- * <p>
- * If you are decompressing complete files and your application knows
- * exactly how much uncompressed data there should be, it is good to try
- * reading one more byte by calling <code>read()</code> and checking
- * that it returns <code>-1</code>. This way the decompressor will parse the
- * file footers and verify the integrity checks, giving the caller more
- * confidence that the uncompressed data is valid. (This advice seems to
- * apply to
- * {@link java.util.zip.GZIPInputStream java.util.zip.GZIPInputStream} too.)
- *
- * @see SingleXZInputStream
- */
-public class XZInputStream extends InputStream {
- private final int memoryLimit;
- private InputStream in;
- private SingleXZInputStream xzIn;
- private final boolean verifyCheck;
- private boolean endReached = false;
- private IOException exception = null;
-
- private final byte[] tempBuf = new byte[1];
-
- /**
- * Creates a new XZ decompressor without a memory usage limit.
- * <p>
- * This constructor reads and parses the XZ Stream Header (12 bytes)
- * from <code>in</code>. The header of the first Block is not read
- * until <code>read</code> is called.
- *
- * @param in input stream from which XZ-compressed
- * data is read
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ header CRC32 doesn't match
- *
- * @throws UnsupportedOptionsException
- * XZ header is valid but specifies options
- * not supported by this implementation
- *
- * @throws EOFException
- * less than 12 bytes of input was available
- * from <code>in</code>
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public XZInputStream(InputStream in) throws IOException {
- this(in, -1);
- }
-
- /**
- * Creates a new XZ decompressor with an optional memory usage limit.
- * <p>
- * This is identical to <code>XZInputStream(InputStream)</code> except
- * that this takes also the <code>memoryLimit</code> argument.
- *
- * @param in input stream from which XZ-compressed
- * data is read
- *
- * @param memoryLimit memory usage limit in kibibytes (KiB)
- * or <code>-1</code> to impose no
- * memory usage limit
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ header CRC32 doesn't match
- *
- * @throws UnsupportedOptionsException
- * XZ header is valid but specifies options
- * not supported by this implementation
- *
- * @throws EOFException
- * less than 12 bytes of input was available
- * from <code>in</code>
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public XZInputStream(InputStream in, int memoryLimit) throws IOException {
- this(in, memoryLimit, true);
- }
-
- /**
- * Creates a new XZ decompressor with an optional memory usage limit
- * and ability to disable verification of integrity checks.
- * <p>
- * This is identical to <code>XZInputStream(InputStream,int)</code> except
- * that this takes also the <code>verifyCheck</code> argument.
- * <p>
- * Note that integrity check verification should almost never be disabled.
- * Possible reasons to disable integrity check verification:
- * <ul>
- * <li>Trying to recover data from a corrupt .xz file.</li>
- * <li>Speeding up decompression. This matters mostly with SHA-256
- * or with files that have compressed extremely well. It's recommended
- * that integrity checking isn't disabled for performance reasons
- * unless the file integrity is verified externally in some other
- * way.</li>
- * </ul>
- * <p>
- * <code>verifyCheck</code> only affects the integrity check of
- * the actual compressed data. The CRC32 fields in the headers
- * are always verified.
- *
- * @param in input stream from which XZ-compressed
- * data is read
- *
- * @param memoryLimit memory usage limit in kibibytes (KiB)
- * or <code>-1</code> to impose no
- * memory usage limit
- *
- * @param verifyCheck if <code>true</code>, the integrity checks
- * will be verified; this should almost never
- * be set to <code>false</code>
- *
- * @throws XZFormatException
- * input is not in the XZ format
- *
- * @throws CorruptedInputException
- * XZ header CRC32 doesn't match
- *
- * @throws UnsupportedOptionsException
- * XZ header is valid but specifies options
- * not supported by this implementation
- *
- * @throws EOFException
- * less than 12 bytes of input was available
- * from <code>in</code>
- *
- * @throws IOException may be thrown by <code>in</code>
- *
- * @since 1.6
- */
- public XZInputStream(InputStream in, int memoryLimit, boolean verifyCheck)
- throws IOException {
- this.in = in;
- this.memoryLimit = memoryLimit;
- this.verifyCheck = verifyCheck;
- this.xzIn = new SingleXZInputStream(in, memoryLimit, verifyCheck);
- }
-
- /**
- * Decompresses the next byte from this input stream.
- * <p>
- * Reading lots of data with <code>read()</code> from this input stream
- * may be inefficient. Wrap it in {@link java.io.BufferedInputStream}
- * if you need to read lots of data one byte at a time.
- *
- * @return the next decompressed byte, or <code>-1</code>
- * to indicate the end of the compressed stream
- *
- * @throws CorruptedInputException
- * @throws UnsupportedOptionsException
- * @throws MemoryLimitException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws EOFException
- * compressed input is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read() throws IOException {
- return read(tempBuf, 0, 1) == -1 ? -1 : (tempBuf[0] & 0xFF);
- }
-
- /**
- * Decompresses into an array of bytes.
- * <p>
- * If <code>len</code> is zero, no bytes are read and <code>0</code>
- * is returned. Otherwise this will try to decompress <code>len</code>
- * bytes of uncompressed data. Less than <code>len</code> bytes may
- * be read only in the following situations:
- * <ul>
- * <li>The end of the compressed data was reached successfully.</li>
- * <li>An error is detected after at least one but less <code>len</code>
- * bytes have already been successfully decompressed.
- * The next call with non-zero <code>len</code> will immediately
- * throw the pending exception.</li>
- * <li>An exception is thrown.</li>
- * </ul>
- *
- * @param buf target buffer for uncompressed data
- * @param off start offset in <code>buf</code>
- * @param len maximum number of uncompressed bytes to read
- *
- * @return number of bytes read, or <code>-1</code> to indicate
- * the end of the compressed stream
- *
- * @throws CorruptedInputException
- * @throws UnsupportedOptionsException
- * @throws MemoryLimitException
- *
- * @throws XZIOException if the stream has been closed
- *
- * @throws EOFException
- * compressed input is truncated or corrupt
- *
- * @throws IOException may be thrown by <code>in</code>
- */
- public int read(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (len == 0)
- return 0;
-
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- if (endReached)
- return -1;
-
- int size = 0;
-
- try {
- while (len > 0) {
- if (xzIn == null) {
- prepareNextStream();
- if (endReached)
- return size == 0 ? -1 : size;
- }
-
- int ret = xzIn.read(buf, off, len);
-
- if (ret > 0) {
- size += ret;
- off += ret;
- len -= ret;
- } else if (ret == -1) {
- xzIn = null;
- }
- }
- } catch (IOException e) {
- exception = e;
- if (size == 0)
- throw e;
- }
-
- return size;
- }
-
- private void prepareNextStream() throws IOException {
- DataInputStream inData = new DataInputStream(in);
- byte[] buf = new byte[DecoderUtil.STREAM_HEADER_SIZE];
-
- // The size of Stream Padding must be a multiple of four bytes,
- // all bytes zero.
- do {
- // First try to read one byte to see if we have reached the end
- // of the file.
- int ret = inData.read(buf, 0, 1);
- if (ret == -1) {
- endReached = true;
- return;
- }
-
- // Since we got one byte of input, there must be at least
- // three more available in a valid file.
- inData.readFully(buf, 1, 3);
-
- } while (buf[0] == 0 && buf[1] == 0 && buf[2] == 0 && buf[3] == 0);
-
- // Not all bytes are zero. In a valid Stream it indicates the
- // beginning of the next Stream. Read the rest of the Stream Header
- // and initialize the XZ decoder.
- inData.readFully(buf, 4, DecoderUtil.STREAM_HEADER_SIZE - 4);
-
- try {
- xzIn = new SingleXZInputStream(in, memoryLimit, verifyCheck, buf);
- } catch (XZFormatException e) {
- // Since this isn't the first .xz Stream, it is more
- // logical to tell that the data is corrupt.
- throw new CorruptedInputException(
- "Garbage after a valid XZ Stream");
- }
- }
-
- /**
- * Returns the number of uncompressed bytes that can be read
- * without blocking. The value is returned with an assumption
- * that the compressed input data will be valid. If the compressed
- * data is corrupt, <code>CorruptedInputException</code> may get
- * thrown before the number of bytes claimed to be available have
- * been read from this input stream.
- *
- * @return the number of uncompressed bytes that can be read
- * without blocking
- */
- public int available() throws IOException {
- if (in == null)
- throw new XZIOException("Stream closed");
-
- if (exception != null)
- throw exception;
-
- return xzIn == null ? 0 : xzIn.available();
- }
-
- /**
- * Closes the stream and calls <code>in.close()</code>.
- * If the stream was already closed, this does nothing.
- *
- * @throws IOException if thrown by <code>in.close()</code>
- */
- public void close() throws IOException {
- if (in != null) {
- try {
- in.close();
- } finally {
- in = null;
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/XZOutputStream.java b/Java/Tukaani/src/org/tukaani/xz/XZOutputStream.java
deleted file mode 100644
index 6a37fed..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/XZOutputStream.java
+++ /dev/null
@@ -1,488 +0,0 @@
-/*
- * XZOutputStream
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz;
-
-import java.io.OutputStream;
-import java.io.IOException;
-import org.tukaani.xz.common.EncoderUtil;
-import org.tukaani.xz.common.StreamFlags;
-import org.tukaani.xz.check.Check;
-import org.tukaani.xz.index.IndexEncoder;
-
-/**
- * Compresses into the .xz file format.
- *
- * <h4>Examples</h4>
- * <p>
- * Getting an output stream to compress with LZMA2 using the default
- * settings and the default integrity check type (CRC64):
- * <p><blockquote><pre>
- * FileOutputStream outfile = new FileOutputStream("foo.xz");
- * XZOutputStream outxz = new XZOutputStream(outfile, new LZMA2Options());
- * </pre></blockquote>
- * <p>
- * Using the preset level <code>8</code> for LZMA2 (the default
- * is <code>6</code>) and SHA-256 instead of CRC64 for integrity checking:
- * <p><blockquote><pre>
- * XZOutputStream outxz = new XZOutputStream(outfile, new LZMA2Options(8),
- * XZ.CHECK_SHA256);
- * </pre></blockquote>
- * <p>
- * Using the x86 BCJ filter together with LZMA2 to compress x86 executables
- * and printing the memory usage information before creating the
- * XZOutputStream:
- * <p><blockquote><pre>
- * X86Options x86 = new X86Options();
- * LZMA2Options lzma2 = new LZMA2Options();
- * FilterOptions[] options = { x86, lzma2 };
- * System.out.println("Encoder memory usage: "
- * + FilterOptions.getEncoderMemoryUsage(options)
- * + " KiB");
- * System.out.println("Decoder memory usage: "
- * + FilterOptions.getDecoderMemoryUsage(options)
- * + " KiB");
- * XZOutputStream outxz = new XZOutputStream(outfile, options);
- * </pre></blockquote>
- */
-public class XZOutputStream extends FinishableOutputStream {
- private OutputStream out;
- private final StreamFlags streamFlags = new StreamFlags();
- private final Check check;
- private final IndexEncoder index = new IndexEncoder();
-
- private BlockOutputStream blockEncoder = null;
- private FilterEncoder[] filters;
-
- /**
- * True if the current filter chain supports flushing.
- * If it doesn't support flushing, <code>flush()</code>
- * will use <code>endBlock()</code> as a fallback.
- */
- private boolean filtersSupportFlushing;
-
- private IOException exception = null;
- private boolean finished = false;
-
- private final byte[] tempBuf = new byte[1];
-
- /**
- * Creates a new XZ compressor using one filter and CRC64 as
- * the integrity check. This constructor is equivalent to passing
- * a single-member FilterOptions array to
- * <code>XZOutputStream(OutputStream, FilterOptions[])</code>.
- *
- * @param out output stream to which the compressed data
- * will be written
- *
- * @param filterOptions
- * filter options to use
- *
- * @throws UnsupportedOptionsException
- * invalid filter chain
- *
- * @throws IOException may be thrown from <code>out</code>
- */
- public XZOutputStream(OutputStream out, FilterOptions filterOptions)
- throws IOException {
- this(out, filterOptions, XZ.CHECK_CRC64);
- }
-
- /**
- * Creates a new XZ compressor using one filter and the specified
- * integrity check type. This constructor is equivalent to
- * passing a single-member FilterOptions array to
- * <code>XZOutputStream(OutputStream, FilterOptions[], int)</code>.
- *
- * @param out output stream to which the compressed data
- * will be written
- *
- * @param filterOptions
- * filter options to use
- *
- * @param checkType type of the integrity check,
- * for example XZ.CHECK_CRC32
- *
- * @throws UnsupportedOptionsException
- * invalid filter chain
- *
- * @throws IOException may be thrown from <code>out</code>
- */
- public XZOutputStream(OutputStream out, FilterOptions filterOptions,
- int checkType) throws IOException {
- this(out, new FilterOptions[] { filterOptions }, checkType);
- }
-
- /**
- * Creates a new XZ compressor using 1-4 filters and CRC64 as
- * the integrity check. This constructor is equivalent
- * <code>XZOutputStream(out, filterOptions, XZ.CHECK_CRC64)</code>.
- *
- * @param out output stream to which the compressed data
- * will be written
- *
- * @param filterOptions
- * array of filter options to use
- *
- * @throws UnsupportedOptionsException
- * invalid filter chain
- *
- * @throws IOException may be thrown from <code>out</code>
- */
- public XZOutputStream(OutputStream out, FilterOptions[] filterOptions)
- throws IOException {
- this(out, filterOptions, XZ.CHECK_CRC64);
- }
-
- /**
- * Creates a new XZ compressor using 1-4 filters and the specified
- * integrity check type.
- *
- * @param out output stream to which the compressed data
- * will be written
- *
- * @param filterOptions
- * array of filter options to use
- *
- * @param checkType type of the integrity check,
- * for example XZ.CHECK_CRC32
- *
- * @throws UnsupportedOptionsException
- * invalid filter chain
- *
- * @throws IOException may be thrown from <code>out</code>
- */
- public XZOutputStream(OutputStream out, FilterOptions[] filterOptions,
- int checkType) throws IOException {
- this.out = out;
- updateFilters(filterOptions);
-
- streamFlags.checkType = checkType;
- check = Check.getInstance(checkType);
-
- encodeStreamHeader();
- }
-
- /**
- * Updates the filter chain with a single filter.
- * This is equivalent to passing a single-member FilterOptions array
- * to <code>updateFilters(FilterOptions[])</code>.
- *
- * @param filterOptions
- * new filter to use
- *
- * @throws UnsupportedOptionsException
- * unsupported filter chain, or trying to change
- * the filter chain in the middle of a Block
- */
- public void updateFilters(FilterOptions filterOptions)
- throws XZIOException {
- FilterOptions[] opts = new FilterOptions[1];
- opts[0] = filterOptions;
- updateFilters(opts);
- }
-
- /**
- * Updates the filter chain with 1-4 filters.
- * <p>
- * Currently this cannot be used to update e.g. LZMA2 options in the
- * middle of a XZ Block. Use <code>endBlock()</code> to finish the
- * current XZ Block before calling this function. The new filter chain
- * will then be used for the next XZ Block.
- *
- * @param filterOptions
- * new filter chain to use
- *
- * @throws UnsupportedOptionsException
- * unsupported filter chain, or trying to change
- * the filter chain in the middle of a Block
- */
- public void updateFilters(FilterOptions[] filterOptions)
- throws XZIOException {
- if (blockEncoder != null)
- throw new UnsupportedOptionsException("Changing filter options "
- + "in the middle of a XZ Block not implemented");
-
- if (filterOptions.length < 1 || filterOptions.length > 4)
- throw new UnsupportedOptionsException(
- "XZ filter chain must be 1-4 filters");
-
- filtersSupportFlushing = true;
- FilterEncoder[] newFilters = new FilterEncoder[filterOptions.length];
- for (int i = 0; i < filterOptions.length; ++i) {
- newFilters[i] = filterOptions[i].getFilterEncoder();
- filtersSupportFlushing &= newFilters[i].supportsFlushing();
- }
-
- RawCoder.validate(newFilters);
- filters = newFilters;
- }
-
- /**
- * Writes one byte to be compressed.
- *
- * @throws XZIOException
- * XZ Stream has grown too big
- *
- * @throws XZIOException
- * <code>finish()</code> or <code>close()</code>
- * was already called
- *
- * @throws IOException may be thrown by the underlying output stream
- */
- public void write(int b) throws IOException {
- tempBuf[0] = (byte)b;
- write(tempBuf, 0, 1);
- }
-
- /**
- * Writes an array of bytes to be compressed.
- * The compressors tend to do internal buffering and thus the written
- * data won't be readable from the compressed output immediately.
- * Use <code>flush()</code> to force everything written so far to
- * be written to the underlaying output stream, but be aware that
- * flushing reduces compression ratio.
- *
- * @param buf buffer of bytes to be written
- * @param off start offset in <code>buf</code>
- * @param len number of bytes to write
- *
- * @throws XZIOException
- * XZ Stream has grown too big: total file size
- * about 8&nbsp;EiB or the Index field exceeds
- * 16&nbsp;GiB; you shouldn't reach these sizes
- * in practice
- *
- * @throws XZIOException
- * <code>finish()</code> or <code>close()</code>
- * was already called and len &gt; 0
- *
- * @throws IOException may be thrown by the underlying output stream
- */
- public void write(byte[] buf, int off, int len) throws IOException {
- if (off < 0 || len < 0 || off + len < 0 || off + len > buf.length)
- throw new IndexOutOfBoundsException();
-
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- try {
- if (blockEncoder == null)
- blockEncoder = new BlockOutputStream(out, filters, check);
-
- blockEncoder.write(buf, off, len);
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- /**
- * Finishes the current XZ Block (but not the whole XZ Stream).
- * This doesn't flush the stream so it's possible that not all data will
- * be decompressible from the output stream when this function returns.
- * Call also <code>flush()</code> if flushing is wanted in addition to
- * finishing the current XZ Block.
- * <p>
- * If there is no unfinished Block open, this function will do nothing.
- * (No empty XZ Block will be created.)
- * <p>
- * This function can be useful, for example, to create
- * random-accessible .xz files.
- * <p>
- * Starting a new XZ Block means that the encoder state is reset.
- * Doing this very often will increase the size of the compressed
- * file a lot (more than plain <code>flush()</code> would do).
- *
- * @throws XZIOException
- * XZ Stream has grown too big
- *
- * @throws XZIOException
- * stream finished or closed
- *
- * @throws IOException may be thrown by the underlying output stream
- */
- public void endBlock() throws IOException {
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- // NOTE: Once there is threading with multiple Blocks, it's possible
- // that this function will be more like a barrier that returns
- // before the last Block has been finished.
- if (blockEncoder != null) {
- try {
- blockEncoder.finish();
- index.add(blockEncoder.getUnpaddedSize(),
- blockEncoder.getUncompressedSize());
- blockEncoder = null;
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
- }
-
- /**
- * Flushes the encoder and calls <code>out.flush()</code>.
- * All buffered pending data will then be decompressible from
- * the output stream.
- * <p>
- * Calling this function very often may increase the compressed
- * file size a lot. The filter chain options may affect the size
- * increase too. For example, with LZMA2 the HC4 match finder has
- * smaller penalty with flushing than BT4.
- * <p>
- * Some filters don't support flushing. If the filter chain has
- * such a filter, <code>flush()</code> will call <code>endBlock()</code>
- * before flushing.
- *
- * @throws XZIOException
- * XZ Stream has grown too big
- *
- * @throws XZIOException
- * stream finished or closed
- *
- * @throws IOException may be thrown by the underlying output stream
- */
- public void flush() throws IOException {
- if (exception != null)
- throw exception;
-
- if (finished)
- throw new XZIOException("Stream finished or closed");
-
- try {
- if (blockEncoder != null) {
- if (filtersSupportFlushing) {
- // This will eventually call out.flush() so
- // no need to do it here again.
- blockEncoder.flush();
- } else {
- endBlock();
- out.flush();
- }
- } else {
- out.flush();
- }
- } catch (IOException e) {
- exception = e;
- throw e;
- }
- }
-
- /**
- * Finishes compression without closing the underlying stream.
- * No more data can be written to this stream after finishing
- * (calling <code>write</code> with an empty buffer is OK).
- * <p>
- * Repeated calls to <code>finish()</code> do nothing unless
- * an exception was thrown by this stream earlier. In that case
- * the same exception is thrown again.
- * <p>
- * After finishing, the stream may be closed normally with
- * <code>close()</code>. If the stream will be closed anyway, there
- * usually is no need to call <code>finish()</code> separately.
- *
- * @throws XZIOException
- * XZ Stream has grown too big
- *
- * @throws IOException may be thrown by the underlying output stream
- */
- public void finish() throws IOException {
- if (!finished) {
- // This checks for pending exceptions so we don't need to
- // worry about it here.
- endBlock();
-
- try {
- index.encode(out);
- encodeStreamFooter();
- } catch (IOException e) {
- exception = e;
- throw e;
- }
-
- // Set it to true only if everything goes fine. Setting it earlier
- // would cause repeated calls to finish() do nothing instead of
- // throwing an exception to indicate an earlier error.
- finished = true;
- }
- }
-
- /**
- * Finishes compression and closes the underlying stream.
- * The underlying stream <code>out</code> is closed even if finishing
- * fails. If both finishing and closing fail, the exception thrown
- * by <code>finish()</code> is thrown and the exception from the failed
- * <code>out.close()</code> is lost.
- *
- * @throws XZIOException
- * XZ Stream has grown too big
- *
- * @throws IOException may be thrown by the underlying output stream
- */
- public void close() throws IOException {
- if (out != null) {
- // If finish() throws an exception, it stores the exception to
- // the variable "exception". So we can ignore the possible
- // exception here.
- try {
- finish();
- } catch (IOException e) {}
-
- try {
- out.close();
- } catch (IOException e) {
- // Remember the exception but only if there is no previous
- // pending exception.
- if (exception == null)
- exception = e;
- }
-
- out = null;
- }
-
- if (exception != null)
- throw exception;
- }
-
- private void encodeStreamFlags(byte[] buf, int off) {
- buf[off] = 0x00;
- buf[off + 1] = (byte)streamFlags.checkType;
- }
-
- private void encodeStreamHeader() throws IOException {
- out.write(XZ.HEADER_MAGIC);
-
- byte[] buf = new byte[2];
- encodeStreamFlags(buf, 0);
- out.write(buf);
-
- EncoderUtil.writeCRC32(out, buf);
- }
-
- private void encodeStreamFooter() throws IOException {
- byte[] buf = new byte[6];
- long backwardSize = index.getIndexSize() / 4 - 1;
- for (int i = 0; i < 4; ++i)
- buf[i] = (byte)(backwardSize >>> (i * 8));
-
- encodeStreamFlags(buf, 4);
-
- EncoderUtil.writeCRC32(out, buf);
- out.write(buf);
- out.write(XZ.FOOTER_MAGIC);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/check/CRC32.java b/Java/Tukaani/src/org/tukaani/xz/check/CRC32.java
deleted file mode 100644
index f182898..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/check/CRC32.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * CRC32
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.check;
-
-public class CRC32 extends Check {
- private final java.util.zip.CRC32 state = new java.util.zip.CRC32();
-
- public CRC32() {
- size = 4;
- name = "CRC32";
- }
-
- public void update(byte[] buf, int off, int len) {
- state.update(buf, off, len);
- }
-
- public byte[] finish() {
- long value = state.getValue();
- byte[] buf = { (byte)(value),
- (byte)(value >>> 8),
- (byte)(value >>> 16),
- (byte)(value >>> 24) };
- state.reset();
- return buf;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/check/CRC64.java b/Java/Tukaani/src/org/tukaani/xz/check/CRC64.java
deleted file mode 100644
index 02b15b7..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/check/CRC64.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * CRC64
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.check;
-
-public class CRC64 extends Check {
- private static final long poly = 0xC96C5795D7870F42L;
- private static final long[] crcTable = new long[256];
-
- private long crc = -1;
-
- static {
- for (int b = 0; b < crcTable.length; ++b) {
- long r = b;
- for (int i = 0; i < 8; ++i) {
- if ((r & 1) == 1)
- r = (r >>> 1) ^ poly;
- else
- r >>>= 1;
- }
-
- crcTable[b] = r;
- }
- }
-
- public CRC64() {
- size = 8;
- name = "CRC64";
- }
-
- public void update(byte[] buf, int off, int len) {
- int end = off + len;
-
- while (off < end)
- crc = crcTable[(buf[off++] ^ (int)crc) & 0xFF] ^ (crc >>> 8);
- }
-
- public byte[] finish() {
- long value = ~crc;
- crc = -1;
-
- byte[] buf = new byte[8];
- for (int i = 0; i < buf.length; ++i)
- buf[i] = (byte)(value >> (i * 8));
-
- return buf;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/check/Check.java b/Java/Tukaani/src/org/tukaani/xz/check/Check.java
deleted file mode 100644
index 02c011e..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/check/Check.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Check
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.check;
-
-import org.tukaani.xz.XZ;
-import org.tukaani.xz.UnsupportedOptionsException;
-
-public abstract class Check {
- int size;
- String name;
-
- public abstract void update(byte[] buf, int off, int len);
- public abstract byte[] finish();
-
- public void update(byte[] buf) {
- update(buf, 0, buf.length);
- }
-
- public int getSize() {
- return size;
- }
-
- public String getName() {
- return name;
- }
-
- public static Check getInstance(int checkType)
- throws UnsupportedOptionsException {
- switch (checkType) {
- case XZ.CHECK_NONE:
- return new None();
-
- case XZ.CHECK_CRC32:
- return new CRC32();
-
- case XZ.CHECK_CRC64:
- return new CRC64();
-
- case XZ.CHECK_SHA256:
- try {
- return new SHA256();
- } catch (java.security.NoSuchAlgorithmException e) {}
-
- break;
- }
-
- throw new UnsupportedOptionsException(
- "Unsupported Check ID " + checkType);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/check/None.java b/Java/Tukaani/src/org/tukaani/xz/check/None.java
deleted file mode 100644
index b07c8e6..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/check/None.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * None
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.check;
-
-public class None extends Check {
- public None() {
- size = 0;
- name = "None";
- }
-
- public void update(byte[] buf, int off, int len) {}
-
- public byte[] finish() {
- byte[] empty = new byte[0];
- return empty;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/check/SHA256.java b/Java/Tukaani/src/org/tukaani/xz/check/SHA256.java
deleted file mode 100644
index 66503c7..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/check/SHA256.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * SHA256
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.check;
-
-public class SHA256 extends Check {
- private final java.security.MessageDigest sha256;
-
- public SHA256() throws java.security.NoSuchAlgorithmException {
- size = 32;
- name = "SHA-256";
- sha256 = java.security.MessageDigest.getInstance("SHA-256");
- }
-
- public void update(byte[] buf, int off, int len) {
- sha256.update(buf, off, len);
- }
-
- public byte[] finish() {
- byte[] buf = sha256.digest();
- sha256.reset();
- return buf;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/common/DecoderUtil.java b/Java/Tukaani/src/org/tukaani/xz/common/DecoderUtil.java
deleted file mode 100644
index 77ba441..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/common/DecoderUtil.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * DecoderUtil
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.common;
-
-import java.io.InputStream;
-import java.io.IOException;
-import java.io.EOFException;
-import java.util.zip.CRC32;
-import org.tukaani.xz.XZ;
-import org.tukaani.xz.XZFormatException;
-import org.tukaani.xz.CorruptedInputException;
-import org.tukaani.xz.UnsupportedOptionsException;
-
-public class DecoderUtil extends Util {
- public static boolean isCRC32Valid(byte[] buf, int off, int len,
- int ref_off) {
- CRC32 crc32 = new CRC32();
- crc32.update(buf, off, len);
- long value = crc32.getValue();
-
- for (int i = 0; i < 4; ++i)
- if ((byte)(value >>> (i * 8)) != buf[ref_off + i])
- return false;
-
- return true;
- }
-
- public static StreamFlags decodeStreamHeader(byte[] buf)
- throws IOException {
- for (int i = 0; i < XZ.HEADER_MAGIC.length; ++i)
- if (buf[i] != XZ.HEADER_MAGIC[i])
- throw new XZFormatException();
-
- if (!isCRC32Valid(buf, XZ.HEADER_MAGIC.length, 2,
- XZ.HEADER_MAGIC.length + 2))
- throw new CorruptedInputException("XZ Stream Header is corrupt");
-
- try {
- return decodeStreamFlags(buf, XZ.HEADER_MAGIC.length);
- } catch (UnsupportedOptionsException e) {
- throw new UnsupportedOptionsException(
- "Unsupported options in XZ Stream Header");
- }
- }
-
- public static StreamFlags decodeStreamFooter(byte[] buf)
- throws IOException {
- if (buf[10] != XZ.FOOTER_MAGIC[0] || buf[11] != XZ.FOOTER_MAGIC[1]) {
- // NOTE: The exception could be XZFormatException too.
- // It depends on the situation which one is better.
- throw new CorruptedInputException("XZ Stream Footer is corrupt");
- }
-
- if (!isCRC32Valid(buf, 4, 6, 0))
- throw new CorruptedInputException("XZ Stream Footer is corrupt");
-
- StreamFlags streamFlags;
- try {
- streamFlags = decodeStreamFlags(buf, 8);
- } catch (UnsupportedOptionsException e) {
- throw new UnsupportedOptionsException(
- "Unsupported options in XZ Stream Footer");
- }
-
- streamFlags.backwardSize = 0;
- for (int i = 0; i < 4; ++i)
- streamFlags.backwardSize |= (buf[i + 4] & 0xFF) << (i * 8);
-
- streamFlags.backwardSize = (streamFlags.backwardSize + 1) * 4;
-
- return streamFlags;
- }
-
- private static StreamFlags decodeStreamFlags(byte[] buf, int off)
- throws UnsupportedOptionsException {
- if (buf[off] != 0x00 || (buf[off + 1] & 0xFF) >= 0x10)
- throw new UnsupportedOptionsException();
-
- StreamFlags streamFlags = new StreamFlags();
- streamFlags.checkType = buf[off + 1];
-
- return streamFlags;
- }
-
- public static boolean areStreamFlagsEqual(StreamFlags a, StreamFlags b) {
- // backwardSize is intentionally not compared.
- return a.checkType == b.checkType;
- }
-
- public static long decodeVLI(InputStream in) throws IOException {
- int b = in.read();
- if (b == -1)
- throw new EOFException();
-
- long num = b & 0x7F;
- int i = 0;
-
- while ((b & 0x80) != 0x00) {
- if (++i >= VLI_SIZE_MAX)
- throw new CorruptedInputException();
-
- b = in.read();
- if (b == -1)
- throw new EOFException();
-
- if (b == 0x00)
- throw new CorruptedInputException();
-
- num |= (long)(b & 0x7F) << (i * 7);
- }
-
- return num;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/common/EncoderUtil.java b/Java/Tukaani/src/org/tukaani/xz/common/EncoderUtil.java
deleted file mode 100644
index 57f688b..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/common/EncoderUtil.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * EncoderUtil
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.common;
-
-import java.io.OutputStream;
-import java.io.IOException;
-import java.util.zip.CRC32;
-
-public class EncoderUtil extends Util {
- public static void writeCRC32(OutputStream out, byte[] buf)
- throws IOException {
- CRC32 crc32 = new CRC32();
- crc32.update(buf);
- long value = crc32.getValue();
-
- for (int i = 0; i < 4; ++i)
- out.write((byte)(value >>> (i * 8)));
- }
-
- public static void encodeVLI(OutputStream out, long num)
- throws IOException {
- while (num >= 0x80) {
- out.write((byte)(num | 0x80));
- num >>>= 7;
- }
-
- out.write((byte)num);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/common/StreamFlags.java b/Java/Tukaani/src/org/tukaani/xz/common/StreamFlags.java
deleted file mode 100644
index b306987..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/common/StreamFlags.java
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * StreamFlags
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.common;
-
-public class StreamFlags {
- public int checkType = -1;
- public long backwardSize = -1;
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/common/Util.java b/Java/Tukaani/src/org/tukaani/xz/common/Util.java
deleted file mode 100644
index c4324ce..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/common/Util.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Util
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.common;
-
-public class Util {
- public static final int STREAM_HEADER_SIZE = 12;
- public static final long BACKWARD_SIZE_MAX = 1L << 34;
- public static final int BLOCK_HEADER_SIZE_MAX = 1024;
- public static final long VLI_MAX = Long.MAX_VALUE;
- public static final int VLI_SIZE_MAX = 9;
-
- public static int getVLISize(long num) {
- int size = 0;
- do {
- ++size;
- num >>= 7;
- } while (num != 0);
-
- return size;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/delta/DeltaCoder.java b/Java/Tukaani/src/org/tukaani/xz/delta/DeltaCoder.java
deleted file mode 100644
index d94eb66..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/delta/DeltaCoder.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * DeltaCoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.delta;
-
-abstract class DeltaCoder {
- static final int DISTANCE_MIN = 1;
- static final int DISTANCE_MAX = 256;
- static final int DISTANCE_MASK = DISTANCE_MAX - 1;
-
- final int distance;
- final byte[] history = new byte[DISTANCE_MAX];
- int pos = 0;
-
- DeltaCoder(int distance) {
- if (distance < DISTANCE_MIN || distance > DISTANCE_MAX)
- throw new IllegalArgumentException();
-
- this.distance = distance;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/delta/DeltaDecoder.java b/Java/Tukaani/src/org/tukaani/xz/delta/DeltaDecoder.java
deleted file mode 100644
index 154cbf3..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/delta/DeltaDecoder.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * DeltaDecoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.delta;
-
-public class DeltaDecoder extends DeltaCoder {
- public DeltaDecoder(int distance) {
- super(distance);
- }
-
- public void decode(byte[] buf, int off, int len) {
- int end = off + len;
- for (int i = off; i < end; ++i) {
- buf[i] += history[(distance + pos) & DISTANCE_MASK];
- history[pos-- & DISTANCE_MASK] = buf[i];
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/delta/DeltaEncoder.java b/Java/Tukaani/src/org/tukaani/xz/delta/DeltaEncoder.java
deleted file mode 100644
index 17accce..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/delta/DeltaEncoder.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * DeltaEncoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.delta;
-
-public class DeltaEncoder extends DeltaCoder {
- public DeltaEncoder(int distance) {
- super(distance);
- }
-
- public void encode(byte[] in, int in_off, int len, byte[] out) {
- for (int i = 0; i < len; ++i) {
- byte tmp = history[(distance + pos) & DISTANCE_MASK];
- history[pos-- & DISTANCE_MASK] = in[in_off + i];
- out[i] = (byte)(in[in_off + i] - tmp);
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/index/BlockInfo.java b/Java/Tukaani/src/org/tukaani/xz/index/BlockInfo.java
deleted file mode 100644
index babae7f..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/index/BlockInfo.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * BlockInfo
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.index;
-
-import org.tukaani.xz.common.StreamFlags;
-
-public class BlockInfo {
- public int blockNumber = -1;
- public long compressedOffset = -1;
- public long uncompressedOffset = -1;
- public long unpaddedSize = -1;
- public long uncompressedSize = -1;
-
- IndexDecoder index;
-
- public BlockInfo(IndexDecoder indexOfFirstStream) {
- index = indexOfFirstStream;
- }
-
- public int getCheckType() {
- return index.getStreamFlags().checkType;
- }
-
- public boolean hasNext() {
- return index.hasRecord(blockNumber + 1);
- }
-
- public void setNext() {
- index.setBlockInfo(this, blockNumber + 1);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/index/IndexBase.java b/Java/Tukaani/src/org/tukaani/xz/index/IndexBase.java
deleted file mode 100644
index e556105..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/index/IndexBase.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * IndexBase
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.index;
-
-import org.tukaani.xz.common.Util;
-import org.tukaani.xz.XZIOException;
-
-abstract class IndexBase {
- private final XZIOException invalidIndexException;
- long blocksSum = 0;
- long uncompressedSum = 0;
- long indexListSize = 0;
- long recordCount = 0;
-
- IndexBase(XZIOException invalidIndexException) {
- this.invalidIndexException = invalidIndexException;
- }
-
- private long getUnpaddedIndexSize() {
- // Index Indicator + Number of Records + List of Records + CRC32
- return 1 + Util.getVLISize(recordCount) + indexListSize + 4;
- }
-
- public long getIndexSize() {
- return (getUnpaddedIndexSize() + 3) & ~3;
- }
-
- public long getStreamSize() {
- return Util.STREAM_HEADER_SIZE + blocksSum + getIndexSize()
- + Util.STREAM_HEADER_SIZE;
- }
-
- int getIndexPaddingSize() {
- return (int)((4 - getUnpaddedIndexSize()) & 3);
- }
-
- void add(long unpaddedSize, long uncompressedSize) throws XZIOException {
- blocksSum += (unpaddedSize + 3) & ~3;
- uncompressedSum += uncompressedSize;
- indexListSize += Util.getVLISize(unpaddedSize)
- + Util.getVLISize(uncompressedSize);
- ++recordCount;
-
- if (blocksSum < 0 || uncompressedSum < 0
- || getIndexSize() > Util.BACKWARD_SIZE_MAX
- || getStreamSize() < 0)
- throw invalidIndexException;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/index/IndexDecoder.java b/Java/Tukaani/src/org/tukaani/xz/index/IndexDecoder.java
deleted file mode 100644
index a3ae986..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/index/IndexDecoder.java
+++ /dev/null
@@ -1,223 +0,0 @@
-/*
- * IndexDecoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.index;
-
-import java.io.IOException;
-import java.io.EOFException;
-import java.util.zip.CheckedInputStream;
-import org.tukaani.xz.common.DecoderUtil;
-import org.tukaani.xz.common.StreamFlags;
-import org.tukaani.xz.SeekableInputStream;
-import org.tukaani.xz.CorruptedInputException;
-import org.tukaani.xz.MemoryLimitException;
-import org.tukaani.xz.UnsupportedOptionsException;
-
-public class IndexDecoder extends IndexBase {
- private final StreamFlags streamFlags;
- private final long streamPadding;
- private final int memoryUsage;
-
- // Unpadded Size and Uncompressed Size fields
- private final long[] unpadded;
- private final long[] uncompressed;
-
- // Uncompressed size of the largest Block. It is used by
- // SeekableXZInputStream to find out the largest Block of the .xz file.
- private long largestBlockSize = 0;
-
- // Offsets relative to the beginning of the .xz file. These are all zero
- // for the first Stream in the file.
- private int recordOffset = 0;
- private long compressedOffset = 0;
- private long uncompressedOffset = 0;
-
- public IndexDecoder(SeekableInputStream in, StreamFlags streamFooterFlags,
- long streamPadding, int memoryLimit)
- throws IOException {
- super(new CorruptedInputException("XZ Index is corrupt"));
- this.streamFlags = streamFooterFlags;
- this.streamPadding = streamPadding;
-
- // If endPos is exceeded before the CRC32 field has been decoded,
- // the Index is corrupt.
- long endPos = in.position() + streamFooterFlags.backwardSize - 4;
-
- java.util.zip.CRC32 crc32 = new java.util.zip.CRC32();
- CheckedInputStream inChecked = new CheckedInputStream(in, crc32);
-
- // Index Indicator
- if (inChecked.read() != 0x00)
- throw new CorruptedInputException("XZ Index is corrupt");
-
- try {
- // Number of Records
- long count = DecoderUtil.decodeVLI(inChecked);
-
- // Catch Record counts that obviously too high to be valid.
- // This test isn't exact because it ignores Index Indicator,
- // Number of Records, and CRC32 fields, but this is good enough
- // to catch the most obvious problems.
- if (count >= streamFooterFlags.backwardSize / 2)
- throw new CorruptedInputException("XZ Index is corrupt");
-
- // If the Record count doesn't fit into an int, we cannot
- // allocate the arrays to hold the Records.
- if (count > Integer.MAX_VALUE)
- throw new UnsupportedOptionsException("XZ Index has over "
- + Integer.MAX_VALUE + " Records");
-
- // Calculate approximate memory requirements and check the
- // memory usage limit.
- memoryUsage = 1 + (int)((16L * count + 1023) / 1024);
- if (memoryLimit >= 0 && memoryUsage > memoryLimit)
- throw new MemoryLimitException(memoryUsage, memoryLimit);
-
- // Allocate the arrays for the Records.
- unpadded = new long[(int)count];
- uncompressed = new long[(int)count];
- int record = 0;
-
- // Decode the Records.
- for (int i = (int)count; i > 0; --i) {
- // Get the next Record.
- long unpaddedSize = DecoderUtil.decodeVLI(inChecked);
- long uncompressedSize = DecoderUtil.decodeVLI(inChecked);
-
- // Check that the input position stays sane. Since this is
- // checked only once per loop iteration instead of for
- // every input byte read, it's still possible that
- // EOFException gets thrown with corrupt input.
- if (in.position() > endPos)
- throw new CorruptedInputException("XZ Index is corrupt");
-
- // Add the new Record.
- unpadded[record] = blocksSum + unpaddedSize;
- uncompressed[record] = uncompressedSum + uncompressedSize;
- ++record;
- super.add(unpaddedSize, uncompressedSize);
- assert record == recordCount;
-
- // Remember the uncompressed size of the largest Block.
- if (largestBlockSize < uncompressedSize)
- largestBlockSize = uncompressedSize;
- }
- } catch (EOFException e) {
- // EOFException is caught just in case a corrupt input causes
- // DecoderUtil.decodeVLI to read too much at once.
- throw new CorruptedInputException("XZ Index is corrupt");
- }
-
- // Validate that the size of the Index field matches
- // Backward Size.
- int indexPaddingSize = getIndexPaddingSize();
- if (in.position() + indexPaddingSize != endPos)
- throw new CorruptedInputException("XZ Index is corrupt");
-
- // Index Padding
- while (indexPaddingSize-- > 0)
- if (inChecked.read() != 0x00)
- throw new CorruptedInputException("XZ Index is corrupt");
-
- // CRC32
- long value = crc32.getValue();
- for (int i = 0; i < 4; ++i)
- if (((value >>> (i * 8)) & 0xFF) != in.read())
- throw new CorruptedInputException("XZ Index is corrupt");
- }
-
- public void setOffsets(IndexDecoder prev) {
- // NOTE: SeekableXZInputStream checks that the total number of Blocks
- // in concatenated Streams fits into an int.
- recordOffset = prev.recordOffset + (int)prev.recordCount;
- compressedOffset = prev.compressedOffset
- + prev.getStreamSize() + prev.streamPadding;
- assert (compressedOffset & 3) == 0;
- uncompressedOffset = prev.uncompressedOffset + prev.uncompressedSum;
- }
-
- public int getMemoryUsage() {
- return memoryUsage;
- }
-
- public StreamFlags getStreamFlags() {
- return streamFlags;
- }
-
- public int getRecordCount() {
- // It was already checked in the constructor that it fits into an int.
- // Otherwise we couldn't have allocated the arrays.
- return (int)recordCount;
- }
-
- public long getUncompressedSize() {
- return uncompressedSum;
- }
-
- public long getLargestBlockSize() {
- return largestBlockSize;
- }
-
- public boolean hasUncompressedOffset(long pos) {
- return pos >= uncompressedOffset
- && pos < uncompressedOffset + uncompressedSum;
- }
-
- public boolean hasRecord(int blockNumber) {
- return blockNumber >= recordOffset
- && blockNumber < recordOffset + recordCount;
- }
-
- public void locateBlock(BlockInfo info, long target) {
- assert target >= uncompressedOffset;
- target -= uncompressedOffset;
- assert target < uncompressedSum;
-
- int left = 0;
- int right = unpadded.length - 1;
-
- while (left < right) {
- int i = left + (right - left) / 2;
-
- if (uncompressed[i] <= target)
- left = i + 1;
- else
- right = i;
- }
-
- setBlockInfo(info, recordOffset + left);
- }
-
- public void setBlockInfo(BlockInfo info, int blockNumber) {
- // The caller has checked that the given Block number is inside
- // this Index.
- assert blockNumber >= recordOffset;
- assert blockNumber - recordOffset < recordCount;
-
- info.index = this;
- info.blockNumber = blockNumber;
-
- int pos = blockNumber - recordOffset;
-
- if (pos == 0) {
- info.compressedOffset = 0;
- info.uncompressedOffset = 0;
- } else {
- info.compressedOffset = (unpadded[pos - 1] + 3) & ~3;
- info.uncompressedOffset = uncompressed[pos - 1];
- }
-
- info.unpaddedSize = unpadded[pos] - info.compressedOffset;
- info.uncompressedSize = uncompressed[pos] - info.uncompressedOffset;
-
- info.compressedOffset += compressedOffset
- + DecoderUtil.STREAM_HEADER_SIZE;
- info.uncompressedOffset += uncompressedOffset;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/index/IndexEncoder.java b/Java/Tukaani/src/org/tukaani/xz/index/IndexEncoder.java
deleted file mode 100644
index 9db40d1..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/index/IndexEncoder.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * IndexEncoder
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.index;
-
-import java.io.OutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.zip.CheckedOutputStream;
-import org.tukaani.xz.common.EncoderUtil;
-import org.tukaani.xz.XZIOException;
-
-public class IndexEncoder extends IndexBase {
- private final ArrayList records = new ArrayList();
-
- public IndexEncoder() {
- super(new XZIOException("XZ Stream or its Index has grown too big"));
- }
-
- public void add(long unpaddedSize, long uncompressedSize)
- throws XZIOException {
- super.add(unpaddedSize, uncompressedSize);
- records.add(new IndexRecord(unpaddedSize, uncompressedSize));
- }
-
- public void encode(OutputStream out) throws IOException {
- java.util.zip.CRC32 crc32 = new java.util.zip.CRC32();
- CheckedOutputStream outChecked = new CheckedOutputStream(out, crc32);
-
- // Index Indicator
- outChecked.write(0x00);
-
- // Number of Records
- EncoderUtil.encodeVLI(outChecked, recordCount);
-
- // List of Records
- for (Iterator i = records.iterator(); i.hasNext(); ) {
- IndexRecord record = (IndexRecord)i.next();
- EncoderUtil.encodeVLI(outChecked, record.unpadded);
- EncoderUtil.encodeVLI(outChecked, record.uncompressed);
- }
-
- // Index Padding
- for (int i = getIndexPaddingSize(); i > 0; --i)
- outChecked.write(0x00);
-
- // CRC32
- long value = crc32.getValue();
- for (int i = 0; i < 4; ++i)
- out.write((byte)(value >>> (i * 8)));
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/index/IndexHash.java b/Java/Tukaani/src/org/tukaani/xz/index/IndexHash.java
deleted file mode 100644
index ab168c6..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/index/IndexHash.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * IndexHash
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.index;
-
-import java.io.InputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.zip.CheckedInputStream;
-import org.tukaani.xz.common.DecoderUtil;
-import org.tukaani.xz.XZIOException;
-import org.tukaani.xz.CorruptedInputException;
-
-public class IndexHash extends IndexBase {
- private org.tukaani.xz.check.Check hash;
-
- public IndexHash() {
- super(new CorruptedInputException());
-
- try {
- hash = new org.tukaani.xz.check.SHA256();
- } catch (java.security.NoSuchAlgorithmException e) {
- hash = new org.tukaani.xz.check.CRC32();
- }
- }
-
- public void add(long unpaddedSize, long uncompressedSize)
- throws XZIOException {
- super.add(unpaddedSize, uncompressedSize);
-
- ByteBuffer buf = ByteBuffer.allocate(2 * 8);
- buf.putLong(unpaddedSize);
- buf.putLong(uncompressedSize);
- hash.update(buf.array());
- }
-
- public void validate(InputStream in) throws IOException {
- // Index Indicator (0x00) has already been read by BlockInputStream
- // so add 0x00 to the CRC32 here.
- java.util.zip.CRC32 crc32 = new java.util.zip.CRC32();
- crc32.update('\0');
- CheckedInputStream inChecked = new CheckedInputStream(in, crc32);
-
- // Get and validate the Number of Records field.
- long storedRecordCount = DecoderUtil.decodeVLI(inChecked);
- if (storedRecordCount != recordCount)
- throw new CorruptedInputException("XZ Index is corrupt");
-
- // Decode and hash the Index field and compare it to
- // the hash value calculated from the decoded Blocks.
- IndexHash stored = new IndexHash();
- for (long i = 0; i < recordCount; ++i) {
- long unpaddedSize = DecoderUtil.decodeVLI(inChecked);
- long uncompressedSize = DecoderUtil.decodeVLI(inChecked);
-
- try {
- stored.add(unpaddedSize, uncompressedSize);
- } catch (XZIOException e) {
- throw new CorruptedInputException("XZ Index is corrupt");
- }
-
- if (stored.blocksSum > blocksSum
- || stored.uncompressedSum > uncompressedSum
- || stored.indexListSize > indexListSize)
- throw new CorruptedInputException("XZ Index is corrupt");
- }
-
- if (stored.blocksSum != blocksSum
- || stored.uncompressedSum != uncompressedSum
- || stored.indexListSize != indexListSize
- || !Arrays.equals(stored.hash.finish(), hash.finish()))
- throw new CorruptedInputException("XZ Index is corrupt");
-
- // Index Padding
- DataInputStream inData = new DataInputStream(inChecked);
- for (int i = getIndexPaddingSize(); i > 0; --i)
- if (inData.readUnsignedByte() != 0x00)
- throw new CorruptedInputException("XZ Index is corrupt");
-
- // CRC32
- long value = crc32.getValue();
- for (int i = 0; i < 4; ++i)
- if (((value >>> (i * 8)) & 0xFF) != inData.readUnsignedByte())
- throw new CorruptedInputException("XZ Index is corrupt");
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/index/IndexRecord.java b/Java/Tukaani/src/org/tukaani/xz/index/IndexRecord.java
deleted file mode 100644
index 5f6ba0f..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/index/IndexRecord.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * IndexRecord
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.index;
-
-class IndexRecord {
- final long unpadded;
- final long uncompressed;
-
- IndexRecord(long unpadded, long uncompressed) {
- this.unpadded = unpadded;
- this.uncompressed = uncompressed;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lz/BT4.java b/Java/Tukaani/src/org/tukaani/xz/lz/BT4.java
deleted file mode 100644
index a73b666..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lz/BT4.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- * Binary Tree match finder with 2-, 3-, and 4-byte hashing
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lz;
-
-final class BT4 extends LZEncoder {
- private final Hash234 hash;
- private final int[] tree;
- private final Matches matches;
- private final int depthLimit;
-
- private final int cyclicSize;
- private int cyclicPos = -1;
- private int lzPos;
-
- static int getMemoryUsage(int dictSize) {
- return Hash234.getMemoryUsage(dictSize) + dictSize / (1024 / 8) + 10;
- }
-
- BT4(int dictSize, int beforeSizeMin, int readAheadMax,
- int niceLen, int matchLenMax, int depthLimit) {
- super(dictSize, beforeSizeMin, readAheadMax, niceLen, matchLenMax);
-
- cyclicSize = dictSize + 1;
- lzPos = cyclicSize;
-
- hash = new Hash234(dictSize);
- tree = new int[cyclicSize * 2];
-
- // Substracting 1 because the shortest match that this match
- // finder can find is 2 bytes, so there's no need to reserve
- // space for one-byte matches.
- matches = new Matches(niceLen - 1);
-
- this.depthLimit = depthLimit > 0 ? depthLimit : 16 + niceLen / 2;
- }
-
- private int movePos() {
- int avail = movePos(niceLen, 4);
-
- if (avail != 0) {
- if (++lzPos == Integer.MAX_VALUE) {
- int normalizationOffset = Integer.MAX_VALUE - cyclicSize;
- hash.normalize(normalizationOffset);
- normalize(tree, normalizationOffset);
- lzPos -= normalizationOffset;
- }
-
- if (++cyclicPos == cyclicSize)
- cyclicPos = 0;
- }
-
- return avail;
- }
-
- public Matches getMatches() {
- matches.count = 0;
-
- int matchLenLimit = matchLenMax;
- int niceLenLimit = niceLen;
- int avail = movePos();
-
- if (avail < matchLenLimit) {
- if (avail == 0)
- return matches;
-
- matchLenLimit = avail;
- if (niceLenLimit > avail)
- niceLenLimit = avail;
- }
-
- hash.calcHashes(buf, readPos);
- int delta2 = lzPos - hash.getHash2Pos();
- int delta3 = lzPos - hash.getHash3Pos();
- int currentMatch = hash.getHash4Pos();
- hash.updateTables(lzPos);
-
- int lenBest = 0;
-
- // See if the hash from the first two bytes found a match.
- // The hashing algorithm guarantees that if the first byte
- // matches, also the second byte does, so there's no need to
- // test the second byte.
- if (delta2 < cyclicSize && buf[readPos - delta2] == buf[readPos]) {
- lenBest = 2;
- matches.len[0] = 2;
- matches.dist[0] = delta2 - 1;
- matches.count = 1;
- }
-
- // See if the hash from the first three bytes found a match that
- // is different from the match possibly found by the two-byte hash.
- // Also here the hashing algorithm guarantees that if the first byte
- // matches, also the next two bytes do.
- if (delta2 != delta3 && delta3 < cyclicSize
- && buf[readPos - delta3] == buf[readPos]) {
- lenBest = 3;
- matches.dist[matches.count++] = delta3 - 1;
- delta2 = delta3;
- }
-
- // If a match was found, see how long it is.
- if (matches.count > 0) {
- while (lenBest < matchLenLimit && buf[readPos + lenBest - delta2]
- == buf[readPos + lenBest])
- ++lenBest;
-
- matches.len[matches.count - 1] = lenBest;
-
- // Return if it is long enough (niceLen or reached the end of
- // the dictionary).
- if (lenBest >= niceLenLimit) {
- skip(niceLenLimit, currentMatch);
- return matches;
- }
- }
-
- // Long enough match wasn't found so easily. Look for better matches
- // from the binary tree.
- if (lenBest < 3)
- lenBest = 3;
-
- int depth = depthLimit;
-
- int ptr0 = (cyclicPos << 1) + 1;
- int ptr1 = cyclicPos << 1;
- int len0 = 0;
- int len1 = 0;
-
- while (true) {
- int delta = lzPos - currentMatch;
-
- // Return if the search depth limit has been reached or
- // if the distance of the potential match exceeds the
- // dictionary size.
- if (depth-- == 0 || delta >= cyclicSize) {
- tree[ptr0] = 0;
- tree[ptr1] = 0;
- return matches;
- }
-
- int pair = (cyclicPos - delta
- + (delta > cyclicPos ? cyclicSize : 0)) << 1;
- int len = Math.min(len0, len1);
-
- if (buf[readPos + len - delta] == buf[readPos + len]) {
- while (++len < matchLenLimit)
- if (buf[readPos + len - delta] != buf[readPos + len])
- break;
-
- if (len > lenBest) {
- lenBest = len;
- matches.len[matches.count] = len;
- matches.dist[matches.count] = delta - 1;
- ++matches.count;
-
- if (len >= niceLenLimit) {
- tree[ptr1] = tree[pair];
- tree[ptr0] = tree[pair + 1];
- return matches;
- }
- }
- }
-
- if ((buf[readPos + len - delta] & 0xFF)
- < (buf[readPos + len] & 0xFF)) {
- tree[ptr1] = currentMatch;
- ptr1 = pair + 1;
- currentMatch = tree[ptr1];
- len1 = len;
- } else {
- tree[ptr0] = currentMatch;
- ptr0 = pair;
- currentMatch = tree[ptr0];
- len0 = len;
- }
- }
- }
-
- private void skip(int niceLenLimit, int currentMatch) {
- int depth = depthLimit;
-
- int ptr0 = (cyclicPos << 1) + 1;
- int ptr1 = cyclicPos << 1;
- int len0 = 0;
- int len1 = 0;
-
- while (true) {
- int delta = lzPos - currentMatch;
-
- if (depth-- == 0 || delta >= cyclicSize) {
- tree[ptr0] = 0;
- tree[ptr1] = 0;
- return;
- }
-
- int pair = (cyclicPos - delta
- + (delta > cyclicPos ? cyclicSize : 0)) << 1;
- int len = Math.min(len0, len1);
-
- if (buf[readPos + len - delta] == buf[readPos + len]) {
- // No need to look for longer matches than niceLenLimit
- // because we only are updating the tree, not returning
- // matches found to the caller.
- do {
- if (++len == niceLenLimit) {
- tree[ptr1] = tree[pair];
- tree[ptr0] = tree[pair + 1];
- return;
- }
- } while (buf[readPos + len - delta] == buf[readPos + len]);
- }
-
- if ((buf[readPos + len - delta] & 0xFF)
- < (buf[readPos + len] & 0xFF)) {
- tree[ptr1] = currentMatch;
- ptr1 = pair + 1;
- currentMatch = tree[ptr1];
- len1 = len;
- } else {
- tree[ptr0] = currentMatch;
- ptr0 = pair;
- currentMatch = tree[ptr0];
- len0 = len;
- }
- }
- }
-
- public void skip(int len) {
- while (len-- > 0) {
- int niceLenLimit = niceLen;
- int avail = movePos();
-
- if (avail < niceLenLimit) {
- if (avail == 0)
- continue;
-
- niceLenLimit = avail;
- }
-
- hash.calcHashes(buf, readPos);
- int currentMatch = hash.getHash4Pos();
- hash.updateTables(lzPos);
-
- skip(niceLenLimit, currentMatch);
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lz/CRC32Hash.java b/Java/Tukaani/src/org/tukaani/xz/lz/CRC32Hash.java
deleted file mode 100644
index 2adfdbf..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lz/CRC32Hash.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * CRC32Hash
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lz;
-
-/**
- * Provides a CRC32 table using the polynomial from IEEE 802.3.
- */
-class CRC32Hash {
- private static final int CRC32_POLY = 0xEDB88320;
-
- static final int[] crcTable = new int[256];
-
- static {
- for (int i = 0; i < 256; ++i) {
- int r = i;
-
- for (int j = 0; j < 8; ++j) {
- if ((r & 1) != 0)
- r = (r >>> 1) ^ CRC32_POLY;
- else
- r >>>= 1;
- }
-
- crcTable[i] = r;
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lz/HC4.java b/Java/Tukaani/src/org/tukaani/xz/lz/HC4.java
deleted file mode 100644
index 0f49fd4..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lz/HC4.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
- * Hash Chain match finder with 2-, 3-, and 4-byte hashing
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lz;
-
-final class HC4 extends LZEncoder {
- private final Hash234 hash;
- private final int[] chain;
- private final Matches matches;
- private final int depthLimit;
-
- private final int cyclicSize;
- private int cyclicPos = -1;
- private int lzPos;
-
- /**
- * Gets approximate memory usage of the match finder as kibibytes.
- */
- static int getMemoryUsage(int dictSize) {
- return Hash234.getMemoryUsage(dictSize) + dictSize / (1024 / 4) + 10;
- }
-
- /**
- * Creates a new LZEncoder with the HC4 match finder.
- * See <code>LZEncoder.getInstance</code> for parameter descriptions.
- */
- HC4(int dictSize, int beforeSizeMin, int readAheadMax,
- int niceLen, int matchLenMax, int depthLimit) {
- super(dictSize, beforeSizeMin, readAheadMax, niceLen, matchLenMax);
-
- hash = new Hash234(dictSize);
-
- // +1 because we need dictSize bytes of history + the current byte.
- cyclicSize = dictSize + 1;
- chain = new int[cyclicSize];
- lzPos = cyclicSize;
-
- // Substracting 1 because the shortest match that this match
- // finder can find is 2 bytes, so there's no need to reserve
- // space for one-byte matches.
- matches = new Matches(niceLen - 1);
-
- // Use a default depth limit if no other value was specified.
- // The default is just something based on experimentation;
- // it's nothing magic.
- this.depthLimit = (depthLimit > 0) ? depthLimit : 4 + niceLen / 4;
- }
-
- /**
- * Moves to the next byte, checks that there is enough available space,
- * and possibly normalizes the hash tables and the hash chain.
- *
- * @return number of bytes available, including the current byte
- */
- private int movePos() {
- int avail = movePos(4, 4);
-
- if (avail != 0) {
- if (++lzPos == Integer.MAX_VALUE) {
- int normalizationOffset = Integer.MAX_VALUE - cyclicSize;
- hash.normalize(normalizationOffset);
- normalize(chain, normalizationOffset);
- lzPos -= normalizationOffset;
- }
-
- if (++cyclicPos == cyclicSize)
- cyclicPos = 0;
- }
-
- return avail;
- }
-
- public Matches getMatches() {
- matches.count = 0;
- int matchLenLimit = matchLenMax;
- int niceLenLimit = niceLen;
- int avail = movePos();
-
- if (avail < matchLenLimit) {
- if (avail == 0)
- return matches;
-
- matchLenLimit = avail;
- if (niceLenLimit > avail)
- niceLenLimit = avail;
- }
-
- hash.calcHashes(buf, readPos);
- int delta2 = lzPos - hash.getHash2Pos();
- int delta3 = lzPos - hash.getHash3Pos();
- int currentMatch = hash.getHash4Pos();
- hash.updateTables(lzPos);
-
- chain[cyclicPos] = currentMatch;
-
- int lenBest = 0;
-
- // See if the hash from the first two bytes found a match.
- // The hashing algorithm guarantees that if the first byte
- // matches, also the second byte does, so there's no need to
- // test the second byte.
- if (delta2 < cyclicSize && buf[readPos - delta2] == buf[readPos]) {
- lenBest = 2;
- matches.len[0] = 2;
- matches.dist[0] = delta2 - 1;
- matches.count = 1;
- }
-
- // See if the hash from the first three bytes found a match that
- // is different from the match possibly found by the two-byte hash.
- // Also here the hashing algorithm guarantees that if the first byte
- // matches, also the next two bytes do.
- if (delta2 != delta3 && delta3 < cyclicSize
- && buf[readPos - delta3] == buf[readPos]) {
- lenBest = 3;
- matches.dist[matches.count++] = delta3 - 1;
- delta2 = delta3;
- }
-
- // If a match was found, see how long it is.
- if (matches.count > 0) {
- while (lenBest < matchLenLimit && buf[readPos + lenBest - delta2]
- == buf[readPos + lenBest])
- ++lenBest;
-
- matches.len[matches.count - 1] = lenBest;
-
- // Return if it is long enough (niceLen or reached the end of
- // the dictionary).
- if (lenBest >= niceLenLimit)
- return matches;
- }
-
- // Long enough match wasn't found so easily. Look for better matches
- // from the hash chain.
- if (lenBest < 3)
- lenBest = 3;
-
- int depth = depthLimit;
-
- while (true) {
- int delta = lzPos - currentMatch;
-
- // Return if the search depth limit has been reached or
- // if the distance of the potential match exceeds the
- // dictionary size.
- if (depth-- == 0 || delta >= cyclicSize)
- return matches;
-
- currentMatch = chain[cyclicPos - delta
- + (delta > cyclicPos ? cyclicSize : 0)];
-
- // Test the first byte and the first new byte that would give us
- // a match that is at least one byte longer than lenBest. This
- // too short matches get quickly skipped.
- if (buf[readPos + lenBest - delta] == buf[readPos + lenBest]
- && buf[readPos - delta] == buf[readPos]) {
- // Calculate the length of the match.
- int len = 0;
- while (++len < matchLenLimit)
- if (buf[readPos + len - delta] != buf[readPos + len])
- break;
-
- // Use the match if and only if it is better than the longest
- // match found so far.
- if (len > lenBest) {
- lenBest = len;
- matches.len[matches.count] = len;
- matches.dist[matches.count] = delta - 1;
- ++matches.count;
-
- // Return if it is long enough (niceLen or reached the
- // end of the dictionary).
- if (len >= niceLenLimit)
- return matches;
- }
- }
- }
- }
-
- public void skip(int len) {
- assert len >= 0;
-
- while (len-- > 0) {
- if (movePos() != 0) {
- // Update the hash chain and hash tables.
- hash.calcHashes(buf, readPos);
- chain[cyclicPos] = hash.getHash4Pos();
- hash.updateTables(lzPos);
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lz/Hash234.java b/Java/Tukaani/src/org/tukaani/xz/lz/Hash234.java
deleted file mode 100644
index 8253bc0..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lz/Hash234.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * 2-, 3-, and 4-byte hashing
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lz;
-
-final class Hash234 extends CRC32Hash {
- private static final int HASH_2_SIZE = 1 << 10;
- private static final int HASH_2_MASK = HASH_2_SIZE - 1;
-
- private static final int HASH_3_SIZE = 1 << 16;
- private static final int HASH_3_MASK = HASH_3_SIZE - 1;
-
- private final int hash4Mask;
-
- private final int[] hash2Table = new int[HASH_2_SIZE];
- private final int[] hash3Table = new int[HASH_3_SIZE];
- private final int[] hash4Table;
-
- private int hash2Value = 0;
- private int hash3Value = 0;
- private int hash4Value = 0;
-
- static int getHash4Size(int dictSize) {
- int h = dictSize - 1;
- h |= h >>> 1;
- h |= h >>> 2;
- h |= h >>> 4;
- h |= h >>> 8;
- h >>>= 1;
- h |= 0xFFFF;
- if (h > (1 << 24))
- h >>>= 1;
-
- return h + 1;
- }
-
- static int getMemoryUsage(int dictSize) {
- // Sizes of the hash arrays + a little extra
- return (HASH_2_SIZE + HASH_3_SIZE + getHash4Size(dictSize))
- / (1024 / 4) + 4;
- }
-
- Hash234(int dictSize) {
- hash4Table = new int[getHash4Size(dictSize)];
- hash4Mask = hash4Table.length - 1;
- }
-
- void calcHashes(byte[] buf, int off) {
- int temp = crcTable[buf[off] & 0xFF] ^ (buf[off + 1] & 0xFF);
- hash2Value = temp & HASH_2_MASK;
-
- temp ^= (buf[off + 2] & 0xFF) << 8;
- hash3Value = temp & HASH_3_MASK;
-
- temp ^= crcTable[buf[off + 3] & 0xFF] << 5;
- hash4Value = temp & hash4Mask;
- }
-
- int getHash2Pos() {
- return hash2Table[hash2Value];
- }
-
- int getHash3Pos() {
- return hash3Table[hash3Value];
- }
-
- int getHash4Pos() {
- return hash4Table[hash4Value];
- }
-
- void updateTables(int pos) {
- hash2Table[hash2Value] = pos;
- hash3Table[hash3Value] = pos;
- hash4Table[hash4Value] = pos;
- }
-
- void normalize(int normalizeOffset) {
- LZEncoder.normalize(hash2Table, normalizeOffset);
- LZEncoder.normalize(hash3Table, normalizeOffset);
- LZEncoder.normalize(hash4Table, normalizeOffset);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lz/LZDecoder.java b/Java/Tukaani/src/org/tukaani/xz/lz/LZDecoder.java
deleted file mode 100644
index 680fec1..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lz/LZDecoder.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * LZDecoder
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lz;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import org.tukaani.xz.CorruptedInputException;
-
-public final class LZDecoder {
- private final byte[] buf;
- private int start = 0;
- private int pos = 0;
- private int full = 0;
- private int limit = 0;
- private int pendingLen = 0;
- private int pendingDist = 0;
-
- public LZDecoder(int dictSize, byte[] presetDict) {
- buf = new byte[dictSize];
-
- if (presetDict != null) {
- pos = Math.min(presetDict.length, dictSize);
- full = pos;
- start = pos;
- System.arraycopy(presetDict, presetDict.length - pos, buf, 0, pos);
- }
- }
-
- public void reset() {
- start = 0;
- pos = 0;
- full = 0;
- limit = 0;
- buf[buf.length - 1] = 0x00;
- }
-
- public void setLimit(int outMax) {
- if (buf.length - pos <= outMax)
- limit = buf.length;
- else
- limit = pos + outMax;
- }
-
- public boolean hasSpace() {
- return pos < limit;
- }
-
- public boolean hasPending() {
- return pendingLen > 0;
- }
-
- public int getPos() {
- return pos;
- }
-
- public int getByte(int dist) {
- int offset = pos - dist - 1;
- if (dist >= pos)
- offset += buf.length;
-
- return buf[offset] & 0xFF;
- }
-
- public void putByte(byte b) {
- buf[pos++] = b;
-
- if (full < pos)
- full = pos;
- }
-
- public void repeat(int dist, int len) throws IOException {
- if (dist < 0 || dist >= full)
- throw new CorruptedInputException();
-
- int left = Math.min(limit - pos, len);
- pendingLen = len - left;
- pendingDist = dist;
-
- int back = pos - dist - 1;
- if (dist >= pos)
- back += buf.length;
-
- do {
- buf[pos++] = buf[back++];
- if (back == buf.length)
- back = 0;
- } while (--left > 0);
-
- if (full < pos)
- full = pos;
- }
-
- public void repeatPending() throws IOException {
- if (pendingLen > 0)
- repeat(pendingDist, pendingLen);
- }
-
- public void copyUncompressed(DataInputStream inData, int len)
- throws IOException {
- int copySize = Math.min(buf.length - pos, len);
- inData.readFully(buf, pos, copySize);
- pos += copySize;
-
- if (full < pos)
- full = pos;
- }
-
- public int flush(byte[] out, int outOff) {
- int copySize = pos - start;
- if (pos == buf.length)
- pos = 0;
-
- System.arraycopy(buf, start, out, outOff, copySize);
- start = pos;
-
- return copySize;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lz/LZEncoder.java b/Java/Tukaani/src/org/tukaani/xz/lz/LZEncoder.java
deleted file mode 100644
index 267d7dd..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lz/LZEncoder.java
+++ /dev/null
@@ -1,419 +0,0 @@
-/*
- * LZEncoder
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lz;
-
-import java.io.OutputStream;
-import java.io.IOException;
-
-public abstract class LZEncoder {
- public static final int MF_HC4 = 0x04;
- public static final int MF_BT4 = 0x14;
-
- /**
- * Number of bytes to keep available before the current byte
- * when moving the LZ window.
- */
- private final int keepSizeBefore;
-
- /**
- * Number of bytes that must be available, the current byte included,
- * to make hasEnoughData return true. Flushing and finishing are
- * naturally exceptions to this since there cannot be any data after
- * the end of the uncompressed input.
- */
- private final int keepSizeAfter;
-
- final int matchLenMax;
- final int niceLen;
-
- final byte[] buf;
-
- int readPos = -1;
- private int readLimit = -1;
- private boolean finishing = false;
- private int writePos = 0;
- private int pendingSize = 0;
-
- static void normalize(int[] positions, int normalizationOffset) {
- for (int i = 0; i < positions.length; ++i) {
- if (positions[i] <= normalizationOffset)
- positions[i] = 0;
- else
- positions[i] -= normalizationOffset;
- }
- }
-
- /**
- * Gets the size of the LZ window buffer that needs to be allocated.
- */
- private static int getBufSize(
- int dictSize, int extraSizeBefore, int extraSizeAfter,
- int matchLenMax) {
- int keepSizeBefore = extraSizeBefore + dictSize;
- int keepSizeAfter = extraSizeAfter + matchLenMax;
- int reserveSize = Math.min(dictSize / 2 + (256 << 10), 512 << 20);
- return keepSizeBefore + keepSizeAfter + reserveSize;
- }
-
- /**
- * Gets approximate memory usage of the LZEncoder base structure and
- * the match finder as kibibytes.
- */
- public static int getMemoryUsage(
- int dictSize, int extraSizeBefore, int extraSizeAfter,
- int matchLenMax, int mf) {
- // Buffer size + a little extra
- int m = getBufSize(dictSize, extraSizeBefore, extraSizeAfter,
- matchLenMax) / 1024 + 10;
-
- switch (mf) {
- case MF_HC4:
- m += HC4.getMemoryUsage(dictSize);
- break;
-
- case MF_BT4:
- m += BT4.getMemoryUsage(dictSize);
- break;
-
- default:
- throw new IllegalArgumentException();
- }
-
- return m;
- }
-
- /**
- * Creates a new LZEncoder.
- * <p>
- * @param dictSize dictionary size
- *
- * @param extraSizeBefore
- * number of bytes to keep available in the
- * history in addition to dictSize
- *
- * @param extraSizeAfter
- * number of bytes that must be available
- * after current position + matchLenMax
- *
- * @param niceLen if a match of at least <code>niceLen</code>
- * bytes is found, be happy with it and don't
- * stop looking for longer matches
- *
- * @param matchLenMax don't test for matches longer than
- * <code>matchLenMax</code> bytes
- *
- * @param mf match finder ID
- *
- * @param depthLimit match finder search depth limit
- */
- public static LZEncoder getInstance(
- int dictSize, int extraSizeBefore, int extraSizeAfter,
- int niceLen, int matchLenMax, int mf, int depthLimit) {
- switch (mf) {
- case MF_HC4:
- return new HC4(dictSize, extraSizeBefore, extraSizeAfter,
- niceLen, matchLenMax, depthLimit);
-
- case MF_BT4:
- return new BT4(dictSize, extraSizeBefore, extraSizeAfter,
- niceLen, matchLenMax, depthLimit);
- }
-
- throw new IllegalArgumentException();
- }
-
- /**
- * Creates a new LZEncoder. See <code>getInstance</code>.
- */
- LZEncoder(int dictSize, int extraSizeBefore, int extraSizeAfter,
- int niceLen, int matchLenMax) {
- buf = new byte[getBufSize(dictSize, extraSizeBefore, extraSizeAfter,
- matchLenMax)];
-
- keepSizeBefore = extraSizeBefore + dictSize;
- keepSizeAfter = extraSizeAfter + matchLenMax;
-
- this.matchLenMax = matchLenMax;
- this.niceLen = niceLen;
- }
-
- /**
- * Sets a preset dictionary. If a preset dictionary is wanted, this
- * function must be called immediately after creating the LZEncoder
- * before any data has been encoded.
- */
- public void setPresetDict(int dictSize, byte[] presetDict) {
- assert !isStarted();
- assert writePos == 0;
-
- if (presetDict != null) {
- // If the preset dictionary buffer is bigger than the dictionary
- // size, copy only the tail of the preset dictionary.
- int copySize = Math.min(presetDict.length, dictSize);
- int offset = presetDict.length - copySize;
- System.arraycopy(presetDict, offset, buf, 0, copySize);
- writePos += copySize;
- skip(copySize);
- }
- }
-
- /**
- * Moves data from the end of the buffer to the beginning, discarding
- * old data and making space for new input.
- */
- private void moveWindow() {
- // Align the move to a multiple of 16 bytes. LZMA2 needs this
- // because it uses the lowest bits from readPos to get the
- // alignment of the uncompressed data.
- int moveOffset = (readPos + 1 - keepSizeBefore) & ~15;
- int moveSize = writePos - moveOffset;
- System.arraycopy(buf, moveOffset, buf, 0, moveSize);
-
- readPos -= moveOffset;
- readLimit -= moveOffset;
- writePos -= moveOffset;
- }
-
- /**
- * Copies new data into the LZEncoder's buffer.
- */
- public int fillWindow(byte[] in, int off, int len) {
- assert !finishing;
-
- // Move the sliding window if needed.
- if (readPos >= buf.length - keepSizeAfter)
- moveWindow();
-
- // Try to fill the dictionary buffer. If it becomes full,
- // some of the input bytes may be left unused.
- if (len > buf.length - writePos)
- len = buf.length - writePos;
-
- System.arraycopy(in, off, buf, writePos, len);
- writePos += len;
-
- // Set the new readLimit but only if there's enough data to allow
- // encoding of at least one more byte.
- if (writePos >= keepSizeAfter)
- readLimit = writePos - keepSizeAfter;
-
- processPendingBytes();
-
- // Tell the caller how much input we actually copied into
- // the dictionary.
- return len;
- }
-
- /**
- * Process pending bytes remaining from preset dictionary initialization
- * or encoder flush operation.
- */
- private void processPendingBytes() {
- // After flushing or setting a preset dictionary there will be
- // pending data that hasn't been ran through the match finder yet.
- // Run it through the match finder now if there is enough new data
- // available (readPos < readLimit) that the encoder may encode at
- // least one more input byte. This way we don't waste any time
- // looping in the match finder (and marking the same bytes as
- // pending again) if the application provides very little new data
- // per write call.
- if (pendingSize > 0 && readPos < readLimit) {
- readPos -= pendingSize;
- int oldPendingSize = pendingSize;
- pendingSize = 0;
- skip(oldPendingSize);
- assert pendingSize < oldPendingSize;
- }
- }
-
- /**
- * Returns true if at least one byte has already been run through
- * the match finder.
- */
- public boolean isStarted() {
- return readPos != -1;
- }
-
- /**
- * Marks that all the input needs to be made available in
- * the encoded output.
- */
- public void setFlushing() {
- readLimit = writePos - 1;
- processPendingBytes();
- }
-
- /**
- * Marks that there is no more input remaining. The read position
- * can be advanced until the end of the data.
- */
- public void setFinishing() {
- readLimit = writePos - 1;
- finishing = true;
- processPendingBytes();
- }
-
- /**
- * Tests if there is enough input available to let the caller encode
- * at least one more byte.
- */
- public boolean hasEnoughData(int alreadyReadLen) {
- return readPos - alreadyReadLen < readLimit;
- }
-
- public void copyUncompressed(OutputStream out, int backward, int len)
- throws IOException {
- out.write(buf, readPos + 1 - backward, len);
- }
-
- /**
- * Get the number of bytes available, including the current byte.
- * <p>
- * Note that the result is undefined if <code>getMatches</code> or
- * <code>skip</code> hasn't been called yet and no preset dictionary
- * is being used.
- */
- public int getAvail() {
- assert isStarted();
- return writePos - readPos;
- }
-
- /**
- * Gets the lowest four bits of the absolute offset of the current byte.
- * Bits other than the lowest four are undefined.
- */
- public int getPos() {
- return readPos;
- }
-
- /**
- * Gets the byte from the given backward offset.
- * <p>
- * The current byte is at <code>0</code>, the previous byte
- * at <code>1</code> etc. To get a byte at zero-based distance,
- * use <code>getByte(dist + 1)<code>.
- * <p>
- * This function is equivalent to <code>getByte(0, backward)</code>.
- */
- public int getByte(int backward) {
- return buf[readPos - backward] & 0xFF;
- }
-
- /**
- * Gets the byte from the given forward minus backward offset.
- * The forward offset is added to the current position. This lets
- * one read bytes ahead of the current byte.
- */
- public int getByte(int forward, int backward) {
- return buf[readPos + forward - backward] & 0xFF;
- }
-
- /**
- * Get the length of a match at the given distance.
- *
- * @param dist zero-based distance of the match to test
- * @param lenLimit don't test for a match longer than this
- *
- * @return length of the match; it is in the range [0, lenLimit]
- */
- public int getMatchLen(int dist, int lenLimit) {
- int backPos = readPos - dist - 1;
- int len = 0;
-
- while (len < lenLimit && buf[readPos + len] == buf[backPos + len])
- ++len;
-
- return len;
- }
-
- /**
- * Get the length of a match at the given distance and forward offset.
- *
- * @param forward forward offset
- * @param dist zero-based distance of the match to test
- * @param lenLimit don't test for a match longer than this
- *
- * @return length of the match; it is in the range [0, lenLimit]
- */
- public int getMatchLen(int forward, int dist, int lenLimit) {
- int curPos = readPos + forward;
- int backPos = curPos - dist - 1;
- int len = 0;
-
- while (len < lenLimit && buf[curPos + len] == buf[backPos + len])
- ++len;
-
- return len;
- }
-
- /**
- * Verifies that the matches returned by the match finder are valid.
- * This is meant to be used in an assert statement. This is totally
- * useless for actual encoding since match finder's results should
- * naturally always be valid if it isn't broken.
- *
- * @param matches return value from <code>getMatches</code>
- *
- * @return true if matches are valid, false if match finder is broken
- */
- public boolean verifyMatches(Matches matches) {
- int lenLimit = Math.min(getAvail(), matchLenMax);
-
- for (int i = 0; i < matches.count; ++i)
- if (getMatchLen(matches.dist[i], lenLimit) != matches.len[i])
- return false;
-
- return true;
- }
-
- /**
- * Moves to the next byte, checks if there is enough input available,
- * and returns the amount of input available.
- *
- * @param requiredForFlushing
- * minimum number of available bytes when
- * flushing; encoding may be continued with
- * new input after flushing
- * @param requiredForFinishing
- * minimum number of available bytes when
- * finishing; encoding must not be continued
- * after finishing or the match finder state
- * may be corrupt
- *
- * @return the number of bytes available or zero if there
- * is not enough input available
- */
- int movePos(int requiredForFlushing, int requiredForFinishing) {
- assert requiredForFlushing >= requiredForFinishing;
-
- ++readPos;
- int avail = writePos - readPos;
-
- if (avail < requiredForFlushing) {
- if (avail < requiredForFinishing || !finishing) {
- ++pendingSize;
- avail = 0;
- }
- }
-
- return avail;
- }
-
- /**
- * Runs match finder for the next byte and returns the matches found.
- */
- public abstract Matches getMatches();
-
- /**
- * Skips the given number of bytes in the match finder.
- */
- public abstract void skip(int len);
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lz/Matches.java b/Java/Tukaani/src/org/tukaani/xz/lz/Matches.java
deleted file mode 100644
index 2fbee11..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lz/Matches.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Matches
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lz;
-
-public final class Matches {
- public final int[] len;
- public final int[] dist;
- public int count = 0;
-
- Matches(int countMax) {
- len = new int[countMax];
- dist = new int[countMax];
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMACoder.java b/Java/Tukaani/src/org/tukaani/xz/lzma/LZMACoder.java
deleted file mode 100644
index c31c9a6..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMACoder.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * LZMACoder
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lzma;
-
-import org.tukaani.xz.rangecoder.RangeCoder;
-
-abstract class LZMACoder {
- static final int POS_STATES_MAX = 1 << 4;
-
- static final int MATCH_LEN_MIN = 2;
- static final int MATCH_LEN_MAX = MATCH_LEN_MIN + LengthCoder.LOW_SYMBOLS
- + LengthCoder.MID_SYMBOLS
- + LengthCoder.HIGH_SYMBOLS - 1;
-
- static final int DIST_STATES = 4;
- static final int DIST_SLOTS = 1 << 6;
- static final int DIST_MODEL_START = 4;
- static final int DIST_MODEL_END = 14;
- static final int FULL_DISTANCES = 1 << (DIST_MODEL_END / 2);
-
- static final int ALIGN_BITS = 4;
- static final int ALIGN_SIZE = 1 << ALIGN_BITS;
- static final int ALIGN_MASK = ALIGN_SIZE - 1;
-
- static final int REPS = 4;
-
- final int posMask;
-
- final int[] reps = new int[REPS];
- final State state = new State();
-
- final short[][] isMatch = new short[State.STATES][POS_STATES_MAX];
- final short[] isRep = new short[State.STATES];
- final short[] isRep0 = new short[State.STATES];
- final short[] isRep1 = new short[State.STATES];
- final short[] isRep2 = new short[State.STATES];
- final short[][] isRep0Long = new short[State.STATES][POS_STATES_MAX];
- final short[][] distSlots = new short[DIST_STATES][DIST_SLOTS];
- final short[][] distSpecial = { new short[2], new short[2],
- new short[4], new short[4],
- new short[8], new short[8],
- new short[16], new short[16],
- new short[32], new short[32] };
- final short[] distAlign = new short[ALIGN_SIZE];
-
- static final int getDistState(int len) {
- return len < DIST_STATES + MATCH_LEN_MIN
- ? len - MATCH_LEN_MIN
- : DIST_STATES - 1;
- }
-
- LZMACoder(int pb) {
- posMask = (1 << pb) - 1;
- }
-
- void reset() {
- reps[0] = 0;
- reps[1] = 0;
- reps[2] = 0;
- reps[3] = 0;
- state.reset();
-
- for (int i = 0; i < isMatch.length; ++i)
- RangeCoder.initProbs(isMatch[i]);
-
- RangeCoder.initProbs(isRep);
- RangeCoder.initProbs(isRep0);
- RangeCoder.initProbs(isRep1);
- RangeCoder.initProbs(isRep2);
-
- for (int i = 0; i < isRep0Long.length; ++i)
- RangeCoder.initProbs(isRep0Long[i]);
-
- for (int i = 0; i < distSlots.length; ++i)
- RangeCoder.initProbs(distSlots[i]);
-
- for (int i = 0; i < distSpecial.length; ++i)
- RangeCoder.initProbs(distSpecial[i]);
-
- RangeCoder.initProbs(distAlign);
- }
-
-
- abstract class LiteralCoder {
- private final int lc;
- private final int literalPosMask;
-
- LiteralCoder(int lc, int lp) {
- this.lc = lc;
- this.literalPosMask = (1 << lp) - 1;
- }
-
- final int getSubcoderIndex(int prevByte, int pos) {
- int low = prevByte >> (8 - lc);
- int high = (pos & literalPosMask) << lc;
- return low + high;
- }
-
-
- abstract class LiteralSubcoder {
- final short[] probs = new short[0x300];
-
- void reset() {
- RangeCoder.initProbs(probs);
- }
- }
- }
-
-
- abstract class LengthCoder {
- static final int LOW_SYMBOLS = 1 << 3;
- static final int MID_SYMBOLS = 1 << 3;
- static final int HIGH_SYMBOLS = 1 << 8;
-
- final short[] choice = new short[2];
- final short[][] low = new short[POS_STATES_MAX][LOW_SYMBOLS];
- final short[][] mid = new short[POS_STATES_MAX][MID_SYMBOLS];
- final short[] high = new short[HIGH_SYMBOLS];
-
- void reset() {
- RangeCoder.initProbs(choice);
-
- for (int i = 0; i < low.length; ++i)
- RangeCoder.initProbs(low[i]);
-
- for (int i = 0; i < low.length; ++i)
- RangeCoder.initProbs(mid[i]);
-
- RangeCoder.initProbs(high);
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMADecoder.java b/Java/Tukaani/src/org/tukaani/xz/lzma/LZMADecoder.java
deleted file mode 100644
index ccf1960..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMADecoder.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * LZMADecoder
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lzma;
-
-import java.io.IOException;
-import org.tukaani.xz.lz.LZDecoder;
-import org.tukaani.xz.rangecoder.RangeDecoder;
-
-public final class LZMADecoder extends LZMACoder {
- private final LZDecoder lz;
- private final RangeDecoder rc;
- private final LiteralDecoder literalDecoder;
- private final LengthDecoder matchLenDecoder = new LengthDecoder();
- private final LengthDecoder repLenDecoder = new LengthDecoder();
-
- public LZMADecoder(LZDecoder lz, RangeDecoder rc, int lc, int lp, int pb) {
- super(pb);
- this.lz = lz;
- this.rc = rc;
- this.literalDecoder = new LiteralDecoder(lc, lp);
- reset();
- }
-
- public void reset() {
- super.reset();
- literalDecoder.reset();
- matchLenDecoder.reset();
- repLenDecoder.reset();
- }
-
- /**
- * Returns true if LZMA end marker was detected. It is encoded as
- * the maximum match distance which with signed ints becomes -1. This
- * function is needed only for LZMA1. LZMA2 doesn't use the end marker
- * in the LZMA layer.
- */
- public boolean endMarkerDetected() {
- return reps[0] == -1;
- }
-
- public void decode() throws IOException {
- lz.repeatPending();
-
- while (lz.hasSpace()) {
- int posState = lz.getPos() & posMask;
-
- if (rc.decodeBit(isMatch[state.get()], posState) == 0) {
- literalDecoder.decode();
- } else {
- int len = rc.decodeBit(isRep, state.get()) == 0
- ? decodeMatch(posState)
- : decodeRepMatch(posState);
-
- // NOTE: With LZMA1 streams that have the end marker,
- // this will throw CorruptedInputException. LZMAInputStream
- // handles it specially.
- lz.repeat(reps[0], len);
- }
- }
-
- rc.normalize();
- }
-
- private int decodeMatch(int posState) throws IOException {
- state.updateMatch();
-
- reps[3] = reps[2];
- reps[2] = reps[1];
- reps[1] = reps[0];
-
- int len = matchLenDecoder.decode(posState);
- int distSlot = rc.decodeBitTree(distSlots[getDistState(len)]);
-
- if (distSlot < DIST_MODEL_START) {
- reps[0] = distSlot;
- } else {
- int limit = (distSlot >> 1) - 1;
- reps[0] = (2 | (distSlot & 1)) << limit;
-
- if (distSlot < DIST_MODEL_END) {
- reps[0] |= rc.decodeReverseBitTree(
- distSpecial[distSlot - DIST_MODEL_START]);
- } else {
- reps[0] |= rc.decodeDirectBits(limit - ALIGN_BITS)
- << ALIGN_BITS;
- reps[0] |= rc.decodeReverseBitTree(distAlign);
- }
- }
-
- return len;
- }
-
- private int decodeRepMatch(int posState) throws IOException {
- if (rc.decodeBit(isRep0, state.get()) == 0) {
- if (rc.decodeBit(isRep0Long[state.get()], posState) == 0) {
- state.updateShortRep();
- return 1;
- }
- } else {
- int tmp;
-
- if (rc.decodeBit(isRep1, state.get()) == 0) {
- tmp = reps[1];
- } else {
- if (rc.decodeBit(isRep2, state.get()) == 0) {
- tmp = reps[2];
- } else {
- tmp = reps[3];
- reps[3] = reps[2];
- }
-
- reps[2] = reps[1];
- }
-
- reps[1] = reps[0];
- reps[0] = tmp;
- }
-
- state.updateLongRep();
-
- return repLenDecoder.decode(posState);
- }
-
-
- private class LiteralDecoder extends LiteralCoder {
- private final LiteralSubdecoder[] subdecoders;
-
- LiteralDecoder(int lc, int lp) {
- super(lc, lp);
-
- subdecoders = new LiteralSubdecoder[1 << (lc + lp)];
- for (int i = 0; i < subdecoders.length; ++i)
- subdecoders[i] = new LiteralSubdecoder();
- }
-
- void reset() {
- for (int i = 0; i < subdecoders.length; ++i)
- subdecoders[i].reset();
- }
-
- void decode() throws IOException {
- int i = getSubcoderIndex(lz.getByte(0), lz.getPos());
- subdecoders[i].decode();
- }
-
-
- private class LiteralSubdecoder extends LiteralSubcoder {
- void decode() throws IOException {
- int symbol = 1;
-
- if (state.isLiteral()) {
- do {
- symbol = (symbol << 1) | rc.decodeBit(probs, symbol);
- } while (symbol < 0x100);
-
- } else {
- int matchByte = lz.getByte(reps[0]);
- int offset = 0x100;
- int matchBit;
- int bit;
-
- do {
- matchByte <<= 1;
- matchBit = matchByte & offset;
- bit = rc.decodeBit(probs, offset + matchBit + symbol);
- symbol = (symbol << 1) | bit;
- offset &= (0 - bit) ^ ~matchBit;
- } while (symbol < 0x100);
- }
-
- lz.putByte((byte)symbol);
- state.updateLiteral();
- }
- }
- }
-
-
- private class LengthDecoder extends LengthCoder {
- int decode(int posState) throws IOException {
- if (rc.decodeBit(choice, 0) == 0)
- return rc.decodeBitTree(low[posState]) + MATCH_LEN_MIN;
-
- if (rc.decodeBit(choice, 1) == 0)
- return rc.decodeBitTree(mid[posState])
- + MATCH_LEN_MIN + LOW_SYMBOLS;
-
- return rc.decodeBitTree(high)
- + MATCH_LEN_MIN + LOW_SYMBOLS + MID_SYMBOLS;
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoder.java b/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoder.java
deleted file mode 100644
index 4fdc198..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoder.java
+++ /dev/null
@@ -1,711 +0,0 @@
-/*
- * LZMAEncoder
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lzma;
-
-import org.tukaani.xz.lz.LZEncoder;
-import org.tukaani.xz.lz.Matches;
-import org.tukaani.xz.rangecoder.RangeEncoder;
-
-public abstract class LZMAEncoder extends LZMACoder {
- public static final int MODE_FAST = 1;
- public static final int MODE_NORMAL = 2;
-
- /**
- * LZMA2 chunk is considered full when its uncompressed size exceeds
- * <code>LZMA2_UNCOMPRESSED_LIMIT</code>.
- * <p>
- * A compressed LZMA2 chunk can hold 2 MiB of uncompressed data.
- * A single LZMA symbol may indicate up to MATCH_LEN_MAX bytes
- * of data, so the LZMA2 chunk is considered full when there is
- * less space than MATCH_LEN_MAX bytes.
- */
- private static final int LZMA2_UNCOMPRESSED_LIMIT
- = (2 << 20) - MATCH_LEN_MAX;
-
- /**
- * LZMA2 chunk is considered full when its compressed size exceeds
- * <code>LZMA2_COMPRESSED_LIMIT</code>.
- * <p>
- * The maximum compressed size of a LZMA2 chunk is 64 KiB.
- * A single LZMA symbol might use 20 bytes of space even though
- * it usually takes just one byte or so. Two more bytes are needed
- * for LZMA2 uncompressed chunks (see LZMA2OutputStream.writeChunk).
- * Leave a little safety margin and use 26 bytes.
- */
- private static final int LZMA2_COMPRESSED_LIMIT = (64 << 10) - 26;
-
- private static final int DIST_PRICE_UPDATE_INTERVAL = FULL_DISTANCES;
- private static final int ALIGN_PRICE_UPDATE_INTERVAL = ALIGN_SIZE;
-
- private final RangeEncoder rc;
- final LZEncoder lz;
- final LiteralEncoder literalEncoder;
- final LengthEncoder matchLenEncoder;
- final LengthEncoder repLenEncoder;
- final int niceLen;
-
- private int distPriceCount = 0;
- private int alignPriceCount = 0;
-
- private final int distSlotPricesSize;
- private final int[][] distSlotPrices;
- private final int[][] fullDistPrices
- = new int[DIST_STATES][FULL_DISTANCES];
- private final int[] alignPrices = new int[ALIGN_SIZE];
-
- int back = 0;
- int readAhead = -1;
- private int uncompressedSize = 0;
-
- public static int getMemoryUsage(int mode, int dictSize,
- int extraSizeBefore, int mf) {
- int m = 80;
-
- switch (mode) {
- case MODE_FAST:
- m += LZMAEncoderFast.getMemoryUsage(
- dictSize, extraSizeBefore, mf);
- break;
-
- case MODE_NORMAL:
- m += LZMAEncoderNormal.getMemoryUsage(
- dictSize, extraSizeBefore, mf);
- break;
-
- default:
- throw new IllegalArgumentException();
- }
-
- return m;
- }
-
- public static LZMAEncoder getInstance(
- RangeEncoder rc, int lc, int lp, int pb, int mode,
- int dictSize, int extraSizeBefore,
- int niceLen, int mf, int depthLimit) {
- switch (mode) {
- case MODE_FAST:
- return new LZMAEncoderFast(rc, lc, lp, pb,
- dictSize, extraSizeBefore,
- niceLen, mf, depthLimit);
-
- case MODE_NORMAL:
- return new LZMAEncoderNormal(rc, lc, lp, pb,
- dictSize, extraSizeBefore,
- niceLen, mf, depthLimit);
- }
-
- throw new IllegalArgumentException();
- }
-
- /**
- * Gets an integer [0, 63] matching the highest two bits of an integer.
- * This is like bit scan reverse (BSR) on x86 except that this also
- * cares about the second highest bit.
- */
- public static int getDistSlot(int dist) {
- if (dist <= DIST_MODEL_START)
- return dist;
-
- int n = dist;
- int i = 31;
-
- if ((n & 0xFFFF0000) == 0) {
- n <<= 16;
- i = 15;
- }
-
- if ((n & 0xFF000000) == 0) {
- n <<= 8;
- i -= 8;
- }
-
- if ((n & 0xF0000000) == 0) {
- n <<= 4;
- i -= 4;
- }
-
- if ((n & 0xC0000000) == 0) {
- n <<= 2;
- i -= 2;
- }
-
- if ((n & 0x80000000) == 0)
- --i;
-
- return (i << 1) + ((dist >>> (i - 1)) & 1);
- }
-
- /**
- * Gets the next LZMA symbol.
- * <p>
- * There are three types of symbols: literal (a single byte),
- * repeated match, and normal match. The symbol is indicated
- * by the return value and by the variable <code>back</code>.
- * <p>
- * Literal: <code>back == -1</code> and return value is <code>1</code>.
- * The literal itself needs to be read from <code>lz</code> separately.
- * <p>
- * Repeated match: <code>back</code> is in the range [0, 3] and
- * the return value is the length of the repeated match.
- * <p>
- * Normal match: <code>back - REPS<code> (<code>back - 4</code>)
- * is the distance of the match and the return value is the length
- * of the match.
- */
- abstract int getNextSymbol();
-
- LZMAEncoder(RangeEncoder rc, LZEncoder lz,
- int lc, int lp, int pb, int dictSize, int niceLen) {
- super(pb);
- this.rc = rc;
- this.lz = lz;
- this.niceLen = niceLen;
-
- literalEncoder = new LiteralEncoder(lc, lp);
- matchLenEncoder = new LengthEncoder(pb, niceLen);
- repLenEncoder = new LengthEncoder(pb, niceLen);
-
- distSlotPricesSize = getDistSlot(dictSize - 1) + 1;
- distSlotPrices = new int[DIST_STATES][distSlotPricesSize];
-
- reset();
- }
-
- public LZEncoder getLZEncoder() {
- return lz;
- }
-
- public void reset() {
- super.reset();
- literalEncoder.reset();
- matchLenEncoder.reset();
- repLenEncoder.reset();
- distPriceCount = 0;
- alignPriceCount = 0;
-
- uncompressedSize += readAhead + 1;
- readAhead = -1;
- }
-
- public int getUncompressedSize() {
- return uncompressedSize;
- }
-
- public void resetUncompressedSize() {
- uncompressedSize = 0;
- }
-
- /**
- * Compresses for LZMA2.
- *
- * @return true if the LZMA2 chunk became full, false otherwise
- */
- public boolean encodeForLZMA2() {
- if (!lz.isStarted() && !encodeInit())
- return false;
-
- while (uncompressedSize <= LZMA2_UNCOMPRESSED_LIMIT
- && rc.getPendingSize() <= LZMA2_COMPRESSED_LIMIT)
- if (!encodeSymbol())
- return false;
-
- return true;
- }
-
- private boolean encodeInit() {
- assert readAhead == -1;
- if (!lz.hasEnoughData(0))
- return false;
-
- // The first symbol must be a literal unless using
- // a preset dictionary. This code isn't run if using
- // a preset dictionary.
- skip(1);
- rc.encodeBit(isMatch[state.get()], 0, 0);
- literalEncoder.encodeInit();
-
- --readAhead;
- assert readAhead == -1;
-
- ++uncompressedSize;
- assert uncompressedSize == 1;
-
- return true;
- }
-
- private boolean encodeSymbol() {
- if (!lz.hasEnoughData(readAhead + 1))
- return false;
-
- int len = getNextSymbol();
-
- assert readAhead >= 0;
- int posState = (lz.getPos() - readAhead) & posMask;
-
- if (back == -1) {
- // Literal i.e. eight-bit byte
- assert len == 1;
- rc.encodeBit(isMatch[state.get()], posState, 0);
- literalEncoder.encode();
- } else {
- // Some type of match
- rc.encodeBit(isMatch[state.get()], posState, 1);
- if (back < REPS) {
- // Repeated match i.e. the same distance
- // has been used earlier.
- assert lz.getMatchLen(-readAhead, reps[back], len) == len;
- rc.encodeBit(isRep, state.get(), 1);
- encodeRepMatch(back, len, posState);
- } else {
- // Normal match
- assert lz.getMatchLen(-readAhead, back - REPS, len) == len;
- rc.encodeBit(isRep, state.get(), 0);
- encodeMatch(back - REPS, len, posState);
- }
- }
-
- readAhead -= len;
- uncompressedSize += len;
-
- return true;
- }
-
- private void encodeMatch(int dist, int len, int posState) {
- state.updateMatch();
- matchLenEncoder.encode(len, posState);
-
- int distSlot = getDistSlot(dist);
- rc.encodeBitTree(distSlots[getDistState(len)], distSlot);
-
- if (distSlot >= DIST_MODEL_START) {
- int footerBits = (distSlot >>> 1) - 1;
- int base = (2 | (distSlot & 1)) << footerBits;
- int distReduced = dist - base;
-
- if (distSlot < DIST_MODEL_END) {
- rc.encodeReverseBitTree(
- distSpecial[distSlot - DIST_MODEL_START],
- distReduced);
- } else {
- rc.encodeDirectBits(distReduced >>> ALIGN_BITS,
- footerBits - ALIGN_BITS);
- rc.encodeReverseBitTree(distAlign, distReduced & ALIGN_MASK);
- --alignPriceCount;
- }
- }
-
- reps[3] = reps[2];
- reps[2] = reps[1];
- reps[1] = reps[0];
- reps[0] = dist;
-
- --distPriceCount;
- }
-
- private void encodeRepMatch(int rep, int len, int posState) {
- if (rep == 0) {
- rc.encodeBit(isRep0, state.get(), 0);
- rc.encodeBit(isRep0Long[state.get()], posState, len == 1 ? 0 : 1);
- } else {
- int dist = reps[rep];
- rc.encodeBit(isRep0, state.get(), 1);
-
- if (rep == 1) {
- rc.encodeBit(isRep1, state.get(), 0);
- } else {
- rc.encodeBit(isRep1, state.get(), 1);
- rc.encodeBit(isRep2, state.get(), rep - 2);
-
- if (rep == 3)
- reps[3] = reps[2];
-
- reps[2] = reps[1];
- }
-
- reps[1] = reps[0];
- reps[0] = dist;
- }
-
- if (len == 1) {
- state.updateShortRep();
- } else {
- repLenEncoder.encode(len, posState);
- state.updateLongRep();
- }
- }
-
- Matches getMatches() {
- ++readAhead;
- Matches matches = lz.getMatches();
- assert lz.verifyMatches(matches);
- return matches;
- }
-
- void skip(int len) {
- readAhead += len;
- lz.skip(len);
- }
-
- int getAnyMatchPrice(State state, int posState) {
- return RangeEncoder.getBitPrice(isMatch[state.get()][posState], 1);
- }
-
- int getNormalMatchPrice(int anyMatchPrice, State state) {
- return anyMatchPrice
- + RangeEncoder.getBitPrice(isRep[state.get()], 0);
- }
-
- int getAnyRepPrice(int anyMatchPrice, State state) {
- return anyMatchPrice
- + RangeEncoder.getBitPrice(isRep[state.get()], 1);
- }
-
- int getShortRepPrice(int anyRepPrice, State state, int posState) {
- return anyRepPrice
- + RangeEncoder.getBitPrice(isRep0[state.get()], 0)
- + RangeEncoder.getBitPrice(isRep0Long[state.get()][posState],
- 0);
- }
-
- int getLongRepPrice(int anyRepPrice, int rep, State state, int posState) {
- int price = anyRepPrice;
-
- if (rep == 0) {
- price += RangeEncoder.getBitPrice(isRep0[state.get()], 0)
- + RangeEncoder.getBitPrice(
- isRep0Long[state.get()][posState], 1);
- } else {
- price += RangeEncoder.getBitPrice(isRep0[state.get()], 1);
-
- if (rep == 1)
- price += RangeEncoder.getBitPrice(isRep1[state.get()], 0);
- else
- price += RangeEncoder.getBitPrice(isRep1[state.get()], 1)
- + RangeEncoder.getBitPrice(isRep2[state.get()],
- rep - 2);
- }
-
- return price;
- }
-
- int getLongRepAndLenPrice(int rep, int len, State state, int posState) {
- int anyMatchPrice = getAnyMatchPrice(state, posState);
- int anyRepPrice = getAnyRepPrice(anyMatchPrice, state);
- int longRepPrice = getLongRepPrice(anyRepPrice, rep, state, posState);
- return longRepPrice + repLenEncoder.getPrice(len, posState);
- }
-
- int getMatchAndLenPrice(int normalMatchPrice,
- int dist, int len, int posState) {
- int price = normalMatchPrice
- + matchLenEncoder.getPrice(len, posState);
- int distState = getDistState(len);
-
- if (dist < FULL_DISTANCES) {
- price += fullDistPrices[distState][dist];
- } else {
- // Note that distSlotPrices includes also
- // the price of direct bits.
- int distSlot = getDistSlot(dist);
- price += distSlotPrices[distState][distSlot]
- + alignPrices[dist & ALIGN_MASK];
- }
-
- return price;
- }
-
- private void updateDistPrices() {
- distPriceCount = DIST_PRICE_UPDATE_INTERVAL;
-
- for (int distState = 0; distState < DIST_STATES; ++distState) {
- for (int distSlot = 0; distSlot < distSlotPricesSize; ++distSlot)
- distSlotPrices[distState][distSlot]
- = RangeEncoder.getBitTreePrice(
- distSlots[distState], distSlot);
-
- for (int distSlot = DIST_MODEL_END; distSlot < distSlotPricesSize;
- ++distSlot) {
- int count = (distSlot >>> 1) - 1 - ALIGN_BITS;
- distSlotPrices[distState][distSlot]
- += RangeEncoder.getDirectBitsPrice(count);
- }
-
- for (int dist = 0; dist < DIST_MODEL_START; ++dist)
- fullDistPrices[distState][dist]
- = distSlotPrices[distState][dist];
- }
-
- int dist = DIST_MODEL_START;
- for (int distSlot = DIST_MODEL_START; distSlot < DIST_MODEL_END;
- ++distSlot) {
- int footerBits = (distSlot >>> 1) - 1;
- int base = (2 | (distSlot & 1)) << footerBits;
-
- int limit = distSpecial[distSlot - DIST_MODEL_START].length;
- for (int i = 0; i < limit; ++i) {
- int distReduced = dist - base;
- int price = RangeEncoder.getReverseBitTreePrice(
- distSpecial[distSlot - DIST_MODEL_START],
- distReduced);
-
- for (int distState = 0; distState < DIST_STATES; ++distState)
- fullDistPrices[distState][dist]
- = distSlotPrices[distState][distSlot] + price;
-
- ++dist;
- }
- }
-
- assert dist == FULL_DISTANCES;
- }
-
- private void updateAlignPrices() {
- alignPriceCount = ALIGN_PRICE_UPDATE_INTERVAL;
-
- for (int i = 0; i < ALIGN_SIZE; ++i)
- alignPrices[i] = RangeEncoder.getReverseBitTreePrice(distAlign,
- i);
- }
-
- /**
- * Updates the lookup tables used for calculating match distance
- * and length prices. The updating is skipped for performance reasons
- * if the tables haven't changed much since the previous update.
- */
- void updatePrices() {
- if (distPriceCount <= 0)
- updateDistPrices();
-
- if (alignPriceCount <= 0)
- updateAlignPrices();
-
- matchLenEncoder.updatePrices();
- repLenEncoder.updatePrices();
- }
-
-
- class LiteralEncoder extends LiteralCoder {
- private final LiteralSubencoder[] subencoders;
-
- LiteralEncoder(int lc, int lp) {
- super(lc, lp);
-
- subencoders = new LiteralSubencoder[1 << (lc + lp)];
- for (int i = 0; i < subencoders.length; ++i)
- subencoders[i] = new LiteralSubencoder();
- }
-
- void reset() {
- for (int i = 0; i < subencoders.length; ++i)
- subencoders[i].reset();
- }
-
- void encodeInit() {
- // When encoding the first byte of the stream, there is
- // no previous byte in the dictionary so the encode function
- // wouldn't work.
- assert readAhead >= 0;
- subencoders[0].encode();
- }
-
- void encode() {
- assert readAhead >= 0;
- int i = getSubcoderIndex(lz.getByte(1 + readAhead),
- lz.getPos() - readAhead);
- subencoders[i].encode();
- }
-
- int getPrice(int curByte, int matchByte,
- int prevByte, int pos, State state) {
- int price = RangeEncoder.getBitPrice(
- isMatch[state.get()][pos & posMask], 0);
-
- int i = getSubcoderIndex(prevByte, pos);
- price += state.isLiteral()
- ? subencoders[i].getNormalPrice(curByte)
- : subencoders[i].getMatchedPrice(curByte, matchByte);
-
- return price;
- }
-
- private class LiteralSubencoder extends LiteralSubcoder {
- void encode() {
- int symbol = lz.getByte(readAhead) | 0x100;
-
- if (state.isLiteral()) {
- int subencoderIndex;
- int bit;
-
- do {
- subencoderIndex = symbol >>> 8;
- bit = (symbol >>> 7) & 1;
- rc.encodeBit(probs, subencoderIndex, bit);
- symbol <<= 1;
- } while (symbol < 0x10000);
-
- } else {
- int matchByte = lz.getByte(reps[0] + 1 + readAhead);
- int offset = 0x100;
- int subencoderIndex;
- int matchBit;
- int bit;
-
- do {
- matchByte <<= 1;
- matchBit = matchByte & offset;
- subencoderIndex = offset + matchBit + (symbol >>> 8);
- bit = (symbol >>> 7) & 1;
- rc.encodeBit(probs, subencoderIndex, bit);
- symbol <<= 1;
- offset &= ~(matchByte ^ symbol);
- } while (symbol < 0x10000);
- }
-
- state.updateLiteral();
- }
-
- int getNormalPrice(int symbol) {
- int price = 0;
- int subencoderIndex;
- int bit;
-
- symbol |= 0x100;
-
- do {
- subencoderIndex = symbol >>> 8;
- bit = (symbol >>> 7) & 1;
- price += RangeEncoder.getBitPrice(probs[subencoderIndex],
- bit);
- symbol <<= 1;
- } while (symbol < (0x100 << 8));
-
- return price;
- }
-
- int getMatchedPrice(int symbol, int matchByte) {
- int price = 0;
- int offset = 0x100;
- int subencoderIndex;
- int matchBit;
- int bit;
-
- symbol |= 0x100;
-
- do {
- matchByte <<= 1;
- matchBit = matchByte & offset;
- subencoderIndex = offset + matchBit + (symbol >>> 8);
- bit = (symbol >>> 7) & 1;
- price += RangeEncoder.getBitPrice(probs[subencoderIndex],
- bit);
- symbol <<= 1;
- offset &= ~(matchByte ^ symbol);
- } while (symbol < (0x100 << 8));
-
- return price;
- }
- }
- }
-
-
- class LengthEncoder extends LengthCoder {
- /**
- * The prices are updated after at least
- * <code>PRICE_UPDATE_INTERVAL</code> many lengths
- * have been encoded with the same posState.
- */
- private static final int PRICE_UPDATE_INTERVAL = 32; // FIXME?
-
- private final int[] counters;
- private final int[][] prices;
-
- LengthEncoder(int pb, int niceLen) {
- int posStates = 1 << pb;
- counters = new int[posStates];
-
- // Always allocate at least LOW_SYMBOLS + MID_SYMBOLS because
- // it makes updatePrices slightly simpler. The prices aren't
- // usually needed anyway if niceLen < 18.
- int lenSymbols = Math.max(niceLen - MATCH_LEN_MIN + 1,
- LOW_SYMBOLS + MID_SYMBOLS);
- prices = new int[posStates][lenSymbols];
- }
-
- void reset() {
- super.reset();
-
- // Reset counters to zero to force price update before
- // the prices are needed.
- for (int i = 0; i < counters.length; ++i)
- counters[i] = 0;
- }
-
- void encode(int len, int posState) {
- len -= MATCH_LEN_MIN;
-
- if (len < LOW_SYMBOLS) {
- rc.encodeBit(choice, 0, 0);
- rc.encodeBitTree(low[posState], len);
- } else {
- rc.encodeBit(choice, 0, 1);
- len -= LOW_SYMBOLS;
-
- if (len < MID_SYMBOLS) {
- rc.encodeBit(choice, 1, 0);
- rc.encodeBitTree(mid[posState], len);
- } else {
- rc.encodeBit(choice, 1, 1);
- rc.encodeBitTree(high, len - MID_SYMBOLS);
- }
- }
-
- --counters[posState];
- }
-
- int getPrice(int len, int posState) {
- return prices[posState][len - MATCH_LEN_MIN];
- }
-
- void updatePrices() {
- for (int posState = 0; posState < counters.length; ++posState) {
- if (counters[posState] <= 0) {
- counters[posState] = PRICE_UPDATE_INTERVAL;
- updatePrices(posState);
- }
- }
- }
-
- private void updatePrices(int posState) {
- int choice0Price = RangeEncoder.getBitPrice(choice[0], 0);
-
- int i = 0;
- for (; i < LOW_SYMBOLS; ++i)
- prices[posState][i] = choice0Price
- + RangeEncoder.getBitTreePrice(low[posState], i);
-
- choice0Price = RangeEncoder.getBitPrice(choice[0], 1);
- int choice1Price = RangeEncoder.getBitPrice(choice[1], 0);
-
- for (; i < LOW_SYMBOLS + MID_SYMBOLS; ++i)
- prices[posState][i] = choice0Price + choice1Price
- + RangeEncoder.getBitTreePrice(mid[posState],
- i - LOW_SYMBOLS);
-
- choice1Price = RangeEncoder.getBitPrice(choice[1], 1);
-
- for (; i < prices[posState].length; ++i)
- prices[posState][i] = choice0Price + choice1Price
- + RangeEncoder.getBitTreePrice(high, i - LOW_SYMBOLS
- - MID_SYMBOLS);
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoderFast.java b/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoderFast.java
deleted file mode 100644
index 072dd09..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoderFast.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * LZMAEncoderFast
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lzma;
-
-import org.tukaani.xz.lz.LZEncoder;
-import org.tukaani.xz.lz.Matches;
-import org.tukaani.xz.rangecoder.RangeEncoder;
-
-final class LZMAEncoderFast extends LZMAEncoder {
- private static final int EXTRA_SIZE_BEFORE = 1;
- private static final int EXTRA_SIZE_AFTER = MATCH_LEN_MAX - 1;
-
- private Matches matches = null;
-
- static int getMemoryUsage(int dictSize, int extraSizeBefore, int mf) {
- return LZEncoder.getMemoryUsage(
- dictSize, Math.max(extraSizeBefore, EXTRA_SIZE_BEFORE),
- EXTRA_SIZE_AFTER, MATCH_LEN_MAX, mf);
- }
-
- LZMAEncoderFast(RangeEncoder rc, int lc, int lp, int pb,
- int dictSize, int extraSizeBefore,
- int niceLen, int mf, int depthLimit) {
- super(rc, LZEncoder.getInstance(dictSize,
- Math.max(extraSizeBefore,
- EXTRA_SIZE_BEFORE),
- EXTRA_SIZE_AFTER,
- niceLen, MATCH_LEN_MAX,
- mf, depthLimit),
- lc, lp, pb, dictSize, niceLen);
- }
-
- private boolean changePair(int smallDist, int bigDist) {
- return smallDist < (bigDist >>> 7);
- }
-
- int getNextSymbol() {
- // Get the matches for the next byte unless readAhead indicates
- // that we already got the new matches during the previous call
- // to this function.
- if (readAhead == -1)
- matches = getMatches();
-
- back = -1;
-
- // Get the number of bytes available in the dictionary, but
- // not more than the maximum match length. If there aren't
- // enough bytes remaining to encode a match at all, return
- // immediately to encode this byte as a literal.
- int avail = Math.min(lz.getAvail(), MATCH_LEN_MAX);
- if (avail < MATCH_LEN_MIN)
- return 1;
-
- // Look for a match from the previous four match distances.
- int bestRepLen = 0;
- int bestRepIndex = 0;
- for (int rep = 0; rep < REPS; ++rep) {
- int len = lz.getMatchLen(reps[rep], avail);
- if (len < MATCH_LEN_MIN)
- continue;
-
- // If it is long enough, return it.
- if (len >= niceLen) {
- back = rep;
- skip(len - 1);
- return len;
- }
-
- // Remember the index and length of the best repeated match.
- if (len > bestRepLen) {
- bestRepIndex = rep;
- bestRepLen = len;
- }
- }
-
- int mainLen = 0;
- int mainDist = 0;
-
- if (matches.count > 0) {
- mainLen = matches.len[matches.count - 1];
- mainDist = matches.dist[matches.count - 1];
-
- if (mainLen >= niceLen) {
- back = mainDist + REPS;
- skip(mainLen - 1);
- return mainLen;
- }
-
- while (matches.count > 1
- && mainLen == matches.len[matches.count - 2] + 1) {
- if (!changePair(matches.dist[matches.count - 2], mainDist))
- break;
-
- --matches.count;
- mainLen = matches.len[matches.count - 1];
- mainDist = matches.dist[matches.count - 1];
- }
-
- if (mainLen == MATCH_LEN_MIN && mainDist >= 0x80)
- mainLen = 1;
- }
-
- if (bestRepLen >= MATCH_LEN_MIN) {
- if (bestRepLen + 1 >= mainLen
- || (bestRepLen + 2 >= mainLen && mainDist >= (1 << 9))
- || (bestRepLen + 3 >= mainLen && mainDist >= (1 << 15))) {
- back = bestRepIndex;
- skip(bestRepLen - 1);
- return bestRepLen;
- }
- }
-
- if (mainLen < MATCH_LEN_MIN || avail <= MATCH_LEN_MIN)
- return 1;
-
- // Get the next match. Test if it is better than the current match.
- // If so, encode the current byte as a literal.
- matches = getMatches();
-
- if (matches.count > 0) {
- int newLen = matches.len[matches.count - 1];
- int newDist = matches.dist[matches.count - 1];
-
- if ((newLen >= mainLen && newDist < mainDist)
- || (newLen == mainLen + 1
- && !changePair(mainDist, newDist))
- || newLen > mainLen + 1
- || (newLen + 1 >= mainLen
- && mainLen >= MATCH_LEN_MIN + 1
- && changePair(newDist, mainDist)))
- return 1;
- }
-
- int limit = Math.max(mainLen - 1, MATCH_LEN_MIN);
- for (int rep = 0; rep < REPS; ++rep)
- if (lz.getMatchLen(reps[rep], limit) == limit)
- return 1;
-
- back = mainDist + REPS;
- skip(mainLen - 2);
- return mainLen;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoderNormal.java b/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoderNormal.java
deleted file mode 100644
index 104afe3..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lzma/LZMAEncoderNormal.java
+++ /dev/null
@@ -1,566 +0,0 @@
-/*
- * LZMAEncoderNormal
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lzma;
-
-import org.tukaani.xz.lz.LZEncoder;
-import org.tukaani.xz.lz.Matches;
-import org.tukaani.xz.rangecoder.RangeEncoder;
-
-final class LZMAEncoderNormal extends LZMAEncoder {
- private static final int OPTS = 4096;
-
- private static final int EXTRA_SIZE_BEFORE = OPTS;
- private static final int EXTRA_SIZE_AFTER = OPTS;
-
- private final Optimum[] opts = new Optimum[OPTS];
- private int optCur = 0;
- private int optEnd = 0;
-
- private Matches matches;
-
- // These are fields solely to avoid allocating the objects again and
- // again on each function call.
- private final int[] repLens = new int[REPS];
- private final State nextState = new State();
-
- static int getMemoryUsage(int dictSize, int extraSizeBefore, int mf) {
- return LZEncoder.getMemoryUsage(dictSize,
- Math.max(extraSizeBefore, EXTRA_SIZE_BEFORE),
- EXTRA_SIZE_AFTER, MATCH_LEN_MAX, mf)
- + OPTS * 64 / 1024;
- }
-
- LZMAEncoderNormal(RangeEncoder rc, int lc, int lp, int pb,
- int dictSize, int extraSizeBefore,
- int niceLen, int mf, int depthLimit) {
- super(rc, LZEncoder.getInstance(dictSize,
- Math.max(extraSizeBefore,
- EXTRA_SIZE_BEFORE),
- EXTRA_SIZE_AFTER,
- niceLen, MATCH_LEN_MAX,
- mf, depthLimit),
- lc, lp, pb, dictSize, niceLen);
-
- for (int i = 0; i < OPTS; ++i)
- opts[i] = new Optimum();
- }
-
- public void reset() {
- optCur = 0;
- optEnd = 0;
- super.reset();
- }
-
- /**
- * Converts the opts array from backward indexes to forward indexes.
- * Then it will be simple to get the next symbol from the array
- * in later calls to <code>getNextSymbol()</code>.
- */
- private int convertOpts() {
- optEnd = optCur;
-
- int optPrev = opts[optCur].optPrev;
-
- do {
- Optimum opt = opts[optCur];
-
- if (opt.prev1IsLiteral) {
- opts[optPrev].optPrev = optCur;
- opts[optPrev].backPrev = -1;
- optCur = optPrev--;
-
- if (opt.hasPrev2) {
- opts[optPrev].optPrev = optPrev + 1;
- opts[optPrev].backPrev = opt.backPrev2;
- optCur = optPrev;
- optPrev = opt.optPrev2;
- }
- }
-
- int temp = opts[optPrev].optPrev;
- opts[optPrev].optPrev = optCur;
- optCur = optPrev;
- optPrev = temp;
- } while (optCur > 0);
-
- optCur = opts[0].optPrev;
- back = opts[optCur].backPrev;
- return optCur;
- }
-
- int getNextSymbol() {
- // If there are pending symbols from an earlier call to this
- // function, return those symbols first.
- if (optCur < optEnd) {
- int len = opts[optCur].optPrev - optCur;
- optCur = opts[optCur].optPrev;
- back = opts[optCur].backPrev;
- return len;
- }
-
- assert optCur == optEnd;
- optCur = 0;
- optEnd = 0;
- back = -1;
-
- if (readAhead == -1)
- matches = getMatches();
-
- // Get the number of bytes available in the dictionary, but
- // not more than the maximum match length. If there aren't
- // enough bytes remaining to encode a match at all, return
- // immediately to encode this byte as a literal.
- int avail = Math.min(lz.getAvail(), MATCH_LEN_MAX);
- if (avail < MATCH_LEN_MIN)
- return 1;
-
- // Get the lengths of repeated matches.
- int repBest = 0;
- for (int rep = 0; rep < REPS; ++rep) {
- repLens[rep] = lz.getMatchLen(reps[rep], avail);
-
- if (repLens[rep] < MATCH_LEN_MIN) {
- repLens[rep] = 0;
- continue;
- }
-
- if (repLens[rep] > repLens[repBest])
- repBest = rep;
- }
-
- // Return if the best repeated match is at least niceLen bytes long.
- if (repLens[repBest] >= niceLen) {
- back = repBest;
- skip(repLens[repBest] - 1);
- return repLens[repBest];
- }
-
- // Initialize mainLen and mainDist to the longest match found
- // by the match finder.
- int mainLen = 0;
- int mainDist = 0;
- if (matches.count > 0) {
- mainLen = matches.len[matches.count - 1];
- mainDist = matches.dist[matches.count - 1];
-
- // Return if it is at least niceLen bytes long.
- if (mainLen >= niceLen) {
- back = mainDist + REPS;
- skip(mainLen - 1);
- return mainLen;
- }
- }
-
- int curByte = lz.getByte(0);
- int matchByte = lz.getByte(reps[0] + 1);
-
- // If the match finder found no matches and this byte cannot be
- // encoded as a repeated match (short or long), we must be return
- // to have the byte encoded as a literal.
- if (mainLen < MATCH_LEN_MIN && curByte != matchByte
- && repLens[repBest] < MATCH_LEN_MIN)
- return 1;
-
-
- int pos = lz.getPos();
- int posState = pos & posMask;
-
- // Calculate the price of encoding the current byte as a literal.
- {
- int prevByte = lz.getByte(1);
- int literalPrice = literalEncoder.getPrice(curByte, matchByte,
- prevByte, pos, state);
- opts[1].set1(literalPrice, 0, -1);
- }
-
- int anyMatchPrice = getAnyMatchPrice(state, posState);
- int anyRepPrice = getAnyRepPrice(anyMatchPrice, state);
-
- // If it is possible to encode this byte as a short rep, see if
- // it is cheaper than encoding it as a literal.
- if (matchByte == curByte) {
- int shortRepPrice = getShortRepPrice(anyRepPrice,
- state, posState);
- if (shortRepPrice < opts[1].price)
- opts[1].set1(shortRepPrice, 0, 0);
- }
-
- // Return if there is neither normal nor long repeated match. Use
- // a short match instead of a literal if is is possible and cheaper.
- optEnd = Math.max(mainLen, repLens[repBest]);
- if (optEnd < MATCH_LEN_MIN) {
- assert optEnd == 0 : optEnd;
- back = opts[1].backPrev;
- return 1;
- }
-
-
- // Update the lookup tables for distances and lengths before using
- // those price calculation functions. (The price function above
- // don't need these tables.)
- updatePrices();
-
- // Initialize the state and reps of this position in opts[].
- // updateOptStateAndReps() will need these to get the new
- // state and reps for the next byte.
- opts[0].state.set(state);
- System.arraycopy(reps, 0, opts[0].reps, 0, REPS);
-
- // Initialize the prices for latter opts that will be used below.
- for (int i = optEnd; i >= MATCH_LEN_MIN; --i)
- opts[i].reset();
-
- // Calculate the prices of repeated matches of all lengths.
- for (int rep = 0; rep < REPS; ++rep) {
- int repLen = repLens[rep];
- if (repLen < MATCH_LEN_MIN)
- continue;
-
- int longRepPrice = getLongRepPrice(anyRepPrice, rep,
- state, posState);
- do {
- int price = longRepPrice + repLenEncoder.getPrice(repLen,
- posState);
- if (price < opts[repLen].price)
- opts[repLen].set1(price, 0, rep);
- } while (--repLen >= MATCH_LEN_MIN);
- }
-
- // Calculate the prices of normal matches that are longer than rep0.
- {
- int len = Math.max(repLens[0] + 1, MATCH_LEN_MIN);
- if (len <= mainLen) {
- int normalMatchPrice = getNormalMatchPrice(anyMatchPrice,
- state);
-
- // Set i to the index of the shortest match that is
- // at least len bytes long.
- int i = 0;
- while (len > matches.len[i])
- ++i;
-
- while (true) {
- int dist = matches.dist[i];
- int price = getMatchAndLenPrice(normalMatchPrice,
- dist, len, posState);
- if (price < opts[len].price)
- opts[len].set1(price, 0, dist + REPS);
-
- if (len == matches.len[i])
- if (++i == matches.count)
- break;
-
- ++len;
- }
- }
- }
-
-
- avail = Math.min(lz.getAvail(), OPTS - 1);
-
- // Get matches for later bytes and optimize the use of LZMA symbols
- // by calculating the prices and picking the cheapest symbol
- // combinations.
- while (++optCur < optEnd) {
- matches = getMatches();
- if (matches.count > 0
- && matches.len[matches.count - 1] >= niceLen)
- break;
-
- --avail;
- ++pos;
- posState = pos & posMask;
-
- updateOptStateAndReps();
- anyMatchPrice = opts[optCur].price
- + getAnyMatchPrice(opts[optCur].state, posState);
- anyRepPrice = getAnyRepPrice(anyMatchPrice, opts[optCur].state);
-
- calc1BytePrices(pos, posState, avail, anyRepPrice);
-
- if (avail >= MATCH_LEN_MIN) {
- int startLen = calcLongRepPrices(pos, posState,
- avail, anyRepPrice);
- if (matches.count > 0)
- calcNormalMatchPrices(pos, posState, avail,
- anyMatchPrice, startLen);
- }
- }
-
- return convertOpts();
- }
-
- /**
- * Updates the state and reps for the current byte in the opts array.
- */
- private void updateOptStateAndReps() {
- int optPrev = opts[optCur].optPrev;
- assert optPrev < optCur;
-
- if (opts[optCur].prev1IsLiteral) {
- --optPrev;
-
- if (opts[optCur].hasPrev2) {
- opts[optCur].state.set(opts[opts[optCur].optPrev2].state);
- if (opts[optCur].backPrev2 < REPS)
- opts[optCur].state.updateLongRep();
- else
- opts[optCur].state.updateMatch();
- } else {
- opts[optCur].state.set(opts[optPrev].state);
- }
-
- opts[optCur].state.updateLiteral();
- } else {
- opts[optCur].state.set(opts[optPrev].state);
- }
-
- if (optPrev == optCur - 1) {
- // Must be either a short rep or a literal.
- assert opts[optCur].backPrev == 0 || opts[optCur].backPrev == -1;
-
- if (opts[optCur].backPrev == 0)
- opts[optCur].state.updateShortRep();
- else
- opts[optCur].state.updateLiteral();
-
- System.arraycopy(opts[optPrev].reps, 0,
- opts[optCur].reps, 0, REPS);
- } else {
- int back;
- if (opts[optCur].prev1IsLiteral && opts[optCur].hasPrev2) {
- optPrev = opts[optCur].optPrev2;
- back = opts[optCur].backPrev2;
- opts[optCur].state.updateLongRep();
- } else {
- back = opts[optCur].backPrev;
- if (back < REPS)
- opts[optCur].state.updateLongRep();
- else
- opts[optCur].state.updateMatch();
- }
-
- if (back < REPS) {
- opts[optCur].reps[0] = opts[optPrev].reps[back];
-
- int rep;
- for (rep = 1; rep <= back; ++rep)
- opts[optCur].reps[rep] = opts[optPrev].reps[rep - 1];
-
- for (; rep < REPS; ++rep)
- opts[optCur].reps[rep] = opts[optPrev].reps[rep];
- } else {
- opts[optCur].reps[0] = back - REPS;
- System.arraycopy(opts[optPrev].reps, 0,
- opts[optCur].reps, 1, REPS - 1);
- }
- }
- }
-
- /**
- * Calculates prices of a literal, a short rep, and literal + rep0.
- */
- private void calc1BytePrices(int pos, int posState,
- int avail, int anyRepPrice) {
- // This will be set to true if using a literal or a short rep.
- boolean nextIsByte = false;
-
- int curByte = lz.getByte(0);
- int matchByte = lz.getByte(opts[optCur].reps[0] + 1);
-
- // Try a literal.
- int literalPrice = opts[optCur].price
- + literalEncoder.getPrice(curByte, matchByte, lz.getByte(1),
- pos, opts[optCur].state);
- if (literalPrice < opts[optCur + 1].price) {
- opts[optCur + 1].set1(literalPrice, optCur, -1);
- nextIsByte = true;
- }
-
- // Try a short rep.
- if (matchByte == curByte && (opts[optCur + 1].optPrev == optCur
- || opts[optCur + 1].backPrev != 0)) {
- int shortRepPrice = getShortRepPrice(anyRepPrice,
- opts[optCur].state,
- posState);
- if (shortRepPrice <= opts[optCur + 1].price) {
- opts[optCur + 1].set1(shortRepPrice, optCur, 0);
- nextIsByte = true;
- }
- }
-
- // If neither a literal nor a short rep was the cheapest choice,
- // try literal + long rep0.
- if (!nextIsByte && matchByte != curByte && avail > MATCH_LEN_MIN) {
- int lenLimit = Math.min(niceLen, avail - 1);
- int len = lz.getMatchLen(1, opts[optCur].reps[0], lenLimit);
-
- if (len >= MATCH_LEN_MIN) {
- nextState.set(opts[optCur].state);
- nextState.updateLiteral();
- int nextPosState = (pos + 1) & posMask;
- int price = literalPrice
- + getLongRepAndLenPrice(0, len,
- nextState, nextPosState);
-
- int i = optCur + 1 + len;
- while (optEnd < i)
- opts[++optEnd].reset();
-
- if (price < opts[i].price)
- opts[i].set2(price, optCur, 0);
- }
- }
- }
-
- /**
- * Calculates prices of long rep and long rep + literal + rep0.
- */
- private int calcLongRepPrices(int pos, int posState,
- int avail, int anyRepPrice) {
- int startLen = MATCH_LEN_MIN;
- int lenLimit = Math.min(avail, niceLen);
-
- for (int rep = 0; rep < REPS; ++rep) {
- int len = lz.getMatchLen(opts[optCur].reps[rep], lenLimit);
- if (len < MATCH_LEN_MIN)
- continue;
-
- while (optEnd < optCur + len)
- opts[++optEnd].reset();
-
- int longRepPrice = getLongRepPrice(anyRepPrice, rep,
- opts[optCur].state, posState);
-
- for (int i = len; i >= MATCH_LEN_MIN; --i) {
- int price = longRepPrice
- + repLenEncoder.getPrice(i, posState);
- if (price < opts[optCur + i].price)
- opts[optCur + i].set1(price, optCur, rep);
- }
-
- if (rep == 0)
- startLen = len + 1;
-
- int len2Limit = Math.min(niceLen, avail - len - 1);
- int len2 = lz.getMatchLen(len + 1, opts[optCur].reps[rep],
- len2Limit);
-
- if (len2 >= MATCH_LEN_MIN) {
- // Rep
- int price = longRepPrice
- + repLenEncoder.getPrice(len, posState);
- nextState.set(opts[optCur].state);
- nextState.updateLongRep();
-
- // Literal
- int curByte = lz.getByte(len, 0);
- int matchByte = lz.getByte(0); // lz.getByte(len, len)
- int prevByte = lz.getByte(len, 1);
- price += literalEncoder.getPrice(curByte, matchByte, prevByte,
- pos + len, nextState);
- nextState.updateLiteral();
-
- // Rep0
- int nextPosState = (pos + len + 1) & posMask;
- price += getLongRepAndLenPrice(0, len2,
- nextState, nextPosState);
-
- int i = optCur + len + 1 + len2;
- while (optEnd < i)
- opts[++optEnd].reset();
-
- if (price < opts[i].price)
- opts[i].set3(price, optCur, rep, len, 0);
- }
- }
-
- return startLen;
- }
-
- /**
- * Calculates prices of a normal match and normal match + literal + rep0.
- */
- private void calcNormalMatchPrices(int pos, int posState, int avail,
- int anyMatchPrice, int startLen) {
- // If the longest match is so long that it would not fit into
- // the opts array, shorten the matches.
- if (matches.len[matches.count - 1] > avail) {
- matches.count = 0;
- while (matches.len[matches.count] < avail)
- ++matches.count;
-
- matches.len[matches.count++] = avail;
- }
-
- if (matches.len[matches.count - 1] < startLen)
- return;
-
- while (optEnd < optCur + matches.len[matches.count - 1])
- opts[++optEnd].reset();
-
- int normalMatchPrice = getNormalMatchPrice(anyMatchPrice,
- opts[optCur].state);
-
- int match = 0;
- while (startLen > matches.len[match])
- ++match;
-
- for (int len = startLen; ; ++len) {
- int dist = matches.dist[match];
-
- // Calculate the price of a match of len bytes from the nearest
- // possible distance.
- int matchAndLenPrice = getMatchAndLenPrice(normalMatchPrice,
- dist, len, posState);
- if (matchAndLenPrice < opts[optCur + len].price)
- opts[optCur + len].set1(matchAndLenPrice,
- optCur, dist + REPS);
-
- if (len != matches.len[match])
- continue;
-
- // Try match + literal + rep0. First get the length of the rep0.
- int len2Limit = Math.min(niceLen, avail - len - 1);
- int len2 = lz.getMatchLen(len + 1, dist, len2Limit);
-
- if (len2 >= MATCH_LEN_MIN) {
- nextState.set(opts[optCur].state);
- nextState.updateMatch();
-
- // Literal
- int curByte = lz.getByte(len, 0);
- int matchByte = lz.getByte(0); // lz.getByte(len, len)
- int prevByte = lz.getByte(len, 1);
- int price = matchAndLenPrice
- + literalEncoder.getPrice(curByte, matchByte,
- prevByte, pos + len,
- nextState);
- nextState.updateLiteral();
-
- // Rep0
- int nextPosState = (pos + len + 1) & posMask;
- price += getLongRepAndLenPrice(0, len2,
- nextState, nextPosState);
-
- int i = optCur + len + 1 + len2;
- while (optEnd < i)
- opts[++optEnd].reset();
-
- if (price < opts[i].price)
- opts[i].set3(price, optCur, dist + REPS, len, 0);
- }
-
- if (++match == matches.count)
- break;
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lzma/Optimum.java b/Java/Tukaani/src/org/tukaani/xz/lzma/Optimum.java
deleted file mode 100644
index 845ac97..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lzma/Optimum.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Optimum
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lzma;
-
-final class Optimum {
- private static final int INFINITY_PRICE = 1 << 30;
-
- final State state = new State();
- final int[] reps = new int[LZMACoder.REPS];
-
- /**
- * Cumulative price of arriving to this byte.
- */
- int price;
-
- int optPrev;
- int backPrev;
- boolean prev1IsLiteral;
-
- boolean hasPrev2;
- int optPrev2;
- int backPrev2;
-
- /**
- * Resets the price.
- */
- void reset() {
- price = INFINITY_PRICE;
- }
-
- /**
- * Sets to indicate one LZMA symbol (literal, rep, or match).
- */
- void set1(int newPrice, int optCur, int back) {
- price = newPrice;
- optPrev = optCur;
- backPrev = back;
- prev1IsLiteral = false;
- }
-
- /**
- * Sets to indicate two LZMA symbols of which the first one is a literal.
- */
- void set2(int newPrice, int optCur, int back) {
- price = newPrice;
- optPrev = optCur + 1;
- backPrev = back;
- prev1IsLiteral = true;
- hasPrev2 = false;
- }
-
- /**
- * Sets to indicate three LZMA symbols of which the second one
- * is a literal.
- */
- void set3(int newPrice, int optCur, int back2, int len2, int back) {
- price = newPrice;
- optPrev = optCur + len2 + 1;
- backPrev = back;
- prev1IsLiteral = true;
- hasPrev2 = true;
- optPrev2 = optCur;
- backPrev2 = back2;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/lzma/State.java b/Java/Tukaani/src/org/tukaani/xz/lzma/State.java
deleted file mode 100644
index 0ece860..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/lzma/State.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * State
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.lzma;
-
-final class State {
- static final int STATES = 12;
-
- private static final int LIT_STATES = 7;
-
- private static final int LIT_LIT = 0;
- private static final int MATCH_LIT_LIT = 1;
- private static final int REP_LIT_LIT = 2;
- private static final int SHORTREP_LIT_LIT = 3;
- private static final int MATCH_LIT = 4;
- private static final int REP_LIT = 5;
- private static final int SHORTREP_LIT = 6;
- private static final int LIT_MATCH = 7;
- private static final int LIT_LONGREP = 8;
- private static final int LIT_SHORTREP = 9;
- private static final int NONLIT_MATCH = 10;
- private static final int NONLIT_REP = 11;
-
- private int state;
-
- State() {}
-
- State(State other) {
- state = other.state;
- }
-
- void reset() {
- state = LIT_LIT;
- }
-
- int get() {
- return state;
- }
-
- void set(State other) {
- state = other.state;
- }
-
- void updateLiteral() {
- if (state <= SHORTREP_LIT_LIT)
- state = LIT_LIT;
- else if (state <= LIT_SHORTREP)
- state -= 3;
- else
- state -= 6;
- }
-
- void updateMatch() {
- state = state < LIT_STATES ? LIT_MATCH : NONLIT_MATCH;
- }
-
- void updateLongRep() {
- state = state < LIT_STATES ? LIT_LONGREP : NONLIT_REP;
- }
-
- void updateShortRep() {
- state = state < LIT_STATES ? LIT_SHORTREP : NONLIT_REP;
- }
-
- boolean isLiteral() {
- return state < LIT_STATES;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/package-info.java b/Java/Tukaani/src/org/tukaani/xz/package-info.java
deleted file mode 100644
index 6d7c1b7..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/package-info.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * XZ data compression support.
- *
- * <h4>Introduction</h4>
- * <p>
- * This aims to be a complete implementation of XZ data compression
- * in pure Java. Features:
- * <ul>
- * <li>Full support for the .xz file format specification version 1.0.4</li>
- * <li>Single-threaded streamed compression and decompression</li>
- * <li>Single-threaded decompression with limited random access support</li>
- * <li>Raw streams (no .xz headers) for advanced users, including LZMA2
- * with preset dictionary</li>
- * </ul>
- * <p>
- * Threading is planned but it is unknown when it will be implemented.
- * <p>
- * For the latest source code, see the
- * <a href="http://tukaani.org/xz/java.html">home page of XZ for Java</a>.
- *
- * <h4>Getting started</h4>
- * <p>
- * Start by reading the documentation of {@link org.tukaani.xz.XZOutputStream}
- * and {@link org.tukaani.xz.XZInputStream}.
- * If you use XZ inside another file format or protocol,
- * see also {@link org.tukaani.xz.SingleXZInputStream}.
- *
- * <h4>Licensing</h4>
- * <p>
- * XZ for Java has been put into the public domain, thus you can do
- * whatever you want with it. All the files in the package have been
- * written by Lasse Collin and/or Igor Pavlov.
- * <p>
- * This software is provided "as is", without any warranty.
- */
-package org.tukaani.xz;
diff --git a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeCoder.java b/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeCoder.java
deleted file mode 100644
index df9b0c4..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeCoder.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * RangeCoder
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.rangecoder;
-
-import java.util.Arrays;
-
-public abstract class RangeCoder {
- static final int SHIFT_BITS = 8;
- static final int TOP_MASK = 0xFF000000;
- static final int BIT_MODEL_TOTAL_BITS = 11;
- static final int BIT_MODEL_TOTAL = 1 << BIT_MODEL_TOTAL_BITS;
- static final short PROB_INIT = (short)(BIT_MODEL_TOTAL / 2);
- static final int MOVE_BITS = 5;
-
- public static final void initProbs(short[] probs) {
- Arrays.fill(probs, PROB_INIT);
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoder.java b/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoder.java
deleted file mode 100644
index e63532e..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoder.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * RangeDecoder
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.rangecoder;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-
-public abstract class RangeDecoder extends RangeCoder {
- int range = 0;
- int code = 0;
-
- public abstract void normalize() throws IOException;
-
- public int decodeBit(short[] probs, int index) throws IOException {
- normalize();
-
- int prob = probs[index];
- int bound = (range >>> BIT_MODEL_TOTAL_BITS) * prob;
- int bit;
-
- // Compare code and bound as if they were unsigned 32-bit integers.
- if ((code ^ 0x80000000) < (bound ^ 0x80000000)) {
- range = bound;
- probs[index] = (short)(
- prob + ((BIT_MODEL_TOTAL - prob) >>> MOVE_BITS));
- bit = 0;
- } else {
- range -= bound;
- code -= bound;
- probs[index] = (short)(prob - (prob >>> MOVE_BITS));
- bit = 1;
- }
-
- return bit;
- }
-
- public int decodeBitTree(short[] probs) throws IOException {
- int symbol = 1;
-
- do {
- symbol = (symbol << 1) | decodeBit(probs, symbol);
- } while (symbol < probs.length);
-
- return symbol - probs.length;
- }
-
- public int decodeReverseBitTree(short[] probs) throws IOException {
- int symbol = 1;
- int i = 0;
- int result = 0;
-
- do {
- int bit = decodeBit(probs, symbol);
- symbol = (symbol << 1) | bit;
- result |= bit << i++;
- } while (symbol < probs.length);
-
- return result;
- }
-
- public int decodeDirectBits(int count) throws IOException {
- int result = 0;
-
- do {
- normalize();
-
- range >>>= 1;
- int t = (code - range) >>> 31;
- code -= range & (t - 1);
- result = (result << 1) | (1 - t);
- } while (--count != 0);
-
- return result;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoderFromBuffer.java b/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoderFromBuffer.java
deleted file mode 100644
index cac7a7e..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoderFromBuffer.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * RangeDecoderFromBuffer
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.rangecoder;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import org.tukaani.xz.CorruptedInputException;
-
-public final class RangeDecoderFromBuffer extends RangeDecoder {
- private static final int INIT_SIZE = 5;
-
- private final byte[] buf;
- private int pos = 0;
- private int end = 0;
-
- public RangeDecoderFromBuffer(int inputSizeMax) {
- buf = new byte[inputSizeMax - INIT_SIZE];
- }
-
- public void prepareInputBuffer(DataInputStream in, int len)
- throws IOException {
- if (len < INIT_SIZE)
- throw new CorruptedInputException();
-
- if (in.readUnsignedByte() != 0x00)
- throw new CorruptedInputException();
-
- code = in.readInt();
- range = 0xFFFFFFFF;
-
- pos = 0;
- end = len - INIT_SIZE;
- in.readFully(buf, 0, end);
- }
-
- public boolean isInBufferOK() {
- return pos <= end;
- }
-
- public boolean isFinished() {
- return pos == end && code == 0;
- }
-
- public void normalize() throws IOException {
- if ((range & TOP_MASK) == 0) {
- try {
- // If the input is corrupt, this might throw
- // ArrayIndexOutOfBoundsException.
- code = (code << SHIFT_BITS) | (buf[pos++] & 0xFF);
- range <<= SHIFT_BITS;
- } catch (ArrayIndexOutOfBoundsException e) {
- throw new CorruptedInputException();
- }
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoderFromStream.java b/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoderFromStream.java
deleted file mode 100644
index 142b518..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeDecoderFromStream.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * RangeDecoderFromStream
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.rangecoder;
-
-import java.io.InputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import org.tukaani.xz.CorruptedInputException;
-
-public final class RangeDecoderFromStream extends RangeDecoder {
- private final DataInputStream inData;
-
- public RangeDecoderFromStream(InputStream in) throws IOException {
- inData = new DataInputStream(in);
-
- if (inData.readUnsignedByte() != 0x00)
- throw new CorruptedInputException();
-
- code = inData.readInt();
- range = 0xFFFFFFFF;
- }
-
- public boolean isFinished() {
- return code == 0;
- }
-
- public void normalize() throws IOException {
- if ((range & TOP_MASK) == 0) {
- code = (code << SHIFT_BITS) | inData.readUnsignedByte();
- range <<= SHIFT_BITS;
- }
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeEncoder.java b/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeEncoder.java
deleted file mode 100644
index a06fdcc..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/rangecoder/RangeEncoder.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- * RangeEncoder
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.rangecoder;
-
-import java.io.OutputStream;
-import java.io.IOException;
-
-public final class RangeEncoder extends RangeCoder {
- private static final int MOVE_REDUCING_BITS = 4;
- private static final int BIT_PRICE_SHIFT_BITS = 4;
-
- private static final int[] prices
- = new int[BIT_MODEL_TOTAL >>> MOVE_REDUCING_BITS];
-
- private long low;
- private int range;
-
- // NOTE: int is OK for LZMA2 because a compressed chunk
- // is not more than 64 KiB, but with LZMA1 there is no chunking
- // so in theory cacheSize can grow very big. To be very safe,
- // use long instead of int if you adapt this code for LZMA1.
- private int cacheSize;
- private byte cache;
-
- private final byte[] buf;
- private int bufPos;
-
- static {
- for (int i = (1 << MOVE_REDUCING_BITS) / 2; i < BIT_MODEL_TOTAL;
- i += (1 << MOVE_REDUCING_BITS)) {
- int w = i;
- int bitCount = 0;
-
- for (int j = 0; j < BIT_PRICE_SHIFT_BITS; ++j) {
- w *= w;
- bitCount <<= 1;
-
- while ((w & 0xFFFF0000) != 0) {
- w >>>= 1;
- ++bitCount;
- }
- }
-
- prices[i >> MOVE_REDUCING_BITS]
- = (BIT_MODEL_TOTAL_BITS << BIT_PRICE_SHIFT_BITS)
- - 15 - bitCount;
- }
- }
-
- public RangeEncoder(int bufSize) {
- buf = new byte[bufSize];
- reset();
- }
-
- public void reset() {
- low = 0;
- range = 0xFFFFFFFF;
- cache = 0x00;
- cacheSize = 1;
- bufPos = 0;
- }
-
- public int getPendingSize() {
- return bufPos + cacheSize + 5 - 1;
- }
-
- public int finish() {
- for (int i = 0; i < 5; ++i)
- shiftLow();
-
- return bufPos;
- }
-
- public void write(OutputStream out) throws IOException {
- out.write(buf, 0, bufPos);
- }
-
- private void shiftLow() {
- int lowHi = (int)(low >>> 32);
-
- if (lowHi != 0 || low < 0xFF000000L) {
- int temp = cache;
-
- do {
- buf[bufPos++] = (byte)(temp + lowHi);
- temp = 0xFF;
- } while (--cacheSize != 0);
-
- cache = (byte)(low >>> 24);
- }
-
- ++cacheSize;
- low = (low & 0x00FFFFFF) << 8;
- }
-
- public void encodeBit(short[] probs, int index, int bit) {
- int prob = probs[index];
- int bound = (range >>> BIT_MODEL_TOTAL_BITS) * prob;
-
- // NOTE: Any non-zero value for bit is taken as 1.
- if (bit == 0) {
- range = bound;
- probs[index] = (short)(
- prob + ((BIT_MODEL_TOTAL - prob) >>> MOVE_BITS));
- } else {
- low += bound & 0xFFFFFFFFL;
- range -= bound;
- probs[index] = (short)(prob - (prob >>> MOVE_BITS));
- }
-
- if ((range & TOP_MASK) == 0) {
- range <<= SHIFT_BITS;
- shiftLow();
- }
- }
-
- public static int getBitPrice(int prob, int bit) {
- // NOTE: Unlike in encodeBit(), here bit must be 0 or 1.
- assert bit == 0 || bit == 1;
- return prices[(prob ^ ((-bit) & (BIT_MODEL_TOTAL - 1)))
- >>> MOVE_REDUCING_BITS];
- }
-
- public void encodeBitTree(short[] probs, int symbol) {
- int index = 1;
- int mask = probs.length;
-
- do {
- mask >>>= 1;
- int bit = symbol & mask;
- encodeBit(probs, index, bit);
-
- index <<= 1;
- if (bit != 0)
- index |= 1;
-
- } while (mask != 1);
- }
-
- public static int getBitTreePrice(short[] probs, int symbol) {
- int price = 0;
- symbol |= probs.length;
-
- do {
- int bit = symbol & 1;
- symbol >>>= 1;
- price += getBitPrice(probs[symbol], bit);
- } while (symbol != 1);
-
- return price;
- }
-
- public void encodeReverseBitTree(short[] probs, int symbol) {
- int index = 1;
- symbol |= probs.length;
-
- do {
- int bit = symbol & 1;
- symbol >>>= 1;
- encodeBit(probs, index, bit);
- index = (index << 1) | bit;
- } while (symbol != 1);
- }
-
- public static int getReverseBitTreePrice(short[] probs, int symbol) {
- int price = 0;
- int index = 1;
- symbol |= probs.length;
-
- do {
- int bit = symbol & 1;
- symbol >>>= 1;
- price += getBitPrice(probs[index], bit);
- index = (index << 1) | bit;
- } while (symbol != 1);
-
- return price;
- }
-
- public void encodeDirectBits(int value, int count) {
- do {
- range >>>= 1;
- low += range & (0 - ((value >>> --count) & 1));
-
- if ((range & TOP_MASK) == 0) {
- range <<= SHIFT_BITS;
- shiftLow();
- }
- } while (count != 0);
- }
-
- public static int getDirectBitsPrice(int count) {
- return count << BIT_PRICE_SHIFT_BITS;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/simple/ARM.java b/Java/Tukaani/src/org/tukaani/xz/simple/ARM.java
deleted file mode 100644
index 6febf78..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/simple/ARM.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * BCJ filter for little endian ARM instructions
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.simple;
-
-public final class ARM implements SimpleFilter {
- private final boolean isEncoder;
- private int pos;
-
- public ARM(boolean isEncoder, int startPos) {
- this.isEncoder = isEncoder;
- pos = startPos + 8;
- }
-
- public int code(byte[] buf, int off, int len) {
- int end = off + len - 4;
- int i;
-
- for (i = off; i <= end; i += 4) {
- if ((buf[i + 3] & 0xFF) == 0xEB) {
- int src = ((buf[i + 2] & 0xFF) << 16)
- | ((buf[i + 1] & 0xFF) << 8)
- | (buf[i] & 0xFF);
- src <<= 2;
-
- int dest;
- if (isEncoder)
- dest = src + (pos + i - off);
- else
- dest = src - (pos + i - off);
-
- dest >>>= 2;
- buf[i + 2] = (byte)(dest >>> 16);
- buf[i + 1] = (byte)(dest >>> 8);
- buf[i] = (byte)dest;
- }
- }
-
- i -= off;
- pos += i;
- return i;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/simple/ARMThumb.java b/Java/Tukaani/src/org/tukaani/xz/simple/ARMThumb.java
deleted file mode 100644
index b8e7ca9..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/simple/ARMThumb.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * BCJ filter for little endian ARM-Thumb instructions
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.simple;
-
-public final class ARMThumb implements SimpleFilter {
- private final boolean isEncoder;
- private int pos;
-
- public ARMThumb(boolean isEncoder, int startPos) {
- this.isEncoder = isEncoder;
- pos = startPos + 4;
- }
-
- public int code(byte[] buf, int off, int len) {
- int end = off + len - 4;
- int i;
-
- for (i = off; i <= end; i += 2) {
- if ((buf[i + 1] & 0xF8) == 0xF0 && (buf[i + 3] & 0xF8) == 0xF8) {
- int src = ((buf[i + 1] & 0x07) << 19)
- | ((buf[i] & 0xFF) << 11)
- | ((buf[i + 3] & 0x07) << 8)
- | (buf[i + 2] & 0xFF);
- src <<= 1;
-
- int dest;
- if (isEncoder)
- dest = src + (pos + i - off);
- else
- dest = src - (pos + i - off);
-
- dest >>>= 1;
- buf[i + 1] = (byte)(0xF0 | ((dest >>> 19) & 0x07));
- buf[i] = (byte)(dest >>> 11);
- buf[i + 3] = (byte)(0xF8 | ((dest >>> 8) & 0x07));
- buf[i + 2] = (byte)dest;
- i += 2;
- }
- }
-
- i -= off;
- pos += i;
- return i;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/simple/IA64.java b/Java/Tukaani/src/org/tukaani/xz/simple/IA64.java
deleted file mode 100644
index 776a1b7..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/simple/IA64.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * BCJ filter for Itanium (IA-64) instructions
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.simple;
-
-public final class IA64 implements SimpleFilter {
- private static final int[] BRANCH_TABLE = {
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 4, 4, 6, 6, 0, 0, 7, 7,
- 4, 4, 0, 0, 4, 4, 0, 0 };
-
- private final boolean isEncoder;
- private int pos;
-
- public IA64(boolean isEncoder, int startPos) {
- this.isEncoder = isEncoder;
- pos = startPos;
- }
-
- public int code(byte[] buf, int off, int len) {
- int end = off + len - 16;
- int i;
-
- for (i = off; i <= end; i += 16) {
- int instrTemplate = buf[i] & 0x1F;
- int mask = BRANCH_TABLE[instrTemplate];
-
- for (int slot = 0, bitPos = 5; slot < 3; ++slot, bitPos += 41) {
- if (((mask >>> slot) & 1) == 0)
- continue;
-
- int bytePos = bitPos >>> 3;
- int bitRes = bitPos & 7;
-
- long instr = 0;
- for (int j = 0; j < 6; ++j)
- instr |= (buf[i + bytePos + j] & 0xFFL) << (8 * j);
-
- long instrNorm = instr >>> bitRes;
-
- if (((instrNorm >>> 37) & 0x0F) != 0x05
- || ((instrNorm >>> 9) & 0x07) != 0x00)
- continue;
-
- int src = (int)((instrNorm >>> 13) & 0x0FFFFF);
- src |= ((int)(instrNorm >>> 36) & 1) << 20;
- src <<= 4;
-
- int dest;
- if (isEncoder)
- dest = src + (pos + i - off);
- else
- dest = src - (pos + i - off);
-
- dest >>>= 4;
-
- instrNorm &= ~(0x8FFFFFL << 13);
- instrNorm |= (dest & 0x0FFFFFL) << 13;
- instrNorm |= (dest & 0x100000L) << (36 - 20);
-
- instr &= (1 << bitRes) - 1;
- instr |= instrNorm << bitRes;
-
- for (int j = 0; j < 6; ++j)
- buf[i + bytePos + j] = (byte)(instr >>> (8 * j));
- }
- }
-
- i -= off;
- pos += i;
- return i;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/simple/PowerPC.java b/Java/Tukaani/src/org/tukaani/xz/simple/PowerPC.java
deleted file mode 100644
index b7400ab..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/simple/PowerPC.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * BCJ filter for big endian PowerPC instructions
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.simple;
-
-public final class PowerPC implements SimpleFilter {
- private final boolean isEncoder;
- private int pos;
-
- public PowerPC(boolean isEncoder, int startPos) {
- this.isEncoder = isEncoder;
- pos = startPos;
- }
-
- public int code(byte[] buf, int off, int len) {
- int end = off + len - 4;
- int i;
-
- for (i = off; i <= end; i += 4) {
- if ((buf[i] & 0xFC) == 0x48 && (buf[i + 3] & 0x03) == 0x01) {
- int src = ((buf[i] & 0x03) << 24)
- | ((buf[i + 1] & 0xFF) << 16)
- | ((buf[i + 2] & 0xFF) << 8)
- | (buf[i + 3] & 0xFC);
-
- int dest;
- if (isEncoder)
- dest = src + (pos + i - off);
- else
- dest = src - (pos + i - off);
-
- buf[i] = (byte)(0x48 | ((dest >>> 24) & 0x03));
- buf[i + 1] = (byte)(dest >>> 16);
- buf[i + 2] = (byte)(dest >>> 8);
- buf[i + 3] = (byte)((buf[i + 3] & 0x03) | dest);
- }
- }
-
- i -= off;
- pos += i;
- return i;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/simple/SPARC.java b/Java/Tukaani/src/org/tukaani/xz/simple/SPARC.java
deleted file mode 100644
index 913c8ac..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/simple/SPARC.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * BCJ filter for SPARC instructions
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.simple;
-
-public final class SPARC implements SimpleFilter {
- private final boolean isEncoder;
- private int pos;
-
- public SPARC(boolean isEncoder, int startPos) {
- this.isEncoder = isEncoder;
- pos = startPos;
- }
-
- public int code(byte[] buf, int off, int len) {
- int end = off + len - 4;
- int i;
-
- for (i = off; i <= end; i += 4) {
- if ((buf[i] == 0x40 && (buf[i + 1] & 0xC0) == 0x00)
- || (buf[i] == 0x7F && (buf[i + 1] & 0xC0) == 0xC0)) {
- int src = ((buf[i] & 0xFF) << 24)
- | ((buf[i + 1] & 0xFF) << 16)
- | ((buf[i + 2] & 0xFF) << 8)
- | (buf[i + 3] & 0xFF);
- src <<= 2;
-
- int dest;
- if (isEncoder)
- dest = src + (pos + i - off);
- else
- dest = src - (pos + i - off);
-
- dest >>>= 2;
- dest = (((0 - ((dest >>> 22) & 1)) << 22) & 0x3FFFFFFF)
- | (dest & 0x3FFFFF) | 0x40000000;
-
- buf[i] = (byte)(dest >>> 24);
- buf[i + 1] = (byte)(dest >>> 16);
- buf[i + 2] = (byte)(dest >>> 8);
- buf[i + 3] = (byte)dest;
- }
- }
-
- i -= off;
- pos += i;
- return i;
- }
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/simple/SimpleFilter.java b/Java/Tukaani/src/org/tukaani/xz/simple/SimpleFilter.java
deleted file mode 100644
index 6f72906..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/simple/SimpleFilter.java
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * BCJ filter for little endian ARM instructions
- *
- * Author: Lasse Collin <lasse.collin@tukaani.org>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.simple;
-
-public interface SimpleFilter {
- int code(byte[] buf, int off, int len);
-}
diff --git a/Java/Tukaani/src/org/tukaani/xz/simple/X86.java b/Java/Tukaani/src/org/tukaani/xz/simple/X86.java
deleted file mode 100644
index a05e08b..0000000
--- a/Java/Tukaani/src/org/tukaani/xz/simple/X86.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * BCJ filter for x86 instructions
- *
- * Authors: Lasse Collin <lasse.collin@tukaani.org>
- * Igor Pavlov <http://7-zip.org/>
- *
- * This file has been put into the public domain.
- * You can do whatever you want with this file.
- */
-
-package org.tukaani.xz.simple;
-
-public final class X86 implements SimpleFilter {
- private static final boolean[] MASK_TO_ALLOWED_STATUS
- = {true, true, true, false, true, false, false, false};
-
- private static final int[] MASK_TO_BIT_NUMBER = {0, 1, 2, 2, 3, 3, 3, 3};
-
- private final boolean isEncoder;
- private int pos;
- private int prevMask = 0;
-
- private static boolean test86MSByte(byte b) {
- int i = b & 0xFF;
- return i == 0x00 || i == 0xFF;
- }
-
- public X86(boolean isEncoder, int startPos) {
- this.isEncoder = isEncoder;
- pos = startPos + 5;
- }
-
- public int code(byte[] buf, int off, int len) {
- int prevPos = off - 1;
- int end = off + len - 5;
- int i;
-
- for (i = off; i <= end; ++i) {
- if ((buf[i] & 0xFE) != 0xE8)
- continue;
-
- prevPos = i - prevPos;
- if ((prevPos & ~3) != 0) { // (unsigned)prevPos > 3
- prevMask = 0;
- } else {
- prevMask = (prevMask << (prevPos - 1)) & 7;
- if (prevMask != 0) {
- if (!MASK_TO_ALLOWED_STATUS[prevMask] || test86MSByte(
- buf[i + 4 - MASK_TO_BIT_NUMBER[prevMask]])) {
- prevPos = i;
- prevMask = (prevMask << 1) | 1;
- continue;
- }
- }
- }
-
- prevPos = i;
-
- if (test86MSByte(buf[i + 4])) {
- int src = (buf[i + 1] & 0xFF)
- | ((buf[i + 2] & 0xFF) << 8)
- | ((buf[i + 3] & 0xFF) << 16)
- | ((buf[i + 4] & 0xFF) << 24);
- int dest;
- while (true) {
- if (isEncoder)
- dest = src + (pos + i - off);
- else
- dest = src - (pos + i - off);
-
- if (prevMask == 0)
- break;
-
- int index = MASK_TO_BIT_NUMBER[prevMask] * 8;
- if (!test86MSByte((byte)(dest >>> (24 - index))))
- break;
-
- src = dest ^ ((1 << (32 - index)) - 1);
- }
-
- buf[i + 1] = (byte)dest;
- buf[i + 2] = (byte)(dest >>> 8);
- buf[i + 3] = (byte)(dest >>> 16);
- buf[i + 4] = (byte)(~(((dest >>> 24) & 1) - 1));
- i += 4;
- } else {
- prevMask = (prevMask << 1) | 1;
- }
- }
-
- prevPos = i - prevPos;
- prevMask = ((prevPos & ~3) != 0) ? 0 : prevMask << (prevPos - 1);
-
- i -= off;
- pos += i;
- return i;
- }
-}