Use apache compression commons for compressing gzip
This commit is contained in:
@@ -1,16 +1,19 @@
|
||||
package net.runelite.cache.fs;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
import net.runelite.cache.fs.io.InputStream;
|
||||
import net.runelite.cache.fs.io.OutputStream;
|
||||
import net.runelite.cache.fs.util.BZip2Decompressor;
|
||||
import net.runelite.cache.fs.util.BZipDecompressor;
|
||||
import net.runelite.cache.fs.util.GZip;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -78,7 +81,7 @@ public class DataFile implements Closeable
|
||||
{
|
||||
if (sector <= 0L || dat.length() / 520L < (long) sector)
|
||||
{
|
||||
logger.warn("bad read, dat length {}", dat.length());
|
||||
logger.warn("bad read, dat length {}, requested sector {}", dat.length(), sector);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -90,6 +93,7 @@ public class DataFile implements Closeable
|
||||
{
|
||||
if (sector == 0)
|
||||
{
|
||||
logger.warn("sector == 0");
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -143,11 +147,16 @@ public class DataFile implements Closeable
|
||||
|
||||
if (archiveId != currentArchive || currentPart != part || indexId != currentIndex)
|
||||
{
|
||||
logger.warn("data mismatch {} != {}, {} != {}, {} != {}",
|
||||
archiveId, currentArchive,
|
||||
part, currentPart,
|
||||
indexId, currentIndex);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (nextSector < 0 || dat.length() / SECTOR_SIZE < (long) nextSector)
|
||||
{
|
||||
logger.warn("Invalid next sector");
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -164,14 +173,6 @@ public class DataFile implements Closeable
|
||||
return this.decompress(buffer.array());
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param indexId
|
||||
* @param archiveId archive to write to
|
||||
* @param data data to write
|
||||
* @return the sector the data starts at
|
||||
* @throws IOException
|
||||
*/
|
||||
public synchronized DataFileWriteResult write(int indexId, int archiveId, ByteBuffer data, int compression, int revision) throws IOException
|
||||
{
|
||||
int sector;
|
||||
@@ -312,7 +313,7 @@ public class DataFile implements Closeable
|
||||
return res;
|
||||
}
|
||||
|
||||
private byte[] compress(byte[] data, int compression, int revision)
|
||||
private byte[] compress(byte[] data, int compression, int revision) throws IOException
|
||||
{
|
||||
OutputStream stream = new OutputStream();
|
||||
stream.writeByte(compression);
|
||||
@@ -324,10 +325,21 @@ public class DataFile implements Closeable
|
||||
stream.writeInt(data.length);
|
||||
break;
|
||||
case 1:
|
||||
compressedData = (byte[]) null;
|
||||
break;
|
||||
// bzip1?
|
||||
throw new UnsupportedOperationException();
|
||||
default:
|
||||
compressedData = GZip.compress(data);
|
||||
ByteArrayOutputStream bout = new ByteArrayOutputStream();
|
||||
try (GzipCompressorOutputStream out = new GzipCompressorOutputStream(bout))
|
||||
{
|
||||
out.write(data);
|
||||
}
|
||||
compressedData = bout.toByteArray();
|
||||
|
||||
// check it with the old compressor
|
||||
byte[] data2 = new byte[data.length];
|
||||
GZip.decompress(new InputStream(compressedData), data2);
|
||||
assert Arrays.equals(data, data2);
|
||||
|
||||
stream.writeInt(compressedData.length);
|
||||
stream.writeInt(data.length);
|
||||
}
|
||||
|
||||
@@ -101,8 +101,9 @@ public class Store implements Closeable
|
||||
for (Index i : indexes)
|
||||
{
|
||||
int id = i.getIndex().getIndexFileId();
|
||||
//if (id == 3 || id == 7) // XXXXXXXXXXXXX
|
||||
i.load();
|
||||
if (id == 5)
|
||||
break;
|
||||
i.load();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,24 +9,24 @@ import net.runelite.cache.fs.io.Stream;
|
||||
public class GZip {
|
||||
private static final Inflater inflaterInstance = new Inflater(true);
|
||||
|
||||
public static final byte[] compress(byte[] data)
|
||||
{
|
||||
ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream();
|
||||
|
||||
try
|
||||
{
|
||||
GZIPOutputStream e = new GZIPOutputStream(compressedBytes);
|
||||
e.write(data);
|
||||
e.finish();
|
||||
e.close();
|
||||
return compressedBytes.toByteArray();
|
||||
}
|
||||
catch (IOException var3)
|
||||
{
|
||||
var3.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
// public static final byte[] compress(byte[] data)
|
||||
// {
|
||||
// ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream();
|
||||
//
|
||||
// try
|
||||
// {
|
||||
// GZIPOutputStream e = new GZIPOutputStream(compressedBytes);
|
||||
// e.write(data);
|
||||
// e.finish();
|
||||
// e.close();
|
||||
// return compressedBytes.toByteArray();
|
||||
// }
|
||||
// catch (IOException var3)
|
||||
// {
|
||||
// var3.printStackTrace();
|
||||
// return null;
|
||||
// }
|
||||
// }
|
||||
|
||||
public static final void decompress(Stream stream, byte[] data)
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user