Use apache compression commons for compressing gzip

This commit is contained in:
Adam
2015-10-17 16:07:41 -04:00
parent 9125e65e60
commit 5e28fe9768
6 changed files with 80 additions and 35 deletions

18
pom.xml
View File

@@ -19,6 +19,12 @@
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
<version>1.7.12</version> <version>1.7.12</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.10</version>
</dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId> <artifactId>slf4j-simple</artifactId>
@@ -32,6 +38,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.16</version>
<configuration>
<enableAssertions>true</enableAssertions>
</configuration>
</plugin>
</plugins>
</build>
<properties> <properties>
<maven.compiler.source>1.7</maven.compiler.source> <maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target> <maven.compiler.target>1.7</maven.compiler.target>

View File

@@ -1,16 +1,19 @@
package net.runelite.cache.fs; package net.runelite.cache.fs;
import java.io.ByteArrayOutputStream;
import java.io.Closeable; import java.io.Closeable;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.RandomAccessFile; import java.io.RandomAccessFile;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Objects; import java.util.Objects;
import net.runelite.cache.fs.io.InputStream; import net.runelite.cache.fs.io.InputStream;
import net.runelite.cache.fs.io.OutputStream; import net.runelite.cache.fs.io.OutputStream;
import net.runelite.cache.fs.util.BZip2Decompressor; import net.runelite.cache.fs.util.BZipDecompressor;
import net.runelite.cache.fs.util.GZip; import net.runelite.cache.fs.util.GZip;
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -78,7 +81,7 @@ public class DataFile implements Closeable
{ {
if (sector <= 0L || dat.length() / 520L < (long) sector) if (sector <= 0L || dat.length() / 520L < (long) sector)
{ {
logger.warn("bad read, dat length {}", dat.length()); logger.warn("bad read, dat length {}, requested sector {}", dat.length(), sector);
return null; return null;
} }
@@ -90,6 +93,7 @@ public class DataFile implements Closeable
{ {
if (sector == 0) if (sector == 0)
{ {
logger.warn("sector == 0");
return null; return null;
} }
@@ -143,11 +147,16 @@ public class DataFile implements Closeable
if (archiveId != currentArchive || currentPart != part || indexId != currentIndex) if (archiveId != currentArchive || currentPart != part || indexId != currentIndex)
{ {
logger.warn("data mismatch {} != {}, {} != {}, {} != {}",
archiveId, currentArchive,
part, currentPart,
indexId, currentIndex);
return null; return null;
} }
if (nextSector < 0 || dat.length() / SECTOR_SIZE < (long) nextSector) if (nextSector < 0 || dat.length() / SECTOR_SIZE < (long) nextSector)
{ {
logger.warn("Invalid next sector");
return null; return null;
} }
@@ -164,14 +173,6 @@ public class DataFile implements Closeable
return this.decompress(buffer.array()); return this.decompress(buffer.array());
} }
/**
*
* @param indexId
* @param archiveId archive to write to
* @param data data to write
* @return the sector the data starts at
* @throws IOException
*/
public synchronized DataFileWriteResult write(int indexId, int archiveId, ByteBuffer data, int compression, int revision) throws IOException public synchronized DataFileWriteResult write(int indexId, int archiveId, ByteBuffer data, int compression, int revision) throws IOException
{ {
int sector; int sector;
@@ -312,7 +313,7 @@ public class DataFile implements Closeable
return res; return res;
} }
private byte[] compress(byte[] data, int compression, int revision) private byte[] compress(byte[] data, int compression, int revision) throws IOException
{ {
OutputStream stream = new OutputStream(); OutputStream stream = new OutputStream();
stream.writeByte(compression); stream.writeByte(compression);
@@ -324,10 +325,21 @@ public class DataFile implements Closeable
stream.writeInt(data.length); stream.writeInt(data.length);
break; break;
case 1: case 1:
compressedData = (byte[]) null; // bzip1?
break; throw new UnsupportedOperationException();
default: default:
compressedData = GZip.compress(data); ByteArrayOutputStream bout = new ByteArrayOutputStream();
try (GzipCompressorOutputStream out = new GzipCompressorOutputStream(bout))
{
out.write(data);
}
compressedData = bout.toByteArray();
// check it with the old compressor
byte[] data2 = new byte[data.length];
GZip.decompress(new InputStream(compressedData), data2);
assert Arrays.equals(data, data2);
stream.writeInt(compressedData.length); stream.writeInt(compressedData.length);
stream.writeInt(data.length); stream.writeInt(data.length);
} }

View File

@@ -101,8 +101,9 @@ public class Store implements Closeable
for (Index i : indexes) for (Index i : indexes)
{ {
int id = i.getIndex().getIndexFileId(); int id = i.getIndex().getIndexFileId();
//if (id == 3 || id == 7) // XXXXXXXXXXXXX if (id == 5)
i.load(); break;
i.load();
} }
} }

View File

@@ -9,24 +9,24 @@ import net.runelite.cache.fs.io.Stream;
public class GZip { public class GZip {
private static final Inflater inflaterInstance = new Inflater(true); private static final Inflater inflaterInstance = new Inflater(true);
public static final byte[] compress(byte[] data) // public static final byte[] compress(byte[] data)
{ // {
ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream(); // ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream();
//
try // try
{ // {
GZIPOutputStream e = new GZIPOutputStream(compressedBytes); // GZIPOutputStream e = new GZIPOutputStream(compressedBytes);
e.write(data); // e.write(data);
e.finish(); // e.finish();
e.close(); // e.close();
return compressedBytes.toByteArray(); // return compressedBytes.toByteArray();
} // }
catch (IOException var3) // catch (IOException var3)
{ // {
var3.printStackTrace(); // var3.printStackTrace();
return null; // return null;
} // }
} // }
public static final void decompress(Stream stream, byte[] data) public static final void decompress(Stream stream, byte[] data)
{ {

View File

@@ -64,4 +64,18 @@ public class DataFileTest
Assert.assertEquals("test", str); Assert.assertEquals("test", str);
} }
} }
// @Test
// public void testBZip2Compression() throws IOException
// {
// try (Store store = new Store(folder.getRoot()))
// {
// DataFile df = new DataFile(store, folder.newFile());
// DataFileWriteResult res = df.write(41, 4, ByteBuffer.wrap("test".getBytes()), 1, 0);
// DataFileReadResult res2 = df.read(41, 4, res.sector, res.compressedLength);
// byte[] buf = res2.data;
// String str = new String(buf);
// Assert.assertEquals("test", str);
// }
// }
} }

View File

@@ -5,7 +5,7 @@ import org.junit.Test;
public class StoreLoadTest public class StoreLoadTest
{ {
//@Test @Test
public void test() throws IOException public void test() throws IOException
{ {
Store store = new Store(new java.io.File("d:/rs/07/cache"));//c:/rs/cache")); Store store = new Store(new java.io.File("d:/rs/07/cache"));//c:/rs/cache"));