datafile can handle all of the compression

This commit is contained in:
Adam
2015-10-17 11:47:26 -04:00
parent 62f3fb1671
commit a16bf14e08
5 changed files with 246 additions and 124 deletions

View File

@@ -7,6 +7,10 @@ import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.Objects;
import net.runelite.cache.fs.io.InputStream;
import net.runelite.cache.fs.io.OutputStream;
import net.runelite.cache.fs.util.bzip2.BZip2Decompressor;
import net.runelite.cache.fs.util.gzip.GZipDecompressor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -70,7 +74,7 @@ public class DataFile implements Closeable
* @return
* @throws IOException
*/
public synchronized byte[] read(int indexId, int archiveId, int sector, int size) throws IOException
public synchronized DataFileReadResult read(int indexId, int archiveId, int sector, int size) throws IOException
{
if (sector <= 0L || dat.length() / 520L < (long) sector)
{
@@ -154,7 +158,10 @@ public class DataFile implements Closeable
}
buffer.flip();
return buffer.array();
//XTEA decrypt here?
return this.decompress(buffer.array());
}
/**
@@ -165,11 +172,14 @@ public class DataFile implements Closeable
* @return the sector the data starts at
* @throws IOException
*/
public synchronized int write(int indexId, int archiveId, ByteBuffer data) throws IOException
public synchronized DataFileWriteResult write(int indexId, int archiveId, ByteBuffer data, int compression, int revision) throws IOException
{
int sector;
int startSector;
data = ByteBuffer.wrap(this.compress(data.array(), compression, revision));
int dataLen = data.remaining();
sector = (int) ((dat.length() + (long) (SECTOR_SIZE - 1)) / (long) SECTOR_SIZE);
if (sector == 0)
{
@@ -253,6 +263,98 @@ public class DataFile implements Closeable
sector = nextSector;
}
return startSector;
DataFileWriteResult res = new DataFileWriteResult();
res.sector = startSector;
res.compressedLength = dataLen;
return res;
}
private DataFileReadResult decompress(byte[] b)
{
InputStream stream = new InputStream(b);
int compression = stream.readUnsignedByte();
int compressedLength = stream.readInt();
if (compressedLength < 0 || compressedLength > 1000000)
throw new RuntimeException("Invalid data");
byte[] data;
int revision;
switch (compression)
{
case 0:
data = new byte[compressedLength];
revision = this.checkRevision(stream, compressedLength);
stream.readBytes(data, 0, compressedLength);
break;
case 1:
{
int length = stream.readInt();
data = new byte[length];
revision = this.checkRevision(stream, compressedLength);
BZip2Decompressor.decompress(data, b, compressedLength, 9);
break;
}
default:
{
int length = stream.readInt();
data = new byte[length];
revision = this.checkRevision(stream, compressedLength);
GZipDecompressor.decompress(stream, data);
}
}
DataFileReadResult res = new DataFileReadResult();
res.data = data;
res.revision = revision;
return res;
}
private byte[] compress(byte[] data, int compression, int revision)
{
OutputStream stream = new OutputStream();
stream.writeByte(compression);
byte[] compressedData;
switch (compression)
{
case 0:
compressedData = data;
stream.writeInt(data.length);
break;
default:
throw new RuntimeException();
// case 1:
// compressedData = (byte[]) null;
// break;
// default:
// compressedData = GZipCompressor.compress(data);
// stream.writeInt(compressedData.length);
// stream.writeInt(data.length);
}
stream.writeBytes(compressedData);
stream.writeShort(revision);
byte[] compressed = new byte[stream.getOffset()];
stream.setOffset(0);
stream.getBytes(compressed, 0, compressed.length);
return compressed;
}
private int checkRevision(InputStream stream, int compressedLength)
{
int offset = stream.getOffset();
int revision;
if (stream.getLength() - (compressedLength + stream.getOffset()) >= 2)
{
stream.setOffset(stream.getLength() - 2);
revision = stream.readUnsignedShort();
stream.setOffset(offset);
}
else
{
revision = -1;
}
return revision;
}
}

View File

@@ -0,0 +1,7 @@
package net.runelite.cache.fs;
public class DataFileReadResult
{
public byte[] data;
public int revision;
}

View File

@@ -0,0 +1,6 @@
package net.runelite.cache.fs;
public class DataFileWriteResult
{
public int sector, compressedLength;
}

View File

@@ -10,7 +10,6 @@ import java.util.Objects;
import net.runelite.cache.fs.io.InputStream;
import net.runelite.cache.fs.io.OutputStream;
import net.runelite.cache.fs.util.bzip2.BZip2Decompressor;
import net.runelite.cache.fs.util.gzip.GZipCompressor;
import net.runelite.cache.fs.util.gzip.GZipDecompressor;
public class Index implements Closeable
@@ -113,41 +112,43 @@ public class Index implements Closeable
IndexFile index255 = store.getIndex255();
IndexEntry entry = index255.read(id);
byte[] b = dataFile.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength());
InputStream stream = new InputStream(b);
//XTEA decrypt here
this.compression = stream.readUnsignedByte();
int compressedLength = stream.readInt();
if (compressedLength < 0 || compressedLength > 1000000)
throw new RuntimeException("Invalid archive header");
byte[] data;
switch (compression)
{
case 0:
data = new byte[compressedLength];
this.checkRevision(stream, compressedLength);
stream.readBytes(data, 0, compressedLength);
break;
case 1:
{
int length = stream.readInt();
data = new byte[length];
this.checkRevision(stream, compressedLength);
BZip2Decompressor.decompress(data, b, compressedLength, 9);
break;
}
default:
{
int length = stream.readInt();
data = new byte[length];
this.checkRevision(stream, compressedLength);
GZipDecompressor.decompress(stream, data);
}
}
DataFileReadResult res = dataFile.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength());
byte[] data = res.data;
// byte[] b = dataFile.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength());
//
// InputStream stream = new InputStream(b);
//
// //XTEA decrypt here
//
// this.compression = stream.readUnsignedByte();
// int compressedLength = stream.readInt();
// if (compressedLength < 0 || compressedLength > 1000000)
// throw new RuntimeException("Invalid archive header");
//
// byte[] data;
// switch (compression)
// {
// case 0:
// data = new byte[compressedLength];
// this.checkRevision(stream, compressedLength);
// stream.readBytes(data, 0, compressedLength);
// break;
// case 1:
// {
// int length = stream.readInt();
// data = new byte[length];
// this.checkRevision(stream, compressedLength);
// BZip2Decompressor.decompress(data, b, compressedLength, 9);
// break;
// }
// default:
// {
// int length = stream.readInt();
// data = new byte[length];
// this.checkRevision(stream, compressedLength);
// GZipDecompressor.decompress(stream, data);
// }
// }
readIndexData(data);
@@ -160,51 +161,53 @@ public class Index implements Closeable
byte[] data = this.writeIndexData();
OutputStream stream = new OutputStream();
stream.writeByte(this.compression);
byte[] compressedData;
switch (this.compression)
{
case 0:
compressedData = data;
stream.writeInt(data.length);
break;
default:
throw new RuntimeException();
// case 1:
// compressedData = (byte[]) null;
// OutputStream stream = new OutputStream();
// stream.writeByte(this.compression);
// byte[] compressedData;
// switch (this.compression)
// {
// case 0:
// compressedData = data;
// stream.writeInt(data.length);
// break;
// default:
// compressedData = GZipCompressor.compress(data);
// stream.writeInt(compressedData.length);
// stream.writeInt(data.length);
}
stream.writeBytes(compressedData);
stream.writeShort(this.revision);
byte[] compressed = new byte[stream.getOffset()];
stream.setOffset(0);
stream.getBytes(compressed, 0, compressed.length);
//XTEA encrypt here
// throw new RuntimeException();
//// case 1:
//// compressedData = (byte[]) null;
//// break;
//// default:
//// compressedData = GZipCompressor.compress(data);
//// stream.writeInt(compressedData.length);
//// stream.writeInt(data.length);
// }
//
// stream.writeBytes(compressedData);
// stream.writeShort(this.revision);
//
// byte[] compressed = new byte[stream.getOffset()];
// stream.setOffset(0);
// stream.getBytes(compressed, 0, compressed.length);
//
// //XTEA encrypt here
DataFile dataFile = store.getData();
IndexFile index255 = store.getIndex255();
int sector = dataFile.write(index255.getIndexFileId(), this.id, ByteBuffer.wrap(compressed));
index255.write(new IndexEntry(index255, id, sector, compressed.length));
DataFileWriteResult res = dataFile.write(index255.getIndexFileId(), this.id, ByteBuffer.wrap(data), 0, this.revision);
index255.write(new IndexEntry(index255, id, res.sector, res.compressedLength));
}
private void checkRevision(InputStream stream, int compressedLength)
{
int offset = stream.getOffset();
if (stream.getLength() - (compressedLength + stream.getOffset()) >= 2) {
if (stream.getLength() - (compressedLength + stream.getOffset()) >= 2)
{
stream.setOffset(stream.getLength() - 2);
this.revision = stream.readUnsignedShort();
stream.setOffset(offset);
}
else {
else
{
this.revision = -1;
}
@@ -308,41 +311,42 @@ public class Index implements Closeable
IndexEntry entry = this.index.read(a.getArchiveId());
assert this.index.getIndexFileId() == this.id;
assert entry.getId() == a.getArchiveId();
byte[] b = store.getData().read(this.id, entry.getId(), entry.getSector(), entry.getLength()); // needs decompress etc...
InputStream stream = new InputStream(b);
this.compression = stream.readUnsignedByte();
int compressedLength = stream.readInt();
if (compressedLength < 0 || compressedLength > 1000000)
{
throw new RuntimeException("Invalid archive header");
}
byte[] data;
switch (compression)
{
case 0:
data = new byte[compressedLength];
this.checkRevision(stream, compressedLength);
stream.readBytes(data, 0, compressedLength);
break;
case 1:
{
int length = stream.readInt();
data = new byte[length];
this.checkRevision(stream, compressedLength);
BZip2Decompressor.decompress(data, b, compressedLength, 9);
break;
}
default:
{
int length = stream.readInt();
data = new byte[length];
this.checkRevision(stream, compressedLength);
GZipDecompressor.decompress(stream, data);
}
}
DataFileReadResult res = store.getData().read(this.id, entry.getId(), entry.getSector(), entry.getLength()); // needs decompress etc...
byte[] data = res.data;
//
// InputStream stream = new InputStream(b);
//
// this.compression = stream.readUnsignedByte();
// int compressedLength = stream.readInt();
// if (compressedLength < 0 || compressedLength > 1000000)
// {
// throw new RuntimeException("Invalid archive header");
// }
//
// byte[] data;
// switch (compression)
// {
// case 0:
// data = new byte[compressedLength];
// this.checkRevision(stream, compressedLength);
// stream.readBytes(data, 0, compressedLength);
// break;
// case 1:
// {
// int length = stream.readInt();
// data = new byte[length];
// this.checkRevision(stream, compressedLength);
// BZip2Decompressor.decompress(data, b, compressedLength, 9);
// break;
// }
// default:
// {
// int length = stream.readInt();
// data = new byte[length];
// this.checkRevision(stream, compressedLength);
// GZipDecompressor.decompress(stream, data);
// }
// }
if (a.getFiles().size() == 1)
{
@@ -356,7 +360,7 @@ public class Index implements Closeable
--readPosition;
int amtOfLoops = data[readPosition] & 255;
readPosition -= amtOfLoops * filesCount * 4;
stream = new InputStream(data);
InputStream stream = new InputStream(data);
stream.setOffset(readPosition);
int[] filesSize = new int[filesCount];
@@ -444,24 +448,24 @@ public class Index implements Closeable
stream.setOffset(0);
stream.getBytes(fileData, 0, fileData.length);
stream = new OutputStream();
stream.writeByte(0); // compression
stream.writeInt(fileData.length);
stream.writeBytes(fileData);
stream.writeShort(this.revision);
byte[] finalFileData = new byte[stream.getOffset()];
stream.setOffset(0);
stream.getBytes(finalFileData, 0, finalFileData.length);
// stream = new OutputStream();
//
// stream.writeByte(0); // compression
// stream.writeInt(fileData.length);
//
// stream.writeBytes(fileData);
// stream.writeShort(this.revision);
//
// byte[] finalFileData = new byte[stream.getOffset()];
// stream.setOffset(0);
// stream.getBytes(finalFileData, 0, finalFileData.length);
assert this.index.getIndexFileId() == this.id;
DataFile data = store.getData();
// XXX old data is just left there in the file?
int sector = data.write(this.id, a.getArchiveId(), ByteBuffer.wrap(finalFileData));
this.index.write(new IndexEntry(this.index, a.getArchiveId(), sector, finalFileData.length));
DataFileWriteResult res = data.write(this.id, a.getArchiveId(), ByteBuffer.wrap(fileData), 0, this.revision);
this.index.write(new IndexEntry(this.index, a.getArchiveId(), res.sector, res.compressedLength));
}
}