Add raw map dumper, and support for xtea encryption. Split datafile reading from deccryption/decompressing, allow cache client to simply save the encrypted+compressed data, too.
This commit is contained in:
@@ -37,7 +37,7 @@ public enum IndexType
|
||||
CONFIGS(2),
|
||||
INTERFACES(3),
|
||||
SOUNDEFFECTS(4),
|
||||
LANDSCAPES(5),
|
||||
MAPS(5),
|
||||
TRACK1(6),
|
||||
MODELS(7),
|
||||
SPRITES(8),
|
||||
|
||||
@@ -52,7 +52,6 @@ import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import net.runelite.cache.downloader.requests.FileRequest;
|
||||
import net.runelite.cache.fs.Archive;
|
||||
import net.runelite.cache.fs.DataFileReadResult;
|
||||
import net.runelite.cache.fs.Index;
|
||||
import net.runelite.cache.fs.Store;
|
||||
import org.slf4j.Logger;
|
||||
@@ -132,6 +131,8 @@ public class CacheClient
|
||||
public void download() throws InterruptedException, ExecutionException, FileNotFoundException
|
||||
{
|
||||
FileResult result = requestFile(255, 255).get();
|
||||
result.decompress(null);
|
||||
|
||||
ByteBuf buffer = Unpooled.wrappedBuffer(result.getContents());
|
||||
|
||||
int indexCount = result.getContents().length / 8;
|
||||
@@ -141,9 +142,6 @@ public class CacheClient
|
||||
int crc = buffer.readInt();
|
||||
int revision = buffer.readInt();
|
||||
|
||||
if (i == 5)
|
||||
continue; // XXX maps are xtea encrypted
|
||||
|
||||
Index index = store.findIndex(i);
|
||||
|
||||
if (index == null)
|
||||
@@ -163,6 +161,7 @@ public class CacheClient
|
||||
logger.info("Downloading index {}", i);
|
||||
|
||||
FileResult indexFileResult = requestFile(255, i).get();
|
||||
indexFileResult.decompress(null);
|
||||
|
||||
logger.info("Downloaded index {}", i);
|
||||
|
||||
@@ -196,11 +195,9 @@ public class CacheClient
|
||||
logger.info("Archive {} in index {} is out of date, downloading", archive.getArchiveId(), index.getId());
|
||||
|
||||
FileResult archiveFileResult = requestFile(index.getId(), archive.getArchiveId()).get();
|
||||
byte[] compressedContents = archiveFileResult.getCompressedData();
|
||||
|
||||
byte[] contents = archiveFileResult.getContents();
|
||||
|
||||
archive.loadContents(contents);
|
||||
archive.setCompression(archiveFileResult.getCompression());
|
||||
archive.setData(compressedContents);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -258,7 +255,7 @@ public class CacheClient
|
||||
return null;
|
||||
}
|
||||
|
||||
protected synchronized void onFileFinish(int index, int file, DataFileReadResult dresult)
|
||||
protected synchronized void onFileFinish(int index, int file, byte[] compressedData)
|
||||
{
|
||||
PendingFileRequest pr = findRequest(index, file);
|
||||
|
||||
@@ -270,9 +267,9 @@ public class CacheClient
|
||||
|
||||
requests.remove(pr);
|
||||
|
||||
FileResult result = new FileResult(index, file, dresult.data, dresult.revision, dresult.crc, dresult.whirlpool, dresult.compression);
|
||||
FileResult result = new FileResult(index, file, compressedData);
|
||||
|
||||
logger.debug("File download finished for index {} file {}, length {}", index, file, result.getContents().length);
|
||||
logger.debug("File download finished for index {} file {}, length {}", index, file, compressedData.length);
|
||||
|
||||
pr.getFuture().complete(result);
|
||||
}
|
||||
|
||||
@@ -37,8 +37,6 @@ import io.netty.channel.ChannelInboundHandlerAdapter;
|
||||
import io.netty.util.ReferenceCountUtil;
|
||||
import net.runelite.cache.downloader.requests.ConnectionInfo;
|
||||
import net.runelite.cache.downloader.requests.HelloHandshake;
|
||||
import net.runelite.cache.fs.DataFile;
|
||||
import net.runelite.cache.fs.DataFileReadResult;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -166,9 +164,7 @@ public class CacheClientHandler extends ChannelInboundHandlerAdapter
|
||||
buffer.readableBytes());
|
||||
buffer.clear();
|
||||
|
||||
DataFileReadResult result = DataFile.decompress(compressedData);
|
||||
|
||||
client.onFileFinish(index, file, result);
|
||||
client.onFileFinish(index, file, compressedData);
|
||||
}
|
||||
|
||||
buffer.discardReadBytes();
|
||||
|
||||
@@ -30,25 +30,26 @@
|
||||
|
||||
package net.runelite.cache.downloader;
|
||||
|
||||
import net.runelite.cache.fs.DataFile;
|
||||
import net.runelite.cache.fs.DataFileReadResult;
|
||||
|
||||
public class FileResult
|
||||
{
|
||||
private final int index;
|
||||
private final int fileId;
|
||||
private final byte[] contents;
|
||||
private final int revision;
|
||||
private final int crc;
|
||||
private final byte[] whirlpool;
|
||||
private final int compression; // compression method used by archive data
|
||||
private final byte[] compressedData;
|
||||
|
||||
public FileResult(int index, int fileId, byte[] contents, int revision, int crc, byte[] whirlpool, int compression)
|
||||
private byte[] contents;
|
||||
private int revision;
|
||||
private int crc;
|
||||
private byte[] whirlpool;
|
||||
private int compression; // compression method used by archive data
|
||||
|
||||
public FileResult(int index, int fileId, byte[] compressedData)
|
||||
{
|
||||
this.index = index;
|
||||
this.fileId = fileId;
|
||||
this.contents = contents;
|
||||
this.revision = revision;
|
||||
this.crc = crc;
|
||||
this.whirlpool = whirlpool;
|
||||
this.compression = compression;
|
||||
this.compressedData = compressedData;
|
||||
}
|
||||
|
||||
public int getIndex()
|
||||
@@ -61,6 +62,22 @@ public class FileResult
|
||||
return fileId;
|
||||
}
|
||||
|
||||
public byte[] getCompressedData()
|
||||
{
|
||||
return compressedData;
|
||||
}
|
||||
|
||||
public void decompress(int[] keys)
|
||||
{
|
||||
DataFileReadResult res = DataFile.decompress(compressedData, keys);
|
||||
|
||||
contents = res.data;
|
||||
revision = res.revision;
|
||||
crc = res.crc;
|
||||
whirlpool = res.whirlpool;
|
||||
compression = res.compression;
|
||||
}
|
||||
|
||||
public byte[] getContents()
|
||||
{
|
||||
return contents;
|
||||
|
||||
@@ -31,20 +31,29 @@
|
||||
package net.runelite.cache.fs;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import net.runelite.cache.io.InputStream;
|
||||
import net.runelite.cache.io.OutputStream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Archive
|
||||
{
|
||||
private static final Logger logger = LoggerFactory.getLogger(Archive.class);
|
||||
|
||||
private Index index; // member of this index
|
||||
|
||||
private byte[] data; // raw data from the datafile, compressed/encrypted
|
||||
|
||||
private int archiveId;
|
||||
private int nameHash;
|
||||
private byte[] whirlpool;
|
||||
private int crc;
|
||||
private int revision;
|
||||
private int compression;
|
||||
|
||||
private List<File> files = new ArrayList<>();
|
||||
|
||||
public Archive(Index index, int id)
|
||||
@@ -94,6 +103,16 @@ public class Archive
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public byte[] getData()
|
||||
{
|
||||
return data;
|
||||
}
|
||||
|
||||
public void setData(byte[] data)
|
||||
{
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public File addFile(int id)
|
||||
{
|
||||
@@ -114,6 +133,40 @@ public class Archive
|
||||
this.files.add(file);
|
||||
}
|
||||
}
|
||||
|
||||
public void decompressAndLoad(int[] keys)
|
||||
{
|
||||
byte[] encryptedData = this.getData();
|
||||
|
||||
DataFileReadResult res = DataFile.decompress(encryptedData, keys);
|
||||
if (res == null)
|
||||
{
|
||||
logger.warn("Unable to decrypt archive {}", this);
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] decompressedData = res.data;
|
||||
|
||||
if (this.crc != res.crc)
|
||||
{
|
||||
logger.warn("crc mismatch for archive {}", this);
|
||||
}
|
||||
|
||||
if (this.getWhirlpool() != null && !Arrays.equals(this.getWhirlpool(), res.whirlpool))
|
||||
{
|
||||
logger.warn("whirlpool mismatch for archive {}", this);
|
||||
}
|
||||
|
||||
if (this.getRevision() != res.revision)
|
||||
{
|
||||
logger.warn("revision mismatch for archive {}", this);
|
||||
}
|
||||
|
||||
setCompression(res.compression);
|
||||
|
||||
loadContents(decompressedData);
|
||||
this.setData(null); // now that we've loaded it, clean it so it doesn't get written back
|
||||
}
|
||||
|
||||
public void loadContents(byte[] data)
|
||||
{
|
||||
@@ -181,6 +234,11 @@ public class Archive
|
||||
|
||||
public byte[] saveContents()
|
||||
{
|
||||
if (data != null)
|
||||
{
|
||||
return data;
|
||||
}
|
||||
|
||||
OutputStream stream = new OutputStream();
|
||||
|
||||
int filesCount = this.getFiles().size();
|
||||
|
||||
@@ -36,12 +36,18 @@ import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.IllegalBlockSizeException;
|
||||
import javax.crypto.NoSuchPaddingException;
|
||||
import net.runelite.cache.util.BZip2;
|
||||
import net.runelite.cache.io.InputStream;
|
||||
import net.runelite.cache.io.OutputStream;
|
||||
import net.runelite.cache.util.CRC32HGenerator;
|
||||
import net.runelite.cache.util.Crc32;
|
||||
import net.runelite.cache.util.GZip;
|
||||
import net.runelite.cache.util.Whirlpool;
|
||||
import net.runelite.cache.util.Xtea;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -71,14 +77,14 @@ public class DataFile implements Closeable
|
||||
|
||||
/**
|
||||
*
|
||||
* @param indexId
|
||||
* @param archiveId
|
||||
* @param indexId expected index of archive of contents being read
|
||||
* @param archiveId expected archive of contents being read
|
||||
* @param sector sector to start reading at
|
||||
* @param size expected size of file
|
||||
* @param size size of file
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
public synchronized DataFileReadResult read(int indexId, int archiveId, int sector, int size) throws IOException
|
||||
public synchronized byte[] read(int indexId, int archiveId, int sector, int size) throws IOException
|
||||
{
|
||||
if (sector <= 0L || dat.length() / SECTOR_SIZE < (long) sector)
|
||||
{
|
||||
@@ -94,7 +100,7 @@ public class DataFile implements Closeable
|
||||
{
|
||||
if (sector == 0)
|
||||
{
|
||||
logger.warn("sector == 0");
|
||||
logger.warn("Unexpected end of file");
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -105,7 +111,7 @@ public class DataFile implements Closeable
|
||||
int currentIndex;
|
||||
int currentPart;
|
||||
int currentArchive;
|
||||
if (0xFFFF < archiveId)
|
||||
if (archiveId > 0xFFFF)
|
||||
{
|
||||
headerSize = 10;
|
||||
if (dataBlockSize > SECTOR_SIZE - headerSize)
|
||||
@@ -116,7 +122,7 @@ public class DataFile implements Closeable
|
||||
int i = dat.read(this.readCachedBuffer, 0, headerSize + dataBlockSize);
|
||||
if (i != headerSize + dataBlockSize)
|
||||
{
|
||||
logger.warn("short read");
|
||||
logger.warn("Short read when reading file data for {}/{}", indexId, archiveId);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -168,21 +174,15 @@ public class DataFile implements Closeable
|
||||
}
|
||||
|
||||
buffer.flip();
|
||||
|
||||
//XTEA decrypt here?
|
||||
|
||||
return decompress(buffer.array());
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
public synchronized DataFileWriteResult write(int indexId, int archiveId, ByteBuffer data, int compression, int revision) throws IOException
|
||||
public synchronized DataFileWriteResult write(int indexId, int archiveId, byte[] compressedData, int revision) throws IOException
|
||||
{
|
||||
int sector;
|
||||
int startSector;
|
||||
|
||||
byte[] compressedData = this.compress(data.array(), compression, revision);
|
||||
data = ByteBuffer.wrap(compressedData);
|
||||
|
||||
//XTEA encrypt here?
|
||||
|
||||
ByteBuffer data = ByteBuffer.wrap(compressedData);
|
||||
|
||||
sector = (int) ((dat.length() + (long) (SECTOR_SIZE - 1)) / (long) SECTOR_SIZE);
|
||||
if (sector == 0)
|
||||
@@ -270,13 +270,17 @@ public class DataFile implements Closeable
|
||||
DataFileWriteResult res = new DataFileWriteResult();
|
||||
res.sector = startSector;
|
||||
res.compressedLength = compressedData.length;
|
||||
|
||||
int length = revision != -1 ? compressedData.length - 2 : compressedData.length;
|
||||
res.crc = CRC32HGenerator.getHash(compressedData, length);
|
||||
Crc32 crc32 = new Crc32();
|
||||
crc32.update(compressedData, 0, length);
|
||||
res.crc = crc32.getHash();
|
||||
|
||||
res.whirlpool = Whirlpool.getHash(compressedData, length);
|
||||
return res;
|
||||
}
|
||||
|
||||
public static DataFileReadResult decompress(byte[] b)
|
||||
public static DataFileReadResult decompress(byte[] b, int[] keys)
|
||||
{
|
||||
InputStream stream = new InputStream(b);
|
||||
|
||||
@@ -284,30 +288,86 @@ public class DataFile implements Closeable
|
||||
int compressedLength = stream.readInt();
|
||||
if (compressedLength < 0 || compressedLength > 1000000)
|
||||
throw new RuntimeException("Invalid data");
|
||||
|
||||
Crc32 crc32 = new Crc32();
|
||||
crc32.update(b, 0, 5); // compression + length
|
||||
|
||||
byte[] data;
|
||||
int revision;
|
||||
int revision = -1;
|
||||
switch (compression)
|
||||
{
|
||||
case CompressionType.NONE:
|
||||
data = new byte[compressedLength];
|
||||
revision = checkRevision(stream, compressedLength);
|
||||
stream.readBytes(data, 0, compressedLength);
|
||||
{
|
||||
byte[] encryptedData = new byte[compressedLength];
|
||||
stream.readBytes(encryptedData, 0, compressedLength);
|
||||
|
||||
crc32.update(encryptedData, 0, compressedLength);
|
||||
byte[] decryptedData = decrypt(encryptedData, encryptedData.length, keys);
|
||||
|
||||
if (stream.remaining() >= 2)
|
||||
{
|
||||
revision = stream.readUnsignedShort();
|
||||
assert revision != -1;
|
||||
}
|
||||
|
||||
data = decryptedData;
|
||||
|
||||
break;
|
||||
}
|
||||
case CompressionType.BZ2:
|
||||
{
|
||||
int length = stream.readInt();
|
||||
revision = checkRevision(stream, compressedLength);
|
||||
byte[] encryptedData = new byte[compressedLength + 4];
|
||||
stream.readBytes(encryptedData);
|
||||
|
||||
crc32.update(encryptedData, 0, encryptedData.length);
|
||||
byte[] decryptedData = decrypt(encryptedData, encryptedData.length, keys);
|
||||
|
||||
if (stream.remaining() >= 2)
|
||||
{
|
||||
revision = stream.readUnsignedShort();
|
||||
assert revision != -1;
|
||||
}
|
||||
|
||||
stream = new InputStream(decryptedData);
|
||||
|
||||
int decompressedLength = stream.readInt();
|
||||
data = BZip2.decompress(stream.getRemaining(), compressedLength);
|
||||
assert data.length == length;
|
||||
|
||||
if (data == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
assert data.length == decompressedLength;
|
||||
|
||||
break;
|
||||
}
|
||||
case CompressionType.GZ:
|
||||
{
|
||||
int length = stream.readInt();
|
||||
revision = checkRevision(stream, compressedLength);
|
||||
byte[] encryptedData = new byte[compressedLength + 4];
|
||||
stream.readBytes(encryptedData);
|
||||
|
||||
crc32.update(encryptedData, 0, encryptedData.length);
|
||||
byte[] decryptedData = decrypt(encryptedData, encryptedData.length, keys);
|
||||
|
||||
if (stream.remaining() >= 2)
|
||||
{
|
||||
revision = stream.readUnsignedShort();
|
||||
assert revision != -1;
|
||||
}
|
||||
|
||||
stream = new InputStream(decryptedData);
|
||||
|
||||
int decompressedLength = stream.readInt();
|
||||
data = GZip.decompress(stream.getRemaining(), compressedLength);
|
||||
assert data.length == length;
|
||||
|
||||
if (data == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
assert data.length == decompressedLength;
|
||||
|
||||
break;
|
||||
}
|
||||
default:
|
||||
@@ -317,14 +377,14 @@ public class DataFile implements Closeable
|
||||
DataFileReadResult res = new DataFileReadResult();
|
||||
res.data = data;
|
||||
res.revision = revision;
|
||||
int length = revision != -1 ? b.length - 2 : b.length;
|
||||
res.crc = CRC32HGenerator.getHash(b, length);
|
||||
int length = revision != -1 ? b.length - 2 : b.length;;
|
||||
res.crc = crc32.getHash();
|
||||
res.whirlpool = Whirlpool.getHash(b, length);
|
||||
res.compression = compression;
|
||||
return res;
|
||||
}
|
||||
|
||||
private byte[] compress(byte[] data, int compression, int revision) throws IOException
|
||||
public static byte[] compress(byte[] data, int compression, int revision, int[] keys) throws IOException
|
||||
{
|
||||
OutputStream stream = new OutputStream();
|
||||
stream.writeByte(compression);
|
||||
@@ -333,16 +393,19 @@ public class DataFile implements Closeable
|
||||
{
|
||||
case CompressionType.NONE:
|
||||
compressedData = data;
|
||||
compressedData = encrypt(compressedData, compressedData.length, keys);
|
||||
stream.writeInt(data.length);
|
||||
break;
|
||||
case CompressionType.BZ2:
|
||||
compressedData = BZip2.compress(data);
|
||||
compressedData = encrypt(compressedData, compressedData.length, keys);
|
||||
|
||||
stream.writeInt(compressedData.length);
|
||||
stream.writeInt(data.length);
|
||||
break;
|
||||
case CompressionType.GZ:
|
||||
compressedData = GZip.compress(data);
|
||||
compressedData = encrypt(compressedData, compressedData.length, keys);
|
||||
|
||||
stream.writeInt(compressedData.length);
|
||||
stream.writeInt(data.length);
|
||||
@@ -375,4 +438,38 @@ public class DataFile implements Closeable
|
||||
}
|
||||
return revision;
|
||||
}
|
||||
|
||||
private static byte[] decrypt(byte[] data, int length, int[] keys)
|
||||
{
|
||||
if (keys == null)
|
||||
return data;
|
||||
|
||||
try
|
||||
{
|
||||
Xtea xtea = new Xtea(keys);
|
||||
return xtea.decrypt(data, length);
|
||||
}
|
||||
catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | IllegalBlockSizeException | BadPaddingException ex)
|
||||
{
|
||||
logger.warn("unable to xtea decrypt", ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] encrypt(byte[] data, int length, int[] keys)
|
||||
{
|
||||
if (keys == null)
|
||||
return data;
|
||||
|
||||
try
|
||||
{
|
||||
Xtea xtea = new Xtea(keys);
|
||||
return xtea.encrypt(data, length);
|
||||
}
|
||||
catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | IllegalBlockSizeException | BadPaddingException ex)
|
||||
{
|
||||
logger.warn("unable to xtea encrypt", ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,7 +32,6 @@ package net.runelite.cache.fs;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@@ -40,6 +39,7 @@ import java.util.Objects;
|
||||
import net.runelite.cache.util.Djb2;
|
||||
import net.runelite.cache.io.InputStream;
|
||||
import net.runelite.cache.io.OutputStream;
|
||||
import net.runelite.cache.util.XteaKeyManager;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -50,12 +50,16 @@ public class Index implements Closeable
|
||||
private final Store store;
|
||||
private final IndexFile index;
|
||||
private final int id;
|
||||
|
||||
private XteaKeyManager xteaManager;
|
||||
|
||||
private int protocol = 7;
|
||||
private boolean named = true, usesWhirpool;
|
||||
private int revision;
|
||||
private int crc;
|
||||
private byte[] whirlpool;
|
||||
private int compression; // compression method of this index's data in 255
|
||||
|
||||
private final List<Archive> archives = new ArrayList<>();
|
||||
|
||||
public Index(Store store, IndexFile index, int id)
|
||||
@@ -108,6 +112,16 @@ public class Index implements Closeable
|
||||
return true;
|
||||
}
|
||||
|
||||
public XteaKeyManager getXteaManager()
|
||||
{
|
||||
return xteaManager;
|
||||
}
|
||||
|
||||
public void setXteaManager(XteaKeyManager xteaManager)
|
||||
{
|
||||
this.xteaManager = xteaManager;
|
||||
}
|
||||
|
||||
public int getId()
|
||||
{
|
||||
return id;
|
||||
@@ -178,7 +192,8 @@ public class Index implements Closeable
|
||||
IndexFile index255 = store.getIndex255();
|
||||
|
||||
IndexEntry entry = index255.read(id);
|
||||
DataFileReadResult res = dataFile.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength());
|
||||
byte[] indexData = dataFile.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength());
|
||||
DataFileReadResult res = DataFile.decompress(indexData, null);
|
||||
byte[] data = res.data;
|
||||
|
||||
archives.clear();
|
||||
@@ -189,8 +204,8 @@ public class Index implements Closeable
|
||||
this.whirlpool = res.whirlpool;
|
||||
this.compression = res.compression;
|
||||
assert res.revision == -1;
|
||||
|
||||
this.loadFiles();
|
||||
|
||||
this.loadArchives();
|
||||
}
|
||||
|
||||
public void save() throws IOException
|
||||
@@ -201,8 +216,10 @@ public class Index implements Closeable
|
||||
|
||||
DataFile dataFile = store.getData();
|
||||
IndexFile index255 = store.getIndex255();
|
||||
|
||||
DataFileWriteResult res = dataFile.write(index255.getIndexFileId(), this.id, ByteBuffer.wrap(data), this.compression, -1); // index data revision is always -1
|
||||
|
||||
byte[] compressedData = DataFile.compress(data, this.compression, -1, null); // index data revision is always -1
|
||||
DataFileWriteResult res = dataFile.write(index255.getIndexFileId(), this.id, compressedData, revision);
|
||||
|
||||
index255.write(new IndexEntry(index255, id, res.sector, res.compressedLength));
|
||||
|
||||
this.crc = res.crc;
|
||||
@@ -300,41 +317,31 @@ public class Index implements Closeable
|
||||
}
|
||||
}
|
||||
|
||||
private void loadFiles() throws IOException
|
||||
private void loadArchives() throws IOException
|
||||
{
|
||||
// get data from index file
|
||||
for (Archive a : archives)
|
||||
for (Archive a : new ArrayList<>(archives))
|
||||
{
|
||||
IndexEntry entry = this.index.read(a.getArchiveId());
|
||||
if (entry == null)
|
||||
{
|
||||
logger.debug("can't read archive " + a.getArchiveId() + " from index " + this.id);
|
||||
archives.remove(a); // is this the correct behavior?
|
||||
continue;
|
||||
}
|
||||
|
||||
assert this.index.getIndexFileId() == this.id;
|
||||
assert entry.getId() == a.getArchiveId();
|
||||
DataFileReadResult res = store.getData().read(this.id, entry.getId(), entry.getSector(), entry.getLength());
|
||||
byte[] data = res.data;
|
||||
|
||||
if (a.getCrc() != res.crc)
|
||||
|
||||
byte[] archiveData = store.getData().read(this.id, entry.getId(), entry.getSector(), entry.getLength());
|
||||
a.setData(archiveData);
|
||||
|
||||
if (this.xteaManager != null)
|
||||
{
|
||||
logger.warn("crc mismatch for archive {}", a);
|
||||
}
|
||||
|
||||
if (a.getWhirlpool() != null && !Arrays.equals(a.getWhirlpool(), res.whirlpool))
|
||||
{
|
||||
logger.warn("whirlpool mismatch for archive {}", a);
|
||||
continue; // can't decrypt this yet
|
||||
}
|
||||
|
||||
if (a.getRevision() != res.revision)
|
||||
{
|
||||
logger.warn("revision mismatch for archive {}", a);
|
||||
}
|
||||
|
||||
a.setCompression(res.compression);
|
||||
|
||||
a.loadContents(data);
|
||||
a.decompressAndLoad(null);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -346,8 +353,10 @@ public class Index implements Closeable
|
||||
|
||||
assert this.index.getIndexFileId() == this.id;
|
||||
DataFile data = store.getData();
|
||||
|
||||
DataFileWriteResult res = data.write(this.id, a.getArchiveId(), ByteBuffer.wrap(fileData), a.getCompression(), a.getRevision());
|
||||
|
||||
byte[] compressedData = DataFile.compress(fileData, a.getCompression(), a.getRevision(), null);
|
||||
|
||||
DataFileWriteResult res = data.write(this.id, a.getArchiveId(), compressedData, a.getRevision());
|
||||
this.index.write(new IndexEntry(this.index, a.getArchiveId(), res.sector, res.compressedLength));
|
||||
|
||||
a.setCrc(res.crc);
|
||||
|
||||
@@ -122,7 +122,7 @@ public class IndexFile implements Closeable
|
||||
int i = idx.read(buffer);
|
||||
if (i != INDEX_ENTRY_LEN)
|
||||
{
|
||||
logger.warn("short read for id {} on index {}: {}", id, indexFileId, i);
|
||||
logger.debug("short read for id {} on index {}: {}", id, indexFileId, i);
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import net.runelite.cache.IndexType;
|
||||
import net.runelite.cache.util.XteaKeyManager;
|
||||
|
||||
public class Store implements Closeable
|
||||
{
|
||||
@@ -60,6 +61,14 @@ public class Store implements Closeable
|
||||
{
|
||||
this.addIndex(i);
|
||||
}
|
||||
|
||||
Index maps = this.findIndex(IndexType.MAPS.getNumber());
|
||||
if (maps != null)
|
||||
{
|
||||
XteaKeyManager mapKeys = new XteaKeyManager();
|
||||
mapKeys.loadKeys();
|
||||
maps.setXteaManager(mapKeys);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -98,7 +107,7 @@ public class Store implements Closeable
|
||||
return true;
|
||||
}
|
||||
|
||||
public Index addIndex(int id) throws FileNotFoundException
|
||||
public final Index addIndex(int id) throws FileNotFoundException
|
||||
{
|
||||
for (Index i : indexes)
|
||||
if (i.getIndex().getIndexFileId() == id)
|
||||
@@ -121,14 +130,7 @@ public class Store implements Closeable
|
||||
public void load() throws IOException
|
||||
{
|
||||
for (Index i : indexes)
|
||||
{
|
||||
int id = i.getIndex().getIndexFileId();
|
||||
if (id == 5) // XXX maps, XTEA encrypted, can't decompress
|
||||
continue;
|
||||
if (id == 6 || id == 14)
|
||||
continue; // XXX I get more Indexes than there is length of the index file for these
|
||||
i.load();
|
||||
}
|
||||
}
|
||||
|
||||
public void save() throws IOException
|
||||
@@ -157,7 +159,7 @@ public class Store implements Closeable
|
||||
return indexes.get(type.getNumber());
|
||||
}
|
||||
|
||||
public Index findIndex(int id)
|
||||
public final Index findIndex(int id)
|
||||
{
|
||||
for (Index i : indexes)
|
||||
if (i.getId() == id)
|
||||
|
||||
@@ -51,6 +51,12 @@ public class InputStream extends java.io.InputStream
|
||||
this.buffer = ByteBuffer.wrap(buffer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return "InputStream{" + "buffer=" + buffer + '}';
|
||||
}
|
||||
|
||||
public int read24BitInt()
|
||||
{
|
||||
return (this.readUnsignedByte() << 16) + (this.readUnsignedByte() << 8) + this.readUnsignedByte();
|
||||
@@ -78,6 +84,11 @@ public class InputStream extends java.io.InputStream
|
||||
return buffer.limit();
|
||||
}
|
||||
|
||||
public int remaining()
|
||||
{
|
||||
return buffer.remaining();
|
||||
}
|
||||
|
||||
public byte readByte()
|
||||
{
|
||||
return buffer.get();
|
||||
|
||||
@@ -32,20 +32,17 @@ package net.runelite.cache.util;
|
||||
|
||||
import java.util.zip.CRC32;
|
||||
|
||||
public final class CRC32HGenerator
|
||||
public class Crc32
|
||||
{
|
||||
public static final CRC32 CRC32Instance = new CRC32();
|
||||
private final CRC32 crc32 = new CRC32();
|
||||
|
||||
public static synchronized int getHash(byte[] data, int len)
|
||||
public void update(byte[] data, int offset, int length)
|
||||
{
|
||||
CRC32Instance.update(data, 0, len);
|
||||
try
|
||||
{
|
||||
return (int) CRC32Instance.getValue();
|
||||
}
|
||||
finally
|
||||
{
|
||||
CRC32Instance.reset();
|
||||
}
|
||||
crc32.update(data, offset, length);
|
||||
}
|
||||
|
||||
public int getHash()
|
||||
{
|
||||
return (int) crc32.getValue();
|
||||
}
|
||||
}
|
||||
79
cache/src/test/java/net/runelite/cache/MapDumperTest.java
vendored
Normal file
79
cache/src/test/java/net/runelite/cache/MapDumperTest.java
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
package net.runelite.cache;
|
||||
|
||||
import com.google.common.io.Files;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import net.runelite.cache.fs.Archive;
|
||||
import net.runelite.cache.fs.Index;
|
||||
import net.runelite.cache.fs.Store;
|
||||
import net.runelite.cache.util.XteaKeyManager;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class MapDumperTest
|
||||
{
|
||||
private static final Logger logger = LoggerFactory.getLogger(MapDumperTest.class);
|
||||
|
||||
private static final int MAX_REGIONS = 32768;
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder folder = StoreLocation.getTemporaryFolder();
|
||||
|
||||
@Test
|
||||
public void dump() throws IOException
|
||||
{
|
||||
File base = StoreLocation.LOCATION,
|
||||
outDir = new java.io.File("d:/rs/07/cache/maps");//folder.newFolder();
|
||||
|
||||
try (Store store = new Store(base))
|
||||
{
|
||||
store.load();
|
||||
|
||||
Index index = store.getIndex(IndexType.MAPS);
|
||||
XteaKeyManager keyManager = index.getXteaManager();
|
||||
|
||||
for (int i = 0; i < MAX_REGIONS; i++)
|
||||
{
|
||||
int[] keys = keyManager.getKeys(i);
|
||||
|
||||
int x = i >> 8;
|
||||
int y = i & 0xFF;
|
||||
|
||||
Archive map = index.findArchiveByName("m" + x + "_" + y);
|
||||
Archive land = index.findArchiveByName("l" + x + "_" + y);
|
||||
|
||||
assert (map == null) == (land == null);
|
||||
|
||||
if (map == null || land == null)
|
||||
continue;
|
||||
|
||||
assert map.getFiles().size() == 1;
|
||||
assert land.getFiles().size() == 1;
|
||||
|
||||
// maps aren't encrypted, but we don't load archive data of any archive in
|
||||
// the maps index, so load it
|
||||
map.decompressAndLoad(null);
|
||||
|
||||
byte[] data = map.getFiles().get(0).getContents();
|
||||
|
||||
Files.write(data, new File(outDir, "m" + x + "_" + y + ".dat"));
|
||||
|
||||
if (keys != null)
|
||||
{
|
||||
land.decompressAndLoad(keys);
|
||||
|
||||
data = land.getFiles().get(0).getContents();
|
||||
|
||||
if (data == null)
|
||||
continue; // key is probably wrong
|
||||
|
||||
Files.write(data, new File(outDir, "l" + x + "_" + y + ".dat"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -57,7 +57,7 @@ public class ModelDumperTest
|
||||
@Test
|
||||
public void test() throws IOException
|
||||
{
|
||||
java.io.File modelDir = folder.newFolder("models");
|
||||
java.io.File modelDir = new java.io.File("d:/rs/07/cache/models");//folder.newFolder("models");
|
||||
int count = 0;
|
||||
|
||||
try (Store store = new Store(StoreLocation.LOCATION))
|
||||
|
||||
@@ -42,7 +42,7 @@ public class CacheClientTest
|
||||
@Before
|
||||
public void before()
|
||||
{
|
||||
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "TRACE");
|
||||
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "TRACE");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package net.runelite.cache.fs;
|
||||
|
||||
import java.io.File;
|
||||
@@ -43,60 +42,86 @@ public class DataFileTest
|
||||
{
|
||||
@Rule
|
||||
public TemporaryFolder folder = StoreLocation.getTemporaryFolder();
|
||||
|
||||
|
||||
@Test
|
||||
public void test1() throws IOException
|
||||
{
|
||||
File file = folder.newFile();
|
||||
Store store = new Store(folder.getRoot());
|
||||
DataFile df = new DataFile(store, file);
|
||||
DataFileWriteResult res = df.write(42, 3, ByteBuffer.wrap("test".getBytes()), CompressionType.NONE, 0);
|
||||
DataFileReadResult res2 = df.read(42, 3, res.sector, res.compressedLength);
|
||||
byte[] buf = res2.data;
|
||||
String str = new String(buf);
|
||||
Assert.assertEquals("test", str);
|
||||
file.delete();
|
||||
|
||||
try (Store store = new Store(folder.getRoot()))
|
||||
{
|
||||
DataFile df = new DataFile(store, file);
|
||||
|
||||
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 0, null);
|
||||
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
|
||||
|
||||
compressedData = df.read(42, 3, res.sector, res.compressedLength);
|
||||
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
|
||||
|
||||
byte[] buf = res2.data;
|
||||
String str = new String(buf);
|
||||
Assert.assertEquals("test", str);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void test2() throws IOException
|
||||
{
|
||||
byte[] b = new byte[1024];
|
||||
for (int i = 0; i < 1024; ++i)
|
||||
{
|
||||
b[i] = (byte) i;
|
||||
|
||||
}
|
||||
|
||||
File file = folder.newFile();
|
||||
Store store = new Store(folder.getRoot());
|
||||
DataFile df = new DataFile(store, file);
|
||||
DataFileWriteResult res = df.write(42, 0x1FFFF, ByteBuffer.wrap(b), CompressionType.BZ2, 42);
|
||||
DataFileReadResult res2 = df.read(42, 0x1FFFF, res.sector, res.compressedLength);
|
||||
byte[] buf = res2.data;
|
||||
Assert.assertArrayEquals(b, buf);
|
||||
file.delete();
|
||||
|
||||
try (Store store = new Store(folder.getRoot()))
|
||||
{
|
||||
DataFile df = new DataFile(store, file);
|
||||
|
||||
byte[] compressedData = DataFile.compress(b, CompressionType.BZ2, 42, null);
|
||||
DataFileWriteResult res = df.write(42, 0x1FFFF, compressedData, 42);
|
||||
|
||||
compressedData = df.read(42, 0x1FFFF, res.sector, res.compressedLength);
|
||||
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
|
||||
|
||||
byte[] buf = res2.data;
|
||||
Assert.assertArrayEquals(b, buf);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testGZipCompression() throws IOException
|
||||
{
|
||||
try (Store store = new Store(folder.getRoot()))
|
||||
{
|
||||
DataFile df = new DataFile(store, folder.newFile());
|
||||
DataFileWriteResult res = df.write(41, 4, ByteBuffer.wrap("test".getBytes()), CompressionType.GZ, 0);
|
||||
DataFileReadResult res2 = df.read(41, 4, res.sector, res.compressedLength);
|
||||
|
||||
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.GZ, 0, null);
|
||||
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
|
||||
|
||||
compressedData = df.read(41, 4, res.sector, res.compressedLength);
|
||||
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
|
||||
|
||||
byte[] buf = res2.data;
|
||||
String str = new String(buf);
|
||||
Assert.assertEquals("test", str);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testBZip2Compression() throws IOException
|
||||
{
|
||||
try (Store store = new Store(folder.getRoot()))
|
||||
{
|
||||
DataFile df = new DataFile(store, folder.newFile());
|
||||
DataFileWriteResult res = df.write(41, 4, ByteBuffer.wrap("test".getBytes()), CompressionType.BZ2, 5);
|
||||
DataFileReadResult res2 = df.read(41, 4, res.sector, res.compressedLength);
|
||||
|
||||
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.BZ2, 5, null);
|
||||
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
|
||||
|
||||
compressedData = df.read(41, 4, res.sector, res.compressedLength);
|
||||
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
|
||||
|
||||
byte[] buf = res2.data;
|
||||
String str = new String(buf);
|
||||
Assert.assertEquals("test", str);
|
||||
@@ -107,15 +132,22 @@ public class DataFileTest
|
||||
public void testCrc() throws IOException
|
||||
{
|
||||
File file = folder.newFile();
|
||||
Store store = new Store(folder.getRoot());
|
||||
DataFile df = new DataFile(store, file);
|
||||
DataFileWriteResult res = df.write(42, 3, ByteBuffer.wrap("test".getBytes()), CompressionType.NONE, 42);
|
||||
DataFileReadResult res2 = df.read(42, 3, res.sector, res.compressedLength);
|
||||
byte[] buf = res2.data;
|
||||
String str = new String(buf);
|
||||
Assert.assertEquals("test", str);
|
||||
Assert.assertEquals(res.crc, res2.crc);
|
||||
Assert.assertEquals(42, res2.revision);
|
||||
file.delete();
|
||||
|
||||
try (Store store = new Store(folder.getRoot()))
|
||||
{
|
||||
DataFile df = new DataFile(store, file);
|
||||
|
||||
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 42, null);
|
||||
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
|
||||
|
||||
compressedData = df.read(42, 3, res.sector, res.compressedLength);
|
||||
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
|
||||
|
||||
byte[] buf = res2.data;
|
||||
String str = new String(buf);
|
||||
Assert.assertEquals("test", str);
|
||||
Assert.assertEquals(res.crc, res2.crc);
|
||||
Assert.assertEquals(42, res2.revision);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user