cache: split storage apart from Store

This commit is contained in:
Adam
2017-09-05 20:05:41 -04:00
parent 1b5cd87351
commit 78f5ddcd3a
23 changed files with 1134 additions and 882 deletions

View File

@@ -26,7 +26,10 @@ package net.runelite.cache;
import java.io.File;
import java.io.IOException;
import net.runelite.cache.fs.Storage;
import net.runelite.cache.fs.Store;
import net.runelite.cache.fs.jagex.DiskStorage;
import net.runelite.cache.fs.tree.TreeStorage;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
@@ -76,9 +79,15 @@ public class Cache
System.out.print("Packing tree from " + tree + " to " + cache + "...");
Store treeBase = new Store(new File(cache));
treeBase.loadTree(new File(tree));
treeBase.save();
File cacheDir = new File(cache),
treeDir = new File(tree);
Storage from = new TreeStorage(treeDir);
Storage to = new DiskStorage(cacheDir);
Store store = new Store(from);
store.load();
to.save(store);
System.out.println(" done!");
return;
@@ -96,7 +105,8 @@ public class Cache
Store treeBase = new Store(new File(cache));
treeBase.load();
treeBase.saveTree(new File(tree));
TreeStorage storage = new TreeStorage(new File(tree));
storage.save(treeBase);
System.out.println(" done!");
return;
@@ -173,8 +183,9 @@ public class Cache
{
if (cache == null)
{
Store store = new Store(new File(tree));
store.loadTree(new File(tree));
Storage storage = new TreeStorage(new File(tree));
Store store = new Store(storage);
store.load();
return store;
}
else

View File

@@ -25,8 +25,8 @@
package net.runelite.cache.client;
import java.io.IOException;
import net.runelite.cache.fs.DataFile;
import net.runelite.cache.fs.DataFileReadResult;
import net.runelite.cache.fs.jagex.DataFile;
import net.runelite.cache.fs.jagex.DataFileReadResult;
public class FileResult
{

View File

@@ -24,13 +24,11 @@
*/
package net.runelite.cache.fs;
import com.google.common.io.Files;
import java.io.File;
import net.runelite.cache.fs.jagex.DataFile;
import net.runelite.cache.fs.jagex.DataFileReadResult;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import net.runelite.cache.io.InputStream;
@@ -42,18 +40,18 @@ public class Archive
{
private static final Logger logger = LoggerFactory.getLogger(Archive.class);
private Index index; // member of this index
private final Index index; // member of this index
private byte[] data; // raw data from the datafile, compressed/encrypted
private int archiveId;
private final int archiveId;
private int nameHash;
private byte[] whirlpool;
private int crc;
private int revision;
private int compression;
private List<FSFile> files = new ArrayList<>();
private final List<FSFile> files = new ArrayList<>();
public Archive(Index index, int id)
{
@@ -103,6 +101,11 @@ public class Archive
return true;
}
public Index getIndex()
{
return index;
}
public byte[] getData()
{
return data;
@@ -168,6 +171,8 @@ public class Archive
{
logger.trace("Loading contents of archive {} ({} files)", archiveId, files.size());
assert !this.getFiles().isEmpty();
if (this.getFiles().size() == 1)
{
this.getFiles().get(0).setContents(data);
@@ -270,151 +275,6 @@ public class Archive
return fileData;
}
public void saveTree(File to) throws IOException
{
if (data != null)
{
assert files.size() == 1; // this is the maps
FSFile file = files.get(0);
File archiveFile = new File(to, this.getArchiveId() + "-"
+ file.getFileId() + "-" + Integer.toHexString(file.getNameHash()) + ".datc");
Files.write(data, archiveFile);
archiveFile = new File(to, this.getArchiveId() + ".rev");
Files.write("" + this.getRevision(), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, this.getArchiveId() + ".name");
Files.write("" + this.getNameHash(), archiveFile, Charset.defaultCharset());
return;
}
if (files.size() == 1)
{
FSFile file = this.getFiles().get(0);
File archiveFile = new File(to, this.getArchiveId() + "-"
+ file.getFileId() + "-" + Integer.toHexString(file.getNameHash()) + ".dat");
byte[] contents = file.getContents();
Files.write(contents, archiveFile);
archiveFile = new File(to, this.getArchiveId() + ".rev");
Files.write("" + this.getRevision(), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, this.getArchiveId() + ".name");
Files.write("" + this.getNameHash(), archiveFile, Charset.defaultCharset());
return;
}
File archiveFile = new File(to, this.getArchiveId() + ".rev");
Files.write("" + this.getRevision(), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, this.getArchiveId() + ".name");
Files.write("" + this.getNameHash(), archiveFile, Charset.defaultCharset());
File archiveFolder = new File(to, "" + this.getArchiveId());
archiveFolder.mkdirs();
for (FSFile file : files)
{
archiveFile = new File(archiveFolder, file.getFileId() + "-"
+ Integer.toHexString(file.getNameHash()) + ".dat");
byte[] contents = file.getContents();
Files.write(contents, archiveFile);
}
}
public void loadTreeData(File parent, File from) throws IOException
{
//archiveId-fileId-fileName
String[] parts = Files.getNameWithoutExtension(from.getName()).split("-");
assert parts.length == 3;
int archiveId = Integer.parseInt(parts[0]);
int fileId = Integer.parseInt(parts[1]);
int nameHash = (int) Long.parseLong(parts[2], 16);
assert archiveId == this.getArchiveId();
data = Files.toByteArray(from);
FSFile file = new FSFile(this, fileId);
file.setNameHash(nameHash);
files.add(file);
File archiveFile = new File(parent, this.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setRevision(rev);
archiveFile = new File(parent, this.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setNameHash(name);
}
public void loadTreeSingleFile(File parent, File from) throws IOException
{
//archiveId-fileId-fileName
String[] parts = Files.getNameWithoutExtension(from.getName()).split("-");
assert parts.length == 3;
int archiveId = Integer.parseInt(parts[0]);
int fileId = Integer.parseInt(parts[1]);
int nameHash = (int) Long.parseLong(parts[2], 16);
assert archiveId == this.getArchiveId();
FSFile file = new FSFile(this, fileId);
file.setNameHash(nameHash);
byte[] contents = Files.toByteArray(from);
file.setContents(contents);
files.add(file);
File archiveFile = new File(parent, this.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setRevision(rev);
archiveFile = new File(parent, this.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setNameHash(name);
}
public void loadTree(File parent, File from) throws IOException
{
for (File file : from.listFiles())
{
//fileId-fileName.dat
String[] split = Files.getNameWithoutExtension(file.getName()).split("-");
assert split.length == 2;
int fileId = Integer.parseInt(split[0]);
int fileName = (int) Long.parseLong(split[1], 16);
FSFile f = new FSFile(this, fileId);
f.setNameHash(fileName);
byte[] contents = Files.toByteArray(file);
f.setContents(contents);
files.add(f);
}
File archiveFile = new File(parent, this.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setRevision(rev);
archiveFile = new File(parent, this.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setNameHash(name);
// the filesystem may order these differently (eg, 1, 10, 2)
Collections.sort(files, (f1, f2) -> Integer.compare(f1.getFileId(), f2.getFileId()));
}
public int getArchiveId()
{
return archiveId;

View File

@@ -24,34 +24,29 @@
*/
package net.runelite.cache.fs;
import com.google.common.io.Files;
import net.runelite.cache.fs.jagex.DataFile;
import net.runelite.cache.fs.jagex.CompressionType;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import net.runelite.cache.index.ArchiveData;
import net.runelite.cache.index.FileData;
import net.runelite.cache.index.IndexData;
import net.runelite.cache.util.Djb2;
import net.runelite.cache.io.OutputStream;
import net.runelite.cache.util.Crc32;
import net.runelite.cache.util.Djb2;
import net.runelite.cache.util.Whirlpool;
import net.runelite.cache.util.XteaKeyManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Index implements Closeable
public class Index
{
private static final Logger logger = LoggerFactory.getLogger(Index.class);
private final Store store;
private final IndexFile index;
private final int id;
private XteaKeyManager xteaManager;
@@ -65,24 +60,12 @@ public class Index implements Closeable
private final List<Archive> archives = new ArrayList<>();
public Index(Store store, IndexFile index, int id)
public Index(Store store, int id)
{
this.store = store;
this.index = index;
this.id = id;
}
@Override
public void close() throws IOException
{
index.close();
}
public void clear() throws IOException
{
index.clear();
}
@Override
public int hashCode()
{
@@ -135,11 +118,46 @@ public class Index implements Closeable
return id;
}
public int getProtocol()
{
return protocol;
}
public void setProtocol(int protocol)
{
this.protocol = protocol;
}
public boolean isNamed()
{
return named;
}
public void setNamed(boolean named)
{
this.named = named;
}
public boolean isUsesWhirpool()
{
return usesWhirpool;
}
public void setUsesWhirpool(boolean usesWhirpool)
{
this.usesWhirpool = usesWhirpool;
}
public int getRevision()
{
return revision;
}
public void setRevision(int revision)
{
this.revision = revision;
}
public int getCrc()
{
return crc;
@@ -160,9 +178,14 @@ public class Index implements Closeable
this.whirlpool = whirlpool;
}
public IndexFile getIndex()
public int getCompression()
{
return index;
return compression;
}
public void setCompression(int compression)
{
this.compression = compression;
}
public List<Archive> getArchives()
@@ -206,8 +229,6 @@ public class Index implements Closeable
{
for (Archive a : archives)
{
assert this.index.getIndexFileId() == this.id;
int rev; // used for determining what part of compressedData to crc
byte[] compressedData;
@@ -235,7 +256,7 @@ public class Index implements Closeable
}
Crc32 crc = new Crc32();
byte[] indexData = this.writeIndexData();
byte[] indexData = toIndexData().writeIndexData();
ByteBuf b = Unpooled.buffer(5, 5);
b.writeByte((byte) CompressionType.NONE);
@@ -248,323 +269,38 @@ public class Index implements Closeable
this.setCrc(hash);
}
public void load() throws IOException
public IndexData toIndexData()
{
logger.trace("Loading index {}", id);
IndexData data = new IndexData();
data.setProtocol(protocol);
data.setRevision(revision);
data.setNamed(named);
data.setUsesWhirpool(usesWhirpool);
DataFile dataFile = store.getData();
IndexFile index255 = store.getIndex255();
ArchiveData[] archiveDatas = new ArchiveData[archives.size()];
data.setArchives(archiveDatas);
IndexEntry entry = index255.read(id);
byte[] indexData = dataFile.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength());
DataFileReadResult res = DataFile.decompress(indexData, null);
byte[] data = res.data;
archives.clear();
readIndexData(data);
this.crc = res.crc;
this.whirlpool = res.whirlpool;
this.compression = res.compression;
assert res.revision == -1;
this.loadArchives();
}
public void save() throws IOException
{
// This updates archive CRCs for writeIndexData
saveArchives();
byte[] data = this.writeIndexData();
DataFile dataFile = store.getData();
IndexFile index255 = store.getIndex255();
byte[] compressedData = DataFile.compress(data, this.compression, -1, null); // index data revision is always -1
DataFileWriteResult res = dataFile.write(index255.getIndexFileId(), this.id, compressedData, revision);
index255.write(new IndexEntry(index255, id, res.sector, res.compressedLength));
this.crc = res.crc;
this.whirlpool = res.whirlpool;
}
public void saveTree(File to) throws IOException
{
File idx = new File(to, "" + this.getId());
idx.mkdirs();
for (Archive a : archives)
int idx = 0;
for (Archive archive : archives)
{
a.saveTree(idx);
}
ArchiveData ad = archiveDatas[idx++] = new ArchiveData();
ad.setId(archive.getArchiveId());
ad.setNameHash(archive.getNameHash());
ad.setCrc(archive.getCrc());
ad.setWhirlpool(archive.getWhirlpool());
ad.setRevision(archive.getRevision());
File rev = new File(to, this.getId() + ".rev");
Files.write("" + this.getRevision(), rev, Charset.defaultCharset());
}
FileData[] files = new FileData[archive.getFiles().size()];
ad.setFiles(files);
public void loadTree(File parent, File to) throws IOException
{
for (File f : to.listFiles())
{
if (f.isDirectory())
int idx2 = 0;
for (FSFile file : archive.getFiles())
{
int id = Integer.parseInt(f.getName());
Archive archive = new Archive(this, id);
archive.loadTree(to, f);
archives.add(archive);
}
else if (f.getName().endsWith(".dat"))
{
// one file. archiveId-fileId-name
String[] parts = Files.getNameWithoutExtension(f.getName()).split("-");
int id = Integer.parseInt(parts[0]);
Archive archive = new Archive(this, id);
archive.loadTreeSingleFile(to, f);
archives.add(archive);
}
else if (f.getName().endsWith(".datc"))
{
// packed data
String[] parts = Files.getNameWithoutExtension(f.getName()).split("-");
int id = Integer.parseInt(parts[0]);
Archive archive = new Archive(this, id);
archive.loadTreeData(to, f);
archives.add(archive);
FileData fd = files[idx2++] = new FileData();
fd.setId(file.getFileId());
fd.setNameHash(file.getNameHash());
}
}
String str = Files.readFirstLine(new File(parent, this.getId() + ".rev"), Charset.defaultCharset());
revision = Integer.parseInt(str);
Collections.sort(archives, (ar1, ar2) -> Integer.compare(ar1.getArchiveId(), ar2.getArchiveId()));
}
public void readIndexData(byte[] data)
{
IndexData indexData = new IndexData();
indexData.load(data);
protocol = indexData.getProtocol();
revision = indexData.getRevision();
named = indexData.isNamed();
usesWhirpool = indexData.isUsesWhirpool();
for (ArchiveData ad : indexData.getArchives())
{
Archive archive = new Archive(this, ad.getId());
archive.setNameHash(ad.getNameHash());
archive.setWhirlpool(ad.getWhirlpool());
archive.setCrc(ad.getCrc());
archive.setRevision(ad.getRevision());
for (FileData fd : ad.getFiles())
{
FSFile file = archive.addFile(fd.getId());
file.setNameHash(fd.getNameHash());
}
archives.add(archive);
}
}
private void loadArchives() throws IOException
{
// get data from index file
for (Archive a : new ArrayList<>(archives))
{
IndexEntry entry = this.index.read(a.getArchiveId());
if (entry == null)
{
logger.debug("can't read archive " + a.getArchiveId() + " from index " + this.id);
archives.remove(a); // is this the correct behavior?
continue;
}
assert this.index.getIndexFileId() == this.id;
assert entry.getId() == a.getArchiveId();
logger.trace("Loading archive {} for index {} from sector {} length {}", a.getArchiveId(), id, entry.getSector(), entry.getLength());
byte[] archiveData = store.getData().read(this.id, entry.getId(), entry.getSector(), entry.getLength());
a.setData(archiveData);
if (this.xteaManager != null)
{
continue; // can't decrypt this yet
}
a.decompressAndLoad(null);
}
}
public void saveArchives() throws IOException
{
for (Archive a : archives)
{
assert this.index.getIndexFileId() == this.id;
DataFile data = store.getData();
int rev; // used for determining what part of compressedData to crc
byte[] compressedData;
if (a.getData() != null)
{
compressedData = a.getData(); // data was never decompressed or loaded
rev = -1; // assume that this data has no revision?
}
else
{
byte[] fileData = a.saveContents();
rev = a.getRevision();
compressedData = DataFile.compress(fileData, a.getCompression(), a.getRevision(), null);
}
DataFileWriteResult res = data.write(this.id, a.getArchiveId(), compressedData, rev);
this.index.write(new IndexEntry(this.index, a.getArchiveId(), res.sector, res.compressedLength));
logger.trace("Saved archive {}/{} at sector {}, compressed length {}", this.getId(), a.getArchiveId(), res.sector, res.compressedLength);
a.setCrc(res.crc);
a.setWhirlpool(res.whirlpool);
}
}
public byte[] writeIndexData()
{
OutputStream stream = new OutputStream();
stream.writeByte(protocol);
if (protocol >= 6)
{
stream.writeInt(this.revision);
}
stream.writeByte((named ? 1 : 0) | (usesWhirpool ? 2 : 0));
if (protocol >= 7)
{
stream.writeBigSmart(this.archives.size());
}
else
{
stream.writeShort(this.archives.size());
}
int data;
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
int archive = a.getArchiveId();
if (data != 0)
{
Archive prev = this.archives.get(data - 1);
archive -= prev.getArchiveId();
}
if (protocol >= 7)
{
stream.writeBigSmart(archive);
}
else
{
stream.writeShort(archive);
}
}
if (named)
{
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
stream.writeInt(a.getNameHash());
}
}
if (usesWhirpool)
{
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
stream.writeBytes(a.getWhirlpool());
}
}
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
stream.writeInt(a.getCrc());
}
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
stream.writeInt(a.getRevision());
}
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
int len = a.getFiles().size();
if (protocol >= 7)
{
stream.writeBigSmart(len);
}
else
{
stream.writeShort(len);
}
}
int index2;
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
for (index2 = 0; index2 < a.getFiles().size(); ++index2)
{
FSFile file = a.getFiles().get(index2);
int offset = file.getFileId();
if (index2 != 0)
{
FSFile prev = a.getFiles().get(index2 - 1);
offset -= prev.getFileId();
}
if (protocol >= 7)
{
stream.writeBigSmart(offset);
}
else
{
stream.writeShort(offset);
}
}
}
if (named)
{
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
for (index2 = 0; index2 < a.getFiles().size(); ++index2)
{
FSFile file = a.getFiles().get(index2);
stream.writeInt(file.getNameHash());
}
}
}
return stream.flip();
return data;
}
}

View File

@@ -0,0 +1,39 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
import java.io.IOException;
public interface Storage extends AutoCloseable
{
void init(Store store) throws IOException;
@Override
void close() throws IOException;
void load(Store store) throws IOException;
void save(Store store) throws IOException;
}

View File

@@ -29,37 +29,25 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import net.runelite.cache.IndexType;
import net.runelite.cache.fs.jagex.DiskStorage;
import net.runelite.cache.util.XteaKeyManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Store implements Closeable
public final class Store implements Closeable
{
private static final Logger logger = LoggerFactory.getLogger(Store.class);
private static final String MAIN_FILE_CACHE_DAT = "main_file_cache.dat2";
private static final String MAIN_FILE_CACHE_IDX = "main_file_cache.idx";
private final File folder;
private final DataFile data;
private final IndexFile index255;
private final Storage storage;
private final List<Index> indexes = new ArrayList<>();
public Store(File folder) throws IOException
{
this.folder = folder;
data = new DataFile(this, new File(folder, MAIN_FILE_CACHE_DAT));
index255 = new IndexFile(this, 255, new File(folder, MAIN_FILE_CACHE_IDX + "255"));
for (int i = 0; i < index255.getIndexCount(); ++i)
{
this.addIndex(i);
}
storage = new DiskStorage(folder);
storage.init(this);
Index maps = this.findIndex(IndexType.MAPS.getNumber());
if (maps != null)
@@ -70,15 +58,17 @@ public class Store implements Closeable
}
}
public Store(Storage storage) throws IOException
{
this.storage = storage;
storage.init(this);
}
@Override
public void close() throws IOException
{
data.close();
index255.close();
for (Index i : indexes)
{
i.close();
}
storage.close();
}
@Override
@@ -112,15 +102,13 @@ public class Store implements Closeable
{
for (Index i : indexes)
{
if (i.getIndex().getIndexFileId() == id)
if (i.getId() == id)
{
throw new IllegalArgumentException("index " + id + " already exists");
}
}
IndexFile indexFile = new IndexFile(this, id, new File(folder, MAIN_FILE_CACHE_IDX + id));
Index index = new Index(this, indexFile, id);
Index index = new Index(this, id);
this.indexes.add(index);
return index;
@@ -139,69 +127,19 @@ public class Store implements Closeable
public void rebuildCrc() throws IOException
{
for (Index i : indexes)
{
i.rebuildCrc();
}
}
public void load() throws IOException
{
for (Index i : indexes)
{
i.load();
}
storage.load(this);
}
public void save() throws IOException
{
logger.debug("Clearing data and indexes in preparation for store save");
data.clear();
for (Index i : indexes)
{
i.clear();
}
for (Index i : indexes)
{
i.save();
}
}
public void saveTree(File to) throws IOException
{
for (Index i : indexes)
{
i.saveTree(to);
}
}
public void loadTree(File from) throws IOException
{
for (File idx : from.listFiles())
{
if (!idx.isDirectory())
{
continue;
}
int id = Integer.parseInt(idx.getName());
IndexFile indexFile = new IndexFile(this, id, new File(folder, MAIN_FILE_CACHE_IDX + id));
Index index = new Index(this, indexFile, id);
index.loadTree(from, idx);
indexes.add(index);
}
Collections.sort(indexes, (idx1, idx2) -> Integer.compare(idx1.getId(), idx2.getId()));
}
public DataFile getData()
{
return data;
}
public IndexFile getIndex255()
{
return index255;
storage.save(this);
}
public List<Index> getIndexes()
@@ -214,7 +152,7 @@ public class Store implements Closeable
return indexes.get(type.getNumber());
}
public final Index findIndex(int id)
public Index findIndex(int id)
{
for (Index i : indexes)
{

View File

@@ -22,7 +22,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
package net.runelite.cache.fs.jagex;
public class CompressionType
{

View File

@@ -22,8 +22,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
package net.runelite.cache.fs.jagex;
import java.io.Closeable;
import java.io.File;
@@ -49,51 +48,47 @@ import org.slf4j.LoggerFactory;
public class DataFile implements Closeable
{
private static final Logger logger = LoggerFactory.getLogger(DataFile.class);
private static final int SECTOR_SIZE = 520;
private final Store store;
private final File file;
private final RandomAccessFile dat;
private final byte[] readCachedBuffer = new byte[SECTOR_SIZE];
public DataFile(Store store, File file) throws FileNotFoundException
public DataFile(File file) throws FileNotFoundException
{
this.file = file;
this.store = store;
dat = new RandomAccessFile(file, "rw");
this.dat = new RandomAccessFile(file, "rw");
}
@Override
public void close() throws IOException
{
dat.close();
}
public void clear() throws IOException
{
dat.setLength(0L);
}
/**
*
*
* @param indexId expected index of archive of contents being read
* @param archiveId expected archive of contents being read
* @param sector sector to start reading at
* @param size size of file
* @return
* @throws IOException
* @throws IOException
*/
public synchronized byte[] read(int indexId, int archiveId, int sector, int size) throws IOException
public byte[] read(int indexId, int archiveId, int sector, int size) throws IOException
{
if (sector <= 0L || dat.length() / SECTOR_SIZE < (long) sector)
{
logger.warn("bad read, dat length {}, requested sector {}", dat.length(), sector);
return null;
}
byte[] readBuffer = new byte[SECTOR_SIZE];
ByteBuffer buffer = ByteBuffer.allocate(size);
for (int part = 0, readBytesCount = 0, nextSector;
size > readBytesCount;
sector = nextSector)
@@ -105,7 +100,7 @@ public class DataFile implements Closeable
}
dat.seek(SECTOR_SIZE * sector);
int dataBlockSize = size - readBytesCount;
byte headerSize;
int currentIndex;
@@ -119,17 +114,22 @@ public class DataFile implements Closeable
dataBlockSize = SECTOR_SIZE - headerSize;
}
int i = dat.read(this.readCachedBuffer, 0, headerSize + dataBlockSize);
int i = dat.read(readBuffer, 0, headerSize + dataBlockSize);
if (i != headerSize + dataBlockSize)
{
logger.warn("Short read when reading file data for {}/{}", indexId, archiveId);
return null;
}
currentArchive = ((this.readCachedBuffer[1] & 255) << 16) + ((this.readCachedBuffer[0] & 255) << 24) + (('\uff00' & this.readCachedBuffer[2] << 8) - -(this.readCachedBuffer[3] & 255));
currentPart = ((this.readCachedBuffer[4] & 255) << 8) + (255 & this.readCachedBuffer[5]);
nextSector = (this.readCachedBuffer[8] & 255) + ('\uff00' & this.readCachedBuffer[7] << 8) + ((255 & this.readCachedBuffer[6]) << 16);
currentIndex = this.readCachedBuffer[9] & 255;
currentArchive = ((readBuffer[0] & 0xFF) << 24)
| ((readBuffer[1] & 0xFF) << 16)
| ((readBuffer[2] & 0xFF) << 8)
| (readBuffer[3] & 0xFF);
currentPart = ((readBuffer[4] & 0xFF) << 8) + (readBuffer[5] & 0xFF);
nextSector = ((readBuffer[6] & 0xFF) << 16)
| ((readBuffer[7] & 0xFF) << 8)
| (readBuffer[8] & 0xFF);
currentIndex = readBuffer[9] & 0xFF;
}
else
{
@@ -139,17 +139,21 @@ public class DataFile implements Closeable
dataBlockSize = SECTOR_SIZE - headerSize;
}
int i = dat.read(this.readCachedBuffer, 0, headerSize + dataBlockSize);
int i = dat.read(readBuffer, 0, headerSize + dataBlockSize);
if (i != headerSize + dataBlockSize)
{
logger.warn("short read");
return null;
}
currentArchive = (255 & this.readCachedBuffer[1]) + ('\uff00' & this.readCachedBuffer[0] << 8);
currentPart = ((this.readCachedBuffer[2] & 255) << 8) + (255 & this.readCachedBuffer[3]);
nextSector = (this.readCachedBuffer[6] & 255) + ('\uff00' & this.readCachedBuffer[5] << 8) + ((255 & this.readCachedBuffer[4]) << 16);
currentIndex = this.readCachedBuffer[7] & 255;
currentArchive = ((readBuffer[0] & 0xFF) << 8)
| (readBuffer[1] & 0xFF);
currentPart = ((readBuffer[2] & 0xFF) << 8)
| (readBuffer[3] & 0xFF);
nextSector = ((readBuffer[4] & 0xFF) << 16)
| ((readBuffer[5] & 0xFF) << 8)
| (readBuffer[6] & 0xFF);
currentIndex = readBuffer[7] & 0xFF;
}
if (archiveId != currentArchive || currentPart != part || indexId != currentIndex)
@@ -167,7 +171,7 @@ public class DataFile implements Closeable
return null;
}
buffer.put(readCachedBuffer, headerSize, dataBlockSize);
buffer.put(readBuffer, headerSize, dataBlockSize);
readBytesCount += dataBlockSize;
++part;
@@ -176,14 +180,15 @@ public class DataFile implements Closeable
buffer.flip();
return buffer.array();
}
public synchronized DataFileWriteResult write(int indexId, int archiveId, byte[] compressedData, int revision) throws IOException
public DataFileWriteResult write(int indexId, int archiveId, byte[] compressedData, int revision) throws IOException
{
int sector;
int startSector;
byte[] writeBuffer = new byte[SECTOR_SIZE];
ByteBuffer data = ByteBuffer.wrap(compressedData);
sector = (int) ((dat.length() + (long) (SECTOR_SIZE - 1)) / (long) SECTOR_SIZE);
if (sector == 0)
{
@@ -210,27 +215,26 @@ public class DataFile implements Closeable
}
}
if (0xFFFF < archiveId)
{
if (data.remaining() <= 510)
{
nextSector = 0;
}
this.readCachedBuffer[0] = (byte) (archiveId >> 24);
this.readCachedBuffer[1] = (byte) (archiveId >> 16);
this.readCachedBuffer[2] = (byte) (archiveId >> 8);
this.readCachedBuffer[3] = (byte) archiveId;
this.readCachedBuffer[4] = (byte) (part >> 8);
this.readCachedBuffer[5] = (byte) part;
this.readCachedBuffer[6] = (byte) (nextSector >> 16);
this.readCachedBuffer[7] = (byte) (nextSector >> 8);
this.readCachedBuffer[8] = (byte) nextSector;
this.readCachedBuffer[9] = (byte) indexId;
writeBuffer[0] = (byte) (archiveId >> 24);
writeBuffer[1] = (byte) (archiveId >> 16);
writeBuffer[2] = (byte) (archiveId >> 8);
writeBuffer[3] = (byte) archiveId;
writeBuffer[4] = (byte) (part >> 8);
writeBuffer[5] = (byte) part;
writeBuffer[6] = (byte) (nextSector >> 16);
writeBuffer[7] = (byte) (nextSector >> 8);
writeBuffer[8] = (byte) nextSector;
writeBuffer[9] = (byte) indexId;
dat.seek(SECTOR_SIZE * sector);
dat.write(this.readCachedBuffer, 0, 10);
dat.write(writeBuffer, 0, 10);
dataToWrite = data.remaining();
if (dataToWrite > 510)
{
@@ -243,18 +247,18 @@ public class DataFile implements Closeable
{
nextSector = 0;
}
this.readCachedBuffer[0] = (byte) (archiveId >> 8);
this.readCachedBuffer[1] = (byte) archiveId;
this.readCachedBuffer[2] = (byte) (part >> 8);
this.readCachedBuffer[3] = (byte) part;
this.readCachedBuffer[4] = (byte) (nextSector >> 16);
this.readCachedBuffer[5] = (byte) (nextSector >> 8);
this.readCachedBuffer[6] = (byte) nextSector;
this.readCachedBuffer[7] = (byte) indexId;
writeBuffer[0] = (byte) (archiveId >> 8);
writeBuffer[1] = (byte) archiveId;
writeBuffer[2] = (byte) (part >> 8);
writeBuffer[3] = (byte) part;
writeBuffer[4] = (byte) (nextSector >> 16);
writeBuffer[5] = (byte) (nextSector >> 8);
writeBuffer[6] = (byte) nextSector;
writeBuffer[7] = (byte) indexId;
dat.seek(SECTOR_SIZE * sector);
dat.write(this.readCachedBuffer, 0, 8);
dat.write(writeBuffer, 0, 8);
dataToWrite = data.remaining();
if (dataToWrite > 512)
{
@@ -262,11 +266,11 @@ public class DataFile implements Closeable
}
}
data.get(readCachedBuffer, 0, dataToWrite);
dat.write(readCachedBuffer, 0, dataToWrite);
data.get(writeBuffer, 0, dataToWrite);
dat.write(writeBuffer, 0, dataToWrite);
sector = nextSector;
}
DataFileWriteResult res = new DataFileWriteResult();
res.sector = startSector;
res.compressedLength = compressedData.length;
@@ -279,19 +283,21 @@ public class DataFile implements Closeable
res.whirlpool = Whirlpool.getHash(compressedData, length);
return res;
}
public static DataFileReadResult decompress(byte[] b, int[] keys) throws IOException
{
InputStream stream = new InputStream(b);
int compression = stream.readUnsignedByte();
int compressedLength = stream.readInt();
if (compressedLength < 0 || compressedLength > 1000000)
{
throw new RuntimeException("Invalid data");
}
Crc32 crc32 = new Crc32();
crc32.update(b, 0, 5); // compression + length
byte[] data;
int revision = -1;
switch (compression)
@@ -311,7 +317,7 @@ public class DataFile implements Closeable
}
data = decryptedData;
break;
}
case CompressionType.BZ2:
@@ -339,7 +345,7 @@ public class DataFile implements Closeable
}
assert data.length == decompressedLength;
break;
}
case CompressionType.GZ:
@@ -367,13 +373,13 @@ public class DataFile implements Closeable
}
assert data.length == decompressedLength;
break;
}
default:
throw new RuntimeException("Unknown decompression type");
}
DataFileReadResult res = new DataFileReadResult();
res.data = data;
res.revision = revision;
@@ -383,7 +389,7 @@ public class DataFile implements Closeable
res.compression = compression;
return res;
}
public static byte[] compress(byte[] data, int compression, int revision, int[] keys) throws IOException
{
OutputStream stream = new OutputStream();
@@ -406,7 +412,7 @@ public class DataFile implements Closeable
case CompressionType.GZ:
compressedData = GZip.compress(data);
compressedData = encrypt(compressedData, compressedData.length, keys);
stream.writeInt(compressedData.length);
stream.writeInt(data.length);
break;
@@ -416,7 +422,9 @@ public class DataFile implements Closeable
stream.writeBytes(compressedData);
if (revision != -1)
{
stream.writeShort(revision);
}
return stream.flip();
}
@@ -424,7 +432,9 @@ public class DataFile implements Closeable
private static byte[] decrypt(byte[] data, int length, int[] keys)
{
if (keys == null)
{
return data;
}
try
{
@@ -441,7 +451,9 @@ public class DataFile implements Closeable
private static byte[] encrypt(byte[] data, int length, int[] keys)
{
if (keys == null)
{
return data;
}
try
{

View File

@@ -23,7 +23,7 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
package net.runelite.cache.fs.jagex;
public class DataFileReadResult
{

View File

@@ -23,7 +23,7 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
package net.runelite.cache.fs.jagex;
public class DataFileWriteResult
{

View File

@@ -0,0 +1,256 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs.jagex;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import net.runelite.cache.fs.Archive;
import net.runelite.cache.fs.FSFile;
import net.runelite.cache.fs.Index;
import net.runelite.cache.fs.Storage;
import net.runelite.cache.fs.Store;
import net.runelite.cache.index.ArchiveData;
import net.runelite.cache.index.FileData;
import net.runelite.cache.index.IndexData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DiskStorage implements Storage
{
private static final Logger logger = LoggerFactory.getLogger(DiskStorage.class);
private static final String MAIN_FILE_CACHE_DAT = "main_file_cache.dat2";
private static final String MAIN_FILE_CACHE_IDX = "main_file_cache.idx";
private final File folder;
private final DataFile data;
private final IndexFile index255;
private final List<IndexFile> indexFiles = new ArrayList<>();
public DiskStorage(File folder) throws IOException
{
this.folder = folder;
this.data = new DataFile(new File(folder, MAIN_FILE_CACHE_DAT));
this.index255 = new IndexFile(255, new File(folder, MAIN_FILE_CACHE_IDX + "255"));
}
@Override
public void init(Store store) throws IOException
{
for (int i = 0; i < index255.getIndexCount(); ++i)
{
store.addIndex(i);
getIndex(i);
}
assert store.getIndexes().size() == indexFiles.size();
}
@Override
public void close() throws IOException
{
data.close();
index255.close();
for (IndexFile indexFile : indexFiles)
{
indexFile.close();
}
}
private IndexFile getIndex(int i) throws FileNotFoundException
{
for (IndexFile indexFile : indexFiles)
{
if (indexFile.getIndexFileId() == i)
{
return indexFile;
}
}
IndexFile indexFile = new IndexFile(i, new File(folder, MAIN_FILE_CACHE_IDX + i));
indexFiles.add(indexFile);
return indexFile;
}
@Override
public void load(Store store) throws IOException
{
for (Index index : store.getIndexes())
{
loadIndex(index);
}
}
private void loadIndex(Index index) throws IOException
{
logger.trace("Loading index {}", index.getId());
IndexEntry entry = index255.read(index.getId());
byte[] indexData = data.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength());
DataFileReadResult res = DataFile.decompress(indexData, null);
byte[] data = res.data;
IndexData id = new IndexData();
id.load(data);
index.setProtocol(id.getProtocol());
index.setRevision(id.getRevision());
index.setNamed(id.isNamed());
index.setUsesWhirpool(id.isUsesWhirpool());
for (ArchiveData ad : id.getArchives())
{
Archive archive = index.addArchive(ad.getId());
archive.setNameHash(ad.getNameHash());
archive.setWhirlpool(ad.getWhirlpool());
archive.setCrc(ad.getCrc());
archive.setRevision(ad.getRevision());
assert ad.getFiles().length > 0;
for (FileData fd : ad.getFiles())
{
FSFile file = archive.addFile(fd.getId());
file.setNameHash(fd.getNameHash());
}
}
index.setCrc(res.crc);
index.setWhirlpool(res.whirlpool);
index.setCompression(res.compression);
assert res.revision == -1;
for (Archive archive : new ArrayList<>(index.getArchives()))
{
loadArchive(archive);
}
}
private void loadArchive(Archive archive) throws IOException
{
Index index = archive.getIndex();
IndexFile indexFile = getIndex(index.getId());
assert indexFile.getIndexFileId() == index.getId();
IndexEntry entry = indexFile.read(archive.getArchiveId());
if (entry == null)
{
logger.debug("can't read archive " + archive.getArchiveId() + " from index " + index.getId());
index.getArchives().remove(archive); // is this correct?
return;
}
assert entry.getId() == archive.getArchiveId();
logger.trace("Loading archive {} for index {} from sector {} length {}",
archive.getArchiveId(), index.getId(), entry.getSector(), entry.getLength());
byte[] archiveData = data.read(index.getId(), entry.getId(), entry.getSector(), entry.getLength());
archive.setData(archiveData);
if (index.getXteaManager() != null)
{
return; // can't decrypt this yet
}
archive.decompressAndLoad(null);
}
@Override
public void save(Store store) throws IOException
{
logger.debug("Clearing data and indexes in preparation for store save");
data.clear();
for (IndexFile indexFile : indexFiles)
{
indexFile.clear();
}
logger.debug("Saving store");
for (Index i : store.getIndexes())
{
saveIndex(i);
}
}
private void saveIndex(Index index) throws IOException
{
// This updates archive CRCs for writeIndexData
for (Archive archive : index.getArchives())
{
saveArchive(archive);
}
IndexData indexData = index.toIndexData();
byte[] data = indexData.writeIndexData();
byte[] compressedData = DataFile.compress(data, index.getCompression(), -1, null); // index data revision is always -1
DataFileWriteResult res = this.data.write(index255.getIndexFileId(), index.getId(), compressedData, index.getRevision());
index255.write(new IndexEntry(index255, index.getId(), res.sector, res.compressedLength));
index.setCrc(res.crc);
index.setWhirlpool(res.whirlpool);
}
private void saveArchive(Archive a) throws IOException
{
Index index = a.getIndex();
IndexFile indexFile = getIndex(index.getId());
assert indexFile.getIndexFileId() == index.getId();
int rev; // used for determining what part of compressedData to crc
byte[] compressedData;
if (a.getData() != null)
{
compressedData = a.getData(); // data was never decompressed or loaded
rev = -1; // assume that this data has no revision?
}
else
{
byte[] fileData = a.saveContents();
rev = a.getRevision();
compressedData = DataFile.compress(fileData, a.getCompression(), a.getRevision(), null);
}
DataFileWriteResult res = data.write(index.getId(), a.getArchiveId(), compressedData, rev);
indexFile.write(new IndexEntry(indexFile, a.getArchiveId(), res.sector, res.compressedLength));
logger.trace("Saved archive {}/{} at sector {}, compressed length {}", index.getId(), a.getArchiveId(), res.sector, res.compressedLength);
a.setCrc(res.crc);
a.setWhirlpool(res.whirlpool);
}
}

View File

@@ -23,7 +23,7 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
package net.runelite.cache.fs.jagex;
import java.util.Objects;

View File

@@ -22,8 +22,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
package net.runelite.cache.fs.jagex;
import java.io.Closeable;
import java.io.File;
@@ -37,22 +36,21 @@ import org.slf4j.LoggerFactory;
public class IndexFile implements Closeable
{
private static final Logger logger = LoggerFactory.getLogger(IndexFile.class);
private static final int INDEX_ENTRY_LEN = 6;
private final Store store;
private final int indexFileId;
private final File file;
private final RandomAccessFile idx;
private final byte[] buffer = new byte[INDEX_ENTRY_LEN];
public IndexFile(Store store, int indexFileId, File file) throws FileNotFoundException
public IndexFile(int indexFileId, File file) throws FileNotFoundException
{
this.store = store;
this.indexFileId = indexFileId;
this.file = file;
this.idx = new RandomAccessFile(file, "rw");
}
@Override
public void close() throws IOException
{
@@ -91,31 +89,26 @@ public class IndexFile implements Closeable
return true;
}
public Store getStore()
{
return store;
}
public int getIndexFileId()
{
return indexFileId;
}
public synchronized void write(IndexEntry entry) throws IOException
{
idx.seek(entry.getId() * INDEX_ENTRY_LEN);
buffer[0] = (byte) (entry.getLength() >> 16);
buffer[1] = (byte) (entry.getLength() >> 8);
buffer[2] = (byte) entry.getLength();
buffer[3] = (byte) (entry.getSector() >> 16);
buffer[4] = (byte) (entry.getSector() >> 8);
buffer[5] = (byte) entry.getSector();
idx.write(buffer);
}
public synchronized IndexEntry read(int id) throws IOException
{
idx.seek(id * INDEX_ENTRY_LEN);
@@ -125,21 +118,21 @@ public class IndexFile implements Closeable
logger.debug("short read for id {} on index {}: {}", id, indexFileId, i);
return null;
}
int length = ((buffer[0] & 0xFF) << 16) | ((buffer[1] & 0xFF) << 8) | (buffer[2] & 0xFF);
int sector = ((buffer[3] & 0xFF) << 16) | ((buffer[4] & 0xFF) << 8) | (buffer[5] & 0xFF);
if (length <= 0 || sector <= 0)
{
logger.debug("invalid length or sector {}/{}", length, sector);
return null;
}
return new IndexEntry(this, id, sector, length);
}
public synchronized int getIndexCount() throws IOException
{
return (int)(idx.length() / INDEX_ENTRY_LEN);
return (int) (idx.length() / INDEX_ENTRY_LEN);
}
}

View File

@@ -0,0 +1,282 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs.tree;
import com.google.common.io.Files;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.List;
import net.runelite.cache.fs.Archive;
import net.runelite.cache.fs.FSFile;
import net.runelite.cache.fs.Index;
import net.runelite.cache.fs.Storage;
import net.runelite.cache.fs.Store;
public class TreeStorage implements Storage
{
private final File folder;
public TreeStorage(File folder)
{
this.folder = folder;
}
@Override
public void init(Store store) throws IOException
{
}
@Override
public void close() throws IOException
{
}
@Override
public void load(Store store) throws IOException
{
for (File idx : folder.listFiles())
{
if (!idx.isDirectory())
{
continue;
}
int id = Integer.parseInt(idx.getName());
Index index = store.addIndex(id);
loadIndex(index, folder, idx);
}
Collections.sort(store.getIndexes(), (idx1, idx2) -> Integer.compare(idx1.getId(), idx2.getId()));
}
private void loadIndex(Index index, File parent, File to) throws IOException
{
for (File f : to.listFiles())
{
if (f.isDirectory())
{
int id = Integer.parseInt(f.getName());
Archive archive = index.addArchive(id);
loadTree(archive, to, f);
}
else if (f.getName().endsWith(".dat"))
{
// one file. archiveId-fileId-name
String[] parts = Files.getNameWithoutExtension(f.getName()).split("-");
int id = Integer.parseInt(parts[0]);
Archive archive = index.addArchive(id);
loadTreeSingleFile(archive, to, f);
}
else if (f.getName().endsWith(".datc"))
{
// packed data
String[] parts = Files.getNameWithoutExtension(f.getName()).split("-");
int id = Integer.parseInt(parts[0]);
Archive archive = index.addArchive(id);
loadTreeData(archive, to, f);
}
}
String str = Files.readFirstLine(new File(parent, index.getId() + ".rev"), Charset.defaultCharset());
int revision = Integer.parseInt(str);
index.setRevision(revision);
Collections.sort(index.getArchives(), (ar1, ar2) -> Integer.compare(ar1.getArchiveId(), ar2.getArchiveId()));
}
public void loadTreeData(Archive archive, File parent, File from) throws IOException
{
//archiveId-fileId-fileName
String[] parts = Files.getNameWithoutExtension(from.getName()).split("-");
assert parts.length == 3;
int archiveId = Integer.parseInt(parts[0]);
int fileId = Integer.parseInt(parts[1]);
int nameHash = (int) Long.parseLong(parts[2], 16);
assert archiveId == archive.getArchiveId();
byte[] data = Files.toByteArray(from);
FSFile file = archive.addFile(fileId);
file.setNameHash(nameHash);
file.setContents(data);
File archiveFile = new File(parent, archive.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setRevision(rev);
archiveFile = new File(parent, archive.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setNameHash(name);
}
public void loadTreeSingleFile(Archive archive, File parent, File from) throws IOException
{
//archiveId-fileId-fileName
String[] parts = Files.getNameWithoutExtension(from.getName()).split("-");
assert parts.length == 3;
int archiveId = Integer.parseInt(parts[0]);
int fileId = Integer.parseInt(parts[1]);
int nameHash = (int) Long.parseLong(parts[2], 16);
assert archiveId == archive.getArchiveId();
FSFile file = archive.addFile(fileId);
file.setNameHash(nameHash);
byte[] contents = Files.toByteArray(from);
file.setContents(contents);
File archiveFile = new File(parent, archive.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setRevision(rev);
archiveFile = new File(parent, archive.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setNameHash(name);
}
public void loadTree(Archive archive, File parent, File from) throws IOException
{
for (File file : from.listFiles())
{
//fileId-fileName.dat
String[] split = Files.getNameWithoutExtension(file.getName()).split("-");
assert split.length == 2;
int fileId = Integer.parseInt(split[0]);
int fileName = (int) Long.parseLong(split[1], 16);
FSFile f = archive.addFile(fileId);
f.setNameHash(fileName);
byte[] contents = Files.toByteArray(file);
f.setContents(contents);
}
File archiveFile = new File(parent, archive.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setRevision(rev);
archiveFile = new File(parent, archive.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setNameHash(name);
// the filesystem may order these differently (eg, 1, 10, 2)
Collections.sort(archive.getFiles(), (f1, f2) -> Integer.compare(f1.getFileId(), f2.getFileId()));
}
@Override
public void save(Store store) throws IOException
{
for (Index i : store.getIndexes())
{
saveIndex(i);
}
}
private void saveIndex(Index i) throws IOException
{
File idx = new File(folder, "" + i.getId());
idx.mkdirs();
for (Archive a : i.getArchives())
{
saveArchive(a, idx);
}
File rev = new File(folder, i.getId() + ".rev");
Files.write(Integer.toString(i.getRevision()), rev, Charset.defaultCharset());
}
private void saveArchive(Archive a, File to) throws IOException
{
byte[] data = a.getData();
List<FSFile> files = a.getFiles();
if (data != null)
{
assert files.size() == 1; // this is the maps
FSFile file = files.get(0);
File archiveFile = new File(to, a.getArchiveId() + "-"
+ file.getFileId() + "-" + Integer.toHexString(file.getNameHash()) + ".datc");
Files.write(data, archiveFile);
archiveFile = new File(to, a.getArchiveId() + ".rev");
Files.write(Integer.toString(a.getRevision()), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, a.getArchiveId() + ".name");
Files.write(Integer.toString(a.getNameHash()), archiveFile, Charset.defaultCharset());
return;
}
if (files.size() == 1)
{
FSFile file = files.get(0);
File archiveFile = new File(to, a.getArchiveId() + "-"
+ file.getFileId() + "-" + Integer.toHexString(file.getNameHash()) + ".dat");
byte[] contents = file.getContents();
Files.write(contents, archiveFile);
archiveFile = new File(to, a.getArchiveId() + ".rev");
Files.write(Integer.toString(a.getRevision()), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, a.getArchiveId() + ".name");
Files.write(Integer.toString(a.getNameHash()), archiveFile, Charset.defaultCharset());
return;
}
File archiveFile = new File(to, a.getArchiveId() + ".rev");
Files.write(Integer.toString(a.getRevision()), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, a.getArchiveId() + ".name");
Files.write(Integer.toString(a.getNameHash()), archiveFile, Charset.defaultCharset());
File archiveFolder = new File(to, Integer.toString(a.getArchiveId()));
archiveFolder.mkdirs();
for (FSFile file : files)
{
archiveFile = new File(archiveFolder, file.getFileId() + "-"
+ Integer.toHexString(file.getNameHash()) + ".dat");
byte[] contents = file.getContents();
Files.write(contents, archiveFile);
}
}
}

View File

@@ -25,6 +25,7 @@
package net.runelite.cache.index;
import net.runelite.cache.io.InputStream;
import net.runelite.cache.io.OutputStream;
public class IndexData
{
@@ -145,6 +146,135 @@ public class IndexData
}
}
public byte[] writeIndexData()
{
OutputStream stream = new OutputStream();
stream.writeByte(protocol);
if (protocol >= 6)
{
stream.writeInt(this.revision);
}
stream.writeByte((named ? 1 : 0) | (usesWhirpool ? 2 : 0));
if (protocol >= 7)
{
stream.writeBigSmart(this.archives.length);
}
else
{
stream.writeShort(this.archives.length);
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
int archive = a.getId();
if (i != 0)
{
ArchiveData prev = this.archives[i - 1];
archive -= prev.getId();
}
if (protocol >= 7)
{
stream.writeBigSmart(archive);
}
else
{
stream.writeShort(archive);
}
}
if (named)
{
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
stream.writeInt(a.getNameHash());
}
}
if (usesWhirpool)
{
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
stream.writeBytes(a.getWhirlpool());
}
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
stream.writeInt(a.getCrc());
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
stream.writeInt(a.getRevision());
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
int len = a.getFiles().length;
if (protocol >= 7)
{
stream.writeBigSmart(len);
}
else
{
stream.writeShort(len);
}
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
for (int j = 0; j < a.getFiles().length; ++j)
{
FileData file = a.getFiles()[j];
int offset = file.getId();
if (j != 0)
{
FileData prev = a.getFiles()[j - 1];
offset -= prev.getId();
}
if (protocol >= 7)
{
stream.writeBigSmart(offset);
}
else
{
stream.writeShort(offset);
}
}
}
if (named)
{
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
for (int j = 0; j < a.getFiles().length; ++j)
{
FileData file = a.getFiles()[j];
stream.writeInt(file.getNameHash());
}
}
}
return stream.flip();
}
public int getProtocol()
{
return protocol;

View File

@@ -34,8 +34,8 @@ import java.util.Arrays;
import net.runelite.cache.client.requests.ConnectionInfo;
import net.runelite.cache.client.requests.HelloHandshake;
import net.runelite.cache.fs.Archive;
import net.runelite.cache.fs.CompressionType;
import net.runelite.cache.fs.DataFile;
import net.runelite.cache.fs.jagex.CompressionType;
import net.runelite.cache.fs.jagex.DataFile;
import net.runelite.cache.fs.Index;
import net.runelite.cache.fs.Store;
import org.slf4j.Logger;
@@ -188,7 +188,7 @@ public class CacheServerHandler extends SimpleChannelInboundHandler<ByteBuf>
Index i = store.findIndex(archiveId);
assert i != null;
byte[] indexData = i.writeIndexData();
byte[] indexData = i.toIndexData().writeIndexData();
byte[] compressed = compress(CompressionType.NONE, indexData);
byte[] packed = addHeader(255, archiveId, compressed);

View File

@@ -28,6 +28,7 @@ import java.io.File;
import java.util.concurrent.CompletableFuture;
import net.runelite.cache.CacheProperties;
import net.runelite.cache.fs.Store;
import net.runelite.cache.fs.tree.TreeStorage;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
@@ -50,10 +51,10 @@ public class CacheClientTest
@Ignore
public void test() throws Exception
{
try (Store store = new Store(new File("d:/temp")))
try (Store store = new Store(new File("D:\\rs\\07\\temp\\cache")))
{
store.load();
CacheClient c = new CacheClient(store, CacheProperties.getRsVersion());
c.connect();
CompletableFuture<Integer> handshake = c.handshake();
@@ -62,11 +63,11 @@ public class CacheClientTest
logger.info("Handshake result: {}", result);
Assert.assertEquals(0, (int) result);
c.download();
c.close();
store.save();
}
}
@@ -77,8 +78,9 @@ public class CacheClientTest
{
try (Store store = new Store(new File("C:\\rs\\temp")))
{
store.loadTree(new File("C:\\rs\\runescape-data\\cache"));
TreeStorage storage = new TreeStorage(new File("C:\\rs\\runescape-data\\cache"));
storage.load(store);
CacheClient c = new CacheClient(store, CacheProperties.getRsVersion());
c.connect();
CompletableFuture<Integer> handshake = c.handshake();
@@ -87,12 +89,13 @@ public class CacheClientTest
logger.info("Handshake result: {}", result);
Assert.assertEquals(0, (int) result);
c.download();
c.close();
store.saveTree(new File("C:\\rs\\temp\\t"));
storage = new TreeStorage(new File("C:\\rs\\temp\\t"));
storage.save(store);
}
}
}

View File

@@ -1,173 +0,0 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
import java.io.File;
import java.io.IOException;
import net.runelite.cache.StoreLocation;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class DataFileTest
{
@Rule
public TemporaryFolder folder = StoreLocation.getTemporaryFolder();
@Test
public void test1() throws IOException
{
File file = folder.newFile();
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, file);
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 0, null);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
Assert.assertEquals(res.crc, res2.crc);
}
}
@Test
public void test2() throws IOException
{
byte[] b = new byte[1024];
for (int i = 0; i < 1024; ++i)
{
b[i] = (byte) i;
}
File file = folder.newFile();
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, file);
byte[] compressedData = DataFile.compress(b, CompressionType.BZ2, 42, null);
DataFileWriteResult res = df.write(42, 0x1FFFF, compressedData, 42);
compressedData = df.read(42, 0x1FFFF, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
Assert.assertArrayEquals(b, buf);
}
}
@Test
public void testGZipCompression() throws IOException
{
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, folder.newFile());
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.GZ, 0, null);
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
compressedData = df.read(41, 4, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
}
}
@Test
public void testBZip2Compression() throws IOException
{
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, folder.newFile());
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.BZ2, 5, null);
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
compressedData = df.read(41, 4, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
}
}
@Test
public void testCrc() throws IOException
{
File file = folder.newFile();
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, file);
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 42, null);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
Assert.assertEquals(res.crc, res2.crc);
Assert.assertEquals(42, res2.revision);
}
}
@Test
public void testKeys() throws IOException
{
File file = folder.newFile();
int[] keys = new int[] { 4, 8, 15, 16 };
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, file);
byte[] compressedData = DataFile.compress("testtesttesttest1".getBytes(), CompressionType.NONE, 42, keys);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, keys);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("testtesttesttest1", str);
Assert.assertEquals(res.crc, res2.crc);
Assert.assertEquals(42, res2.revision);
}
}
}

View File

@@ -28,6 +28,7 @@ import com.google.common.io.Files;
import java.io.File;
import java.io.IOException;
import net.runelite.cache.StoreLocation;
import net.runelite.cache.fs.tree.TreeStorage;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Rule;
@@ -70,6 +71,10 @@ public class StoreLoadTest
Assert.assertTrue(store.equals(testStore));
testStore.save();
}
try (Store testStore = new Store(testStoreFile))
{
testStore.load();
Assert.assertTrue(store.equals(testStore));
@@ -86,7 +91,8 @@ public class StoreLoadTest
{
store.load();
store.saveTree(folder.newFolder());
TreeStorage storage = new TreeStorage(folder.newFolder());
storage.save(store);
}
}
@@ -96,8 +102,9 @@ public class StoreLoadTest
{
try (Store store = new Store(folder.newFolder()))
{
store.loadTree(new File("C:\\rs\\temp\\tree"));
TreeStorage storage = new TreeStorage(new File("C:\\rs\\temp\\tree"));
storage.load(store);
try (Store store2 = new Store(StoreLocation.LOCATION))
{
store2.load();
@@ -114,7 +121,9 @@ public class StoreLoadTest
try (Store store = new Store(new File("d:/rs/07/temp/cache")))
{
store.load();
store.saveTree(new File("d:/rs/07/temp/tree"));
TreeStorage storage = new TreeStorage(new File("d:/rs/07/temp/tree"));
storage.save(store);
}
}
}

View File

@@ -28,6 +28,7 @@ import java.io.File;
import java.io.IOException;
import java.util.Random;
import net.runelite.cache.StoreLocation;
import net.runelite.cache.fs.tree.TreeStorage;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
@@ -150,11 +151,13 @@ public class StoreTest
// Test tree save/load
File tree = folder.newFolder();
store.saveTree(tree);
Storage treeStorage = new TreeStorage(tree);
try (Store store2 = new Store(folder.newFolder()))
treeStorage.save(store);
try (Store store2 = new Store(treeStorage))
{
store2.loadTree(tree);
store2.load();
}
}
}

View File

@@ -0,0 +1,156 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs.jagex;
import java.io.File;
import java.io.IOException;
import net.runelite.cache.StoreLocation;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class DataFileTest
{
@Rule
public TemporaryFolder folder = StoreLocation.getTemporaryFolder();
@Test
public void test1() throws IOException
{
File file = folder.newFile();
DataFile df = new DataFile(file);
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 0, null);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
Assert.assertEquals(res.crc, res2.crc);
}
@Test
public void test2() throws IOException
{
byte[] b = new byte[1024];
for (int i = 0; i < 1024; ++i)
{
b[i] = (byte) i;
}
File file = folder.newFile();
DataFile df = new DataFile(file);
byte[] compressedData = DataFile.compress(b, CompressionType.BZ2, 42, null);
DataFileWriteResult res = df.write(42, 0x1FFFF, compressedData, 42);
compressedData = df.read(42, 0x1FFFF, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
Assert.assertArrayEquals(b, buf);
}
@Test
public void testGZipCompression() throws IOException
{
DataFile df = new DataFile(folder.newFile());
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.GZ, 0, null);
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
compressedData = df.read(41, 4, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
}
@Test
public void testBZip2Compression() throws IOException
{
DataFile df = new DataFile(folder.newFile());
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.BZ2, 5, null);
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
compressedData = df.read(41, 4, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
}
@Test
public void testCrc() throws IOException
{
File file = folder.newFile();
DataFile df = new DataFile(file);
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 42, null);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
Assert.assertEquals(res.crc, res2.crc);
Assert.assertEquals(42, res2.revision);
}
@Test
public void testKeys() throws IOException
{
File file = folder.newFile();
int[] keys = new int[]
{
4, 8, 15, 16
};
DataFile df = new DataFile(file);
byte[] compressedData = DataFile.compress("testtesttesttest1".getBytes(), CompressionType.NONE, 42, keys);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, keys);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("testtesttesttest1", str);
Assert.assertEquals(res.crc, res2.crc);
Assert.assertEquals(42, res2.revision);
}
}

View File

@@ -22,8 +22,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
package net.runelite.cache.fs.jagex;
import java.io.File;
import java.io.IOException;
@@ -37,18 +36,15 @@ public class IndexFileTest
{
@Rule
public TemporaryFolder folder = StoreLocation.getTemporaryFolder();
@Test
public void test() throws IOException
{
File file = folder.newFile();
try (Store store = new Store(folder.getRoot()))
{
IndexFile index = new IndexFile(store, 5, file);
IndexEntry entry = new IndexEntry(index, 7, 8, 9);
index.write(entry);
IndexEntry entry2 = index.read(7);
Assert.assertEquals(entry, entry2);
}
IndexFile index = new IndexFile(5, file);
IndexEntry entry = new IndexEntry(index, 7, 8, 9);
index.write(entry);
IndexEntry entry2 = index.read(7);
Assert.assertEquals(entry, entry2);
}
}

View File

@@ -94,7 +94,8 @@ public class CacheServerTest
server.start();
try (Store store2 = new Store(folder.newFolder()); CacheClient client = new CacheClient(store2, HOST, REVISION))
try (Store store2 = new Store(folder.newFolder());
CacheClient client = new CacheClient(store2, HOST, REVISION))
{
client.connect();
client.handshake().get();