cache: split storage apart from Store

This commit is contained in:
Adam
2017-09-05 20:05:41 -04:00
parent 1b5cd87351
commit 78f5ddcd3a
23 changed files with 1134 additions and 882 deletions

View File

@@ -26,7 +26,10 @@ package net.runelite.cache;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import net.runelite.cache.fs.Storage;
import net.runelite.cache.fs.Store; import net.runelite.cache.fs.Store;
import net.runelite.cache.fs.jagex.DiskStorage;
import net.runelite.cache.fs.tree.TreeStorage;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.DefaultParser;
@@ -76,9 +79,15 @@ public class Cache
System.out.print("Packing tree from " + tree + " to " + cache + "..."); System.out.print("Packing tree from " + tree + " to " + cache + "...");
Store treeBase = new Store(new File(cache)); File cacheDir = new File(cache),
treeBase.loadTree(new File(tree)); treeDir = new File(tree);
treeBase.save();
Storage from = new TreeStorage(treeDir);
Storage to = new DiskStorage(cacheDir);
Store store = new Store(from);
store.load();
to.save(store);
System.out.println(" done!"); System.out.println(" done!");
return; return;
@@ -96,7 +105,8 @@ public class Cache
Store treeBase = new Store(new File(cache)); Store treeBase = new Store(new File(cache));
treeBase.load(); treeBase.load();
treeBase.saveTree(new File(tree)); TreeStorage storage = new TreeStorage(new File(tree));
storage.save(treeBase);
System.out.println(" done!"); System.out.println(" done!");
return; return;
@@ -173,8 +183,9 @@ public class Cache
{ {
if (cache == null) if (cache == null)
{ {
Store store = new Store(new File(tree)); Storage storage = new TreeStorage(new File(tree));
store.loadTree(new File(tree)); Store store = new Store(storage);
store.load();
return store; return store;
} }
else else

View File

@@ -25,8 +25,8 @@
package net.runelite.cache.client; package net.runelite.cache.client;
import java.io.IOException; import java.io.IOException;
import net.runelite.cache.fs.DataFile; import net.runelite.cache.fs.jagex.DataFile;
import net.runelite.cache.fs.DataFileReadResult; import net.runelite.cache.fs.jagex.DataFileReadResult;
public class FileResult public class FileResult
{ {

View File

@@ -24,13 +24,11 @@
*/ */
package net.runelite.cache.fs; package net.runelite.cache.fs;
import com.google.common.io.Files; import net.runelite.cache.fs.jagex.DataFile;
import java.io.File; import net.runelite.cache.fs.jagex.DataFileReadResult;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import net.runelite.cache.io.InputStream; import net.runelite.cache.io.InputStream;
@@ -42,18 +40,18 @@ public class Archive
{ {
private static final Logger logger = LoggerFactory.getLogger(Archive.class); private static final Logger logger = LoggerFactory.getLogger(Archive.class);
private Index index; // member of this index private final Index index; // member of this index
private byte[] data; // raw data from the datafile, compressed/encrypted private byte[] data; // raw data from the datafile, compressed/encrypted
private int archiveId; private final int archiveId;
private int nameHash; private int nameHash;
private byte[] whirlpool; private byte[] whirlpool;
private int crc; private int crc;
private int revision; private int revision;
private int compression; private int compression;
private List<FSFile> files = new ArrayList<>(); private final List<FSFile> files = new ArrayList<>();
public Archive(Index index, int id) public Archive(Index index, int id)
{ {
@@ -103,6 +101,11 @@ public class Archive
return true; return true;
} }
public Index getIndex()
{
return index;
}
public byte[] getData() public byte[] getData()
{ {
return data; return data;
@@ -168,6 +171,8 @@ public class Archive
{ {
logger.trace("Loading contents of archive {} ({} files)", archiveId, files.size()); logger.trace("Loading contents of archive {} ({} files)", archiveId, files.size());
assert !this.getFiles().isEmpty();
if (this.getFiles().size() == 1) if (this.getFiles().size() == 1)
{ {
this.getFiles().get(0).setContents(data); this.getFiles().get(0).setContents(data);
@@ -270,151 +275,6 @@ public class Archive
return fileData; return fileData;
} }
public void saveTree(File to) throws IOException
{
if (data != null)
{
assert files.size() == 1; // this is the maps
FSFile file = files.get(0);
File archiveFile = new File(to, this.getArchiveId() + "-"
+ file.getFileId() + "-" + Integer.toHexString(file.getNameHash()) + ".datc");
Files.write(data, archiveFile);
archiveFile = new File(to, this.getArchiveId() + ".rev");
Files.write("" + this.getRevision(), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, this.getArchiveId() + ".name");
Files.write("" + this.getNameHash(), archiveFile, Charset.defaultCharset());
return;
}
if (files.size() == 1)
{
FSFile file = this.getFiles().get(0);
File archiveFile = new File(to, this.getArchiveId() + "-"
+ file.getFileId() + "-" + Integer.toHexString(file.getNameHash()) + ".dat");
byte[] contents = file.getContents();
Files.write(contents, archiveFile);
archiveFile = new File(to, this.getArchiveId() + ".rev");
Files.write("" + this.getRevision(), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, this.getArchiveId() + ".name");
Files.write("" + this.getNameHash(), archiveFile, Charset.defaultCharset());
return;
}
File archiveFile = new File(to, this.getArchiveId() + ".rev");
Files.write("" + this.getRevision(), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, this.getArchiveId() + ".name");
Files.write("" + this.getNameHash(), archiveFile, Charset.defaultCharset());
File archiveFolder = new File(to, "" + this.getArchiveId());
archiveFolder.mkdirs();
for (FSFile file : files)
{
archiveFile = new File(archiveFolder, file.getFileId() + "-"
+ Integer.toHexString(file.getNameHash()) + ".dat");
byte[] contents = file.getContents();
Files.write(contents, archiveFile);
}
}
public void loadTreeData(File parent, File from) throws IOException
{
//archiveId-fileId-fileName
String[] parts = Files.getNameWithoutExtension(from.getName()).split("-");
assert parts.length == 3;
int archiveId = Integer.parseInt(parts[0]);
int fileId = Integer.parseInt(parts[1]);
int nameHash = (int) Long.parseLong(parts[2], 16);
assert archiveId == this.getArchiveId();
data = Files.toByteArray(from);
FSFile file = new FSFile(this, fileId);
file.setNameHash(nameHash);
files.add(file);
File archiveFile = new File(parent, this.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setRevision(rev);
archiveFile = new File(parent, this.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setNameHash(name);
}
public void loadTreeSingleFile(File parent, File from) throws IOException
{
//archiveId-fileId-fileName
String[] parts = Files.getNameWithoutExtension(from.getName()).split("-");
assert parts.length == 3;
int archiveId = Integer.parseInt(parts[0]);
int fileId = Integer.parseInt(parts[1]);
int nameHash = (int) Long.parseLong(parts[2], 16);
assert archiveId == this.getArchiveId();
FSFile file = new FSFile(this, fileId);
file.setNameHash(nameHash);
byte[] contents = Files.toByteArray(from);
file.setContents(contents);
files.add(file);
File archiveFile = new File(parent, this.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setRevision(rev);
archiveFile = new File(parent, this.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setNameHash(name);
}
public void loadTree(File parent, File from) throws IOException
{
for (File file : from.listFiles())
{
//fileId-fileName.dat
String[] split = Files.getNameWithoutExtension(file.getName()).split("-");
assert split.length == 2;
int fileId = Integer.parseInt(split[0]);
int fileName = (int) Long.parseLong(split[1], 16);
FSFile f = new FSFile(this, fileId);
f.setNameHash(fileName);
byte[] contents = Files.toByteArray(file);
f.setContents(contents);
files.add(f);
}
File archiveFile = new File(parent, this.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setRevision(rev);
archiveFile = new File(parent, this.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
this.setNameHash(name);
// the filesystem may order these differently (eg, 1, 10, 2)
Collections.sort(files, (f1, f2) -> Integer.compare(f1.getFileId(), f2.getFileId()));
}
public int getArchiveId() public int getArchiveId()
{ {
return archiveId; return archiveId;

View File

@@ -24,34 +24,29 @@
*/ */
package net.runelite.cache.fs; package net.runelite.cache.fs;
import com.google.common.io.Files; import net.runelite.cache.fs.jagex.DataFile;
import net.runelite.cache.fs.jagex.CompressionType;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import java.io.Closeable;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import net.runelite.cache.index.ArchiveData; import net.runelite.cache.index.ArchiveData;
import net.runelite.cache.index.FileData; import net.runelite.cache.index.FileData;
import net.runelite.cache.index.IndexData; import net.runelite.cache.index.IndexData;
import net.runelite.cache.util.Djb2;
import net.runelite.cache.io.OutputStream;
import net.runelite.cache.util.Crc32; import net.runelite.cache.util.Crc32;
import net.runelite.cache.util.Djb2;
import net.runelite.cache.util.Whirlpool; import net.runelite.cache.util.Whirlpool;
import net.runelite.cache.util.XteaKeyManager; import net.runelite.cache.util.XteaKeyManager;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
public class Index implements Closeable public class Index
{ {
private static final Logger logger = LoggerFactory.getLogger(Index.class); private static final Logger logger = LoggerFactory.getLogger(Index.class);
private final Store store; private final Store store;
private final IndexFile index;
private final int id; private final int id;
private XteaKeyManager xteaManager; private XteaKeyManager xteaManager;
@@ -65,24 +60,12 @@ public class Index implements Closeable
private final List<Archive> archives = new ArrayList<>(); private final List<Archive> archives = new ArrayList<>();
public Index(Store store, IndexFile index, int id) public Index(Store store, int id)
{ {
this.store = store; this.store = store;
this.index = index;
this.id = id; this.id = id;
} }
@Override
public void close() throws IOException
{
index.close();
}
public void clear() throws IOException
{
index.clear();
}
@Override @Override
public int hashCode() public int hashCode()
{ {
@@ -135,11 +118,46 @@ public class Index implements Closeable
return id; return id;
} }
public int getProtocol()
{
return protocol;
}
public void setProtocol(int protocol)
{
this.protocol = protocol;
}
public boolean isNamed()
{
return named;
}
public void setNamed(boolean named)
{
this.named = named;
}
public boolean isUsesWhirpool()
{
return usesWhirpool;
}
public void setUsesWhirpool(boolean usesWhirpool)
{
this.usesWhirpool = usesWhirpool;
}
public int getRevision() public int getRevision()
{ {
return revision; return revision;
} }
public void setRevision(int revision)
{
this.revision = revision;
}
public int getCrc() public int getCrc()
{ {
return crc; return crc;
@@ -160,9 +178,14 @@ public class Index implements Closeable
this.whirlpool = whirlpool; this.whirlpool = whirlpool;
} }
public IndexFile getIndex() public int getCompression()
{ {
return index; return compression;
}
public void setCompression(int compression)
{
this.compression = compression;
} }
public List<Archive> getArchives() public List<Archive> getArchives()
@@ -206,8 +229,6 @@ public class Index implements Closeable
{ {
for (Archive a : archives) for (Archive a : archives)
{ {
assert this.index.getIndexFileId() == this.id;
int rev; // used for determining what part of compressedData to crc int rev; // used for determining what part of compressedData to crc
byte[] compressedData; byte[] compressedData;
@@ -235,7 +256,7 @@ public class Index implements Closeable
} }
Crc32 crc = new Crc32(); Crc32 crc = new Crc32();
byte[] indexData = this.writeIndexData(); byte[] indexData = toIndexData().writeIndexData();
ByteBuf b = Unpooled.buffer(5, 5); ByteBuf b = Unpooled.buffer(5, 5);
b.writeByte((byte) CompressionType.NONE); b.writeByte((byte) CompressionType.NONE);
@@ -248,323 +269,38 @@ public class Index implements Closeable
this.setCrc(hash); this.setCrc(hash);
} }
public void load() throws IOException public IndexData toIndexData()
{ {
logger.trace("Loading index {}", id); IndexData data = new IndexData();
data.setProtocol(protocol);
data.setRevision(revision);
data.setNamed(named);
data.setUsesWhirpool(usesWhirpool);
DataFile dataFile = store.getData(); ArchiveData[] archiveDatas = new ArchiveData[archives.size()];
IndexFile index255 = store.getIndex255(); data.setArchives(archiveDatas);
IndexEntry entry = index255.read(id); int idx = 0;
byte[] indexData = dataFile.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength()); for (Archive archive : archives)
DataFileReadResult res = DataFile.decompress(indexData, null);
byte[] data = res.data;
archives.clear();
readIndexData(data);
this.crc = res.crc;
this.whirlpool = res.whirlpool;
this.compression = res.compression;
assert res.revision == -1;
this.loadArchives();
}
public void save() throws IOException
{
// This updates archive CRCs for writeIndexData
saveArchives();
byte[] data = this.writeIndexData();
DataFile dataFile = store.getData();
IndexFile index255 = store.getIndex255();
byte[] compressedData = DataFile.compress(data, this.compression, -1, null); // index data revision is always -1
DataFileWriteResult res = dataFile.write(index255.getIndexFileId(), this.id, compressedData, revision);
index255.write(new IndexEntry(index255, id, res.sector, res.compressedLength));
this.crc = res.crc;
this.whirlpool = res.whirlpool;
}
public void saveTree(File to) throws IOException
{
File idx = new File(to, "" + this.getId());
idx.mkdirs();
for (Archive a : archives)
{ {
a.saveTree(idx); ArchiveData ad = archiveDatas[idx++] = new ArchiveData();
} ad.setId(archive.getArchiveId());
ad.setNameHash(archive.getNameHash());
ad.setCrc(archive.getCrc());
ad.setWhirlpool(archive.getWhirlpool());
ad.setRevision(archive.getRevision());
File rev = new File(to, this.getId() + ".rev"); FileData[] files = new FileData[archive.getFiles().size()];
Files.write("" + this.getRevision(), rev, Charset.defaultCharset()); ad.setFiles(files);
}
public void loadTree(File parent, File to) throws IOException int idx2 = 0;
{ for (FSFile file : archive.getFiles())
for (File f : to.listFiles())
{
if (f.isDirectory())
{ {
int id = Integer.parseInt(f.getName()); FileData fd = files[idx2++] = new FileData();
fd.setId(file.getFileId());
Archive archive = new Archive(this, id); fd.setNameHash(file.getNameHash());
archive.loadTree(to, f);
archives.add(archive);
}
else if (f.getName().endsWith(".dat"))
{
// one file. archiveId-fileId-name
String[] parts = Files.getNameWithoutExtension(f.getName()).split("-");
int id = Integer.parseInt(parts[0]);
Archive archive = new Archive(this, id);
archive.loadTreeSingleFile(to, f);
archives.add(archive);
}
else if (f.getName().endsWith(".datc"))
{
// packed data
String[] parts = Files.getNameWithoutExtension(f.getName()).split("-");
int id = Integer.parseInt(parts[0]);
Archive archive = new Archive(this, id);
archive.loadTreeData(to, f);
archives.add(archive);
} }
} }
return data;
String str = Files.readFirstLine(new File(parent, this.getId() + ".rev"), Charset.defaultCharset());
revision = Integer.parseInt(str);
Collections.sort(archives, (ar1, ar2) -> Integer.compare(ar1.getArchiveId(), ar2.getArchiveId()));
}
public void readIndexData(byte[] data)
{
IndexData indexData = new IndexData();
indexData.load(data);
protocol = indexData.getProtocol();
revision = indexData.getRevision();
named = indexData.isNamed();
usesWhirpool = indexData.isUsesWhirpool();
for (ArchiveData ad : indexData.getArchives())
{
Archive archive = new Archive(this, ad.getId());
archive.setNameHash(ad.getNameHash());
archive.setWhirlpool(ad.getWhirlpool());
archive.setCrc(ad.getCrc());
archive.setRevision(ad.getRevision());
for (FileData fd : ad.getFiles())
{
FSFile file = archive.addFile(fd.getId());
file.setNameHash(fd.getNameHash());
}
archives.add(archive);
}
}
private void loadArchives() throws IOException
{
// get data from index file
for (Archive a : new ArrayList<>(archives))
{
IndexEntry entry = this.index.read(a.getArchiveId());
if (entry == null)
{
logger.debug("can't read archive " + a.getArchiveId() + " from index " + this.id);
archives.remove(a); // is this the correct behavior?
continue;
}
assert this.index.getIndexFileId() == this.id;
assert entry.getId() == a.getArchiveId();
logger.trace("Loading archive {} for index {} from sector {} length {}", a.getArchiveId(), id, entry.getSector(), entry.getLength());
byte[] archiveData = store.getData().read(this.id, entry.getId(), entry.getSector(), entry.getLength());
a.setData(archiveData);
if (this.xteaManager != null)
{
continue; // can't decrypt this yet
}
a.decompressAndLoad(null);
}
}
public void saveArchives() throws IOException
{
for (Archive a : archives)
{
assert this.index.getIndexFileId() == this.id;
DataFile data = store.getData();
int rev; // used for determining what part of compressedData to crc
byte[] compressedData;
if (a.getData() != null)
{
compressedData = a.getData(); // data was never decompressed or loaded
rev = -1; // assume that this data has no revision?
}
else
{
byte[] fileData = a.saveContents();
rev = a.getRevision();
compressedData = DataFile.compress(fileData, a.getCompression(), a.getRevision(), null);
}
DataFileWriteResult res = data.write(this.id, a.getArchiveId(), compressedData, rev);
this.index.write(new IndexEntry(this.index, a.getArchiveId(), res.sector, res.compressedLength));
logger.trace("Saved archive {}/{} at sector {}, compressed length {}", this.getId(), a.getArchiveId(), res.sector, res.compressedLength);
a.setCrc(res.crc);
a.setWhirlpool(res.whirlpool);
}
}
public byte[] writeIndexData()
{
OutputStream stream = new OutputStream();
stream.writeByte(protocol);
if (protocol >= 6)
{
stream.writeInt(this.revision);
}
stream.writeByte((named ? 1 : 0) | (usesWhirpool ? 2 : 0));
if (protocol >= 7)
{
stream.writeBigSmart(this.archives.size());
}
else
{
stream.writeShort(this.archives.size());
}
int data;
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
int archive = a.getArchiveId();
if (data != 0)
{
Archive prev = this.archives.get(data - 1);
archive -= prev.getArchiveId();
}
if (protocol >= 7)
{
stream.writeBigSmart(archive);
}
else
{
stream.writeShort(archive);
}
}
if (named)
{
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
stream.writeInt(a.getNameHash());
}
}
if (usesWhirpool)
{
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
stream.writeBytes(a.getWhirlpool());
}
}
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
stream.writeInt(a.getCrc());
}
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
stream.writeInt(a.getRevision());
}
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
int len = a.getFiles().size();
if (protocol >= 7)
{
stream.writeBigSmart(len);
}
else
{
stream.writeShort(len);
}
}
int index2;
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
for (index2 = 0; index2 < a.getFiles().size(); ++index2)
{
FSFile file = a.getFiles().get(index2);
int offset = file.getFileId();
if (index2 != 0)
{
FSFile prev = a.getFiles().get(index2 - 1);
offset -= prev.getFileId();
}
if (protocol >= 7)
{
stream.writeBigSmart(offset);
}
else
{
stream.writeShort(offset);
}
}
}
if (named)
{
for (data = 0; data < this.archives.size(); ++data)
{
Archive a = this.archives.get(data);
for (index2 = 0; index2 < a.getFiles().size(); ++index2)
{
FSFile file = a.getFiles().get(index2);
stream.writeInt(file.getNameHash());
}
}
}
return stream.flip();
} }
} }

View File

@@ -0,0 +1,39 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
import java.io.IOException;
public interface Storage extends AutoCloseable
{
void init(Store store) throws IOException;
@Override
void close() throws IOException;
void load(Store store) throws IOException;
void save(Store store) throws IOException;
}

View File

@@ -29,37 +29,25 @@ import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import net.runelite.cache.IndexType; import net.runelite.cache.IndexType;
import net.runelite.cache.fs.jagex.DiskStorage;
import net.runelite.cache.util.XteaKeyManager; import net.runelite.cache.util.XteaKeyManager;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
public class Store implements Closeable public final class Store implements Closeable
{ {
private static final Logger logger = LoggerFactory.getLogger(Store.class); private static final Logger logger = LoggerFactory.getLogger(Store.class);
private static final String MAIN_FILE_CACHE_DAT = "main_file_cache.dat2"; private final Storage storage;
private static final String MAIN_FILE_CACHE_IDX = "main_file_cache.idx";
private final File folder;
private final DataFile data;
private final IndexFile index255;
private final List<Index> indexes = new ArrayList<>(); private final List<Index> indexes = new ArrayList<>();
public Store(File folder) throws IOException public Store(File folder) throws IOException
{ {
this.folder = folder; storage = new DiskStorage(folder);
storage.init(this);
data = new DataFile(this, new File(folder, MAIN_FILE_CACHE_DAT));
index255 = new IndexFile(this, 255, new File(folder, MAIN_FILE_CACHE_IDX + "255"));
for (int i = 0; i < index255.getIndexCount(); ++i)
{
this.addIndex(i);
}
Index maps = this.findIndex(IndexType.MAPS.getNumber()); Index maps = this.findIndex(IndexType.MAPS.getNumber());
if (maps != null) if (maps != null)
@@ -70,15 +58,17 @@ public class Store implements Closeable
} }
} }
public Store(Storage storage) throws IOException
{
this.storage = storage;
storage.init(this);
}
@Override @Override
public void close() throws IOException public void close() throws IOException
{ {
data.close(); storage.close();
index255.close();
for (Index i : indexes)
{
i.close();
}
} }
@Override @Override
@@ -112,15 +102,13 @@ public class Store implements Closeable
{ {
for (Index i : indexes) for (Index i : indexes)
{ {
if (i.getIndex().getIndexFileId() == id) if (i.getId() == id)
{ {
throw new IllegalArgumentException("index " + id + " already exists"); throw new IllegalArgumentException("index " + id + " already exists");
} }
} }
IndexFile indexFile = new IndexFile(this, id, new File(folder, MAIN_FILE_CACHE_IDX + id)); Index index = new Index(this, id);
Index index = new Index(this, indexFile, id);
this.indexes.add(index); this.indexes.add(index);
return index; return index;
@@ -139,69 +127,19 @@ public class Store implements Closeable
public void rebuildCrc() throws IOException public void rebuildCrc() throws IOException
{ {
for (Index i : indexes) for (Index i : indexes)
{
i.rebuildCrc(); i.rebuildCrc();
}
} }
public void load() throws IOException public void load() throws IOException
{ {
for (Index i : indexes) storage.load(this);
{
i.load();
}
} }
public void save() throws IOException public void save() throws IOException
{ {
logger.debug("Clearing data and indexes in preparation for store save"); storage.save(this);
data.clear();
for (Index i : indexes)
{
i.clear();
}
for (Index i : indexes)
{
i.save();
}
}
public void saveTree(File to) throws IOException
{
for (Index i : indexes)
{
i.saveTree(to);
}
}
public void loadTree(File from) throws IOException
{
for (File idx : from.listFiles())
{
if (!idx.isDirectory())
{
continue;
}
int id = Integer.parseInt(idx.getName());
IndexFile indexFile = new IndexFile(this, id, new File(folder, MAIN_FILE_CACHE_IDX + id));
Index index = new Index(this, indexFile, id);
index.loadTree(from, idx);
indexes.add(index);
}
Collections.sort(indexes, (idx1, idx2) -> Integer.compare(idx1.getId(), idx2.getId()));
}
public DataFile getData()
{
return data;
}
public IndexFile getIndex255()
{
return index255;
} }
public List<Index> getIndexes() public List<Index> getIndexes()
@@ -214,7 +152,7 @@ public class Store implements Closeable
return indexes.get(type.getNumber()); return indexes.get(type.getNumber());
} }
public final Index findIndex(int id) public Index findIndex(int id)
{ {
for (Index i : indexes) for (Index i : indexes)
{ {

View File

@@ -22,7 +22,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package net.runelite.cache.fs; package net.runelite.cache.fs.jagex;
public class CompressionType public class CompressionType
{ {

View File

@@ -22,8 +22,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package net.runelite.cache.fs.jagex;
package net.runelite.cache.fs;
import java.io.Closeable; import java.io.Closeable;
import java.io.File; import java.io.File;
@@ -52,16 +51,11 @@ public class DataFile implements Closeable
private static final int SECTOR_SIZE = 520; private static final int SECTOR_SIZE = 520;
private final Store store;
private final File file;
private final RandomAccessFile dat; private final RandomAccessFile dat;
private final byte[] readCachedBuffer = new byte[SECTOR_SIZE];
public DataFile(Store store, File file) throws FileNotFoundException public DataFile(File file) throws FileNotFoundException
{ {
this.file = file; this.dat = new RandomAccessFile(file, "rw");
this.store = store;
dat = new RandomAccessFile(file, "rw");
} }
@Override @Override
@@ -84,7 +78,7 @@ public class DataFile implements Closeable
* @return * @return
* @throws IOException * @throws IOException
*/ */
public synchronized byte[] read(int indexId, int archiveId, int sector, int size) throws IOException public byte[] read(int indexId, int archiveId, int sector, int size) throws IOException
{ {
if (sector <= 0L || dat.length() / SECTOR_SIZE < (long) sector) if (sector <= 0L || dat.length() / SECTOR_SIZE < (long) sector)
{ {
@@ -92,6 +86,7 @@ public class DataFile implements Closeable
return null; return null;
} }
byte[] readBuffer = new byte[SECTOR_SIZE];
ByteBuffer buffer = ByteBuffer.allocate(size); ByteBuffer buffer = ByteBuffer.allocate(size);
for (int part = 0, readBytesCount = 0, nextSector; for (int part = 0, readBytesCount = 0, nextSector;
@@ -119,17 +114,22 @@ public class DataFile implements Closeable
dataBlockSize = SECTOR_SIZE - headerSize; dataBlockSize = SECTOR_SIZE - headerSize;
} }
int i = dat.read(this.readCachedBuffer, 0, headerSize + dataBlockSize); int i = dat.read(readBuffer, 0, headerSize + dataBlockSize);
if (i != headerSize + dataBlockSize) if (i != headerSize + dataBlockSize)
{ {
logger.warn("Short read when reading file data for {}/{}", indexId, archiveId); logger.warn("Short read when reading file data for {}/{}", indexId, archiveId);
return null; return null;
} }
currentArchive = ((this.readCachedBuffer[1] & 255) << 16) + ((this.readCachedBuffer[0] & 255) << 24) + (('\uff00' & this.readCachedBuffer[2] << 8) - -(this.readCachedBuffer[3] & 255)); currentArchive = ((readBuffer[0] & 0xFF) << 24)
currentPart = ((this.readCachedBuffer[4] & 255) << 8) + (255 & this.readCachedBuffer[5]); | ((readBuffer[1] & 0xFF) << 16)
nextSector = (this.readCachedBuffer[8] & 255) + ('\uff00' & this.readCachedBuffer[7] << 8) + ((255 & this.readCachedBuffer[6]) << 16); | ((readBuffer[2] & 0xFF) << 8)
currentIndex = this.readCachedBuffer[9] & 255; | (readBuffer[3] & 0xFF);
currentPart = ((readBuffer[4] & 0xFF) << 8) + (readBuffer[5] & 0xFF);
nextSector = ((readBuffer[6] & 0xFF) << 16)
| ((readBuffer[7] & 0xFF) << 8)
| (readBuffer[8] & 0xFF);
currentIndex = readBuffer[9] & 0xFF;
} }
else else
{ {
@@ -139,17 +139,21 @@ public class DataFile implements Closeable
dataBlockSize = SECTOR_SIZE - headerSize; dataBlockSize = SECTOR_SIZE - headerSize;
} }
int i = dat.read(this.readCachedBuffer, 0, headerSize + dataBlockSize); int i = dat.read(readBuffer, 0, headerSize + dataBlockSize);
if (i != headerSize + dataBlockSize) if (i != headerSize + dataBlockSize)
{ {
logger.warn("short read"); logger.warn("short read");
return null; return null;
} }
currentArchive = (255 & this.readCachedBuffer[1]) + ('\uff00' & this.readCachedBuffer[0] << 8); currentArchive = ((readBuffer[0] & 0xFF) << 8)
currentPart = ((this.readCachedBuffer[2] & 255) << 8) + (255 & this.readCachedBuffer[3]); | (readBuffer[1] & 0xFF);
nextSector = (this.readCachedBuffer[6] & 255) + ('\uff00' & this.readCachedBuffer[5] << 8) + ((255 & this.readCachedBuffer[4]) << 16); currentPart = ((readBuffer[2] & 0xFF) << 8)
currentIndex = this.readCachedBuffer[7] & 255; | (readBuffer[3] & 0xFF);
nextSector = ((readBuffer[4] & 0xFF) << 16)
| ((readBuffer[5] & 0xFF) << 8)
| (readBuffer[6] & 0xFF);
currentIndex = readBuffer[7] & 0xFF;
} }
if (archiveId != currentArchive || currentPart != part || indexId != currentIndex) if (archiveId != currentArchive || currentPart != part || indexId != currentIndex)
@@ -167,7 +171,7 @@ public class DataFile implements Closeable
return null; return null;
} }
buffer.put(readCachedBuffer, headerSize, dataBlockSize); buffer.put(readBuffer, headerSize, dataBlockSize);
readBytesCount += dataBlockSize; readBytesCount += dataBlockSize;
++part; ++part;
@@ -177,11 +181,12 @@ public class DataFile implements Closeable
return buffer.array(); return buffer.array();
} }
public synchronized DataFileWriteResult write(int indexId, int archiveId, byte[] compressedData, int revision) throws IOException public DataFileWriteResult write(int indexId, int archiveId, byte[] compressedData, int revision) throws IOException
{ {
int sector; int sector;
int startSector; int startSector;
byte[] writeBuffer = new byte[SECTOR_SIZE];
ByteBuffer data = ByteBuffer.wrap(compressedData); ByteBuffer data = ByteBuffer.wrap(compressedData);
sector = (int) ((dat.length() + (long) (SECTOR_SIZE - 1)) / (long) SECTOR_SIZE); sector = (int) ((dat.length() + (long) (SECTOR_SIZE - 1)) / (long) SECTOR_SIZE);
@@ -210,7 +215,6 @@ public class DataFile implements Closeable
} }
} }
if (0xFFFF < archiveId) if (0xFFFF < archiveId)
{ {
if (data.remaining() <= 510) if (data.remaining() <= 510)
@@ -218,18 +222,18 @@ public class DataFile implements Closeable
nextSector = 0; nextSector = 0;
} }
this.readCachedBuffer[0] = (byte) (archiveId >> 24); writeBuffer[0] = (byte) (archiveId >> 24);
this.readCachedBuffer[1] = (byte) (archiveId >> 16); writeBuffer[1] = (byte) (archiveId >> 16);
this.readCachedBuffer[2] = (byte) (archiveId >> 8); writeBuffer[2] = (byte) (archiveId >> 8);
this.readCachedBuffer[3] = (byte) archiveId; writeBuffer[3] = (byte) archiveId;
this.readCachedBuffer[4] = (byte) (part >> 8); writeBuffer[4] = (byte) (part >> 8);
this.readCachedBuffer[5] = (byte) part; writeBuffer[5] = (byte) part;
this.readCachedBuffer[6] = (byte) (nextSector >> 16); writeBuffer[6] = (byte) (nextSector >> 16);
this.readCachedBuffer[7] = (byte) (nextSector >> 8); writeBuffer[7] = (byte) (nextSector >> 8);
this.readCachedBuffer[8] = (byte) nextSector; writeBuffer[8] = (byte) nextSector;
this.readCachedBuffer[9] = (byte) indexId; writeBuffer[9] = (byte) indexId;
dat.seek(SECTOR_SIZE * sector); dat.seek(SECTOR_SIZE * sector);
dat.write(this.readCachedBuffer, 0, 10); dat.write(writeBuffer, 0, 10);
dataToWrite = data.remaining(); dataToWrite = data.remaining();
if (dataToWrite > 510) if (dataToWrite > 510)
@@ -244,16 +248,16 @@ public class DataFile implements Closeable
nextSector = 0; nextSector = 0;
} }
this.readCachedBuffer[0] = (byte) (archiveId >> 8); writeBuffer[0] = (byte) (archiveId >> 8);
this.readCachedBuffer[1] = (byte) archiveId; writeBuffer[1] = (byte) archiveId;
this.readCachedBuffer[2] = (byte) (part >> 8); writeBuffer[2] = (byte) (part >> 8);
this.readCachedBuffer[3] = (byte) part; writeBuffer[3] = (byte) part;
this.readCachedBuffer[4] = (byte) (nextSector >> 16); writeBuffer[4] = (byte) (nextSector >> 16);
this.readCachedBuffer[5] = (byte) (nextSector >> 8); writeBuffer[5] = (byte) (nextSector >> 8);
this.readCachedBuffer[6] = (byte) nextSector; writeBuffer[6] = (byte) nextSector;
this.readCachedBuffer[7] = (byte) indexId; writeBuffer[7] = (byte) indexId;
dat.seek(SECTOR_SIZE * sector); dat.seek(SECTOR_SIZE * sector);
dat.write(this.readCachedBuffer, 0, 8); dat.write(writeBuffer, 0, 8);
dataToWrite = data.remaining(); dataToWrite = data.remaining();
if (dataToWrite > 512) if (dataToWrite > 512)
@@ -262,8 +266,8 @@ public class DataFile implements Closeable
} }
} }
data.get(readCachedBuffer, 0, dataToWrite); data.get(writeBuffer, 0, dataToWrite);
dat.write(readCachedBuffer, 0, dataToWrite); dat.write(writeBuffer, 0, dataToWrite);
sector = nextSector; sector = nextSector;
} }
@@ -287,7 +291,9 @@ public class DataFile implements Closeable
int compression = stream.readUnsignedByte(); int compression = stream.readUnsignedByte();
int compressedLength = stream.readInt(); int compressedLength = stream.readInt();
if (compressedLength < 0 || compressedLength > 1000000) if (compressedLength < 0 || compressedLength > 1000000)
{
throw new RuntimeException("Invalid data"); throw new RuntimeException("Invalid data");
}
Crc32 crc32 = new Crc32(); Crc32 crc32 = new Crc32();
crc32.update(b, 0, 5); // compression + length crc32.update(b, 0, 5); // compression + length
@@ -416,7 +422,9 @@ public class DataFile implements Closeable
stream.writeBytes(compressedData); stream.writeBytes(compressedData);
if (revision != -1) if (revision != -1)
{
stream.writeShort(revision); stream.writeShort(revision);
}
return stream.flip(); return stream.flip();
} }
@@ -424,7 +432,9 @@ public class DataFile implements Closeable
private static byte[] decrypt(byte[] data, int length, int[] keys) private static byte[] decrypt(byte[] data, int length, int[] keys)
{ {
if (keys == null) if (keys == null)
{
return data; return data;
}
try try
{ {
@@ -441,7 +451,9 @@ public class DataFile implements Closeable
private static byte[] encrypt(byte[] data, int length, int[] keys) private static byte[] encrypt(byte[] data, int length, int[] keys)
{ {
if (keys == null) if (keys == null)
{
return data; return data;
}
try try
{ {

View File

@@ -23,7 +23,7 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package net.runelite.cache.fs; package net.runelite.cache.fs.jagex;
public class DataFileReadResult public class DataFileReadResult
{ {

View File

@@ -23,7 +23,7 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package net.runelite.cache.fs; package net.runelite.cache.fs.jagex;
public class DataFileWriteResult public class DataFileWriteResult
{ {

View File

@@ -0,0 +1,256 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs.jagex;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import net.runelite.cache.fs.Archive;
import net.runelite.cache.fs.FSFile;
import net.runelite.cache.fs.Index;
import net.runelite.cache.fs.Storage;
import net.runelite.cache.fs.Store;
import net.runelite.cache.index.ArchiveData;
import net.runelite.cache.index.FileData;
import net.runelite.cache.index.IndexData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DiskStorage implements Storage
{
private static final Logger logger = LoggerFactory.getLogger(DiskStorage.class);
private static final String MAIN_FILE_CACHE_DAT = "main_file_cache.dat2";
private static final String MAIN_FILE_CACHE_IDX = "main_file_cache.idx";
private final File folder;
private final DataFile data;
private final IndexFile index255;
private final List<IndexFile> indexFiles = new ArrayList<>();
public DiskStorage(File folder) throws IOException
{
this.folder = folder;
this.data = new DataFile(new File(folder, MAIN_FILE_CACHE_DAT));
this.index255 = new IndexFile(255, new File(folder, MAIN_FILE_CACHE_IDX + "255"));
}
@Override
public void init(Store store) throws IOException
{
for (int i = 0; i < index255.getIndexCount(); ++i)
{
store.addIndex(i);
getIndex(i);
}
assert store.getIndexes().size() == indexFiles.size();
}
@Override
public void close() throws IOException
{
data.close();
index255.close();
for (IndexFile indexFile : indexFiles)
{
indexFile.close();
}
}
private IndexFile getIndex(int i) throws FileNotFoundException
{
for (IndexFile indexFile : indexFiles)
{
if (indexFile.getIndexFileId() == i)
{
return indexFile;
}
}
IndexFile indexFile = new IndexFile(i, new File(folder, MAIN_FILE_CACHE_IDX + i));
indexFiles.add(indexFile);
return indexFile;
}
@Override
public void load(Store store) throws IOException
{
for (Index index : store.getIndexes())
{
loadIndex(index);
}
}
private void loadIndex(Index index) throws IOException
{
logger.trace("Loading index {}", index.getId());
IndexEntry entry = index255.read(index.getId());
byte[] indexData = data.read(index255.getIndexFileId(), entry.getId(), entry.getSector(), entry.getLength());
DataFileReadResult res = DataFile.decompress(indexData, null);
byte[] data = res.data;
IndexData id = new IndexData();
id.load(data);
index.setProtocol(id.getProtocol());
index.setRevision(id.getRevision());
index.setNamed(id.isNamed());
index.setUsesWhirpool(id.isUsesWhirpool());
for (ArchiveData ad : id.getArchives())
{
Archive archive = index.addArchive(ad.getId());
archive.setNameHash(ad.getNameHash());
archive.setWhirlpool(ad.getWhirlpool());
archive.setCrc(ad.getCrc());
archive.setRevision(ad.getRevision());
assert ad.getFiles().length > 0;
for (FileData fd : ad.getFiles())
{
FSFile file = archive.addFile(fd.getId());
file.setNameHash(fd.getNameHash());
}
}
index.setCrc(res.crc);
index.setWhirlpool(res.whirlpool);
index.setCompression(res.compression);
assert res.revision == -1;
for (Archive archive : new ArrayList<>(index.getArchives()))
{
loadArchive(archive);
}
}
private void loadArchive(Archive archive) throws IOException
{
Index index = archive.getIndex();
IndexFile indexFile = getIndex(index.getId());
assert indexFile.getIndexFileId() == index.getId();
IndexEntry entry = indexFile.read(archive.getArchiveId());
if (entry == null)
{
logger.debug("can't read archive " + archive.getArchiveId() + " from index " + index.getId());
index.getArchives().remove(archive); // is this correct?
return;
}
assert entry.getId() == archive.getArchiveId();
logger.trace("Loading archive {} for index {} from sector {} length {}",
archive.getArchiveId(), index.getId(), entry.getSector(), entry.getLength());
byte[] archiveData = data.read(index.getId(), entry.getId(), entry.getSector(), entry.getLength());
archive.setData(archiveData);
if (index.getXteaManager() != null)
{
return; // can't decrypt this yet
}
archive.decompressAndLoad(null);
}
@Override
public void save(Store store) throws IOException
{
logger.debug("Clearing data and indexes in preparation for store save");
data.clear();
for (IndexFile indexFile : indexFiles)
{
indexFile.clear();
}
logger.debug("Saving store");
for (Index i : store.getIndexes())
{
saveIndex(i);
}
}
private void saveIndex(Index index) throws IOException
{
// This updates archive CRCs for writeIndexData
for (Archive archive : index.getArchives())
{
saveArchive(archive);
}
IndexData indexData = index.toIndexData();
byte[] data = indexData.writeIndexData();
byte[] compressedData = DataFile.compress(data, index.getCompression(), -1, null); // index data revision is always -1
DataFileWriteResult res = this.data.write(index255.getIndexFileId(), index.getId(), compressedData, index.getRevision());
index255.write(new IndexEntry(index255, index.getId(), res.sector, res.compressedLength));
index.setCrc(res.crc);
index.setWhirlpool(res.whirlpool);
}
private void saveArchive(Archive a) throws IOException
{
Index index = a.getIndex();
IndexFile indexFile = getIndex(index.getId());
assert indexFile.getIndexFileId() == index.getId();
int rev; // used for determining what part of compressedData to crc
byte[] compressedData;
if (a.getData() != null)
{
compressedData = a.getData(); // data was never decompressed or loaded
rev = -1; // assume that this data has no revision?
}
else
{
byte[] fileData = a.saveContents();
rev = a.getRevision();
compressedData = DataFile.compress(fileData, a.getCompression(), a.getRevision(), null);
}
DataFileWriteResult res = data.write(index.getId(), a.getArchiveId(), compressedData, rev);
indexFile.write(new IndexEntry(indexFile, a.getArchiveId(), res.sector, res.compressedLength));
logger.trace("Saved archive {}/{} at sector {}, compressed length {}", index.getId(), a.getArchiveId(), res.sector, res.compressedLength);
a.setCrc(res.crc);
a.setWhirlpool(res.whirlpool);
}
}

View File

@@ -23,7 +23,7 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package net.runelite.cache.fs; package net.runelite.cache.fs.jagex;
import java.util.Objects; import java.util.Objects;

View File

@@ -22,8 +22,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package net.runelite.cache.fs.jagex;
package net.runelite.cache.fs;
import java.io.Closeable; import java.io.Closeable;
import java.io.File; import java.io.File;
@@ -37,17 +36,16 @@ import org.slf4j.LoggerFactory;
public class IndexFile implements Closeable public class IndexFile implements Closeable
{ {
private static final Logger logger = LoggerFactory.getLogger(IndexFile.class); private static final Logger logger = LoggerFactory.getLogger(IndexFile.class);
private static final int INDEX_ENTRY_LEN = 6; private static final int INDEX_ENTRY_LEN = 6;
private final Store store;
private final int indexFileId; private final int indexFileId;
private final File file; private final File file;
private final RandomAccessFile idx; private final RandomAccessFile idx;
private final byte[] buffer = new byte[INDEX_ENTRY_LEN]; private final byte[] buffer = new byte[INDEX_ENTRY_LEN];
public IndexFile(Store store, int indexFileId, File file) throws FileNotFoundException public IndexFile(int indexFileId, File file) throws FileNotFoundException
{ {
this.store = store;
this.indexFileId = indexFileId; this.indexFileId = indexFileId;
this.file = file; this.file = file;
this.idx = new RandomAccessFile(file, "rw"); this.idx = new RandomAccessFile(file, "rw");
@@ -91,11 +89,6 @@ public class IndexFile implements Closeable
return true; return true;
} }
public Store getStore()
{
return store;
}
public int getIndexFileId() public int getIndexFileId()
{ {
return indexFileId; return indexFileId;
@@ -140,6 +133,6 @@ public class IndexFile implements Closeable
public synchronized int getIndexCount() throws IOException public synchronized int getIndexCount() throws IOException
{ {
return (int)(idx.length() / INDEX_ENTRY_LEN); return (int) (idx.length() / INDEX_ENTRY_LEN);
} }
} }

View File

@@ -0,0 +1,282 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs.tree;
import com.google.common.io.Files;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.List;
import net.runelite.cache.fs.Archive;
import net.runelite.cache.fs.FSFile;
import net.runelite.cache.fs.Index;
import net.runelite.cache.fs.Storage;
import net.runelite.cache.fs.Store;
public class TreeStorage implements Storage
{
private final File folder;
public TreeStorage(File folder)
{
this.folder = folder;
}
@Override
public void init(Store store) throws IOException
{
}
@Override
public void close() throws IOException
{
}
@Override
public void load(Store store) throws IOException
{
for (File idx : folder.listFiles())
{
if (!idx.isDirectory())
{
continue;
}
int id = Integer.parseInt(idx.getName());
Index index = store.addIndex(id);
loadIndex(index, folder, idx);
}
Collections.sort(store.getIndexes(), (idx1, idx2) -> Integer.compare(idx1.getId(), idx2.getId()));
}
private void loadIndex(Index index, File parent, File to) throws IOException
{
for (File f : to.listFiles())
{
if (f.isDirectory())
{
int id = Integer.parseInt(f.getName());
Archive archive = index.addArchive(id);
loadTree(archive, to, f);
}
else if (f.getName().endsWith(".dat"))
{
// one file. archiveId-fileId-name
String[] parts = Files.getNameWithoutExtension(f.getName()).split("-");
int id = Integer.parseInt(parts[0]);
Archive archive = index.addArchive(id);
loadTreeSingleFile(archive, to, f);
}
else if (f.getName().endsWith(".datc"))
{
// packed data
String[] parts = Files.getNameWithoutExtension(f.getName()).split("-");
int id = Integer.parseInt(parts[0]);
Archive archive = index.addArchive(id);
loadTreeData(archive, to, f);
}
}
String str = Files.readFirstLine(new File(parent, index.getId() + ".rev"), Charset.defaultCharset());
int revision = Integer.parseInt(str);
index.setRevision(revision);
Collections.sort(index.getArchives(), (ar1, ar2) -> Integer.compare(ar1.getArchiveId(), ar2.getArchiveId()));
}
public void loadTreeData(Archive archive, File parent, File from) throws IOException
{
//archiveId-fileId-fileName
String[] parts = Files.getNameWithoutExtension(from.getName()).split("-");
assert parts.length == 3;
int archiveId = Integer.parseInt(parts[0]);
int fileId = Integer.parseInt(parts[1]);
int nameHash = (int) Long.parseLong(parts[2], 16);
assert archiveId == archive.getArchiveId();
byte[] data = Files.toByteArray(from);
FSFile file = archive.addFile(fileId);
file.setNameHash(nameHash);
file.setContents(data);
File archiveFile = new File(parent, archive.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setRevision(rev);
archiveFile = new File(parent, archive.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setNameHash(name);
}
public void loadTreeSingleFile(Archive archive, File parent, File from) throws IOException
{
//archiveId-fileId-fileName
String[] parts = Files.getNameWithoutExtension(from.getName()).split("-");
assert parts.length == 3;
int archiveId = Integer.parseInt(parts[0]);
int fileId = Integer.parseInt(parts[1]);
int nameHash = (int) Long.parseLong(parts[2], 16);
assert archiveId == archive.getArchiveId();
FSFile file = archive.addFile(fileId);
file.setNameHash(nameHash);
byte[] contents = Files.toByteArray(from);
file.setContents(contents);
File archiveFile = new File(parent, archive.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setRevision(rev);
archiveFile = new File(parent, archive.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setNameHash(name);
}
public void loadTree(Archive archive, File parent, File from) throws IOException
{
for (File file : from.listFiles())
{
//fileId-fileName.dat
String[] split = Files.getNameWithoutExtension(file.getName()).split("-");
assert split.length == 2;
int fileId = Integer.parseInt(split[0]);
int fileName = (int) Long.parseLong(split[1], 16);
FSFile f = archive.addFile(fileId);
f.setNameHash(fileName);
byte[] contents = Files.toByteArray(file);
f.setContents(contents);
}
File archiveFile = new File(parent, archive.getArchiveId() + ".rev");
int rev = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setRevision(rev);
archiveFile = new File(parent, archive.getArchiveId() + ".name");
int name = Integer.parseInt(Files.readFirstLine(archiveFile, Charset.defaultCharset()));
archive.setNameHash(name);
// the filesystem may order these differently (eg, 1, 10, 2)
Collections.sort(archive.getFiles(), (f1, f2) -> Integer.compare(f1.getFileId(), f2.getFileId()));
}
@Override
public void save(Store store) throws IOException
{
for (Index i : store.getIndexes())
{
saveIndex(i);
}
}
private void saveIndex(Index i) throws IOException
{
File idx = new File(folder, "" + i.getId());
idx.mkdirs();
for (Archive a : i.getArchives())
{
saveArchive(a, idx);
}
File rev = new File(folder, i.getId() + ".rev");
Files.write(Integer.toString(i.getRevision()), rev, Charset.defaultCharset());
}
private void saveArchive(Archive a, File to) throws IOException
{
byte[] data = a.getData();
List<FSFile> files = a.getFiles();
if (data != null)
{
assert files.size() == 1; // this is the maps
FSFile file = files.get(0);
File archiveFile = new File(to, a.getArchiveId() + "-"
+ file.getFileId() + "-" + Integer.toHexString(file.getNameHash()) + ".datc");
Files.write(data, archiveFile);
archiveFile = new File(to, a.getArchiveId() + ".rev");
Files.write(Integer.toString(a.getRevision()), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, a.getArchiveId() + ".name");
Files.write(Integer.toString(a.getNameHash()), archiveFile, Charset.defaultCharset());
return;
}
if (files.size() == 1)
{
FSFile file = files.get(0);
File archiveFile = new File(to, a.getArchiveId() + "-"
+ file.getFileId() + "-" + Integer.toHexString(file.getNameHash()) + ".dat");
byte[] contents = file.getContents();
Files.write(contents, archiveFile);
archiveFile = new File(to, a.getArchiveId() + ".rev");
Files.write(Integer.toString(a.getRevision()), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, a.getArchiveId() + ".name");
Files.write(Integer.toString(a.getNameHash()), archiveFile, Charset.defaultCharset());
return;
}
File archiveFile = new File(to, a.getArchiveId() + ".rev");
Files.write(Integer.toString(a.getRevision()), archiveFile, Charset.defaultCharset());
archiveFile = new File(to, a.getArchiveId() + ".name");
Files.write(Integer.toString(a.getNameHash()), archiveFile, Charset.defaultCharset());
File archiveFolder = new File(to, Integer.toString(a.getArchiveId()));
archiveFolder.mkdirs();
for (FSFile file : files)
{
archiveFile = new File(archiveFolder, file.getFileId() + "-"
+ Integer.toHexString(file.getNameHash()) + ".dat");
byte[] contents = file.getContents();
Files.write(contents, archiveFile);
}
}
}

View File

@@ -25,6 +25,7 @@
package net.runelite.cache.index; package net.runelite.cache.index;
import net.runelite.cache.io.InputStream; import net.runelite.cache.io.InputStream;
import net.runelite.cache.io.OutputStream;
public class IndexData public class IndexData
{ {
@@ -145,6 +146,135 @@ public class IndexData
} }
} }
public byte[] writeIndexData()
{
OutputStream stream = new OutputStream();
stream.writeByte(protocol);
if (protocol >= 6)
{
stream.writeInt(this.revision);
}
stream.writeByte((named ? 1 : 0) | (usesWhirpool ? 2 : 0));
if (protocol >= 7)
{
stream.writeBigSmart(this.archives.length);
}
else
{
stream.writeShort(this.archives.length);
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
int archive = a.getId();
if (i != 0)
{
ArchiveData prev = this.archives[i - 1];
archive -= prev.getId();
}
if (protocol >= 7)
{
stream.writeBigSmart(archive);
}
else
{
stream.writeShort(archive);
}
}
if (named)
{
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
stream.writeInt(a.getNameHash());
}
}
if (usesWhirpool)
{
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
stream.writeBytes(a.getWhirlpool());
}
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
stream.writeInt(a.getCrc());
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
stream.writeInt(a.getRevision());
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
int len = a.getFiles().length;
if (protocol >= 7)
{
stream.writeBigSmart(len);
}
else
{
stream.writeShort(len);
}
}
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
for (int j = 0; j < a.getFiles().length; ++j)
{
FileData file = a.getFiles()[j];
int offset = file.getId();
if (j != 0)
{
FileData prev = a.getFiles()[j - 1];
offset -= prev.getId();
}
if (protocol >= 7)
{
stream.writeBigSmart(offset);
}
else
{
stream.writeShort(offset);
}
}
}
if (named)
{
for (int i = 0; i < this.archives.length; ++i)
{
ArchiveData a = this.archives[i];
for (int j = 0; j < a.getFiles().length; ++j)
{
FileData file = a.getFiles()[j];
stream.writeInt(file.getNameHash());
}
}
}
return stream.flip();
}
public int getProtocol() public int getProtocol()
{ {
return protocol; return protocol;

View File

@@ -34,8 +34,8 @@ import java.util.Arrays;
import net.runelite.cache.client.requests.ConnectionInfo; import net.runelite.cache.client.requests.ConnectionInfo;
import net.runelite.cache.client.requests.HelloHandshake; import net.runelite.cache.client.requests.HelloHandshake;
import net.runelite.cache.fs.Archive; import net.runelite.cache.fs.Archive;
import net.runelite.cache.fs.CompressionType; import net.runelite.cache.fs.jagex.CompressionType;
import net.runelite.cache.fs.DataFile; import net.runelite.cache.fs.jagex.DataFile;
import net.runelite.cache.fs.Index; import net.runelite.cache.fs.Index;
import net.runelite.cache.fs.Store; import net.runelite.cache.fs.Store;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -188,7 +188,7 @@ public class CacheServerHandler extends SimpleChannelInboundHandler<ByteBuf>
Index i = store.findIndex(archiveId); Index i = store.findIndex(archiveId);
assert i != null; assert i != null;
byte[] indexData = i.writeIndexData(); byte[] indexData = i.toIndexData().writeIndexData();
byte[] compressed = compress(CompressionType.NONE, indexData); byte[] compressed = compress(CompressionType.NONE, indexData);
byte[] packed = addHeader(255, archiveId, compressed); byte[] packed = addHeader(255, archiveId, compressed);

View File

@@ -28,6 +28,7 @@ import java.io.File;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import net.runelite.cache.CacheProperties; import net.runelite.cache.CacheProperties;
import net.runelite.cache.fs.Store; import net.runelite.cache.fs.Store;
import net.runelite.cache.fs.tree.TreeStorage;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Ignore; import org.junit.Ignore;
@@ -50,7 +51,7 @@ public class CacheClientTest
@Ignore @Ignore
public void test() throws Exception public void test() throws Exception
{ {
try (Store store = new Store(new File("d:/temp"))) try (Store store = new Store(new File("D:\\rs\\07\\temp\\cache")))
{ {
store.load(); store.load();
@@ -77,7 +78,8 @@ public class CacheClientTest
{ {
try (Store store = new Store(new File("C:\\rs\\temp"))) try (Store store = new Store(new File("C:\\rs\\temp")))
{ {
store.loadTree(new File("C:\\rs\\runescape-data\\cache")); TreeStorage storage = new TreeStorage(new File("C:\\rs\\runescape-data\\cache"));
storage.load(store);
CacheClient c = new CacheClient(store, CacheProperties.getRsVersion()); CacheClient c = new CacheClient(store, CacheProperties.getRsVersion());
c.connect(); c.connect();
@@ -92,7 +94,8 @@ public class CacheClientTest
c.close(); c.close();
store.saveTree(new File("C:\\rs\\temp\\t")); storage = new TreeStorage(new File("C:\\rs\\temp\\t"));
storage.save(store);
} }
} }
} }

View File

@@ -1,173 +0,0 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs;
import java.io.File;
import java.io.IOException;
import net.runelite.cache.StoreLocation;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class DataFileTest
{
@Rule
public TemporaryFolder folder = StoreLocation.getTemporaryFolder();
@Test
public void test1() throws IOException
{
File file = folder.newFile();
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, file);
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 0, null);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
Assert.assertEquals(res.crc, res2.crc);
}
}
@Test
public void test2() throws IOException
{
byte[] b = new byte[1024];
for (int i = 0; i < 1024; ++i)
{
b[i] = (byte) i;
}
File file = folder.newFile();
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, file);
byte[] compressedData = DataFile.compress(b, CompressionType.BZ2, 42, null);
DataFileWriteResult res = df.write(42, 0x1FFFF, compressedData, 42);
compressedData = df.read(42, 0x1FFFF, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
Assert.assertArrayEquals(b, buf);
}
}
@Test
public void testGZipCompression() throws IOException
{
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, folder.newFile());
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.GZ, 0, null);
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
compressedData = df.read(41, 4, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
}
}
@Test
public void testBZip2Compression() throws IOException
{
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, folder.newFile());
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.BZ2, 5, null);
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
compressedData = df.read(41, 4, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
}
}
@Test
public void testCrc() throws IOException
{
File file = folder.newFile();
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, file);
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 42, null);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
Assert.assertEquals(res.crc, res2.crc);
Assert.assertEquals(42, res2.revision);
}
}
@Test
public void testKeys() throws IOException
{
File file = folder.newFile();
int[] keys = new int[] { 4, 8, 15, 16 };
try (Store store = new Store(folder.getRoot()))
{
DataFile df = new DataFile(store, file);
byte[] compressedData = DataFile.compress("testtesttesttest1".getBytes(), CompressionType.NONE, 42, keys);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, keys);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("testtesttesttest1", str);
Assert.assertEquals(res.crc, res2.crc);
Assert.assertEquals(42, res2.revision);
}
}
}

View File

@@ -28,6 +28,7 @@ import com.google.common.io.Files;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import net.runelite.cache.StoreLocation; import net.runelite.cache.StoreLocation;
import net.runelite.cache.fs.tree.TreeStorage;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Rule; import org.junit.Rule;
@@ -70,6 +71,10 @@ public class StoreLoadTest
Assert.assertTrue(store.equals(testStore)); Assert.assertTrue(store.equals(testStore));
testStore.save(); testStore.save();
}
try (Store testStore = new Store(testStoreFile))
{
testStore.load(); testStore.load();
Assert.assertTrue(store.equals(testStore)); Assert.assertTrue(store.equals(testStore));
@@ -86,7 +91,8 @@ public class StoreLoadTest
{ {
store.load(); store.load();
store.saveTree(folder.newFolder()); TreeStorage storage = new TreeStorage(folder.newFolder());
storage.save(store);
} }
} }
@@ -96,7 +102,8 @@ public class StoreLoadTest
{ {
try (Store store = new Store(folder.newFolder())) try (Store store = new Store(folder.newFolder()))
{ {
store.loadTree(new File("C:\\rs\\temp\\tree")); TreeStorage storage = new TreeStorage(new File("C:\\rs\\temp\\tree"));
storage.load(store);
try (Store store2 = new Store(StoreLocation.LOCATION)) try (Store store2 = new Store(StoreLocation.LOCATION))
{ {
@@ -114,7 +121,9 @@ public class StoreLoadTest
try (Store store = new Store(new File("d:/rs/07/temp/cache"))) try (Store store = new Store(new File("d:/rs/07/temp/cache")))
{ {
store.load(); store.load();
store.saveTree(new File("d:/rs/07/temp/tree"));
TreeStorage storage = new TreeStorage(new File("d:/rs/07/temp/tree"));
storage.save(store);
} }
} }
} }

View File

@@ -28,6 +28,7 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.Random; import java.util.Random;
import net.runelite.cache.StoreLocation; import net.runelite.cache.StoreLocation;
import net.runelite.cache.fs.tree.TreeStorage;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
@@ -150,11 +151,13 @@ public class StoreTest
// Test tree save/load // Test tree save/load
File tree = folder.newFolder(); File tree = folder.newFolder();
store.saveTree(tree); Storage treeStorage = new TreeStorage(tree);
try (Store store2 = new Store(folder.newFolder())) treeStorage.save(store);
try (Store store2 = new Store(treeStorage))
{ {
store2.loadTree(tree); store2.load();
} }
} }
} }

View File

@@ -0,0 +1,156 @@
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.cache.fs.jagex;
import java.io.File;
import java.io.IOException;
import net.runelite.cache.StoreLocation;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class DataFileTest
{
@Rule
public TemporaryFolder folder = StoreLocation.getTemporaryFolder();
@Test
public void test1() throws IOException
{
File file = folder.newFile();
DataFile df = new DataFile(file);
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 0, null);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
Assert.assertEquals(res.crc, res2.crc);
}
@Test
public void test2() throws IOException
{
byte[] b = new byte[1024];
for (int i = 0; i < 1024; ++i)
{
b[i] = (byte) i;
}
File file = folder.newFile();
DataFile df = new DataFile(file);
byte[] compressedData = DataFile.compress(b, CompressionType.BZ2, 42, null);
DataFileWriteResult res = df.write(42, 0x1FFFF, compressedData, 42);
compressedData = df.read(42, 0x1FFFF, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
Assert.assertArrayEquals(b, buf);
}
@Test
public void testGZipCompression() throws IOException
{
DataFile df = new DataFile(folder.newFile());
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.GZ, 0, null);
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
compressedData = df.read(41, 4, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
}
@Test
public void testBZip2Compression() throws IOException
{
DataFile df = new DataFile(folder.newFile());
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.BZ2, 5, null);
DataFileWriteResult res = df.write(41, 4, compressedData, 0);
compressedData = df.read(41, 4, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
}
@Test
public void testCrc() throws IOException
{
File file = folder.newFile();
DataFile df = new DataFile(file);
byte[] compressedData = DataFile.compress("test".getBytes(), CompressionType.NONE, 42, null);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, null);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
Assert.assertEquals(res.crc, res2.crc);
Assert.assertEquals(42, res2.revision);
}
@Test
public void testKeys() throws IOException
{
File file = folder.newFile();
int[] keys = new int[]
{
4, 8, 15, 16
};
DataFile df = new DataFile(file);
byte[] compressedData = DataFile.compress("testtesttesttest1".getBytes(), CompressionType.NONE, 42, keys);
DataFileWriteResult res = df.write(42, 3, compressedData, 0);
compressedData = df.read(42, 3, res.sector, res.compressedLength);
DataFileReadResult res2 = DataFile.decompress(compressedData, keys);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("testtesttesttest1", str);
Assert.assertEquals(res.crc, res2.crc);
Assert.assertEquals(42, res2.revision);
}
}

View File

@@ -22,8 +22,7 @@
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package net.runelite.cache.fs.jagex;
package net.runelite.cache.fs;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@@ -42,13 +41,10 @@ public class IndexFileTest
public void test() throws IOException public void test() throws IOException
{ {
File file = folder.newFile(); File file = folder.newFile();
try (Store store = new Store(folder.getRoot())) IndexFile index = new IndexFile(5, file);
{ IndexEntry entry = new IndexEntry(index, 7, 8, 9);
IndexFile index = new IndexFile(store, 5, file); index.write(entry);
IndexEntry entry = new IndexEntry(index, 7, 8, 9); IndexEntry entry2 = index.read(7);
index.write(entry); Assert.assertEquals(entry, entry2);
IndexEntry entry2 = index.read(7);
Assert.assertEquals(entry, entry2);
}
} }
} }

View File

@@ -94,7 +94,8 @@ public class CacheServerTest
server.start(); server.start();
try (Store store2 = new Store(folder.newFolder()); CacheClient client = new CacheClient(store2, HOST, REVISION)) try (Store store2 = new Store(folder.newFolder());
CacheClient client = new CacheClient(store2, HOST, REVISION))
{ {
client.connect(); client.connect();
client.handshake().get(); client.handshake().get();