cache: various fixes to downloader and logging improvements

This commit is contained in:
Adam
2017-02-27 21:28:31 -05:00
parent 47541ffe05
commit 56448b4e31
7 changed files with 76 additions and 40 deletions

View File

@@ -25,6 +25,7 @@
package net.runelite.cache.downloader;
import com.google.common.base.Stopwatch;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
@@ -37,10 +38,8 @@ import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.proxy.HttpProxyHandler;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayDeque;
import java.util.Queue;
import java.util.concurrent.CompletableFuture;
@@ -59,7 +58,7 @@ public class CacheClient
private static final String HOST = "oldschool1.runescape.com";
private static final int PORT = 43594;
private static final int CLIENT_REVISION = 115;
private static final int CLIENT_REVISION = 135;
private Store store; // store cache will be written to
private int clientRevision;
@@ -104,18 +103,8 @@ public class CacheClient
public void stop()
{
try
{
channel.closeFuture().sync();
}
catch (InterruptedException e)
{
logger.warn(null, e);
}
finally
{
group.shutdownGracefully();
}
channel.close().syncUninterruptibly();
group.shutdownGracefully();
}
public int getClientRevision()
@@ -125,6 +114,8 @@ public class CacheClient
public void download() throws InterruptedException, ExecutionException, FileNotFoundException, IOException
{
Stopwatch stopwatch = Stopwatch.createStarted();
FileResult result = requestFile(255, 255).get();
result.decompress(null);
@@ -187,7 +178,7 @@ public class CacheClient
if (oldArchive == null || oldArchive.getRevision() != archive.getRevision())
{
logger.info("Archive {} in index {} is out of date, downloading", archive.getArchiveId(), index.getId());
logger.info("Archive {}/{} in index {} is out of date, downloading", archive.getArchiveId(), index.getArchives().size(), index.getId());
FileResult archiveFileResult = requestFile(index.getId(), archive.getArchiveId()).get();
byte[] compressedContents = archiveFileResult.getCompressedData();
@@ -196,26 +187,26 @@ public class CacheClient
}
else
{
logger.info("Active {} in index {} is up to date", archive.getArchiveId(), index.getId());
logger.info("Active {}/{} in index {} is up to date", archive.getArchiveId(), index.getArchives().size(), index.getId());
// copy existing contents, this is sort of hackish.
byte[] contents = oldArchive.saveContents();
archive.loadContents(contents);
archive.setCompression(oldArchive.getCompression());
byte[] contents = oldArchive.getData();
if (contents != null)
{
archive.setData(contents);
}
else
{
contents = oldArchive.saveContents();
archive.loadContents(contents);
archive.setCompression(oldArchive.getCompression());
}
}
}
try
{
store.save(); // save up to this point to disk
// XXX if this save takes too long, server closes the connection
}
catch (IOException ex)
{
logger.warn("unable to save cache", ex);
}
}
stopwatch.stop();
logger.info("Download completed in {}", stopwatch);
}
public synchronized CompletableFuture<FileResult> requestFile(int index, int fileId)

View File

@@ -108,8 +108,7 @@ public class CacheClientHandler extends ChannelInboundHandlerAdapter
int size = compressedFileSize
+ 5 // 1 byte compresion type, 4 byte compressed size
+ (compression != 0 ? 4 : 0) // compression has leading 4 byte decompressed length
;//+ (index != 255 ? 2 : 0); // for the revision
+ (compression != 0 ? 4 : 0); // compression has leading 4 byte decompressed length
int breaks = calculateBreaks(size);

View File

@@ -166,6 +166,8 @@ public class Archive
public void loadContents(byte[] data)
{
logger.trace("Loading contents of archive {} ({} files)", archiveId, files.size());
if (this.getFiles().size() == 1)
{
this.getFiles().get(0).setContents(data);
@@ -232,6 +234,7 @@ public class Archive
{
if (data != null)
{
logger.trace("Saving contents of archive {}/{} using cached data", index.getId(), archiveId);
return data;
}
@@ -248,7 +251,8 @@ public class Archive
{
for (File file : this.getFiles())
{
stream.writeBytes(file.getContents());
byte[] contents = file.getContents();
stream.writeBytes(contents);
}
int offset = 0;
@@ -266,6 +270,9 @@ public class Archive
}
byte[] fileData = stream.flip();
logger.trace("Saved contents of archive {}/{} ({} files), {} bytes", index.getId(), archiveId, files.size(), fileData.length);
return fileData;
}

View File

@@ -70,6 +70,11 @@ public class DataFile implements Closeable
dat.close();
}
public void clear() throws IOException
{
dat.setLength(0L);
}
/**
*
* @param indexId expected index of archive of contents being read

View File

@@ -28,7 +28,6 @@ package net.runelite.cache.fs;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import net.runelite.cache.util.Djb2;
@@ -70,6 +69,11 @@ public class Index implements Closeable
index.close();
}
public void clear() throws IOException
{
index.clear();
}
@Override
public int hashCode()
{
@@ -182,7 +186,9 @@ public class Index implements Closeable
}
public void load() throws IOException
{
{
logger.trace("Loading index {}", id);
DataFile dataFile = store.getData();
IndexFile index255 = store.getIndex255();
@@ -324,10 +330,12 @@ public class Index implements Closeable
archives.remove(a); // is this the correct behavior?
continue;
}
assert this.index.getIndexFileId() == this.id;
assert entry.getId() == a.getArchiveId();
logger.trace("Loading archive {} for index {} from sector {} length {}", a.getArchiveId(), id, entry.getSector(), entry.getLength());
byte[] archiveData = store.getData().read(this.id, entry.getId(), entry.getSector(), entry.getLength());
a.setData(archiveData);
@@ -344,16 +352,26 @@ public class Index implements Closeable
{
for (Archive a : archives)
{
byte[] fileData = a.saveContents();
assert this.index.getIndexFileId() == this.id;
DataFile data = store.getData();
byte[] compressedData = DataFile.compress(fileData, a.getCompression(), a.getRevision(), null);
byte[] compressedData;
if (a.getData() != null)
{
compressedData = a.getData(); // data was never decompressed or loaded
}
else
{
byte[] fileData = a.saveContents();
compressedData = DataFile.compress(fileData, a.getCompression(), a.getRevision(), null);
}
DataFileWriteResult res = data.write(this.id, a.getArchiveId(), compressedData, a.getRevision());
this.index.write(new IndexEntry(this.index, a.getArchiveId(), res.sector, res.compressedLength));
logger.trace("Saved archive {}/{} at sector {}, compressed length {}", this.getId(), a.getArchiveId(), res.sector, res.compressedLength);
a.setCrc(res.crc);
a.setWhirlpool(res.whirlpool);
}

View File

@@ -59,6 +59,11 @@ public class IndexFile implements Closeable
idx.close();
}
public void clear() throws IOException
{
idx.setLength(0L);
}
@Override
public int hashCode()
{

View File

@@ -34,9 +34,13 @@ import java.util.List;
import java.util.Objects;
import net.runelite.cache.IndexType;
import net.runelite.cache.util.XteaKeyManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Store implements Closeable
{
private static final Logger logger = LoggerFactory.getLogger(Store.class);
private static final String MAIN_FILE_CACHE_DAT = "main_file_cache.dat2";
private static final String MAIN_FILE_CACHE_IDX = "main_file_cache.idx";
@@ -130,6 +134,13 @@ public class Store implements Closeable
public void save() throws IOException
{
logger.debug("Clearing data and indexes in preparation for store save");
data.clear();
for (Index i : indexes)
i.clear();
for (Index i : indexes)
i.save();
}