* Blocklist: Buffer input to speed lookup

* PersistentDataStore: Buffer file writes and reads
This commit is contained in:
zzz
2011-12-18 13:28:26 +00:00
parent 937d2c54c8
commit fb4d85ff8b
2 changed files with 40 additions and 31 deletions

View File

@ -5,9 +5,11 @@ package net.i2p.router;
* zzz 2008-06
*/
import java.io.BufferedReader;
import java.io.IOException;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.Writer;
import java.net.InetAddress;
import java.net.UnknownHostException;
@ -23,11 +25,11 @@ import java.util.Set;
import java.util.TreeSet;
import net.i2p.data.Base64;
import net.i2p.data.DataHelper;
import net.i2p.data.Hash;
import net.i2p.data.RouterAddress;
import net.i2p.data.RouterInfo;
import net.i2p.router.networkdb.kademlia.FloodfillNetworkDatabaseFacade;
import net.i2p.util.Addresses;
import net.i2p.util.ConcurrentHashSet;
import net.i2p.util.Log;
import net.i2p.util.Translate;
@ -47,9 +49,11 @@ import net.i2p.util.Translate;
* And the on-disk blocklist can also contain router hashes to be shitlisted.
*
* So, this class maintains three separate lists:
*<pre>
* 1) The list of IP ranges, read in from a file at startup
* 2) The list of hashes, read in from the same file
* 3) A list of single IPs, initially empty, added to as needed
*</pre>
*
* Read in the IP blocklist from a file, store it in-memory as efficiently
* as we can, and perform tests against it as requested.
@ -197,10 +201,10 @@ public class Blocklist {
FileInputStream in = null;
try {
in = new FileInputStream(BLFile);
StringBuilder buf = new StringBuilder(128);
while (DataHelper.readLine(in, buf) && count < maxSize) {
BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
String buf = null;
while ((buf = br.readLine()) != null && count < maxSize) {
Entry e = parse(buf, true);
buf.setLength(0);
if (e == null) {
badcount++;
continue;
@ -276,17 +280,17 @@ public class Blocklist {
}
}
private Entry parse(StringBuilder buf, boolean bitch) {
private Entry parse(String buf, boolean bitch) {
byte[] ip1;
byte[] ip2;
int start1 = 0;
int end1 = buf.length();
if (end1 <= 0)
return null; // blank
if (buf.charAt(end1 - 1) == '\r') { // DataHelper.readLine leaves the \r on there
buf.deleteCharAt(end1 - 1);
end1--;
}
//if (buf.charAt(end1 - 1) == '\r') { // DataHelper.readLine leaves the \r on there
// buf.deleteCharAt(end1 - 1);
// end1--;
//}
if (end1 <= 0)
return null; // blank
int start2 = -1;
@ -387,10 +391,9 @@ public class Blocklist {
FileInputStream in = null;
try {
in = new FileInputStream(BLFile);
StringBuilder buf = new StringBuilder(128);
while (DataHelper.readLine(in, buf)) {
BufferedReader br = new BufferedReader(new InputStreamReader(in, "ISO-8859-1"));
while (br.readLine() != null) {
lines++;
buf.setLength(0);
}
} catch (IOException ioe) {
if (_log.shouldLog(Log.WARN))
@ -457,7 +460,7 @@ public class Blocklist {
return;
if (add(toInt(ip)))
if (_log.shouldLog(Log.WARN))
_log.warn("Adding IP to blocklist: " + (ip[0]&0xff) + '.' + (ip[1]&0xff) + '.' + (ip[2]&0xff) + '.' + (ip[3]&0xff));
_log.warn("Adding IP to blocklist: " + Addresses.toString(ip));
}
private boolean add(int ip) {
@ -643,11 +646,6 @@ public class Blocklist {
return rv;
}
/** IP to string */
public static String toStr(byte[] ip) {
return toStr(toInt(ip));
}
private static String toStr(long entry) {
StringBuilder buf = new StringBuilder(32);
for (int i = 7; i >= 0; i--) {
@ -681,7 +679,7 @@ public class Blocklist {
private void shitlist(Hash peer, byte[] ip) {
// Temporary reason, until the job finishes
String reason = _x("IP banned by blocklist.txt entry {0}");
_context.shitlist().shitlistRouterForever(peer, reason, toStr(ip));
_context.shitlist().shitlistRouterForever(peer, reason, Addresses.toString(ip));
if (! _context.getBooleanPropertyDefaultTrue(PROP_BLOCKLIST_DETAIL))
return;
boolean shouldRunJob;
@ -743,12 +741,12 @@ public class Blocklist {
FileInputStream in = null;
try {
in = new FileInputStream(BLFile);
StringBuilder buf = new StringBuilder(128);
BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
String buf = null;
// assume the file is unsorted, so go through the whole thing
while (DataHelper.readLine(in, buf)) {
while ((buf = br.readLine()) != null) {
Entry e = parse(buf, false);
if (e == null || e.peer != null) {
buf.setLength(0);
continue;
}
if (match(ipint, toEntry(e.ip1, e.ip2))) {
@ -766,7 +764,6 @@ public class Blocklist {
_context.shitlist().shitlistRouterForever(peer, reason, buf.toString());
return;
}
buf.setLength(0);
}
} catch (IOException ioe) {
if (_log.shouldLog(Log.WARN))
@ -783,6 +780,8 @@ public class Blocklist {
/**
* Write directly to the stream so we don't OOM on a huge list.
* Go through each list twice since we store out-of-order.
*
* TODO move to routerconsole, but that would require exposing the _blocklist array.
*/
public void renderStatusHTML(Writer out) throws IOException {
// move to the jsp

View File

@ -8,11 +8,15 @@ package net.i2p.router.networkdb.kademlia;
*
*/
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
@ -63,6 +67,7 @@ class PersistentDataStore extends TransientDataStore {
ctx.statManager().createRateStat("netDb.writePending", "How many pending writes are there", "NetworkDatabase", new long[] { 60*1000 });
ctx.statManager().createRateStat("netDb.writeOut", "How many we wrote", "NetworkDatabase", new long[] { 20*60*1000 });
ctx.statManager().createRateStat("netDb.writeTime", "How long it took", "NetworkDatabase", new long[] { 20*60*1000 });
//ctx.statManager().createRateStat("netDb.readTime", "How long one took", "NetworkDatabase", new long[] { 20*60*1000 });
_writer = new Writer();
I2PThread writer = new I2PThread(_writer, "DBWriter");
// stop() must be called to flush data to disk
@ -198,8 +203,8 @@ class PersistentDataStore extends TransientDataStore {
int pending = _keys.size();
boolean exists = (null != _keys.put(key, data));
if (exists)
_context.statManager().addRateData("netDb.writeClobber", pending, 0);
_context.statManager().addRateData("netDb.writePending", pending, 0);
_context.statManager().addRateData("netDb.writeClobber", pending);
_context.statManager().addRateData("netDb.writePending", pending);
}
/** check to see if it's in the write queue */
@ -253,8 +258,8 @@ class PersistentDataStore extends TransientDataStore {
long time = _context.clock().now() - startTime;
if (_log.shouldLog(Log.INFO))
_log.info("Wrote " + lastCount + " entries to disk in " + time);
_context.statManager().addRateData("netDb.writeOut", lastCount, 0);
_context.statManager().addRateData("netDb.writeTime", time, 0);
_context.statManager().addRateData("netDb.writeOut", lastCount);
_context.statManager().addRateData("netDb.writeTime", time);
}
if (_quit)
break;
@ -279,7 +284,7 @@ class PersistentDataStore extends TransientDataStore {
private void write(Hash key, DatabaseEntry data) {
if (_log.shouldLog(Log.INFO))
_log.info("Writing key " + key);
FileOutputStream fos = null;
OutputStream fos = null;
File dbFile = null;
try {
String filename = null;
@ -296,6 +301,7 @@ class PersistentDataStore extends TransientDataStore {
if (dbFile.lastModified() < dataPublishDate) {
// our filesystem is out of date, lets replace it
fos = new SecureFileOutputStream(dbFile);
fos = new BufferedOutputStream(fos);
try {
data.writeBytes(fos);
fos.close();
@ -368,7 +374,9 @@ class PersistentDataStore extends TransientDataStore {
// Run it inline so we don't clog up the job queue, esp. at startup
// Also this allows us to wait until it is really done to call checkReseed() and set _initialized
//PersistentDataStore.this._context.jobQueue().addJob(new ReadRouterJob(routerInfoFiles[i], key));
//long start = System.currentTimeMillis();
(new ReadRouterJob(routerInfoFiles[i], key)).runJob();
//_context.statManager().addRateData("netDb.readTime", System.currentTimeMillis() - start);
}
}
}
@ -382,8 +390,9 @@ class PersistentDataStore extends TransientDataStore {
}
private class ReadRouterJob extends JobImpl {
private File _routerFile;
private Hash _key;
private final File _routerFile;
private final Hash _key;
public ReadRouterJob(File routerFile, Hash key) {
super(PersistentDataStore.this._context);
_routerFile = routerFile;
@ -410,10 +419,11 @@ class PersistentDataStore extends TransientDataStore {
public void runJob() {
if (!shouldRead()) return;
try {
FileInputStream fis = null;
InputStream fis = null;
boolean corrupt = false;
try {
fis = new FileInputStream(_routerFile);
fis = new BufferedInputStream(fis);
RouterInfo ri = new RouterInfo();
ri.readBytes(fis);
if (ri.getNetworkId() != Router.NETWORK_ID) {