Permalink
Browse files

xxxx

git-svn-id: svn://forre.st/undns@1216 470744a7-cac9-478e-843e-5ec1b25c69e8
  • Loading branch information...
1 parent 2a4185c commit 9143c44d8c48d984080fbe211027d2a2df94881b forrest committed Apr 3, 2011
Showing with 142 additions and 39 deletions.
  1. +96 −0 db.py
  2. +46 −39 server.py
View
96 db.py
@@ -0,0 +1,96 @@
+import random
+
+class CachingDictWrapper(object):
+ def __init__(self, inner, cache_size=100):
+ self._inner = inner
+ self._cache = {}
+ self._cache_size = cache_size
+
+ def _add_to_cache(self, key, value):
+ self._cache[key] = value
+ if len(self._cache) > self._cache_size:
+ del self._cache[random.choice(self.cache.keys())]
+
+ def __len__(self):
+ return len(self._inner)
+
+ def __getitem__(self, key):
+ try:
+ return self._cache[key]
+ except KeyError:
+ value = self._inner[key]
+ self._add_to_cache(key, value)
+ return value
+
+ def __setitem__(self, key, value):
+ self._inner[key] = value
+ self._add_to_cache(key, value)
+
+ def __delitem__(self, key):
+ del self._inner[key]
+ if key in self.cache:
+ del self._cache[key]
+
+ def __contains__(self, key):
+ if key in self._cache:
+ return True
+ return key in self._inner
+
+ def __iter__(self):
+ return iter(self._inner)
+
+ def keys(self):
+ return self._inner.keys()
+ def iterkeys(self):
+ return self._inner.iterkeys()
+ def values(self):
+ return self._inner.values()
+ def itervalues(self):
+ return self._inner.itervalues()
+ def items(self):
+ return self._inner.items()
+ def iteritems(self):
+ return self._inner.iteritems()
+
+class SetDictWrapper(object):
+ def __init__(self, inner):
+ self._inner = inner
+ def add(self, item):
+ self._inner[item] = ""
+ def remove(self, item):
+ self._inner.pop(item) # raises KeyError if not present
+ def __contains__(self, item):
+ return item in self._inner
+ def __iter__(self):
+ return iter(self._inner)
+
+class CachingSetWrapper(object):
+ def __init__(self, inner, cache_size=1000):
+ self._inner = inner
+ self._cache = {}
+ self._cache_size = cache_size
+
+ def _add_to_cache(self, key, value):
+ self._cache[key] = value
+ if len(self._cache) > self._cache_size:
+ del self._cache[random.choice(self.cache.keys())]
+
+ def __len__(self):
+ return len(self._inner)
+
+ def add(self, item):
+ self._inner.add(item)
+ self._add_to_cache(item, True)
+
+ def remove(self, item):
+ self._inner.remove(item)
+ self._add_to_cache(item, False)
+
+ def __contains__(self, item):
+ try:
+ return self._cache[item]
+ except KeyError:
+ return item in self._inner
+
+ def __iter__(self):
+ return iter(self._inner)
View
85 server.py
@@ -13,6 +13,7 @@
import itertools
import sqlite3
import time
+import bsddb
import twisted.names.common, twisted.names.client, twisted.names.dns, twisted.names.server, twisted.names.error, twisted.names.authority
del twisted
@@ -24,6 +25,7 @@
import packet
import util
+import db
try:
__version__ = subprocess.Popen(["svnversion", os.path.dirname(sys.argv[0])], stdout=subprocess.PIPE).stdout.read().strip()
@@ -61,8 +63,7 @@
port = args.dht_port
print "PORT:", port
-db = sqlite3.connect(args.config)
-db.execute("create table if not exists blocks (hash blob primary key, data blob)")
+db_prefix = args.config
def parse(x):
if ':' not in x:
@@ -127,42 +128,26 @@ def f(result):
def __init__(self, data):
self.data = data
self.hash_difficulty = int(util.hash_difficulty(data).hexdigest(), 16)
- self.hash_id = int(hashlib.sha1(self.data).hexdigest(), 16)
+ self.hash_id = hashlib.sha1(self.data).hexdigest()
(self.previous_hash, self.pos, self.timestamp, self.total_difficulty, self.message, self.difficulty), self.nonce = json.loads(data)
-class BlockDB(object):
- def __init__(self, db):
- self._db = db
+class BlockDictWrapper(object):
+ # int -> Block : str -> str
+ def __init__(self, inner):
+ self._inner = inner
def __getitem__(self, key):
- key = util.natural_to_string(key)
- value, = self._db.execute("select data from blocks where hash == ?", [buffer(key)]).fetchone()
- value = str(value)
- if value == "":
- return None
- return Block(value)
+ block = Block(self._inner[key])
+ if block.hash_id != key:
+ print "warning: invalid block in db!"
+ self._inner[key]
+ raise KeyError()
+ return block
def __setitem__(self, key, value):
- try:
- self.pop(key)
- except:
- pass
- #assert key == value.hash_id
- key = util.natural_to_string(key)
- if value is None:
- value = ""
- else:
- value = value.data
- self._db.execute("insert into blocks (hash, data) values (?, ?)", [buffer(key), buffer(value)])
+ if value.hash_id != key:
+ raise ValueError("invalid block insertion")
+ self._inner[key] = value.data
def __contains__(self, key):
- key = util.natural_to_string(key)
- return bool(self._db.execute("select hash from blocks where hash == ?", [buffer(key)]).fetchall())
- def pop(self, key):
- key = util.natural_to_string(key)
- value, = self._db.execute("select data from blocks where hash == ?", [buffer(key)]).fetchone()
- value = str(value)
- self._db.execute("delete from blocks where hash == ?", [buffer(key)])
- if value == "":
- return None
- return Block(value)
+ return key in self._inner
class UnDNSNode(node.Node):
@property
@@ -175,12 +160,12 @@ def peers(self):
def __init__(self, *args, **kwargs):
node.Node.__init__(self, *args, **kwargs)
- self.blocks = {} # hash -> data
- self.blocks = BlockDB(db)
- self.verified = set() # hashes of blocks that can be tracked to a genesis block
+ self.blocks = db.CachingDictWrapper(BlockDictWrapper(bsddb.hashopen(db_prefix + '.blocks')))
+ self.verified = db.CachingSetWrapper(db.SetDictWrapper(bsddb.hashopen(db_prefix + '.verified')))
self.referrers = {} # hash -> list of hashes
self.best_block = None
self.best_block_callbacks = []
+ self.best_block = max((block for block in self.blocks if block.hash_id in self.verified), key=lambda block: block.total_difficulty)
def joinNetwork(self, *args, **kwargs):
node.Node.joinNetwork(self, *args, **kwargs)
@@ -215,6 +200,20 @@ def handle_new_block(self, block_data, _rpcNodeID, _rpcNodeContact):
traceback.print_exc()
@node.rpcmethod
+ def handle_new_request(self, request_data):
+ request = Request(request_data)
+ if request.hash_id in self.requests:
+ return
+
+ # check
+
+ self.requests[request.hash_id] = request
+
+ def check_request(self, request):
+ if request.hash_difficulty % request.difficulty != 0:
+ return False
+
+ @node.rpcmethod
def get_block(self, block_hash):
block_data = None
if block_hash in self.blocks:
@@ -263,14 +262,14 @@ def try_to_do_something(self):
previous_hash = None
pos = 0
timestamp = int(time.time())
- message = {self.port: 1}
+ message = {self.port: 1} # XXX insert self.requests
difficulty = GENESIS_DIFFICULTY
total_difficulty = 0 + difficulty
else:
previous_hash = previous_block.hash_id
pos = previous_block.pos + 1
timestamp = max(previous_block.timestamp, int(time.time()))
- message = dict((int(k), int(v)) for k, v in previous_block.message.iteritems())
+ message = dict((int(k), int(v)) for k, v in previous_block.message.iteritems()) # XXX insert self.requests
message[self.port] = message.get(self.port, 0) + 1
if pos < 25:
@@ -480,7 +479,7 @@ def callback(result):
# RPC
-class RPCProtocol(basic.LineReceiver):
+class RPCProtocol(basic.LineOnlyReceiver):
def lineReceived(self, line):
method, args = json.loads(line)
try:
@@ -503,6 +502,14 @@ def rpc_get_graph(self):
break
cur = self.blocks[cur.previous_hash]
return "{" + ', '.join("{%i, %i}" % x for x in res[::-1]) + "}"
+
+ def rpc_register(self, name, contents):
+ a
+
+ def rpc_update(self, name, contents):
+ a
+ # transfer, drop
+ # all need TTL
rpc_factory = protocol.ServerFactory()
rpc_factory.protocol = RPCProtocol
for port in args.rpc_ports:

0 comments on commit 9143c44

Please sign in to comment.