Skip to content
This repository has been archived by the owner on Feb 15, 2024. It is now read-only.

Commit

Permalink
ChainDb, node, rpc: convert spaces to tabs
Browse files Browse the repository at this point in the history
  • Loading branch information
Jeff Garzik authored and Jeff Garzik committed Oct 1, 2012
1 parent 8dd3fcf commit 6626832
Show file tree
Hide file tree
Showing 3 changed files with 90 additions and 90 deletions.
122 changes: 61 additions & 61 deletions ChainDb.py
Expand Up @@ -91,31 +91,31 @@ def __init__(self, settings, datadir, log, mempool, netmagic,
self.orphans = {}
self.orphan_deps = {}

# LevelDB to hold:
# tx:* transaction outputs
# misc:* state
# height:* list of blocks at height h
# blkmeta:* block metadata
# blocks:* block seek point in stream
self.blk_write = io.BufferedWriter(io.FileIO(datadir + '/blocks.dat','ab'))
self.blk_read = io.BufferedReader(io.FileIO(datadir + '/blocks.dat','rb'))
self.db = leveldb.LevelDB(datadir + '/leveldb')

try:
self.db.Get('misc:height')
except KeyError:
# LevelDB to hold:
# tx:* transaction outputs
# misc:* state
# height:* list of blocks at height h
# blkmeta:* block metadata
# blocks:* block seek point in stream
self.blk_write = io.BufferedWriter(io.FileIO(datadir + '/blocks.dat','ab'))
self.blk_read = io.BufferedReader(io.FileIO(datadir + '/blocks.dat','rb'))
self.db = leveldb.LevelDB(datadir + '/leveldb')

try:
self.db.Get('misc:height')
except KeyError:
self.log.write("INITIALIZING EMPTY BLOCKCHAIN DATABASE")
batch = leveldb.WriteBatch()
batch = leveldb.WriteBatch()
batch.Put('misc:height', str(-1))
batch.Put('misc:msg_start', self.netmagic.msg_start)
batch.Put('misc:tophash', ser_uint256(0L))
batch.Put('misc:total_work', hex(0L))
self.db.Write(batch)
self.db.Write(batch)

try:
start = self.db.Get('misc:msg_start')
if start != self.netmagic.msg_start: raise KeyError
except KeyError:
try:
start = self.db.Get('misc:msg_start')
if start != self.netmagic.msg_start: raise KeyError
except KeyError:
self.log.write("Database magic number mismatch. Data corruption or incorrect network?")
raise RuntimeError

Expand All @@ -124,21 +124,21 @@ def puttxidx(self, txhash, txidx):


try:
self.db.Get('tx:'+ser_txhash)
old_txidx = self.gettxidx(txhash)
self.log.write("WARNING: overwriting duplicate TX %064x, height %d, oldblk %064x, oldspent %x, newblk %064x" % (txhash, self.getheight(), old_txidx.blkhash, old_txidx.spentmask, txidx.blkhash))
except KeyError:
pass
self.db.Get('tx:'+ser_txhash)
old_txidx = self.gettxidx(txhash)
self.log.write("WARNING: overwriting duplicate TX %064x, height %d, oldblk %064x, oldspent %x, newblk %064x" % (txhash, self.getheight(), old_txidx.blkhash, old_txidx.spentmask, txidx.blkhash))
except KeyError:
pass
self.db.Put('tx:'+ser_txhash, hex(txidx.blkhash) + ' ' +
hex(txidx.spentmask))
hex(txidx.spentmask))

return True

def gettxidx(self, txhash):
ser_txhash = ser_uint256(txhash)
try:
ser_value = self.db.Get('tx:'+ser_txhash)
except KeyError:
try:
ser_value = self.db.Get('tx:'+ser_txhash)
except KeyError:
return None

pos = string.find(ser_value, ' ')
Expand Down Expand Up @@ -169,11 +169,11 @@ def haveblock(self, blkhash, checkorphans):
if checkorphans and blkhash in self.orphans:
return True
ser_hash = ser_uint256(blkhash)
try:
self.db.Get('blocks:'+ser_hash)
return True
except KeyError:
return False
try:
self.db.Get('blocks:'+ser_hash)
return True
except KeyError:
return False

def have_prevblock(self, block):
if self.getheight() < 0 and block.sha256 == self.netmagic.block0:
Expand All @@ -188,14 +188,14 @@ def getblock(self, blkhash):
return block

ser_hash = ser_uint256(blkhash)
try:
# Lookup the block index, seek in the file
fpos = long(self.db.Get('blocks:'+ser_hash))
self.blk_read.seek(fpos)
block = CBlock()
block.deserialize(self.blk_read)
except KeyError:
return None
try:
# Lookup the block index, seek in the file
fpos = long(self.db.Get('blocks:'+ser_hash))
self.blk_read.seek(fpos)
block = CBlock()
block.deserialize(self.blk_read)
except KeyError:
return None

self.blk_cache.put(blkhash, block)

Expand Down Expand Up @@ -360,11 +360,11 @@ def connect_block(self, ser_hash, block, blkmeta):
return False

# update database pointers for best chain
batch = leveldb.WriteBatch()
batch = leveldb.WriteBatch()
batch.Put('misc:total_work', hex(blkmeta.work))
batch.Put('misc:height', str(blkmeta.height))
batch.Put('misc:tophash', ser_hash)
self.db.Write(batch)
self.db.Write(batch)

self.log.write("ChainDb: height %d, block %064x" % (
blkmeta.height, block.sha256))
Expand Down Expand Up @@ -408,10 +408,10 @@ def disconnect_block(self, block):
for tx in block.vtx:
tx.calc_sha256()
ser_hash = ser_uint256(tx.sha256)
try:
batch.Delete('tx:'+ser_hash)
except KeyError:
pass
try:
batch.Delete('tx:'+ser_hash)
except KeyError:
pass

if not tx.is_coinbase():
self.mempool.add(tx)
Expand All @@ -420,7 +420,7 @@ def disconnect_block(self, block):
batch.Put('misc:total_work', hex(prevmeta.work))
batch.Put('misc:height', str(prevmeta.height))
batch.Put('misc:tophash', ser_prevhash)
self.db.Write(batch)
self.db.Write(batch)

self.log.write("ChainDb(disconn): height %d, block %064x" % (
prevmeta.height, block.hashPrevBlock))
Expand All @@ -429,10 +429,10 @@ def disconnect_block(self, block):

def getblockmeta(self, blkhash):
ser_hash = ser_uint256(blkhash)
try:
meta = BlkMeta()
meta.deserialize(self.db.Get('blkmeta:'+ser_hash))
except KeyError:
try:
meta = BlkMeta()
meta.deserialize(self.db.Get('blkmeta:'+ser_hash))
except KeyError:
return None

return meta
Expand Down Expand Up @@ -525,13 +525,13 @@ def putoneblock(self, block):
else:
ser_prevhash = ''

batch = leveldb.WriteBatch()
batch = leveldb.WriteBatch()

# store raw block data
ser_hash = ser_uint256(block.sha256)
fpos = self.blk_write.tell()
self.blk_write.write(block.serialize())
self.blk_write.flush()
fpos = self.blk_write.tell()
self.blk_write.write(block.serialize())
self.blk_write.flush()
batch.Put('blocks:'+ser_hash, str(fpos))

# store metadata related to this block
Expand All @@ -544,14 +544,14 @@ def putoneblock(self, block):
# store list of blocks at this height
heightidx = HeightIdx()
heightstr = str(blkmeta.height)
try:
heightidx.deserialize(self.db.Get('height:'+heightstr))
except KeyError:
pass
try:
heightidx.deserialize(self.db.Get('height:'+heightstr))
except KeyError:
pass
heightidx.blocks.append(block.sha256)

batch.Put('height:'+heightstr, heightidx.serialize())
self.db.Write(batch)
self.db.Write(batch)

# if chain is not best chain, proceed no further
if (blkmeta.work <= top_work):
Expand Down
44 changes: 22 additions & 22 deletions node.py
Expand Up @@ -212,7 +212,7 @@ def send_getblocks(self, timecheck=True):
self.send_message(gb)

def got_message(self, message):
gevent.sleep()
gevent.sleep()

if self.last_sent + 30 * 60 < time.time():
self.send_message(msg_ping(self.ver_send))
Expand All @@ -228,7 +228,7 @@ def got_message(self, message):
return

if (self.ver_send >= NOBLKS_VERSION_START and
self.ver_send <= NOBLKS_VERSION_END):
self.ver_send <= NOBLKS_VERSION_END):
self.getblocks_ok = False

self.remote_height = message.nStartingHeight
Expand All @@ -254,8 +254,8 @@ def got_message(self, message):

# special message sent to kick getblocks
if (len(message.inv) == 1 and
message.inv[0].type == MSG_BLOCK and
self.chaindb.haveblock(message.inv[0].hash, True)):
message.inv[0].type == MSG_BLOCK and
self.chaindb.haveblock(message.inv[0].hash, True)):
self.send_getblocks(False)
return

Expand Down Expand Up @@ -422,10 +422,10 @@ def __init__(self, log, mempool, chaindb, netmagic):

def add(self, host, port):
self.log.write("PeerManager: connecting to %s:%d" %
(host, port))
(host, port))
self.tried[host] = True
c = NodeConn(host, port, self.log, self, self.mempool,
self.chaindb, self.netmagic)
self.chaindb, self.netmagic)
self.peers.append(c)
return c

Expand Down Expand Up @@ -485,7 +485,7 @@ def closeall(self):
settings['log'] = None

if ('rpcuser' not in settings or
'rpcpass' not in settings):
'rpcpass' not in settings):
print "You must set the following in config: rpcuser, rpcpass"
sys.exit(1)

Expand Down Expand Up @@ -523,19 +523,19 @@ def closeall(self):
c = peermgr.add(settings['host'], settings['port'])
threads.append(c)

# program main loop
def start(timeout=None):
for t in threads: t.start()
try:
gevent.joinall(threads,timeout=timeout,
raise_error=True)
finally:
for t in threads: t.kill()
gevent.joinall(threads)
log.write('Flushing database...')
del chaindb.db
chaindb.blk_write.close()
log.write('OK')

start()
# program main loop
def start(timeout=None):
for t in threads: t.start()
try:
gevent.joinall(threads,timeout=timeout,
raise_error=True)
finally:
for t in threads: t.kill()
gevent.joinall(threads)
log.write('Flushing database...')
del chaindb.db
chaindb.blk_write.close()
log.write('OK')

start()

14 changes: 7 additions & 7 deletions rpc.py
Expand Up @@ -287,16 +287,16 @@ def handle_request(self, environ, start_response):
raise RPCException('400', "Unable to decode JSON data")

if isinstance(rpcreq, dict):
start_response('200 OK', [('Content-Type', 'application/json')])
start_response('200 OK', [('Content-Type', 'application/json')])
resp = self.handle_rpc(rpcreq)
respstr = json.dumps(resp) + "\n"
yield respstr
respstr = json.dumps(resp) + "\n"
yield respstr

elif isinstance(rpcreq, list):
start_response('200 OK', [('Content-Type', 'application/json')])
for resp in itertools.imap(self.handle_rpc, repcreq_list):
respstr = json.dumps(resp) + "\n"
yield respstr
start_response('200 OK', [('Content-Type', 'application/json')])
for resp in itertools.imap(self.handle_rpc, repcreq_list):
respstr = json.dumps(resp) + "\n"
yield respstr
else:
raise RPCException('400', "Not a valid JSON-RPC request")

Expand Down

0 comments on commit 6626832

Please sign in to comment.