diff --git a/lbrynet/wallet/account.py b/lbrynet/wallet/account.py index 437ec358bf..a2db395992 100644 --- a/lbrynet/wallet/account.py +++ b/lbrynet/wallet/account.py @@ -13,10 +13,6 @@ def get_key_chain_from_xpub(xpub): return key, chain -def derive_key(parent_key, chain, sequence): - return CKD_pub(parent_key, chain, sequence)[0] - - class AddressSequence: def __init__(self, derived_keys, gap, age_checker, pub_key, chain_key): @@ -31,7 +27,7 @@ def __init__(self, derived_keys, gap, age_checker, pub_key, chain_key): ] def generate_next_address(self): - new_key, _ = derive_key(self.pub_key, self.chain_key, len(self.derived_keys)) + new_key, _ = CKD_pub(self.pub_key, self.chain_key, len(self.derived_keys)) address = public_key_to_address(new_key) self.derived_keys.append(new_key.encode('hex')) self.addresses.append(address) @@ -59,11 +55,11 @@ def __init__(self, data, receiving_gap, change_gap, age_checker): master_key, master_chain = get_key_chain_from_xpub(data['xpub']) self.receiving = AddressSequence( data.get('receiving', []), receiving_gap, age_checker, - *derive_key(master_key, master_chain, 0) + *CKD_pub(master_key, master_chain, 0) ) self.change = AddressSequence( data.get('change', []), change_gap, age_checker, - *derive_key(master_key, master_chain, 1) + *CKD_pub(master_key, master_chain, 1) ) self.is_old = age_checker @@ -74,10 +70,6 @@ def as_dict(self): 'xpub': self.xpub } - def ensure_enough_addresses(self): - return self.receiving.ensure_enough_addresses() + \ - self.change.ensure_enough_addresses() - @property def sequences(self): return self.receiving, self.change diff --git a/lbrynet/wallet/blockchain.py b/lbrynet/wallet/blockchain.py index f6eaf5b1fa..c08e0f44ca 100644 --- a/lbrynet/wallet/blockchain.py +++ b/lbrynet/wallet/blockchain.py @@ -1,16 +1,63 @@ import os import logging +import hashlib from twisted.internet import threads, defer from lbryum.util import hex_to_int, int_to_hex, rev_hex from lbryum.hashing import hash_encode, Hash, PoWHash -from .stream import StreamController +from .stream import StreamController, execute_serially from .constants import blockchain_params, HEADER_SIZE log = logging.getLogger(__name__) +class Transaction: + + def __init__(self, tx_hash, raw, height): + self.hash = tx_hash + self.raw = raw + self.height = height + + +class BlockchainTransactions: + + def __init__(self, history): + self.addresses = {} + self.transactions = {} + for address, transactions in history.items(): + self.addresses[address] = [] + for txid, raw, height in transactions: + tx = Transaction(txid, raw, height) + self.addresses[address].append(tx) + self.transactions[txid] = tx + + def has_address(self, address): + return address in self.addresses + + def get_transaction(self, tx_hash, *args): + return self.transactions.get(tx_hash, *args) + + def get_transactions(self, address, *args): + return self.addresses.get(address, *args) + + def get_status(self, address): + hashes = [ + '{}:{}:'.format(tx.hash, tx.height) + for tx in self.get_transactions(address, []) + ] + if hashes: + return hashlib.sha256(''.join(hashes)).digest().encode('hex') + + def has_transaction(self, tx_hash): + return tx_hash in self.transactions + + def add_transaction(self, address, transaction): + self.transactions.setdefault(transaction.hash, transaction) + self.addresses.setdefault(address, []) + self.addresses[address].append(transaction) + + class BlockchainHeaders: def __init__(self, path, chain='lbrycrd_main'): @@ -24,39 +71,39 @@ def __init__(self, path, chain='lbrycrd_main'): self.on_changed = self._on_change_controller.stream self._size = None - self._write_lock = defer.DeferredLock() if not os.path.exists(path): with open(path, 'wb'): pass + @property + def height(self): + return len(self) - 1 + def sync_read_length(self): return os.path.getsize(self.path) / HEADER_SIZE - def __len__(self): - if self._size is None: - self._size = self.sync_read_length() - return self._size - def sync_read_header(self, height): if 0 <= height < len(self): with open(self.path, 'rb') as f: f.seek(height * HEADER_SIZE) return f.read(HEADER_SIZE) + def __len__(self): + if self._size is None: + self._size = self.sync_read_length() + return self._size + def __getitem__(self, height): assert not isinstance(height, slice),\ "Slicing of header chain has not been implemented yet." header = self.sync_read_header(height) return self._deserialize(height, header) + @execute_serially @defer.inlineCallbacks def connect(self, start, headers): - yield self._write_lock.acquire() - try: - yield threads.deferToThread(self._sync_connect, start, headers) - finally: - self._write_lock.release() + yield threads.deferToThread(self._sync_connect, start, headers) def _sync_connect(self, start, headers): previous_header = None diff --git a/lbrynet/wallet/manager.py b/lbrynet/wallet/manager.py index 66682b3b7a..6f52fbfb81 100644 --- a/lbrynet/wallet/manager.py +++ b/lbrynet/wallet/manager.py @@ -1,22 +1,19 @@ import os import logging +from operator import itemgetter from twisted.internet import defer import lbryschema from .protocol import Network -from .blockchain import BlockchainHeaders +from .blockchain import BlockchainHeaders, Transaction from .wallet import Wallet +from .stream import execute_serially log = logging.getLogger(__name__) -def chunks(l, n): - for i in range(0, len(l), n): - yield l[i:i+n] - - class WalletManager: def __init__(self, storage, config): @@ -24,11 +21,10 @@ def __init__(self, storage, config): self.config = config lbryschema.BLOCKCHAIN_NAME = config['chain'] self.headers = BlockchainHeaders(self.headers_path, config['chain']) - self.wallet = Wallet(self.wallet_path) + self.wallet = Wallet(self.wallet_path, self.headers) self.network = Network(config) self.network.on_header.listen(self.process_header) - self.network.on_transaction.listen(self.process_transaction) - self._downloading_headers = False + self.network.on_status.listen(self.process_status) @property def headers_path(self): @@ -41,48 +37,117 @@ def headers_path(self): def wallet_path(self): return os.path.join(self.config['wallet_path'], 'wallets', 'default_wallet') + def get_least_used_receiving_address(self, max_transactions=1000): + return self._get_least_used_address( + self.wallet.receiving_addresses, + self.wallet.default_account.receiving, + max_transactions + ) + + def get_least_used_change_address(self, max_transactions=100): + return self._get_least_used_address( + self.wallet.change_addresses, + self.wallet.default_account.change, + max_transactions + ) + + def _get_least_used_address(self, addresses, sequence, max_transactions): + transaction_counts = [] + for address in addresses: + transactions = self.wallet.history.get_transactions(address, []) + tx_count = len(transactions) + if tx_count == 0: + return address + elif tx_count >= max_transactions: + continue + else: + transaction_counts.append((address, tx_count)) + + if transaction_counts: + transaction_counts.sort(key=itemgetter(1)) + return transaction_counts[0] + + address = sequence.generate_next_address() + self.subscribe_history(address) + return address + @defer.inlineCallbacks def start(self): - self.wallet.load() self.network.start() yield self.network.on_connected.first - yield self.download_headers() - yield self.network.headers_subscribe() - yield self.download_transactions() + yield self.update_headers() + yield self.network.subscribe_headers() + yield self.update_wallet() def stop(self): return self.network.stop() + @execute_serially @defer.inlineCallbacks - def download_headers(self): - self._downloading_headers = True + def update_headers(self): while True: - sought_height = len(self.headers) - headers = yield self.network.block_headers(sought_height) - log.info("received {} headers starting at {} height".format(headers['count'], sought_height)) + height_sought = len(self.headers) + headers = yield self.network.get_headers(height_sought) + log.info("received {} headers starting at {} height".format(headers['count'], height_sought)) if headers['count'] <= 0: break - yield self.headers.connect(sought_height, headers['hex'].decode('hex')) - self._downloading_headers = False + yield self.headers.connect(height_sought, headers['hex'].decode('hex')) @defer.inlineCallbacks - def process_header(self, header): - if self._downloading_headers: + def process_header(self, response): + header = response[0] + if self.update_headers.is_running: return - if header['block_height'] == len(self.headers): + if header['height'] == len(self.headers): # New header from network directly connects after the last local header. yield self.headers.connect(len(self.headers), header['hex'].decode('hex')) - elif header['block_height'] > len(self.headers): + elif header['height'] > len(self.headers): # New header is several heights ahead of local, do download instead. - yield self.download_headers() + yield self.update_headers() + @execute_serially @defer.inlineCallbacks - def download_transactions(self): - for addresses in chunks(self.wallet.addresses, 500): - self.network.rpc([ - ('blockchain.address.subscribe', [address]) - for address in addresses + def update_wallet(self): + + if not self.wallet.exists: + self.wallet.create() + + # Before subscribing, download history for any addresses that don't have any, + # this avoids situation where we're getting status updates to addresses we know + # need to update anyways. Continue to get history and create more addresses until + # all missing addresses are created and history for them is fully restored. + self.wallet.ensure_enough_addresses() + addresses = list(self.wallet.addresses_without_history) + while addresses: + yield defer.gatherResults([ + self.update_history(a) for a in addresses ]) + addresses = self.wallet.ensure_enough_addresses() + + # By this point all of the addresses should be restored and we + # can now subscribe all of them to receive updates. + yield defer.gatherResults([ + self.subscribe_history(address) + for address in self.wallet.addresses + ]) - def process_transaction(self, tx): - pass + @defer.inlineCallbacks + def update_history(self, address): + history = yield self.network.get_history(address) + for hash in map(itemgetter('tx_hash'), history): + transaction = self.wallet.history.get_transaction(hash) + if not transaction: + raw = yield self.network.get_transaction(hash) + transaction = Transaction(hash, raw, None) + self.wallet.history.add_transaction(address, transaction) + + @defer.inlineCallbacks + def subscribe_history(self, address): + status = yield self.network.subscribe_address(address) + if status != self.wallet.history.get_status(address): + self.update_history(address) + + def process_status(self, response): + address, status = response + if status != self.wallet.history.get_status(address): + self.update_history(address) diff --git a/lbrynet/wallet/protocol.py b/lbrynet/wallet/protocol.py index a59c76ad08..dc8cda58c7 100644 --- a/lbrynet/wallet/protocol.py +++ b/lbrynet/wallet/protocol.py @@ -63,57 +63,37 @@ def connectionLost(self, reason=None): self.on_disconnected_controller.add(True) def lineReceived(self, line): + try: message = json.loads(line) except (ValueError, TypeError): - raise ProtocolException("Cannot decode message '%s'" % line.strip()) - msg_id = message.get('id', 0) - msg_result = message.get('result') - msg_error = message.get('error') - msg_method = message.get('method') - msg_params = message.get('params') - if msg_id: - # It's a RPC response - # Perform lookup to the table of waiting requests. + raise ProtocolException("Cannot decode message '{}'".format(line.strip())) + + if message.get('id'): try: - meta = self.lookup_table[msg_id] - del self.lookup_table[msg_id] + d = self.lookup_table.pop(message['id']) + if message.get('error'): + d.errback(RemoteServiceException(*message['error'])) + else: + d.callback(message.get('result')) except KeyError: - # When deferred object for given message ID isn't found, it's an error raise ProtocolException( - "Lookup for deferred object for message ID '%s' failed." % msg_id) - # If there's an error, handle it as errback - # If both result and error are null, handle it as a success with blank result - if msg_error != None: - meta['defer'].errback( - RemoteServiceException(msg_error[0], msg_error[1], msg_error[2]) - ) - else: - meta['defer'].callback(msg_result) - elif msg_method: - if msg_method == 'blockchain.headers.subscribe': - self.network._on_header_controller.add(msg_params[0]) - elif msg_method == 'blockchain.address.subscribe': - self.network._on_address_controller.add(msg_params) + "Lookup for deferred object for message ID '{}' failed.".format(message['id'])) + elif message.get('method') in self.network.subscription_controllers: + controller = self.network.subscription_controllers[message['method']] + controller.add(message.get('params')) else: log.warning("Cannot handle message '%s'" % line) - def write_request(self, method, params, is_notification=False): - request_id = None if is_notification else self._get_id() - serialized = json.dumps({'id': request_id, 'method': method, 'params': params}) - self.sendLine(serialized) - return request_id - - def rpc(self, method, params, is_notification=False): - request_id = self.write_request(method, params, is_notification) - if is_notification: - return - d = defer.Deferred() - self.lookup_table[request_id] = { + def rpc(self, method, *args): + message_id = self._get_id() + message = json.dumps({ + 'id': message_id, 'method': method, - 'params': params, - 'defer': d, - } + 'params': args + }) + self.sendLine(message) + d = self.lookup_table[message_id] = defer.Deferred() return d @@ -147,8 +127,13 @@ def __init__(self, config): self._on_header_controller = StreamController() self.on_header = self._on_header_controller.stream - self._on_transaction_controller = StreamController() - self.on_transaction = self._on_transaction_controller.stream + self._on_status_controller = StreamController() + self.on_status = self._on_status_controller.stream + + self.subscription_controllers = { + 'blockchain.headers.subscribe': self._on_header_controller, + 'blockchain.address.subscribe': self._on_status_controller, + } @defer.inlineCallbacks def start(self): @@ -182,101 +167,29 @@ def stop(self): def is_connected(self): return self.client is not None and self.client.connected - def rpc(self, method, params, *args, **kwargs): + def rpc(self, list_or_method, *args): if self.is_connected: - return self.client.rpc(method, params, *args, **kwargs) + return self.client.rpc(list_or_method, *args) else: raise TransportException("Attempting to send rpc request when connection is not available.") - def claimtrie_getvaluesforuris(self, block_hash, *uris): - return self.rpc( - 'blockchain.claimtrie.getvaluesforuris', [block_hash] + list(uris) - ) - - def claimtrie_getvaluesforuri(self, block_hash, uri): - return self.rpc('blockchain.claimtrie.getvaluesforuri', [block_hash, uri]) - - def claimtrie_getclaimssignedbynthtoname(self, name, n): - return self.rpc('blockchain.claimtrie.getclaimssignedbynthtoname', [name, n]) - - def claimtrie_getclaimssignedbyid(self, certificate_id): - return self.rpc('blockchain.claimtrie.getclaimssignedbyid', [certificate_id]) - - def claimtrie_getclaimssignedby(self, name): - return self.rpc('blockchain.claimtrie.getclaimssignedby', [name]) - - def claimtrie_getnthclaimforname(self, name, n): - return self.rpc('blockchain.claimtrie.getnthclaimforname', [name, n]) - - def claimtrie_getclaimsbyids(self, *claim_ids): - return self.rpc('blockchain.claimtrie.getclaimsbyids', list(claim_ids)) - - def claimtrie_getclaimbyid(self, claim_id): - return self.rpc('blockchain.claimtrie.getclaimbyid', [claim_id]) - - def claimtrie_get(self): - return self.rpc('blockchain.claimtrie.get', []) - - def block_get_block(self, block_hash): - return self.rpc('blockchain.block.get_block', [block_hash]) + def broadcast(self, raw_transaction): + return self.rpc('blockchain.transaction.broadcast', raw_transaction) - def claimtrie_getclaimsforname(self, name): - return self.rpc('blockchain.claimtrie.getclaimsforname', [name]) + def get_history(self, address): + return self.rpc('blockchain.address.get_history', address) - def claimtrie_getclaimsintx(self, txid): - return self.rpc('blockchain.claimtrie.getclaimsintx', [txid]) + def get_transaction(self, tx_hash): + return self.rpc('blockchain.transaction.get', tx_hash) - def claimtrie_getvalue(self, name, block_hash=None): - return self.rpc('blockchain.claimtrie.getvalue', [name, block_hash]) + def get_merkle(self, tx_hash, height): + return self.rpc('blockchain.transaction.get_merkle', tx_hash, height) - def relayfee(self): - return self.rpc('blockchain.relayfee', []) + def get_headers(self, height, count=10000): + return self.rpc('blockchain.block.headers', height, count) - def estimatefee(self): - return self.rpc('blockchain.estimatefee', []) - - def transaction_get(self, txid): - return self.rpc('blockchain.transaction.get', [txid]) - - def transaction_get_merkle(self, tx_hash, height, cache_only=False): - return self.rpc('blockchain.transaction.get_merkle', [tx_hash, height, cache_only]) - - def transaction_broadcast(self, raw_transaction): - return self.rpc('blockchain.transaction.broadcast', [raw_transaction]) - - def block_get_chunk(self, index, cache_only=False): - return self.rpc('blockchain.block.get_chunk', [index, cache_only]) - - def block_get_header(self, height, cache_only=False): - return self.rpc('blockchain.block.get_header', [height, cache_only]) - - def block_headers(self, height, count=10000): - return self.rpc('blockchain.block.headers', [height, count]) - - def utxo_get_address(self, txid, pos): - return self.rpc('blockchain.utxo.get_address', [txid, pos]) - - def address_listunspent(self, address): - return self.rpc('blockchain.address.listunspent', [address]) - - def address_get_proof(self, address): - return self.rpc('blockchain.address.get_proof', [address]) - - def address_get_balance(self, address): - return self.rpc('blockchain.address.get_balance', [address]) - - def address_get_mempool(self, address): - return self.rpc('blockchain.address.get_mempool', [address]) - - def address_get_history(self, address): - return self.rpc('blockchain.address.get_history', [address]) - - def address_subscribe(self, addresses): - if isinstance(addresses, str): - return self.rpc('blockchain.address.subscribe', [addresses]) - else: - msgs = map(lambda addr: ('blockchain.address.subscribe', [addr]), addresses) - self.network.send(msgs, self.addr_subscription_response) + def subscribe_headers(self): + return self.rpc('blockchain.headers.subscribe') - def headers_subscribe(self): - return self.rpc('blockchain.headers.subscribe', [], True) + def subscribe_address(self, address): + return self.rpc('blockchain.address.subscribe', address) diff --git a/lbrynet/wallet/stream.py b/lbrynet/wallet/stream.py index fcc86d2dfb..0f089dc5f2 100644 --- a/lbrynet/wallet/stream.py +++ b/lbrynet/wallet/stream.py @@ -1,7 +1,24 @@ -from twisted.internet.defer import Deferred +from twisted.internet.defer import Deferred, DeferredLock, maybeDeferred, inlineCallbacks from twisted.python.failure import Failure +def execute_serially(f): + _lock = DeferredLock() + + @inlineCallbacks + def allow_only_one_at_a_time(*args, **kwargs): + yield _lock.acquire() + allow_only_one_at_a_time.is_running = True + try: + yield maybeDeferred(f, *args, **kwargs) + finally: + allow_only_one_at_a_time.is_running = False + _lock.release() + + allow_only_one_at_a_time.is_running = False + return allow_only_one_at_a_time + + class BroadcastSubscription: def __init__(self, controller, on_data, on_error, on_done): diff --git a/lbrynet/wallet/transaction.py b/lbrynet/wallet/transaction.py index 3c15e78393..92b59f45d4 100644 --- a/lbrynet/wallet/transaction.py +++ b/lbrynet/wallet/transaction.py @@ -16,7 +16,7 @@ from .lbrycrd import point_to_ser, MyVerifyingKey, MySigningKey from .lbrycrd import regenerate_key, public_key_from_private_key from .lbrycrd import encode_claim_id_hex, claim_id_hash -from .util import profiler, var_int, int_to_hex, parse_sig, rev_hex +from .util import var_int, int_to_hex, parse_sig, rev_hex log = logging.getLogger() @@ -559,7 +559,6 @@ def fee_for_size(cls, relay_fee, fee_per_kb, size): fee = relay_fee return fee - @profiler def estimated_size(self): '''Return an estimated tx size in bytes.''' return len(self.serialize(-1)) / 2 # ASCII hex string diff --git a/lbrynet/wallet/wallet.py b/lbrynet/wallet/wallet.py index 699af6a460..ea230007f3 100644 --- a/lbrynet/wallet/wallet.py +++ b/lbrynet/wallet/wallet.py @@ -1,48 +1,28 @@ -import ast import copy import stat import json import os -import random -import threading -import time -import hashlib import logging -from decimal import Decimal -from functools import partial - -from lbryschema.address import hash_160_bytes_to_address, public_key_to_address, is_address +from .constants import NEW_SEED_VERSION from .account import Account -from .constants import TYPE_ADDRESS, TYPE_CLAIM, TYPE_SUPPORT, TYPE_UPDATE, TYPE_PUBKEY -from .constants import EXPIRATION_BLOCKS, COINBASE_MATURITY, RECOMMENDED_FEE -from .coinchooser import COIN_CHOOSERS -from .transaction import Transaction from .mnemonic import Mnemonic -from .util import rev_hex -from .errors import NotEnoughFunds, InvalidPassword -from .constants import NEW_SEED_VERSION -from .lbrycrd import regenerate_key, is_compressed, pw_encode, pw_decode -from .lbrycrd import bip32_private_key -from .lbrycrd import encode_claim_id_hex, deserialize_xkey, claim_id_hash -from .lbrycrd import bip32_private_derivation, bip32_root +from .lbrycrd import pw_encode, bip32_private_derivation, bip32_root +from .blockchain import BlockchainTransactions log = logging.getLogger(__name__) class WalletStorage: + def __init__(self, path): - self.lock = threading.RLock() self.data = {} self.path = path self.file_exists = False self.modified = False - log.info("wallet path: %s", self.path) - if self.path: - self.read(self.path) + self.path and self.read() - def read(self, path): - """Read the contents of the wallet file.""" + def read(self): try: with open(self.path, "r") as f: data = f.read() @@ -50,36 +30,17 @@ def read(self, path): return try: self.data = json.loads(data) - except: - try: - d = ast.literal_eval(data) # parse raw data from reading wallet file - labels = d.get('labels', {}) - except Exception as e: - raise IOError("Cannot read wallet file '%s'" % self.path) + except Exception: self.data = {} - # In old versions of Electrum labels were latin1 encoded, this fixes breakage. - for i, label in labels.items(): - try: - unicode(label) - except UnicodeDecodeError: - d['labels'][i] = unicode(label.decode('latin1')) - for key, value in d.items(): - try: - json.dumps(key) - json.dumps(value) - except: - log.error('Failed to convert label to json format: {}'.format(key)) - continue - self.data[key] = value + raise IOError("Cannot read wallet file '%s'" % self.path) self.file_exists = True def get(self, key, default=None): - with self.lock: - v = self.data.get(key) - if v is None: - v = default - else: - v = copy.deepcopy(v) + v = self.data.get(key) + if v is None: + v = default + else: + v = copy.deepcopy(v) return v def put(self, key, value): @@ -87,25 +48,19 @@ def put(self, key, value): json.dumps(key) json.dumps(value) except: - self.print_error("json error: cannot save", key) return - with self.lock: - if value is not None: - if self.data.get(key) != value: - self.modified = True - self.data[key] = copy.deepcopy(value) - elif key in self.data: + if value is not None: + if self.data.get(key) != value: self.modified = True - self.data.pop(key) + self.data[key] = copy.deepcopy(value) + elif key in self.data: + self.modified = True + self.data.pop(key) def write(self): - with self.lock: - self._write() + self._write() def _write(self): - if threading.currentThread().isDaemon(): - log.warning('daemon thread cannot write wallet') - return if not self.modified: return s = json.dumps(self.data, indent=4, sort_keys=True) @@ -128,1372 +83,160 @@ def _write(self): os.chmod(self.path, mode) self.modified = False + def upgrade(self): + + def _rename_property(old, new): + if old in self.data: + old_value = self.data[old] + del self.data[old] + if new not in self.data: + self.data[new] = old_value + + _rename_property('addr_history', 'history') + _rename_property('use_encryption', 'encrypted') + class Wallet: root_name = 'x/' - root_derivation = "m/" - wallet_type = 'standard' - max_change_outputs = 3 + root_derivation = 'm/' + gap_limit_for_change = 6 - def __init__(self, path): + def __init__(self, path, headers): self.storage = storage = WalletStorage(path) - + storage.upgrade() + self.headers = headers + self.accounts = self._instantiate_accounts(storage.get('accounts', {})) + self.history = BlockchainTransactions(storage.get('history', {})) + self.master_public_keys = storage.get('master_public_keys', {}) + self.master_private_keys = storage.get('master_private_keys', {}) self.gap_limit = storage.get('gap_limit', 20) - self.gap_limit_for_change = 6 - - self.accounts = {} + self.seed = storage.get('seed', '') self.seed_version = storage.get('seed_version', NEW_SEED_VERSION) - self.use_change = storage.get('use_change', True) - self.multiple_change = storage.get('multiple_change', False) - - self.use_encryption = storage.get('use_encryption', False) - self.seed = storage.get('seed', '') # encrypted - self.labels = storage.get('labels', {}) - self.frozen_addresses = set(storage.get('frozen_addresses', [])) - self.stored_height = storage.get('stored_height', 0) # last known height (for offline mode) - self.history = storage.get('addr_history', {}) # address -> list(txid, height) - - # Transactions pending verification. A map from tx hash to transaction - # height. Access is not contended so no lock is needed. - self.unverified_tx = {} - # Verified transactions. Each value is a (height, timestamp, block_pos) tuple. - # Access with self.lock. - self.verified_tx = storage.get('verified_tx3', {}) - - # there is a difference between wallet.up_to_date and interface.is_up_to_date() - # interface.is_up_to_date() returns true when all requests have been answered and processed - # wallet.up_to_date is true when the wallet is synchronized (stronger requirement) - self.up_to_date = False - + self.encrypted = storage.get('encrypted', storage.get('use_encryption', False)) self.claim_certificates = storage.get('claim_certificates', {}) self.default_certificate_claim = storage.get('default_certificate_claim', None) - # save wallet type the first time - if self.storage.get('wallet_type') is None: - self.storage.put('wallet_type', self.wallet_type) - - self.master_public_keys = storage.get('master_public_keys', {}) - self.master_private_keys = storage.get('master_private_keys', {}) - self.mnemonic = Mnemonic(storage.get('lang', 'eng')) - - @property - def addresses(self): - for account in self.accounts.values(): - for sequence in account.sequences: - for address in sequence.addresses: - yield address - - def create(self): - seed = self.mnemonic.make_seed() - self.add_seed(seed, None) - self.add_xprv_from_seed(seed, self.root_name, None) - self.add_account('0', Account({ - 'xpub': self.master_public_keys.get("x/") - }, - self.gap_limit, - self.gap_limit_for_change, - self.address_is_old - )) - self.ensure_enough_addresses() - - def ensure_enough_addresses(self): - for account in self.accounts.values(): - account.ensure_enough_addresses() - - def load(self): - self.load_accounts() - self.load_transactions() - - def load_accounts(self): - for index, details in self.storage.get('accounts', {}).items(): + def _instantiate_accounts(self, accounts): + instances = {} + for index, details in accounts.items(): if 'xpub' in details: - self.accounts[index] = Account( - details, self.gap_limit, self.gap_limit_for_change, self.address_is_old + instances[index] = Account( + details, self.gap_limit, self.gap_limit_for_change, self.is_address_old ) else: log.error("cannot load account: {}".format(details)) + return instances - def load_transactions(self): - self.txi = self.storage.get('txi', {}) - self.txo = self.storage.get('txo', {}) - self.pruned_txo = self.storage.get('pruned_txo', {}) - tx_list = self.storage.get('transactions', {}) - self.claimtrie_transactions = self.storage.get('claimtrie_transactions', {}) - self.transactions = {} - for tx_hash, raw in tx_list.items(): - tx = Transaction(raw) - self.transactions[tx_hash] = tx - if self.txi.get(tx_hash) is None and self.txo.get(tx_hash) is None and \ - (tx_hash not in self.pruned_txo.values()): - log.info("removing unreferenced tx: %s", tx_hash) - self.transactions.pop(tx_hash) - - # add to claimtrie transactions if its a claimtrie transaction - tx.deserialize() - for n, txout in enumerate(tx.outputs()): - if txout[0] & (TYPE_CLAIM | TYPE_UPDATE | TYPE_SUPPORT): - self.claimtrie_transactions[tx_hash + ':' + str(n)] = txout[0] - - def set_use_encryption(self, use_encryption): - self.use_encryption = use_encryption - self.storage.put('use_encryption', use_encryption) - - def save_transactions(self, write=False): - tx = {} - for k, v in self.transactions.items(): - tx[k] = str(v) - self.storage.put('transactions', tx) - self.storage.put('txi', self.txi) - self.storage.put('txo', self.txo) - self.storage.put('pruned_txo', self.pruned_txo) - self.storage.put('addr_history', self.history) - self.storage.put('claimtrie_transactions', self.claimtrie_transactions) - if write: - self.storage.write() - - def save_certificate(self, claim_id, private_key, write=False): - certificate_keys = self.storage.get('claim_certificates') or {} - certificate_keys[claim_id] = private_key - self.storage.put('claim_certificates', certificate_keys) - if write: - self.storage.write() - - def set_default_certificate(self, claim_id, overwrite_existing=True, write=False): - if self.default_certificate_claim is not None and overwrite_existing or not \ - self.default_certificate_claim: - self.storage.put('default_certificate_claim', claim_id) - if write: - self.storage.write() - self.default_certificate_claim = claim_id - - def get_certificate_signing_key(self, claim_id): - certificates = self.storage.get('claim_certificates', {}) - return certificates.get(claim_id, None) - - def get_certificate_claim_ids_for_signing(self): - certificates = self.storage.get('claim_certificates', {}) - return certificates.keys() - - def clear_history(self): - with self.transaction_lock: - self.txi = {} - self.txo = {} - self.pruned_txo = {} - self.save_transactions() - with self.lock: - self.history = {} - self.tx_addr_hist = {} - - def build_reverse_history(self): - self.tx_addr_hist = {} - for addr, hist in self.history.items(): - for tx_hash, h in hist: - s = self.tx_addr_hist.get(tx_hash, set()) - s.add(addr) - self.tx_addr_hist[tx_hash] = s - - def check_history(self): - save = False - for addr, hist in self.history.items(): - if not self.is_mine(addr): - self.history.pop(addr) - save = True - continue - - for tx_hash, tx_height in hist: - if tx_hash in self.pruned_txo.values() or self.txi.get(tx_hash) or self.txo.get( - tx_hash): - continue - tx = self.transactions.get(tx_hash) - if tx is not None: - self.add_transaction(tx_hash, tx) - save = True - if save: - self.save_transactions() - - def set_up_to_date(self, up_to_date): - with self.lock: - self.up_to_date = up_to_date - if up_to_date: - self.save_transactions(write=True) - - def is_up_to_date(self): - with self.lock: - return self.up_to_date - - def set_label(self, name, text=None): - changed = False - old_text = self.labels.get(name) - if text: - if old_text != text: - self.labels[name] = text - changed = True - else: - if old_text: - self.labels.pop(name) - changed = True - - if changed: - self.storage.put('labels', self.labels) - - return changed - - def is_mine(self, address): - return address in self.addresses - - def is_change(self, address): - if not self.is_mine(address): - return False - acct, s = self.get_address_index(address) - if s is None: - return False - return s[0] == 1 - - def get_address_index(self, address): - for acc_id in self.accounts: - for for_change in [0, 1]: - addresses = self.accounts[acc_id].get_addresses(for_change) - if address in addresses: - return acc_id, (for_change, addresses.index(address)) - raise Exception("Address not found", address) - - def get_private_key(self, address, password): - if self.is_watching_only(): - return [] - account_id, sequence = self.get_address_index(address) - return self.accounts[account_id].get_private_key(sequence, self, password) - - def get_public_keys(self, address): - account_id, sequence = self.get_address_index(address) - return self.accounts[account_id].get_pubkeys(*sequence) - - def sign_message(self, address, message, password): - keys = self.get_private_key(address, password) - assert len(keys) == 1 - sec = keys[0] - key = regenerate_key(sec) - compressed = is_compressed(sec) - return key.sign_message(message, compressed, address) - - def decrypt_message(self, pubkey, message, password): - address = public_key_to_address(pubkey.decode('hex')) - keys = self.get_private_key(address, password) - secret = keys[0] - ec = regenerate_key(secret) - decrypted = ec.decrypt_message(message) - return decrypted - - def add_unverified_tx(self, tx_hash, tx_height): - # Only add if confirmed and not verified - if tx_height > 0 and tx_hash not in self.verified_tx: - self.unverified_tx[tx_hash] = tx_height - - def add_verified_tx(self, tx_hash, info): - # Remove from the unverified map and add to the verified map and - self.unverified_tx.pop(tx_hash, None) - with self.lock: - self.verified_tx[tx_hash] = info # (tx_height, timestamp, pos) - self.storage.put('verified_tx3', self.verified_tx) - - conf, timestamp = self.get_confirmations(tx_hash) - self.network.trigger_callback('verified', tx_hash, conf, timestamp) - - def get_unverified_txs(self): - """Returns a map from tx hash to transaction height""" - return self.unverified_tx - - def undo_verifications(self, height): - """Used by the verifier when a reorg has happened""" - txs = [] - with self.lock: - for tx_hash, item in self.verified_tx: - tx_height, timestamp, pos = item - if tx_height >= height: - self.verified_tx.pop(tx_hash, None) - txs.append(tx_hash) - return txs - - def get_local_height(self): - """ return last known height if we are offline """ - return self.network.get_local_height() if self.network else self.stored_height - - def get_confirmations(self, tx): - """ return the number of confirmations of a monitored transaction. """ - with self.lock: - if tx in self.verified_tx: - height, timestamp, pos = self.verified_tx[tx] - conf = (self.get_local_height() - height + 1) - if conf <= 0: - timestamp = None - elif tx in self.unverified_tx: - conf = -1 - timestamp = None - else: - conf = 0 - timestamp = None - - return conf, timestamp - - def get_txpos(self, tx_hash): - "return position, even if the tx is unverified" - with self.lock: - x = self.verified_tx.get(tx_hash) - y = self.unverified_tx.get(tx_hash) - if x: - height, timestamp, pos = x - return height, pos - elif y: - return y, 0 - else: - return 1e12, 0 - - def is_found(self): - return self.history.values() != [[]] * len(self.history) - - def get_num_tx(self, address): - """ return number of transactions where address is involved """ - return len(self.history.get(address, [])) - - def get_tx_delta(self, tx_hash, address): - "effect of tx on address" - # pruned - if tx_hash in self.pruned_txo.values(): - return None - delta = 0 - # substract the value of coins sent from address - d = self.txi.get(tx_hash, {}).get(address, []) - for n, v in d: - delta -= v - # add the value of the coins received at address - d = self.txo.get(tx_hash, {}).get(address, []) - for n, v, cb in d: - delta += v - return delta - - def get_wallet_delta(self, tx): - """ effect of tx on wallet """ - addresses = self.addresses - is_relevant = False - is_send = False - is_pruned = False - is_partial = False - v_in = v_out = v_out_mine = 0 - for item in tx.inputs(): - addr = item.get('address') - if addr in addresses: - is_send = True - is_relevant = True - d = self.txo.get(item['prevout_hash'], {}).get(addr, []) - for n, v, cb in d: - if n == item['prevout_n']: - value = v - break - else: - value = None - if value is None: - is_pruned = True - else: - v_in += value - else: - is_partial = True - if not is_send: - is_partial = False - for addr, value in tx.get_outputs(): - v_out += value - if addr in addresses: - v_out_mine += value - is_relevant = True - if is_pruned: - # some inputs are mine: - fee = None - if is_send: - v = v_out_mine - v_out - else: - # no input is mine - v = v_out_mine - else: - v = v_out_mine - v_in - if is_partial: - # some inputs are mine, but not all - fee = None - is_send = v < 0 - else: - # all inputs are mine - fee = v_out - v_in - return is_relevant, is_send, v, fee - - def get_addr_io(self, address): - h = self.history.get(address, []) - received = {} - sent = {} - for tx_hash, height in h: - l = self.txo.get(tx_hash, {}).get(address, []) - for n, v, is_cb in l: - received[tx_hash + ':%d' % n] = (height, v, is_cb) - for tx_hash, height in h: - l = self.txi.get(tx_hash, {}).get(address, []) - for txi, v in l: - sent[txi] = height - return received, sent - - def get_addr_utxo(self, address): - coins, spent = self.get_addr_io(address) - for txi in spent: - coins.pop(txi) - return coins - - # return the total amount ever received by an address - def get_addr_received(self, address): - received, sent = self.get_addr_io(address) - return sum([v for height, v, is_cb in received.values()]) - - # return the balance of a bitcoin address: confirmed and matured, unconfirmed, unmatured - def get_addr_balance(self, address, exclude_claimtrietx=False): - received, sent = self.get_addr_io(address) - c = u = x = 0 - for txo, (tx_height, v, is_cb) in received.items(): - exclude_tx = False - # check if received transaction is a claimtrie tx to ourself - if exclude_claimtrietx: - prevout_hash, prevout_n = txo.split(':') - tx_type = self.claimtrie_transactions.get(txo) - if tx_type is not None: - exclude_tx = True - - if not exclude_tx: - if is_cb and tx_height + COINBASE_MATURITY > self.get_local_height(): - x += v - elif tx_height > 0: - c += v - else: - u += v - if txo in sent: - if sent[txo] > 0: - c -= v - else: - u -= v - return c, u, x - - # get coin object in order to abandon calimtrie transactions - # equivalent of get_spendable_coins but for claimtrie utxos - def get_spendable_claimtrietx_coin(self, txid, nOut): - tx = self.transactions.get(txid) - if tx is None: - raise BaseException('txid was not found in wallet') - tx.deserialize() - txouts = tx.outputs() - if len(txouts) < nOut + 1: - raise BaseException('nOut is too large') - txout = txouts[nOut] - txout_type, txout_dest, txout_value = txout - if not txout_type & (TYPE_CLAIM | TYPE_UPDATE | TYPE_SUPPORT): - raise BaseException('txid and nOut does not refer to a claimtrie transaction') - - address = txout_dest[1] - utxos = self.get_addr_utxo(address) - try: - utxo = utxos[txid + ':' + str(nOut)] - except KeyError: - raise BaseException('this claimtrie transaction has already been spent') - - # create inputs - is_update = txout_type & TYPE_UPDATE - is_claim = txout_type & TYPE_CLAIM - is_support = txout_type & TYPE_SUPPORT - - i = {'prevout_hash': txid, 'prevout_n': nOut, 'address': address, 'value': txout_value, - 'is_update': is_update, 'is_claim': is_claim, 'is_support': is_support, 'height': utxo[0]} - if is_claim: - i['claim_name'] = txout_dest[0][0] - i['claim_value'] = txout_dest[0][1] - elif is_support: - i['claim_name'] = txout_dest[0][0] - i['claim_id'] = txout_dest[0][1] - elif is_update: - i['claim_name'] = txout_dest[0][0] - i['claim_id'] = txout_dest[0][1] - i['claim_value'] = txout_dest[0][2] - else: - # should not reach here - raise ZeroDivisionError() - - self.add_input_info(i) - return i - - def get_spendable_coins(self, domain=None, exclude_frozen=True, abandon_txid=None): - coins = [] - found_abandon_txid = False - if domain is None: - domain = list(self.addresses) - if exclude_frozen: - domain = set(domain) - self.frozen_addresses - for addr in domain: - c = self.get_addr_utxo(addr) - for txo, v in c.items(): - tx_height, value, is_cb = v - if is_cb and tx_height + COINBASE_MATURITY > self.get_local_height(): - continue - prevout_hash, prevout_n = txo.split(':') - tx = self.transactions.get(prevout_hash) - tx.deserialize() - txout = tx.outputs()[int(prevout_n)] - if txout[0] & (TYPE_CLAIM | TYPE_SUPPORT | TYPE_UPDATE) == 0 or ( - abandon_txid is not None and prevout_hash == abandon_txid): - output = { - 'address': addr, - 'value': value, - 'prevout_n': int(prevout_n), - 'prevout_hash': prevout_hash, - 'height': tx_height, - 'coinbase': is_cb, - 'is_claim': bool(txout[0] & TYPE_CLAIM), - 'is_support': bool(txout[0] & TYPE_SUPPORT), - 'is_update': bool(txout[0] & TYPE_UPDATE), - } - if txout[0] & TYPE_CLAIM: - output['claim_name'] = txout[1][0][0] - output['claim_value'] = txout[1][0][1] - elif txout[0] & TYPE_SUPPORT: - output['claim_name'] = txout[1][0][0] - output['claim_id'] = txout[1][0][1] - elif txout[0] & TYPE_UPDATE: - output['claim_name'] = txout[1][0][0] - output['claim_id'] = txout[1][0][1] - output['claim_value'] = txout[1][0][2] - coins.append(output) - if abandon_txid is not None and prevout_hash == abandon_txid: - found_abandon_txid = True - continue - if abandon_txid is not None and not found_abandon_txid: - raise ValueError("Can't spend from the given txid") - return coins - - def get_account_addresses(self, acc_id, include_change=True): - '''acc_id of None means all user-visible accounts''' - addr_list = [] - acc_ids = self.accounts_to_show() if acc_id is None else [acc_id] - for _acc_id in acc_ids: - if _acc_id in self.accounts: - acc = self.accounts[_acc_id] - addr_list += acc.get_addresses(0) - if include_change: - addr_list += acc.get_addresses(1) - return addr_list - - def get_account_from_address(self, addr): - """Returns the account that contains this address, or None""" - for acc_id in self.accounts: # similar to get_address_index but simpler - if addr in self.get_account_addresses(acc_id): - return acc_id - return None - - def get_account_balance(self, account, exclude_claimtrietx=False): - return self.get_balance(self.get_account_addresses(account, exclude_claimtrietx)) - - def get_frozen_balance(self): - return self.get_balance(self.frozen_addresses) - - def get_balance(self, domain=None, exclude_claimtrietx=False): - if domain is None: - domain = self.addresses(True) - cc = uu = xx = 0 - for addr in domain: - c, u, x = self.get_addr_balance(addr, exclude_claimtrietx) - cc += c - uu += u - xx += x - return cc, uu, xx - - def get_address_history(self, address): - with self.lock: - return self.history.get(address, []) - - def get_status(self, h): - if not h: - return None - status = '' - for tx_hash, height in h: - status += tx_hash + ':%d:' % height - return hashlib.sha256(status).digest().encode('hex') - - def find_pay_to_pubkey_address(self, prevout_hash, prevout_n): - dd = self.txo.get(prevout_hash, {}) - for addr, l in dd.items(): - for n, v, is_cb in l: - if n == prevout_n: - self.print_error("found pay-to-pubkey address:", addr) - return addr - - def add_transaction(self, tx_hash, tx): - log.info("Adding tx: %s", tx_hash) - is_coinbase = True if tx.inputs()[0].get('is_coinbase') else False - with self.transaction_lock: - # add inputs - self.txi[tx_hash] = d = {} - for txi in tx.inputs(): - addr = txi.get('address') - if not txi.get('is_coinbase'): - prevout_hash = txi['prevout_hash'] - prevout_n = txi['prevout_n'] - ser = prevout_hash + ':%d' % prevout_n - if addr == "(pubkey)": - addr = self.find_pay_to_pubkey_address(prevout_hash, prevout_n) - # find value from prev output - if addr and self.is_mine(addr): - dd = self.txo.get(prevout_hash, {}) - for n, v, is_cb in dd.get(addr, []): - if n == prevout_n: - if d.get(addr) is None: - d[addr] = [] - d[addr].append((ser, v)) - break - else: - self.pruned_txo[ser] = tx_hash - - # add outputs - self.txo[tx_hash] = d = {} - for n, txo in enumerate(tx.outputs()): - ser = tx_hash + ':%d' % n - _type, x, v = txo - if _type & (TYPE_CLAIM | TYPE_UPDATE | TYPE_SUPPORT): - x = x[1] - self.claimtrie_transactions[ser] = _type - if _type & TYPE_ADDRESS: - addr = x - elif _type & TYPE_PUBKEY: - addr = public_key_to_address(x.decode('hex')) - else: - addr = None - if addr and self.is_mine(addr): - if d.get(addr) is None: - d[addr] = [] - d[addr].append((n, v, is_coinbase)) - # give v to txi that spends me - next_tx = self.pruned_txo.get(ser) - if next_tx is not None: - self.pruned_txo.pop(ser) - dd = self.txi.get(next_tx, {}) - if dd.get(addr) is None: - dd[addr] = [] - dd[addr].append((ser, v)) - # save - self.transactions[tx_hash] = tx - log.info("Saved") - - def remove_transaction(self, tx_hash): - with self.transaction_lock: - self.print_error("removing tx from history", tx_hash) - # tx = self.transactions.pop(tx_hash) - for ser, hh in self.pruned_txo.items(): - if hh == tx_hash: - self.pruned_txo.pop(ser) - # add tx to pruned_txo, and undo the txi addition - for next_tx, dd in self.txi.items(): - for addr, l in dd.items(): - ll = l[:] - for item in ll: - ser, v = item - prev_hash, prev_n = ser.split(':') - if prev_hash == tx_hash: - l.remove(item) - self.pruned_txo[ser] = next_tx - if not l: - dd.pop(addr) - else: - dd[addr] = l - try: - self.txi.pop(tx_hash) - self.txo.pop(tx_hash) - except KeyError: - self.print_error("tx was not in history", tx_hash) - - def receive_tx_callback(self, tx_hash, tx, tx_height): - self.add_transaction(tx_hash, tx) - self.save_transactions() - self.add_unverified_tx(tx_hash, tx_height) - - def receive_history_callback(self, addr, hist): - with self.lock: - old_hist = self.history.get(addr, []) - for tx_hash, height in old_hist: - if (tx_hash, height) not in hist: - # remove tx if it's not referenced in histories - self.tx_addr_hist[tx_hash].remove(addr) - if not self.tx_addr_hist[tx_hash]: - self.remove_transaction(tx_hash) - - self.history[addr] = hist - - for tx_hash, tx_height in hist: - # add it in case it was previously unconfirmed - self.add_unverified_tx(tx_hash, tx_height) - # add reference in tx_addr_hist - s = self.tx_addr_hist.get(tx_hash, set()) - s.add(addr) - self.tx_addr_hist[tx_hash] = s - # if addr is new, we have to recompute txi and txo - tx = self.transactions.get(tx_hash) - if tx is not None and self.txi.get(tx_hash, {}).get(addr) is None and self.txo.get( - tx_hash, {}).get(addr) is None: - self.add_transaction(tx_hash, tx) - - # Write updated TXI, TXO etc. - self.save_transactions() - - def get_history(self, domain=None): - from collections import defaultdict - # get domain - if domain is None: - domain = self.get_account_addresses(None) - - # 1. Get the history of each address in the domain, maintain the - # delta of a tx as the sum of its deltas on domain addresses - tx_deltas = defaultdict(int) - for addr in domain: - h = self.get_address_history(addr) - for tx_hash, height in h: - delta = self.get_tx_delta(tx_hash, addr) - if delta is None or tx_deltas[tx_hash] is None: - tx_deltas[tx_hash] = None - else: - tx_deltas[tx_hash] += delta - - # 2. create sorted history - history = [] - for tx_hash, delta in tx_deltas.items(): - conf, timestamp = self.get_confirmations(tx_hash) - history.append((tx_hash, conf, delta, timestamp)) - history.sort(key=lambda x: self.get_txpos(x[0])) - history.reverse() - - # 3. add balance - c, u, x = self.get_balance(domain) - balance = c + u + x - h2 = [] - for item in history: - tx_hash, conf, delta, timestamp = item - h2.append((tx_hash, conf, delta, timestamp, balance)) - if balance is None or delta is None: - balance = None - else: - balance -= delta - h2.reverse() - - # fixme: this may happen if history is incomplete - if balance not in [None, 0]: - self.print_error("Error: history not synchronized") - return [] - - return h2 - - def get_name_claims(self, domain=None, include_abandoned=True, include_supports=True, - exclude_expired=True): - claims = [] - if domain is None: - domain = self.get_account_addresses(None) - - for addr in domain: - txos, txis = self.get_addr_io(addr) - for txo, v in txos.items(): - tx_height, value, is_cb = v - prevout_hash, prevout_n = txo.split(':') - - tx = self.transactions.get(prevout_hash) - tx.deserialize() - txout = tx.outputs()[int(prevout_n)] - if not include_abandoned and txo in txis: - continue - if not include_supports and txout[0] & TYPE_SUPPORT: - continue - if txout[0] & (TYPE_CLAIM | TYPE_UPDATE | TYPE_SUPPORT): - local_height = self.get_local_height() - expired = tx_height + EXPIRATION_BLOCKS <= local_height - if expired and exclude_expired: - continue - output = { - 'txid': prevout_hash, - 'nout': int(prevout_n), - 'address': addr, - 'amount': Decimal(value), - 'height': tx_height, - 'expiration_height': tx_height + EXPIRATION_BLOCKS, - 'expired': expired, - 'confirmations': local_height - tx_height, - 'is_spent': txo in txis, - } - if tx_height: - output['height'] = tx_height - output['expiration_height'] = tx_height + EXPIRATION_BLOCKS - output['expired'] = expired - output['confirmations'] = local_height - tx_height - output['is_pending'] = False - else: - output['height'] = None - output['expiration_height'] = None - output['expired'] = expired - output['confirmations'] = None - output['is_pending'] = True - - if txout[0] & TYPE_CLAIM: - output['category'] = 'claim' - claim_name, claim_value = txout[1][0] - output['name'] = claim_name - output['value'] = claim_value.encode('hex') - claim_id = claim_id_hash(rev_hex(output['txid']).decode('hex'), - output['nout']) - claim_id = encode_claim_id_hex(claim_id) - output['claim_id'] = claim_id - elif txout[0] & TYPE_SUPPORT: - output['category'] = 'support' - claim_name, claim_id = txout[1][0] - output['name'] = claim_name - output['claim_id'] = encode_claim_id_hex(claim_id) - elif txout[0] & TYPE_UPDATE: - output['category'] = 'update' - claim_name, claim_id, claim_value = txout[1][0] - output['name'] = claim_name - output['value'] = claim_value.encode('hex') - output['claim_id'] = encode_claim_id_hex(claim_id) - if not expired: - output[ - 'blocks_to_expiration'] = tx_height + EXPIRATION_BLOCKS - local_height - claims.append(output) - return claims - - def get_label(self, tx_hash): - label = self.labels.get(tx_hash, '') - if label == '': - label = self.get_default_label(tx_hash) - return label - - def get_default_label(self, tx_hash): - if self.txi.get(tx_hash) == {}: - d = self.txo.get(tx_hash, {}) - labels = [] - for addr in d.keys(): - label = self.labels.get(addr) - if label: - labels.append(label) - return ', '.join(labels) - return '' - - def fee_per_kb(self, config): - b = config.get('dynamic_fees') - f = config.get('fee_factor', 50) - F = config.get('fee_per_kb', RECOMMENDED_FEE) - if b and self.network and self.network.fee: - result = min(RECOMMENDED_FEE, self.network.fee * (50 + f) / 100) - else: - result = F - return result - - def relayfee(self): - RELAY_FEE = 5000 - MAX_RELAY_FEE = 50000 - f = self.network.relay_fee if self.network and self.network.relay_fee else RELAY_FEE - return min(f, MAX_RELAY_FEE) - - def get_tx_fee(self, tx): - # this method can be overloaded - return tx.get_fee() - - def coin_chooser_name(self, config): - kind = config.get('coin_chooser') - if kind not in COIN_CHOOSERS: - kind = 'Priority' - return kind - - def coin_chooser(self, config): - klass = COIN_CHOOSERS[self.coin_chooser_name(config)] - return klass() - - def make_unsigned_transaction(self, coins, outputs, config, fixed_fee=None, change_addr=None, - abandon_txid=None): - # check outputs - for type, data, value in outputs: - if type & (TYPE_CLAIM | TYPE_UPDATE | TYPE_SUPPORT): - data = data[1] - if type & TYPE_ADDRESS: - assert is_address(data), "Address " + data + " is invalid!" - - # Avoid index-out-of-range with coins[0] below - if not coins: - raise NotEnoughFunds() - - for item in coins: - self.add_input_info(item) - - # change address - if change_addr: - change_addrs = [change_addr] - else: - # send change to one of the accounts involved in the tx - address = coins[0].get('address') - account, _ = self.get_address_index(address) - if self.use_change and self.accounts[account].has_change(): - # New change addresses are created only after a few - # confirmations. Select the unused addresses within the - # gap limit; if none take one at random - addrs = self.accounts[account].get_addresses(1)[-self.gap_limit_for_change:] - change_addrs = [addr for addr in addrs if - self.get_num_tx(addr) == 0] - if not change_addrs: - change_addrs = [random.choice(addrs)] - else: - change_addrs = [address] - - # Fee estimator - if fixed_fee is None: - fee_estimator = partial(Transaction.fee_for_size, - self.relayfee(), - self.fee_per_kb(config)) - else: - fee_estimator = lambda size: fixed_fee - - # Change <= dust threshold is added to the tx fee - dust_threshold = 182 * 3 * self.relayfee() / 1000 - - # Let the coin chooser select the coins to spend - max_change = self.max_change_outputs if self.multiple_change else 1 - coin_chooser = self.coin_chooser(config) - tx = coin_chooser.make_tx(coins, outputs, change_addrs[:max_change], - fee_estimator, dust_threshold, abandon_txid=abandon_txid) - - # Sort the inputs and outputs deterministically - tx.BIP_LI01_sort() - - return tx - - def mktx(self, outputs, password, config, fee=None, change_addr=None, domain=None): - coins = self.get_spendable_coins(domain) - tx = self.make_unsigned_transaction(coins, outputs, config, fee, change_addr) - self.sign_transaction(tx, password) - return tx - - def add_input_info(self, txin): - address = txin['address'] - account_id, sequence = self.get_address_index(address) - account = self.accounts[account_id] - redeemScript = account.redeem_script(*sequence) - pubkeys = account.get_pubkeys(*sequence) - x_pubkeys = account.get_xpubkeys(*sequence) - # sort pubkeys and x_pubkeys, using the order of pubkeys - pubkeys, x_pubkeys = zip(*sorted(zip(pubkeys, x_pubkeys))) - txin['pubkeys'] = list(pubkeys) - txin['x_pubkeys'] = list(x_pubkeys) - txin['signatures'] = [None] * len(pubkeys) - - if redeemScript: - txin['redeemScript'] = redeemScript - txin['num_sig'] = account.m - else: - txin['redeemPubkey'] = account.get_pubkey(*sequence) - txin['num_sig'] = 1 - - def sign_transaction(self, tx, password): - if self.is_watching_only(): - return - # Raise if password is not correct. - self.check_password(password) - # Add derivation for utxo in wallets - for i, addr in self.utxo_can_sign(tx): - txin = tx.inputs()[i] - txin['address'] = addr - self.add_input_info(txin) - # Add private keys - keypairs = {} - for x in self.xkeys_can_sign(tx): - sec = self.get_private_key_from_xpubkey(x, password) - if sec: - keypairs[x] = sec - # Sign - if keypairs: - tx.sign(keypairs) - - def send_tx(self, tx, timeout=300): - # fixme: this does not handle the case where server does not answer - if not self.network.interface: - raise Exception("Not connected.") - - txid = tx.hash() - - with self.send_tx_lock: - self.network.send([('blockchain.transaction.broadcast', [str(tx)])], self.on_broadcast) - self.tx_event.wait() - success, result = self.receive_tx(txid, tx) - self.tx_event.clear() - - if not success: - log.error("send tx failed: %s", result) - return success, result - - log.debug("waiting for %s to be added to the wallet", txid) - now = time.time() - while txid not in self.transactions and time.time() < now + timeout: - time.sleep(0.2) - - if txid not in self.transactions: - #TODO: detect if the txid is not known because it changed - log.error("timed out while waiting to receive back a broadcast transaction, " - "expected txid: %s", txid) - return False, "timed out while waiting to receive back a broadcast transaction, " \ - "expected txid: %s" % txid - - log.info("successfully sent %s", txid) - return success, result - - def on_broadcast(self, r): - self.tx_result = r.get('result') - self.tx_event.set() + @property + def exists(self): + return self.storage.file_exists - def receive_tx(self, tx_hash, tx): - out = self.tx_result - if out != tx_hash: - return False, "error: " + out - return True, out + @property + def default_account(self): + return self.accounts['0'] - def update_password(self, old_password, new_password): - if new_password == '': - new_password = None + @property + def sequences(self): + for account in self.accounts.values(): + for sequence in account.sequences: + yield sequence - if self.has_seed(): - decoded = self.get_seed(old_password) - self.seed = pw_encode(decoded, new_password) - self.storage.put('seed', self.seed) + @property + def addresses(self): + for sequence in self.sequences: + for address in sequence.addresses: + yield address - if hasattr(self, 'master_private_keys'): - for k, v in self.master_private_keys.items(): - b = pw_decode(v, old_password) - c = pw_encode(b, new_password) - self.master_private_keys[k] = c - self.storage.put('master_private_keys', self.master_private_keys) + @property + def receiving_addresses(self): + for account in self.accounts.values(): + for address in account.receiving.addresses: + yield address - self.set_use_encryption(new_password is not None) + @property + def change_addresses(self): + for account in self.accounts.values(): + for address in account.receiving.addresses: + yield address - def is_frozen(self, addr): - return addr in self.frozen_addresses + @property + def addresses_without_history(self): + for address in self.addresses: + if not self.history.has_address(address): + yield address - def set_frozen_state(self, addrs, freeze): - '''Set frozen state of the addresses to FREEZE, True or False''' - if all(self.is_mine(addr) for addr in addrs): - if freeze: - self.frozen_addresses |= set(addrs) - else: - self.frozen_addresses -= set(addrs) - self.storage.put('frozen_addresses', list(self.frozen_addresses)) - return True - return False + def ensure_enough_addresses(self): + return [ + address + for sequence in self.sequences + for address in sequence.ensure_enough_addresses() + ] - def prepare_for_verifier(self): - # review transactions that are in the history - for addr, hist in self.history.items(): - for tx_hash, tx_height in hist: - # add it in case it was previously unconfirmed - self.add_unverified_tx(tx_hash, tx_height) + def create(self): + mnemonic = Mnemonic(self.storage.get('lang', 'eng')) + seed = mnemonic.make_seed() + self.add_seed(seed, None) + self.add_xprv_from_seed(seed, self.root_name, None) + account = Account( + {'xpub': self.master_public_keys.get("x/")}, + self.gap_limit, + self.gap_limit_for_change, + self.is_address_old + ) + self.add_account('0', account) - # if we are on a pruning server, remove unverified transactions - vr = self.verified_tx.keys() + self.unverified_tx.keys() - for tx_hash in self.transactions.keys(): - if tx_hash not in vr: - log.info("removing transaction %s", tx_hash) - self.transactions.pop(tx_hash) + def add_seed(self, seed, password): + if self.seed: + raise Exception("a seed exists") + self.seed_version, self.seed = self.format_seed(seed) + if password: + self.seed = pw_encode(self.seed, password) + self.storage.put('seed', self.seed) + self.storage.put('seed_version', self.seed_version) + self.set_use_encryption(password is not None) - def accounts_to_show(self): - return self.accounts.keys() + @staticmethod + def format_seed(seed): + return NEW_SEED_VERSION, ' '.join(seed.split()) - def get_accounts(self): - return {a_id: a for a_id, a in self.accounts.items() - if a_id in self.accounts_to_show()} + def add_xprv_from_seed(self, seed, name, password, passphrase=''): + xprv, xpub = bip32_root(Mnemonic.mnemonic_to_seed(seed, passphrase)) + xprv, xpub = bip32_private_derivation(xprv, "m/", self.root_derivation) + self.add_master_public_key(name, xpub) + self.add_master_private_key(name, xprv, password) - def get_account_name(self, k): - default_name = "Main account" if k == '0' else "Account " + k - return self.labels.get(k, default_name) + def add_master_public_key(self, name, xpub): + if xpub in self.master_public_keys.values(): + raise BaseException('Duplicate master public key') + self.master_public_keys[name] = xpub + self.storage.put('master_public_keys', self.master_public_keys) - def get_account_names(self): - ids = self.accounts_to_show() - return dict(zip(ids, map(self.get_account_name, ids))) + def add_master_private_key(self, name, xpriv, password): + self.master_private_keys[name] = pw_encode(xpriv, password) + self.storage.put('master_private_keys', self.master_private_keys) def add_account(self, account_id, account): self.accounts[account_id] = account self.save_accounts() + def set_use_encryption(self, use_encryption): + self.use_encryption = use_encryption + self.storage.put('use_encryption', use_encryption) + def save_accounts(self): d = {} for k, v in self.accounts.items(): - d[k] = v.dump() + d[k] = v.as_dict() self.storage.put('accounts', d) - def is_used(self, address): - h = self.history.get(address, []) - c, u, x = self.get_addr_balance(address) - return len(h) > 0 and c + u + x == 0 - - def is_empty(self, address): - c, u, x = self.get_addr_balance(address) - return c + u + x == 0 - - def address_is_old(self, address, age_limit=2): + def is_address_old(self, address, age_limit=2): age = -1 - h = self.history.get(address, []) - for tx_hash, tx_height in h: - if tx_height == 0: + for tx in self.history.get_transactions(address, []): + if tx.height == 0: tx_age = 0 else: - tx_age = self.get_local_height() - tx_height + 1 + tx_age = self.headers.height - tx.height + 1 if tx_age > age: age = tx_age return age > age_limit - - def can_sign(self, tx): - if self.is_watching_only(): - return False - if tx.is_complete(): - return False - if self.xkeys_can_sign(tx): - return True - if self.utxo_can_sign(tx): - return True - return False - - def utxo_can_sign(self, tx): - out = set() - coins = self.get_spendable_coins() - for i in tx.inputs_without_script(): - txin = tx.inputs[i] - for item in coins: - if txin.get('prevout_hash') == item.get('prevout_hash') and txin.get( - 'prevout_n') == item.get('prevout_n'): - out.add((i, item.get('address'))) - return out - - def xkeys_can_sign(self, tx): - out = set() - for x in tx.inputs_to_sign(): - if self.can_sign_xpubkey(x): - out.add(x) - return out - - def get_private_key_from_xpubkey(self, x_pubkey, password): - if x_pubkey[0:2] in ['02', '03', '04']: - addr = public_key_to_address(x_pubkey.decode('hex')) - if self.is_mine(addr): - return self.get_private_key(addr, password)[0] - elif x_pubkey[0:2] == 'ff': - xpub, sequence = Account.parse_xpubkey(x_pubkey) - for k, v in self.master_public_keys.items(): - if v == xpub: - xprv = self.get_master_private_key(k, password) - if xprv: - _, _, _, c, k = deserialize_xkey(xprv) - return bip32_private_key(sequence, k, c) - elif x_pubkey[0:2] == 'fd': - addrtype = ord(x_pubkey[2:4].decode('hex')) - addr = hash_160_bytes_to_address(x_pubkey[4:].decode('hex'), addrtype) - if self.is_mine(addr): - return self.get_private_key(addr, password)[0] - else: - raise BaseException("z") - - def can_sign_xpubkey(self, x_pubkey): - if x_pubkey[0:2] in ['02', '03', '04']: - addr = public_key_to_address(x_pubkey.decode('hex')) - return self.is_mine(addr) - elif x_pubkey[0:2] == 'ff': - if not isinstance(self, Wallet): - return False - xpub, sequence = Account.parse_xpubkey(x_pubkey) - return xpub in [self.master_public_keys[k] for k in self.master_private_keys.keys()] - elif x_pubkey[0:2] == 'fd': - addrtype = ord(x_pubkey[2:4].decode('hex')) - addr = hash_160_bytes_to_address(x_pubkey[4:].decode('hex'), addrtype) - return self.is_mine(addr) - else: - raise BaseException("z") - - def can_change_password(self): - return not self.is_watching_only() - - def get_unused_addresses(self, account): - # fixme: use slots from expired requests - domain = self.get_account_addresses(account, include_change=False) - return [addr for addr in domain if not self.history.get(addr)] - - def get_unused_address(self, account): - domain = self.get_account_addresses(account, include_change=False) - for addr in domain: - if not self.history.get(addr): - return addr - - def is_watching_only(self): - return not bool(self.master_private_keys) - - def get_master_public_key(self): - return self.master_public_keys.get(self.root_name) - - def get_master_private_key(self, account, password): - k = self.master_private_keys.get(account) - if not k: - return - xprv = pw_decode(k, password) - try: - deserialize_xkey(xprv) - except: - raise InvalidPassword() - return xprv - - def check_password(self, password): - xpriv = self.get_master_private_key(self.root_name, password) - xpub = self.master_public_keys[self.root_name] - if deserialize_xkey(xpriv)[3] != deserialize_xkey(xpub)[3]: - raise InvalidPassword() - - def add_master_public_key(self, name, xpub): - if xpub in self.master_public_keys.values(): - raise BaseException('Duplicate master public key') - self.master_public_keys[name] = xpub - self.storage.put('master_public_keys', self.master_public_keys) - - def add_master_private_key(self, name, xpriv, password): - self.master_private_keys[name] = pw_encode(xpriv, password) - self.storage.put('master_private_keys', self.master_private_keys) - - def derive_xkeys(self, root, derivation, password): - x = self.master_private_keys[root] - root_xprv = pw_decode(x, password) - xprv, xpub = bip32_private_derivation(root_xprv, root, derivation) - return xpub, xprv - - def mnemonic_to_seed(self, seed, password): - return Mnemonic.mnemonic_to_seed(seed, password) - - def format_seed(self, seed): - return NEW_SEED_VERSION, ' '.join(seed.split()) - - @classmethod - def account_derivation(cls, account_id): - return cls.root_derivation + account_id - - @classmethod - def address_derivation(cls, account_id, change, address_index): - account_derivation = cls.account_derivation(account_id) - return "%s/%d/%d" % (account_derivation, change, address_index) - - def address_id(self, address): - acc_id, (change, address_index) = self.get_address_index(address) - return self.address_derivation(acc_id, change, address_index) - - def add_xprv_from_seed(self, seed, name, password, passphrase=''): - # we don't store the seed, only the master xpriv - xprv, _ = bip32_root(self.mnemonic_to_seed(seed, passphrase)) - xprv, xpub = bip32_private_derivation(xprv, "m/", self.root_derivation) - self.add_master_public_key(name, xpub) - self.add_master_private_key(name, xprv, password) - - def add_xpub_from_seed(self, seed, name): - # store only master xpub - xprv, _ = bip32_root(self.mnemonic_to_seed(seed, '')) - _, xpub = bip32_private_derivation(xprv, "m/", self.root_derivation) - self.add_master_public_key(name, xpub) - - def has_seed(self): - return self.seed != '' - - def add_seed(self, seed, password): - if self.seed: - raise Exception("a seed exists") - self.seed_version, self.seed = self.format_seed(seed) - if password: - self.seed = pw_encode(self.seed, password) - self.storage.put('seed', self.seed) - self.storage.put('seed_version', self.seed_version) - self.set_use_encryption(password is not None) - - def get_seed(self, password): - return pw_decode(self.seed, password) - - def get_mnemonic(self, password): - return self.get_seed(password) - - def num_unused_trailing_addresses(self, addresses): - k = 0 - for a in addresses[::-1]: - if self.history.get(a): - break - k = k + 1 - return k - - def min_acceptable_gap(self): - # fixme: this assumes wallet is synchronized - n = 0 - nmax = 0 - - for account in self.accounts.values(): - addresses = account.get_addresses(0) - k = self.num_unused_trailing_addresses(addresses) - for a in addresses[0:-k]: - if self.history.get(a): - n = 0 - else: - n += 1 - if n > nmax: - nmax = n - return nmax + 1 - - def default_account(self): - return self.accounts['0'] - - def create_new_address(self, account=None, for_change=0): - with self.lock: - if account is None: - account = self.default_account() - address = account.create_new_address(for_change) - self.add_address(address) - log.info("created address %s", address) - return address - - def add_address(self, address): - if address not in self.history: - self.history[address] = [] - if self.synchronizer: - self.synchronizer.add(address) - self.save_accounts() - - def get_least_used_address(self, account=None, for_change=False, max_count=100): - domain = self.get_account_addresses(account, include_change=for_change) - hist = {} - for addr in domain: - if for_change != self.is_change(addr): - continue - else: - h = self.history.get(addr) - if h and len(h) >= max_count: - continue - elif h: - hist[addr] = h - else: - hist[addr] = [] - if hist: - return sorted(hist.keys(), key=lambda x: len(hist[x]))[0] - return self.create_new_address(account, for_change=for_change) - - def is_beyond_limit(self, address, account, is_change): - addr_list = account.get_addresses(is_change) - i = addr_list.index(address) - prev_addresses = addr_list[:max(0, i)] - limit = self.gap_limit_for_change if is_change else self.gap_limit - if len(prev_addresses) < limit: - return False - prev_addresses = prev_addresses[max(0, i - limit):] - for addr in prev_addresses: - if self.history.get(addr): - return False - return True - - def get_master_public_keys(self): - out = {} - for k, account in self.accounts.items(): - name = self.get_account_name(k) - mpk_text = '\n\n'.join(account.get_master_pubkeys()) - out[name] = mpk_text - return out