diff --git a/.gitignore b/.gitignore index cfa2d3d..76c5381 100644 --- a/.gitignore +++ b/.gitignore @@ -9,15 +9,24 @@ gui.build/ node.build/ workspace/ *.log +*.log.* +node.log.* +wallet.log.* privkey.der pubkey.der wallet.der address.txt mempool.db ledger.db +index.db static/regmode. static/regmode.* static/index_reg.* heavy3a.bin /theme config_custom.txt +sequencing_last +# ignore polysign, so devs can have a symlink or local dev version. +polysign +# ignore ledger queries, for the time being. +ledger_queries.py diff --git a/README.md b/README.md index 529c540..64055b5 100644 --- a/README.md +++ b/README.md @@ -2,53 +2,50 @@ Bismuth Readme ======= ##### Warning: For production purposes, please only use code from the "releases" page, which is not in pre-release state. -[![Build Status](https://travis-ci.org/hclivess/Bismuth.svg?branch=master)](https://travis-ci.org/hclivess/Bismuth) - -Official website: +### Official website: * http://bismuth.cz -Hypernodes website: -* https://hypernodes.bismuth.live +### Explorers: +* https://bismuth.online -DApps, miscellaneous: -* https://github.com/hclivess/BismuthProjects +### Wallets: +* [Tornado Wallet](https://github.com/bismuthfoundation/TornadoWallet) +* [tk-wallet](https://github.com/bismuthfoundation/tk-wallet) -API, RPC, Bisafe, HowTo: -* https://github.com/EggPool +### Hypernodes website: +* https://hypernodes.bismuth.live -Explorer, pool, tools: +### Social: +* [Discord](https://discord.gg/dKVZd4z) +* [Blog](https://hypernodes.bismuth.live/?page_id=20) +* [Reddit](https://www.reddit.com/r/cryptobismuth) +* [Facebook](https://web.facebook.com/cryptobismuth) +* [Telegram](https://t.me/cryptobismuth) +* [Egg's Twitter](https://twitter.com/EggPoolNet) +* [Jan's Twitter](https://twitter.com/bismuthdev) +* [Gawlea's Twitter](https://twitter.com/BismuthPlatform) + +### Related repositories: * https://github.com/maccaspacca +* https://github.com/EggPool +* https://github.com/hclivess + +### Links: Bismuth Foundation: * https://github.com/bismuthfoundation - Twitter: * https://twitter.com/bismuthplatform * https://twitter.com/bismuthdev * https://twitter.com/EggPoolNet +Discord (main support and community place): +* https://discord.gg/dKVZd4z + Facebook: * https://web.facebook.com/cryptobismuth Reddit: * https://www.reddit.com/r/cryptobismuth/ -Discord: -* https://discord.gg/dKVZd4z - -License: - -This program is free software; you can redistribute it and/or -modify it under the terms of the GNU General Public License -as published by the Free Software Foundation; either version 2 -of the License, or (at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. diff --git a/aliases.py b/aliases.py index c8b5804..0468465 100644 --- a/aliases.py +++ b/aliases.py @@ -1,81 +1,36 @@ -import sqlite3 -import re import log -import functools +import re def replace_regex(string,replace): replaced_string = re.sub(r'^{}'.format(replace), "", string) return replaced_string -def sql_trace_callback(log, id, statement): - line = f"SQL[{id}] {statement}" - log.warning(line) - -def aliases_update(f, ledger ,mode, app_log, trace_db_calls=False): - """Where f is the aliases database file""" - # index aliases - if mode not in ("normal", "reindex"): - raise ValueError ("Wrong value for aliases_update function") - - # removed `conn.text_factory = str` because sqlites default `text_factory` is `str` - with sqlite3.connect(ledger) as conn: - if trace_db_calls: - conn.set_trace_callback(functools.partial(sql_trace_callback,app_log,"ALIASES-LEDGER")) - try: - c = conn.cursor() - except: - app_log.error('Failed to create cursor for ledger') +def aliases_update(node, db_handler_instance): + db_handler_instance.index_cursor.execute("SELECT block_height FROM aliases ORDER BY block_height DESC LIMIT 1;") + try: + alias_last_block = int(db_handler_instance.index_cursor.fetchone()[0]) + except: + alias_last_block = 0 - with sqlite3.connect(f) as ali: - if trace_db_calls: - ali.set_trace_callback(functools.partial(sql_trace_callback,app_log,"ALIASES-F")) - try: - a = ali.cursor() - except: - app_log.error('Failed to create cursor for aliases') - return + node.logger.app_log.warning("Alias anchor block: {}".format(alias_last_block)) - try: - a.execute("CREATE TABLE IF NOT EXISTS aliases (block_height INTEGER, address, alias)") - ali.commit() - except: - app_log.error('Failed to create aliases table') - return - - if mode == 'reindex': - app_log.warning("Alias database will be reindexed") - try: - a.execute("DELETE FROM aliases") - ali.commit() - except: - app_log.error('Failed to delete content from aliases table') - return - - a.execute("SELECT block_height FROM aliases ORDER BY block_height DESC LIMIT 1;") - try: - alias_last_block = int(a.fetchone()[0]) - except: - alias_last_block = 0 + db_handler_instance.h.execute("SELECT block_height, address, openfield FROM transactions WHERE openfield LIKE ? AND block_height >= ? ORDER BY block_height ASC, timestamp ASC;", ("alias=" + '%',) + (alias_last_block,)) + # include the anchor block in case indexation stopped there + result = db_handler_instance.h.fetchall() - app_log.warning("Alias anchor block: {}".format(alias_last_block)) - - c.execute("SELECT block_height, address, openfield FROM transactions WHERE openfield LIKE ? AND block_height >= ? ORDER BY block_height ASC, timestamp ASC;", ("alias=" + '%',)+(alias_last_block,)) - #include the anchor block in case indexation stopped there - result = c.fetchall() - - for openfield in result: - alias = (replace_regex(openfield[2], "alias=")) - app_log.warning("Processing alias registration: {}".format(alias)) - try: - a.execute("SELECT * from aliases WHERE alias = ?", (alias,)) - dummy = a.fetchall()[0] #check for uniqueness - app_log.warning("Alias already registered: {}".format(alias)) - except: - a.execute("INSERT INTO aliases VALUES (?,?,?)", (openfield[0],openfield[1],alias)) - ali.commit() - app_log.warning("Added alias to the database: {} from block {}".format (alias,openfield[0])) + for openfield in result: + alias = (replace_regex(openfield[2], "alias=")) + node.logger.app_log.warning(f"Processing alias registration: {alias}") + try: + db_handler_instance.index_cursor.execute("SELECT * from aliases WHERE alias = ?", (alias,)) + dummy = db_handler_instance.index_cursor.fetchall()[0] # check for uniqueness + node.logger.app_log.warning(f"Alias already registered: {alias}") + except: + db_handler_instance.index_cursor.execute("INSERT INTO aliases VALUES (?,?,?)", (openfield[0], openfield[1], alias)) + db_handler_instance.index.commit() + node.logger.app_log.warning(f"Added alias to the database: {alias} from block {openfield[0]}") if __name__ == "__main__": app_log = log.log("aliases.log", "WARNING", True) - aliases_update("static/index.db","static/ledger.db","normal",app_log) + diff --git a/aliasesv2.py b/aliasesv2.py new file mode 100644 index 0000000..f7dd739 --- /dev/null +++ b/aliasesv2.py @@ -0,0 +1,33 @@ +import sqlite3 +import re +import log +import functools + +def aliases_update(node, db_handler_instance): + + db_handler_instance.index_cursor.execute("SELECT block_height FROM aliases ORDER BY block_height DESC LIMIT 1;") + try: + alias_last_block = int(db_handler_instance.index_cursor.fetchone()[0]) + except: + alias_last_block = 0 + + node.logger.app_log.warning("Alias anchor block: {}".format(alias_last_block)) + + db_handler_instance.h.execute("SELECT block_height, address, openfield FROM transactions WHERE operation = ? AND block_height >= ? AND reward = 0 ORDER BY block_height ASC, timestamp ASC;", ("alias:register", alias_last_block,)) + #include the anchor block in case indexation stopped there + result = db_handler_instance.h.fetchall() + + for openfield in result: + node.logger.app_log.warning(f"Processing alias registration: {openfield[2]}") + try: + db_handler_instance.index_cursor.execute("SELECT * from aliases WHERE alias = ?", (openfield[2],)) + dummy = db_handler_instance.index_cursor.fetchall()[0] #check for uniqueness + node.logger.app_log.warning(f"Alias already registered: {openfield[2]}") + except: + db_handler_instance.index_cursor.execute("INSERT INTO aliases VALUES (?,?,?)", (openfield[0],openfield[1],openfield[2])) + db_handler_instance.index.commit() + node.logger.app_log.warning(f"Added alias to the database: {openfield[2]} from block {openfield[0]}") + + +if __name__ == "__main__": + app_log = log.log("aliases.log", "WARNING", True) diff --git a/apihandler.py b/apihandler.py index fbee988..1643b77 100644 --- a/apihandler.py +++ b/apihandler.py @@ -4,19 +4,20 @@ Needed for Json-RPC server or other third party interaction """ -import re -import sqlite3 import base64 +import json +import os +import sys +import threading +from essentials import format_raw_tx + # modular handlers will need access to the database methods under some form, so it needs to be modular too. # Here, I just duplicated the minimum needed code from node, further refactoring with classes will follow. -import connections, peershandler -import threading -import os, sys -#import math +import connections import mempool as mp -import json +from polysign.signerfactory import SignerFactory -__version__ = "0.0.6" +__version__ = "0.0.8" class ApiHandler: @@ -52,11 +53,78 @@ def dispatch(self, method, socket_handler, db_handler, peers): result = getattr(self, method)(socket_handler, db_handler, peers) return result except AttributeError: - raise - print('KO') - self.app_log.warning("API Method <{}> does not exist.".format(method)) + # raise + self.app_log.warning(f"API Method <{method}> does not exist.") return False + def blockstojson(self, raw_blocks: list): + tx_list = [] + block = {} + blocks = {} + + old = None + for transaction_raw in raw_blocks: + transaction = format_raw_tx(transaction_raw) + height = transaction['block_height'] + hash = transaction['block_hash'] + + del transaction['block_height'] + del transaction['block_hash'] + + if old != height: # if same block + del tx_list[:] + block.clear() + + tx_list.append(transaction) + + block['block_height'] = height + block['block_hash'] = hash + block['transactions'] = list(tx_list) + blocks[height] = dict(block) + + old = height # update + + return blocks + + def blocktojsondiffs(self, list_of_txs:list, list_of_diffs:list): + i = 0 + blocks_dict = {} + block_dict = {} + normal_transactions = [] + + old = None + for transaction in list_of_txs: + transaction_formatted = format_raw_tx(transaction) + height = transaction_formatted["block_height"] + + del transaction_formatted["block_height"] + + # del transaction_formatted["signature"] # optional + # del transaction_formatted["pubkey"] # optional + + if old != height: + block_dict.clear() + del normal_transactions[:] + + if transaction_formatted["reward"] == 0: # if normal tx + del transaction_formatted["block_hash"] + del transaction_formatted["reward"] + normal_transactions.append(transaction_formatted) + + else: + del transaction_formatted["address"] + del transaction_formatted["amount"] + transaction_formatted['difficulty'] = list_of_diffs[i][0] + block_dict['mining_tx'] = transaction_formatted + + block_dict['transactions'] = list(normal_transactions) + + blocks_dict[height] = dict(block_dict) + i += 1 + old = height + + return blocks_dict + def api_mempool(self, socket_handler, db_handler, peers): """ Returns all the TX from mempool @@ -68,6 +136,16 @@ def api_mempool(self, socket_handler, db_handler, peers): txs = mp.MEMPOOL.fetchall(mp.SQL_SELECT_TX_TO_SEND) connections.send(socket_handler, txs) + def api_getconfig(self, socket_handler, db_handler, peers): + """ + Returns configuration + :param socket_handler: + :param db_handler: + :param peers: + :return: list of node configuration options + """ + connections.send(socket_handler, self.config.__dict__) + def api_clearmempool(self, socket_handler, db_handler, peers): """ Empty the current mempool @@ -77,8 +155,8 @@ def api_clearmempool(self, socket_handler, db_handler, peers): :return: 'ok' """ mp.MEMPOOL.clear() - connections.send(socket_handler, 'ok') - + connections.send(socket_handler, 'ok') + def api_ping(self, socket_handler, db_handler, peers): """ Void, just to allow the client to keep the socket open (avoids timeout) @@ -103,7 +181,7 @@ def api_getaddressinfo(self, socket_handler, db_handler, peers): # print('api_getaddressinfo', address) try: # format check - if not re.match('[abcdef0123456789]{56}', address): + if not SignerFactory.address_is_valid(address): self.app_log.info("Bad address format <{}>".format(address)) connections.send(socket_handler, info) return @@ -119,21 +197,182 @@ def api_getaddressinfo(self, socket_handler, db_handler, peers): info['pubkey'] = db_handler.h.fetchone()[0] info['pubkey'] = base64.b64decode(info['pubkey']).decode('utf-8') except Exception as e: - print(e) - pass + self.app_log.warning(e) + except Exception as e: - pass + self.app_log.warning(e) + # returns info # print("info", info) connections.send(socket_handler, info) except Exception as e: - pass + self.app_log.warning(e) + + def api_getblockfromhash(self, socket_handler, db_handler, peers): + """ + Returns a specific block based on the provided hash. + Warning: format is strange: we provide a hash, so there should be at most one result. + Or we send back a dict, with height as key, and block (including height again) as value. + Should be enough to only send the block. + **BUT** do not change, this would break current implementations using the current format (json rpc server for instance). + + :param socket_handler: + :param db_handler: + :param peers: + :return: + """ + + block_hash = connections.receive(socket_handler) + + db_handler.execute_param(db_handler.h, + "SELECT * FROM transactions " + "WHERE block_hash = ?", + (block_hash,)) + + result = db_handler.h.fetchall() + blocks = self.blockstojson(result) + connections.send(socket_handler, blocks) + + def api_getblockfromhashextra(self, socket_handler, db_handler, peers): + """ + Returns a specific block based on the provided hash. + similar to api_getblockfromhash, but sends block dict, not a dict of a dict. + Also embeds last and next block hash, as well as block difficulty + Needed for json-rpc server and btc like data. + + :param socket_handler: + :param db_handler: + :param peers: + :return: + """ + try: + block_hash = connections.receive(socket_handler) + + result = db_handler.fetchall(db_handler.h, + "SELECT * FROM transactions " + "WHERE block_hash = ? ", + (block_hash,)) + blocks = self.blockstojson(result) + block = list(blocks.values())[0] + + block["previous_block_hash"] = db_handler.fetchone(db_handler.h, + "SELECT block_hash FROM transactions WHERE block_height = ?", + (block['block_height'] - 1,)) + block["next_block_hash"] = db_handler.fetchone(db_handler.h, + "SELECT block_hash FROM transactions WHERE block_height = ?", + (block['block_height'] + 1,)) + block["difficulty"] = int(float(db_handler.fetchone(db_handler.h, + "SELECT difficulty FROM misc WHERE block_height = ?", + (block['block_height'],)))) + # print(block) + connections.send(socket_handler, block) + except Exception as e: + self.app_log.warning("api_getblockfromhashextra {}".format(e)) + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + self.app_log.warning("{} {} {}".format(exc_type, fname, exc_tb.tb_lineno)) + raise + + def api_getblockfromheight(self, socket_handler, db_handler, peers): + """ + Returns a specific block based on the provided hash. + + :param socket_handler: + :param db_handler: + :param peers: + :return: + """ + + height = connections.receive(socket_handler) + + db_handler.execute_param(db_handler.h, ("SELECT * FROM transactions " + "WHERE block_height = ? "), + (height,)) + + result = db_handler.h.fetchall() + blocks = self.blockstojson(result) + connections.send(socket_handler, blocks) + + def api_getaddressrange(self, socket_handler, db_handler, peers): + """ + Returns a given number of transactions, maximum of 500 entries. Ignores blocks where no transactions of a given address happened. + Reorganizes parameters to a quickly accessible json. + Unnecessary data are removed. + + :param socket_handler: + :param db_handler: (UNUSED) + :param peers: (UNUSED) + :return: + """ + + address = connections.receive(socket_handler) + starting_block = connections.receive(socket_handler) + limit = connections.receive(socket_handler) + + if limit > 500: + limit = 500 + + db_handler.execute_param(db_handler.h, ("SELECT * FROM transactions " + "WHERE ? IN (address, recipient) " + "AND block_height >= ? " + "ORDER BY block_height " + "ASC LIMIT ?"), + (address, starting_block, limit,)) + + result = db_handler.h.fetchall() + blocks = self.blockstojson(result) + connections.send(socket_handler, blocks) + + def api_getblockrange(self, socket_handler, db_handler, peers): + """ + Returns full blocks and transactions from a block range, maximum of 50 entries. + Includes function format_raw_txs_diffs for formatting. Useful for big data / nosql storage. + :param socket_handler: + :param db_handler: (UNUSED) + :param peers: (UNUSED) + :return: + """ + + start_block = connections.receive(socket_handler) + limit = connections.receive(socket_handler) + + if limit > 50: + limit = 50 + + try: + db_handler.execute_param(db_handler.h, + ('SELECT * FROM transactions ' + 'WHERE block_height >= ? ' + 'AND block_height < ?;'), + (start_block, start_block+limit,)) + raw_txs = db_handler.h.fetchall() + + db_handler.execute_param(db_handler.h, + ('SELECT difficulty FROM misc ' + 'WHERE block_height >= ? ' + 'AND block_height < ?;'), + (start_block, start_block+limit,)) + raw_diffs = db_handler.h.fetchall() + + reply = json.dumps(self.blocktojsondiffs(raw_txs, raw_diffs)) + + except Exception as e: + self.app_log.warning(e) + raise + connections.send(socket_handler, reply) def api_getblocksince(self, socket_handler, db_handler, peers): """ Returns the full blocks and transactions following a given block_height Returns at most transactions from 10 blocks (the most recent ones if it truncates) Used by the json-rpc server to poll and be notified of tx and new blocks. + + Returns full blocks and transactions following a given block_height. + Given block_height should not be lower than the last 10 blocks. + If given block_height is lower than the most recent block -10, + last 10 blocks will be returned. + + **Used by the json-rpc server to poll and be notified of tx and new blocks** DO NOT REMOVE!!!. :param socket_handler: :param db_handler: :param peers: @@ -142,13 +381,12 @@ def api_getblocksince(self, socket_handler, db_handler, peers): info = [] # get the last known block since_height = connections.receive(socket_handler) - #print('api_getblocksince', since_height) + # print('api_getblocksince', since_height) try: try: db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") # what is the min block height to consider ? block_height = max(db_handler.h.fetchone()[0]-11, since_height) - #print("block_height",block_height) db_handler.execute_param(db_handler.h, ('SELECT * FROM transactions WHERE block_height > ?;'), (block_height, )) @@ -192,19 +430,19 @@ def api_getblockswhereoflike(self, socket_handler, db_handler, peers): # it's a list of tuples, send as is. #print("info", info) except Exception as e: - print("error", e) + self.app_log.warning(e) raise # Add the last fetched block so the client will be able to fetch the next block info.append([block_height]) connections.send(socket_handler, info) except Exception as e: - print(e) + self.app_log.warning(e) exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - print(exc_type, fname, exc_tb.tb_lineno) + self.app_log.warning("{} {} {}".format(exc_type, fname, exc_tb.tb_lineno)) raise - def api_getblocksincewhere(self, socket_handler, db_handler, peers): + def api_getblocksafterwhere(self, socket_handler, db_handler, peers): """ Returns the full transactions following a given block_height and with specific conditions Returns at most transactions from 720 blocks at a time (the most *older* ones if it truncates) so about 12 hours worth of data. @@ -218,7 +456,7 @@ def api_getblocksincewhere(self, socket_handler, db_handler, peers): # get the last known block since_height = connections.receive(socket_handler) where_conditions = connections.receive(socket_handler) - print('api_getblocksincewhere', since_height, where_conditions) + self.app_log.warning('api_getblocksafterwhere', since_height, where_conditions) # TODO: feed as array to have a real control and avoid sql injection !important # Do *NOT* use in production until it's done. raise ValueError("Unsafe, do not use yet") @@ -241,25 +479,25 @@ def api_getblocksincewhere(self, socket_handler, db_handler, peers): db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") # what is the max block height to consider ? block_height = min(db_handler.h.fetchone()[0], since_height+720) - #print("block_height",block_height) + # print("block_height",block_height) db_handler.execute_param(db_handler.h, ('SELECT * FROM transactions WHERE block_height > ? and block_height <= ? and ( '+where_assembled+')'), (since_height, block_height)+conditions_assembled) info = db_handler.h.fetchall() # it's a list of tuples, send as is. - #print(all) + # print(all) except Exception as e: - print(e) + self.app_log.warning(e) raise # print("info", info) connections.send(socket_handler, info) except Exception as e: - print(e) + # self.app_log.warning(e) raise - + def api_getaddresssince(self, socket_handler, db_handler, peers): """ - Returns the full transactions following a given block_height (will not include the given height) for the given address, with at least minirmations confirmations, + Returns the full transactions following a given block_height (will not include the given height) for the given address, with at least min_confirmations confirmations, as well as last considered block. Returns at most transactions from 720 blocks at a time (the most *older* ones if it truncates) so about 12 hours worth of data. @@ -271,14 +509,14 @@ def api_getaddresssince(self, socket_handler, db_handler, peers): info = [] # get the last known block since_height = int(connections.receive(socket_handler)) - minirmations = int(connections.receive(socket_handler)) + min_confirmations = int(connections.receive(socket_handler)) address = str(connections.receive(socket_handler)) - print('api_getaddresssince', since_height, minirmations, address) + print('api_getaddresssince', since_height, min_confirmations, address) try: try: db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") # what is the max block height to consider ? - block_height = min(db_handler.h.fetchone()[0] - minirmations, since_height+720) + block_height = min(db_handler.h.fetchone()[0] - min_confirmations, since_height+720) db_handler.execute_param(db_handler.h, ('SELECT * FROM transactions WHERE block_height > ? AND block_height <= ? ' 'AND ((address = ?) OR (recipient = ?)) ORDER BY block_height ASC'), @@ -287,10 +525,10 @@ def api_getaddresssince(self, socket_handler, db_handler, peers): except Exception as e: print("Exception api_getaddresssince:".format(e)) raise - connections.send(socket_handler, {'last': block_height, 'minconf': minirmations, 'transactions': info}) + connections.send(socket_handler, {'last': block_height, 'minconf': min_confirmations, 'transactions': info}) except Exception as e: - print(e) - raise + # self.app_log.warning(e) + raise def _get_balance(self, db_handler, address, minconf=1): """ @@ -314,10 +552,10 @@ def _get_balance(self, db_handler, address, minconf=1): if not debit: debit = 0 # keep as float - #balance = '{:.8f}'.format(credit - debit) + # balance = '{:.8f}'.format(credit - debit) balance = credit - debit except Exception as e: - print(e) + # self.app_log.warning(e) raise return balance @@ -340,7 +578,7 @@ def api_getbalance(self, socket_handler, db_handler, peers): # TODO: Better to use a single sql query with all addresses listed? for address in addresses: balance += self._get_balance(db_handler, address, minconf) - #print('api_getbalance', addresses, minconf,':', balance) + # print('api_getbalance', addresses, minconf,':', balance) connections.send(socket_handler, balance) except Exception as e: raise @@ -363,7 +601,7 @@ def _get_received(self, db_handler, address, minconf=1): if not credit: credit = 0 except Exception as e: - print(e) + # self.app_log.warning(e) raise return credit @@ -388,6 +626,7 @@ def api_getreceived(self, socket_handler, db_handler, peers): print('api_getreceived', addresses, minconf,':', received) connections.send(socket_handler, received) except Exception as e: + # self.app_log.warning(e) raise def api_listreceived(self, socket_handler, db_handler, peers): @@ -416,6 +655,7 @@ def api_listreceived(self, socket_handler, db_handler, peers): print('api_listreceived', addresses, minconf,':', received) connections.send(socket_handler, received) except Exception as e: + # self.app_log.warning(e) raise def api_listbalance(self, socket_handler, db_handler, peers): @@ -442,11 +682,12 @@ def api_listbalance(self, socket_handler, db_handler, peers): print('api_listbalance', addresses, minconf,':', balances) connections.send(socket_handler, balances) except Exception as e: + # self.app_log.warning(e) raise def api_gettransaction(self, socket_handler, db_handler, peers): """ - Returns the full transaction matching a tx id. Takes txid anf format as params (json output if format is True) + Returns the full transaction matching a tx id. Takes txid anf format as params (json output if format is True) :param socket_handler: :param db_handler: :param peers: @@ -456,11 +697,11 @@ def api_gettransaction(self, socket_handler, db_handler, peers): try: # get the txid transaction_id = connections.receive(socket_handler) - # and format + # and format format = connections.receive(socket_handler) # raw tx details db_handler.execute_param(db_handler.h, - "SELECT * FROM transactions WHERE signature like ?", + "SELECT * FROM transactions WHERE substr(signature,1,4)=substr(?1,1,4) and signature like ?1", (transaction_id+'%',)) raw = db_handler.h.fetchone() if not format: @@ -468,7 +709,7 @@ def api_gettransaction(self, socket_handler, db_handler, peers): print('api_gettransaction', format, raw) return - # current block height, needed for confirmations # + # current block height, needed for confirmations # db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") block_height = db_handler.h.fetchone()[0] transaction['txid'] = transaction_id @@ -481,7 +722,10 @@ def api_gettransaction(self, socket_handler, db_handler, peers): transaction['reward'] = raw[9] transaction['operation']= raw[10] transaction['openfield'] = raw[11] - transaction['pubkey'] = base64.b64decode(raw[6]).decode('utf-8') + try: + transaction['pubkey'] = base64.b64decode(raw[6]).decode('utf-8') + except: + transaction['pubkey'] = raw[6] # support new pubkey schemes transaction['blockhash'] = raw[7] transaction['blockheight'] = raw[0] transaction['confirmations'] = block_height - raw[0] @@ -495,6 +739,7 @@ def api_gettransaction(self, socket_handler, db_handler, peers): print('api_gettransaction', format, transaction) connections.send(socket_handler, transaction) except Exception as e: + # self.app_log.warning(e) raise def api_gettransactionbysignature(self, socket_handler, db_handler, peers): @@ -513,7 +758,7 @@ def api_gettransactionbysignature(self, socket_handler, db_handler, peers): format = connections.receive(socket_handler) # raw tx details db_handler.execute_param(db_handler.h, - "SELECT * FROM transactions WHERE signature = ?", + "SELECT * FROM transactions WHERE substr(signature,1,4)=substr(?1,1,4) and signature = ?1", (signature,)) raw = db_handler.h.fetchone() if not format: @@ -521,7 +766,7 @@ def api_gettransactionbysignature(self, socket_handler, db_handler, peers): print('api_gettransactionbysignature', format, raw) return - # current block height, needed for confirmations # + # current block height, needed for confirmations db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") block_height = db_handler.h.fetchone()[0] transaction['signature'] = signature @@ -534,7 +779,10 @@ def api_gettransactionbysignature(self, socket_handler, db_handler, peers): transaction['reward'] = raw[9] transaction['operation'] = raw[10] transaction['openfield'] = raw[11] - transaction['pubkey'] = base64.b64decode(raw[6]).decode('utf-8') + try: + transaction['pubkey'] = base64.b64decode(raw[6]).decode('utf-8') + except: + transaction['pubkey'] = raw[6] # support new pubkey schemes transaction['blockhash'] = raw[7] transaction['blockheight'] = raw[0] transaction['confirmations'] = block_height - raw[0] @@ -548,6 +796,7 @@ def api_gettransactionbysignature(self, socket_handler, db_handler, peers): print('api_gettransactionbysignature', format, transaction) connections.send(socket_handler, transaction) except Exception as e: + # self.app_log.warning(e) raise def api_getpeerinfo(self, socket_handler, db_handler, peers): @@ -565,62 +814,68 @@ def api_getpeerinfo(self, socket_handler, db_handler, peers): # TODO: add outbound connection connections.send(socket_handler, info) except Exception as e: - pass + self.app_log.warning(e) -def api_gettransaction_for_recipients(self, socket_handler, db_handler, peers): - """ - Warning: this is currently very slow - Returns the full transaction matching a tx id for a list of recipient addresses. - Takes txid anf format as params (json output if format is True) - :param socket_handler: - :param db_handler: - :param peers: - :return: - """ - transaction = {} - try: - # get the txid - transaction_id = connections.receive(socket_handler) - # then the recipient list - addresses = connections.receive(socket_handler) - # and format - format = connections.receive(socket_handler) - recipients = json.dumps(addresses).replace("[", "(").replace(']', ')') # format as sql - # raw tx details - db_handler.execute_param(db_handler.h, - "SELECT * FROM transactions WHERE recipient IN {} AND signature LIKE ?".format(recipients), - (transaction_id + '%', )) - raw = db_handler.h.fetchone() - if not format: - connections.send(socket_handler, raw) - print('api_gettransaction_for_recipients', format, raw) - return + def api_gettransaction_for_recipients(self, socket_handler, db_handler, peers): + """ + Warning: this is currently very slow + Returns the full transaction matching a tx id for a list of recipient addresses. + Takes txid and format as params (json output if format is True) + :param socket_handler: + :param db_handler: + :param peers: + :return: + """ + transaction = {} + try: + # get the txid + transaction_id = connections.receive(socket_handler) + # then the recipient list + addresses = connections.receive(socket_handler) + # and format + format = connections.receive(socket_handler) + recipients = json.dumps(addresses).replace("[", "(").replace(']', ')') # format as sql + # raw tx details + db_handler.execute_param(db_handler.h, + "SELECT * FROM transactions WHERE recipient IN {} AND substr(signature,1,4)=substr(?1,1,4) and signature LIKE ?1".format(recipients), + (transaction_id + '%', )) + raw = db_handler.h.fetchone() + if not format: + connections.send(socket_handler, raw) + print('api_gettransaction_for_recipients', format, raw) + return - # current block height, needed for confirmations # - db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") - block_height = db_handler.h.fetchone()[0] - transaction['txid'] = transaction_id - transaction['time'] = raw[1] - transaction['hash'] = raw[5] - transaction['address'] = raw[2] - transaction['recipient'] = raw[3] - transaction['amount'] = raw[4] - transaction['fee'] = raw[8] - transaction['reward'] = raw[9] - transaction['operation']= raw[10] - transaction['openfield'] = raw[11] - transaction['pubkey'] = base64.b64decode(raw[6]).decode('utf-8') - transaction['blockhash'] = raw[7] - transaction['blockheight'] = raw[0] - transaction['confirmations'] = block_height - raw[0] - # Get more info on the block the tx is in. - db_handler.execute_param(db_handler.h, - "SELECT timestamp, recipient FROM transactions WHERE block_height= ? AND reward > 0", - (raw[0],)) - block_data = db_handler.h.fetchone() - transaction['blocktime'] = block_data[0] - transaction['blockminer'] = block_data[1] - print('api_gettransaction_for_recipients', format, transaction) - connections.send(socket_handler, transaction) - except Exception as e: - raise + # current block height, needed for confirmations # + db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") + block_height = db_handler.h.fetchone()[0] + transaction['txid'] = transaction_id + transaction['time'] = raw[1] + transaction['hash'] = raw[5] + transaction['address'] = raw[2] + transaction['recipient'] = raw[3] + transaction['amount'] = raw[4] + transaction['fee'] = raw[8] + transaction['reward'] = raw[9] + transaction['operation']= raw[10] + transaction['openfield'] = raw[11] + + try: + transaction['pubkey'] = base64.b64decode(raw[6]).decode('utf-8') + except: + transaction['pubkey'] = raw[6] # support new pubkey schemes + + transaction['blockhash'] = raw[7] + transaction['blockheight'] = raw[0] + transaction['confirmations'] = block_height - raw[0] + # Get more info on the block the tx is in. + db_handler.execute_param(db_handler.h, + "SELECT timestamp, recipient FROM transactions WHERE block_height= ? AND reward > 0", + (raw[0],)) + block_data = db_handler.h.fetchone() + transaction['blocktime'] = block_data[0] + transaction['blockminer'] = block_data[1] + print('api_gettransaction_for_recipients', format, transaction) + connections.send(socket_handler, transaction) + except Exception as e: + # self.app_log.warning(e) + raise diff --git a/balance_nogui.py b/balance_nogui.py index fe56323..9110399 100644 --- a/balance_nogui.py +++ b/balance_nogui.py @@ -5,7 +5,7 @@ node_ip = config.node_ip port = config.port -key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_hashed, address = essentials.keys_load("privkey.der", "pubkey.der") +key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_b64encoded, address = essentials.keys_load("privkey.der", "pubkey.der") s = socks.socksocket() s.settimeout(10) diff --git a/bisurl.py b/bisurl.py deleted file mode 100644 index c3bff0e..0000000 --- a/bisurl.py +++ /dev/null @@ -1,43 +0,0 @@ -import hashlib, base64 - -def checksum(string): - #return base64.urlsafe_b85encode(string.encode("utf-8")).decode("utf-8")[:8] - m = hashlib.md5() - m.update(string.encode("utf-8")) - return base64.b85encode(m.digest()).decode("utf-8") - - -def create_url(app_log, command, recipient, amount, operation, openfield): - if command == "pay": - openfield_b85_encode = base64.b85encode(openfield.encode("utf-8")).decode("utf-8") - operation_b85_encode = base64.b85encode(operation.encode("utf-8")).decode("utf-8") - - url_partial = "bis://{}/{}/{}/{}/{}/".format(command,recipient,amount,operation_b85_encode,openfield_b85_encode) - - url_constructed = url_partial+checksum(url_partial) - app_log.warning(url_constructed) - return url_constructed - -def read_url(app_log, url): - url_split = url.split("/") - app_log.warning(url_split) - reconstruct = "bis://{}/{}/{}/{}/{}/".format(url_split[2],url_split[3],url_split[4],url_split[5],url_split[6],url_split[7]) - operation_b85_decode = base64.b85decode(url_split[5]).decode("utf-8") - openfield_b85_decode = base64.b85decode(url_split[6]).decode("utf-8") - - if checksum(reconstruct) == url_split[7]: - url_deconstructed = url_split[2],url_split[3],url_split[4],operation_b85_decode,openfield_b85_decode - app_log.warning("Checksum match") - return url_deconstructed - else: - app_log.warning("Checksum mismatch",checksum(reconstruct),url_split[7]) - return - - -if __name__ == "__main__": - #test - import log - app_log = log.log("node.log", "WARNING", True) - - print ("create_url", create_url (app_log, "pay", "recipient", "10", "test", "test1")) - print ("read_url", read_url(app_log, "bis://pay/recipient/10/bY*jN/bY*jNF#/w8YD>)O(N?Z)!FWkoCFX")) \ No newline at end of file diff --git a/cmd_addpeers.py b/cmd_addpeers.py new file mode 100644 index 0000000..87bf91a --- /dev/null +++ b/cmd_addpeers.py @@ -0,0 +1,51 @@ +""" + +cmd for the addpeers command. + +Adds new peers to a running node + +Usage: + +python3 cmd_addpeers.py ip:port + +python3 cmd_addpeers.py ip:port,ip:port,ip:port + +if port is omitted, 5658 will be used. +""" + + +import connections +import json +import socks +import sys + + +__version__ = "0.0.1" + + +def add_peers(peers: dict): + s = socks.socksocket() + s.settimeout(10) + s.connect(("127.0.0.1", 5658)) + # Command first + connections.send(s, "addpeers") + # addpeers expects a string, that is a json encoded dict. + connections.send(s, json.dumps(peers)) + res = connections.receive(s) + return res + + +if __name__ == "__main__": + _, peers_string = sys.argv + peers = peers_string.split(',') + peers_dict = {} + for peer in peers: + if ':' in peer: + ip, port = peer.split(':') + else: + ip = peer + port = '5658' + peers_dict[ip] = port + res_as_dict = add_peers(peers_dict) + print("Answer (-1 means busy testing peers):") + print(json.dumps(res_as_dict, indent=2)) diff --git a/cmd_hn_last_block_ts.py b/cmd_hn_last_block_ts.py new file mode 100644 index 0000000..73f0234 --- /dev/null +++ b/cmd_hn_last_block_ts.py @@ -0,0 +1,44 @@ +""" + +test cmd for the hn_last_block_ts command. + + +Usage: + +python3 cmd_hn_last_block_ts + +""" + + +import connections +import json +import socks +import sys + + +__version__ = "0.0.1" + + +ORIGIN_OF_TIME = 1534716000 # Real Origin: August 20 +POS_SLOT_TIME_MIN = 3 # Real world setting? +POS_SLOT_TIME_SEC = POS_SLOT_TIME_MIN * 60 +MAX_ROUND_SLOTS = 19 # Real world. 19+1 = 20 , 3x20 = 60 (round time) +END_ROUND_SLOTS = 1 +# Round time in seconds +ROUND_TIME_SEC = POS_SLOT_TIME_SEC * (MAX_ROUND_SLOTS + END_ROUND_SLOTS) + + +def hn_last_block_ts(): + s = socks.socksocket() + s.settimeout(10) + s.connect(("127.0.0.1", 5658)) + # Last param is ip, to get feed of a specific ip, False for all. + connections.send(s, "HN_last_block_ts") + res = connections.receive(s) + return res + + +if __name__ == "__main__": + res_as_dict = hn_last_block_ts() + print("Answer (<=0 means fail):") + print(json.dumps(res_as_dict, indent=2)) diff --git a/cmd_hn_reg_round.py b/cmd_hn_reg_round.py new file mode 100644 index 0000000..55aab80 --- /dev/null +++ b/cmd_hn_reg_round.py @@ -0,0 +1,46 @@ +""" + +cmd for the hn_reg_round command. + + +Usage: + +python3 cmd_hn_reg_round.py round + +""" + + +import connections +import json +import socks +import sys + + +__version__ = "0.0.1" + + +ORIGIN_OF_TIME = 1534716000 # Real Origin: August 20 +POS_SLOT_TIME_MIN = 3 # Real world setting? +POS_SLOT_TIME_SEC = POS_SLOT_TIME_MIN * 60 +MAX_ROUND_SLOTS = 19 # Real world. 19+1 = 20 , 3x20 = 60 (round time) +END_ROUND_SLOTS = 1 +# Round time in seconds +ROUND_TIME_SEC = POS_SLOT_TIME_SEC * (MAX_ROUND_SLOTS + END_ROUND_SLOTS) + + +def hn_reg_round(round: int, pow_height: int=0): + s = socks.socksocket() + s.settimeout(10) + s.connect(("127.0.0.1", 5658)) + timestamp = ORIGIN_OF_TIME + round * ROUND_TIME_SEC + # Last param is ip, to get feed of a specific ip, False for all. + connections.send(s, "HN_reg_round {} {} {} False".format(round, timestamp, pow_height)) + res = connections.receive(s) + return res + + +if __name__ == "__main__": + _, round_string = sys.argv + res_as_dict = hn_reg_round(int(round_string)) + print("Answer (<=0 means fail):") + print(json.dumps(res_as_dict, indent=2)) diff --git a/commands.py b/commands.py index b85e12d..1add392 100644 --- a/commands.py +++ b/commands.py @@ -1,11 +1,13 @@ +#!/usr/bin/env python3 + import socks, connections, time, sys, json import options config = options.Get() config.read() version = config.version -print ('Number of arguments:', len(sys.argv), 'arguments.') -print ('Argument List:', str(sys.argv)) +# print ('Number of arguments:', len(sys.argv), 'arguments.') +# print ('Argument List:', str(sys.argv)) try: command = sys.argv[1] @@ -85,17 +87,8 @@ #s.connect(("34.192.6.105", 5658)) #s.connect(("bismuth.live", 5658)) -def stop(socket): - connections.send(s, "stop") - - -def annverget(socket): - connections.send(s, "annverget") - result = connections.receive(s) - print (result) - -def annget(socket): - connections.send(s, "annget") +def api_getconfig(socket): + connections.send(s, "api_getconfig") result = connections.receive(s) print (result) @@ -210,6 +203,14 @@ def blocklastjson(socket): print(json.dumps(response)) #get last block +def api_getblocksince(socket, arg1=None): + #get last block + connections.send(s, "api_getblocksince") + if arg1: + connections.send(s, arg1) + response = connections.receive(s) + print(json.dumps(response)) + #get last block def keygen(socket): #generate address @@ -309,6 +310,12 @@ def listlim(socket, arg1): for row in tx_list: print (row) +def api_getblockfromhash(socket, arg1): + connections.send(s, "api_getblockfromhash") + connections.send(s, arg1) + reply = connections.receive(s) + print(reply) + def listlimjson(socket, arg1): #get x last txs connections.send(s, "listlimjson") @@ -364,6 +371,11 @@ def statusget(socket): response = connections.receive(s) print(json.dumps(response)) +def portget(socket): + connections.send(s, "portget") + response = connections.receive(s) + print(json.dumps(response)) + def addvalidate(socket, arg1): connections.send(s, "addvalidate") connections.send(s, arg1) @@ -391,7 +403,6 @@ def api_getaddresssince(socket, arg1, arg2, arg3): connections.send(s, "getversion") print(connections.receive(s)) - if command == "generate": if not is_regnet: print("Only available on regnet") @@ -408,7 +419,6 @@ def api_getaddresssince(socket, arg1, arg2, arg3): connections.send(s, arg1) print(connections.receive(s)) - if command == "mpinsert": #arg1 = '1520788207.69', '4edadac9093d9326ee4b17f869b14f1a2534f96f9c5d7b48dc9acaed', '4edadac9093d9326ee4b17f869b14f1a2534f96f9c5d7b48dc9acaed', '0.00000000', 'e0piKXvc636t0fYmxdOti3fJZ+G1vQYAJ2IZv4inPGQYgG4nS0lU+61LDQQVqeGvmsDOsxFhM6VVLpYExPmc5HF6e1ZAr5IXQ69s88sJBx/XVl1YavAdo0katGDyvZpQf609F8PVbtD0zzBinQjfkoXU/NXo00CEyniyYPxAXuI=', 'LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlHZk1BMEdDU3FHU0liM0RRRUJBUVVBQTRHTkFEQ0JpUUtCZ1FES3ZMVGJEeDg1YTF1Z2IvNnhNTWhWT3E2VQoyR2VZVDgrSXEyejlGd0lNUjQwbDJ0dEdxTks3dmFyTmNjRkxJdThLbjRvZ0RRczNXU1dRQ3hOa2haaC9GcXpGCllZYTMvSXRQUGZ6clhxZ2Fqd0Q4cTRadDRZbWp0OCsyQmtJbVBqakZOa3VUUUl6Mkl1M3lGcU9JeExkak13N24KVVZ1OXRGUGlVa0QwVm5EUExRSURBUUFCCi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQ==', '0', '' mpinsert(s, arg1) @@ -449,11 +459,8 @@ def api_getaddresssince(socket, arg1, arg2, arg3): elif command == "balancegethyperjson": balancegethyperjson(s, arg1) -elif command == "annget": - annget(s) - -elif command == "annverget": - annverget(s) +elif command == "api_getconfig": + api_getconfig(s) elif command == "mpget": mpget(s) @@ -464,6 +471,9 @@ def api_getaddresssince(socket, arg1, arg2, arg3): elif command == "statusget": statusget(s) +elif command == "portget": + portget(s) + elif command == "peersget": peersget(s) @@ -503,11 +513,18 @@ def api_getaddresssince(socket, arg1, arg2, arg3): elif command == "listlim": listlim(s, arg1) -elif command == "stop": - stop(s) +elif command == "api_getblockfromhash": + api_getblockfromhash(s, arg1) + +elif command == "api_getblocksince": + try: + api_getblocksince(s, int(arg1)) + except: + api_getblocksince(s) elif command == "stop": connections.send(s, "stop") + print("Asked to Stop") elif command == "addfromalias": addfromalias(s, arg1) diff --git a/compile_nuitka.cmd b/compile_nuitka.cmd index 48e5389..7091919 100644 --- a/compile_nuitka.cmd +++ b/compile_nuitka.cmd @@ -4,32 +4,19 @@ mkdir dist python -m nuitka --follow-imports commands.py --windows-icon=graphics\icon.ico --standalone --show-progress -j 8 --recurse-all python -m nuitka --follow-imports node.py --windows-icon=graphics\icon.ico --standalone --show-progress -j 8 --recurse-all -python -m nuitka --follow-imports wallet.py --windows-icon=graphics\icon.ico --standalone --show-progress -j 8 --recurse-all python -m nuitka --follow-imports node_stop.py --windows-icon=graphics\icon.ico --standalone --show-progress -j 8 --recurse-all -robocopy node.dist dist\files /MOVE /E -robocopy wallet.dist dist\files /MOVE /E -robocopy commands.dist dist\files /MOVE /E -robocopy node_stop.dist dist\files /MOVE /E +robocopy "C:\Program Files\Python37\Lib\site-packages\Cryptodome" dist\Cryptodome /MIR +robocopy "C:\Program Files\Python37\Lib\site-packages\coincurve" dist\coincurve /MIR -robocopy C:\Program Files\Python37\Lib\site-packages\Cryptodome dist\files\Cryptodome /MIR -robocopy C:\Program Files\Python37\tcl dist\lib /MIR -robocopy C:\Program Files\Python37\Lib\site-packages\coincurve dist\files\coincurve /MIR +robocopy node.dist dist /MOVE /E +robocopy commands.dist dist /MOVE /E +robocopy node_stop.dist dist /MOVE /E -mkdir dist\files\static -copy static\backup.py dist\files\static\backup.py -copy static\bg.jpg dist\files\static\bg.jpg -copy static\Chart.js dist\files\static\Chart.js -copy static\explorer.ico dist\files\static\explorer.ico -copy static\explorer_bg.png dist\files\static\explorer_bg.png -copy static\style.css dist\files\static\style.css -copy static\style_zircodice.css dist\files\static\style_zircodice.css -copy static\zircodice.ico dist\files\static\zircodice.ico +copy peers.txt dist\peers.txt +copy peers.txt dist\suggested_peers.txt +copy config.txt dist\config.txt -copy peers.txt dist\files\peers.txt -copy peers.txt dist\files\suggested_peers.txt -copy config.txt dist\files\config.txt - -"C:\Program Files (x86)\Inno Setup 5\iscc" /q "setup_nuitka.iss" +"C:\Program Files (x86)\Inno Setup 5\iscc" /q "setup.iss" pause diff --git a/compile_pyinstaller.cmd b/compile_pyinstaller.cmd deleted file mode 100644 index 1e19e01..0000000 --- a/compile_pyinstaller.cmd +++ /dev/null @@ -1,34 +0,0 @@ -del /f /s /q dist 1>nul -rmdir /s /q dist -del /f /s /q build 1>nul -rmdir /s /q build - -pyinstaller --uac-admin --log-level=INFO commands.py --icon=graphics\icon.ico --hidden-import=pycryptodomex --hidden-import=PySocks -pyinstaller --uac-admin --log-level=INFO wallet.py --icon=graphics\icon.ico --hidden-import=pycryptodomex --hidden-import=PySocks -pyinstaller --uac-admin --log-level=INFO node.py --icon=graphics\icon.ico --hidden-import=pycryptodomex --hidden-import=PySocks - -robocopy graphics dist\graphics -robocopy themes dist\themes -robocopy dist\wallet dist\ /move /E -rmdir /s /q dist\wallet -robocopy dist\node dist\ /move /E -rmdir /s /q dist\node -robocopy dist\commands dist\ /move /E -rmdir /s /q dist\commands - -mkdir dist\static -copy static\backup.py dist\static\backup.py -copy static\bg.jpg dist\static\bg.jpg -copy static\Chart.js dist\static\Chart.js -copy static\explorer.ico dist\static\explorer.ico -copy static\explorer_bg.png dist\static\explorer_bg.png -copy static\style.css dist\static\style.css -copy static\style_zircodice.css dist\static\style_zircodice.css -copy static\zircodice.ico dist\static\zircodice.ico - -copy peers.txt dist\peers.txt -copy config.txt dist\config.txt - -"C:\Program Files (x86)\Inno Setup 5\iscc" /q "setup.iss" -pause - diff --git a/config.txt b/config.txt index e5e9428..708b6fb 100644 --- a/config.txt +++ b/config.txt @@ -1,7 +1,7 @@ port=5658 verify=False -version=mainnet0020 -version_allow=mainnet0020,mainnet0021 +version=mainnet0021 +version_allow=mainnet0020,mainnet0021,mainnet0022 thread_limit=64 rebuild_db=True debug=False @@ -17,7 +17,7 @@ tor=False allowed=127.0.0.1,192.168.0.1,any ram=False node_ip=127.0.0.1 -light_ip=127.0.0.1,bismuth.live:8150,wallet.bismuth.online:8150,wallet1.bismuth.online:8150,wallet2.bismuth.online:8150,wallet.bismuthplatform.de:8150 +light_ip={"127.0.0.1": "5658", "bismuth.live":"5658", "51.15.226.30" : "8150","wallet.bismuth.online":"8150","wallet1.bismuth.online":"8150","wallet2.bismuth.online":"8150","wallet.bismuthplatform.de":"8150"} reveal_address=True accept_peers=True banlist=127.1.2.3 @@ -29,3 +29,4 @@ gui_scaling=adapt mempool_ram=True egress=True trace_db_calls=False +heavy3_path=./heavy3a.bin diff --git a/connectionmanager.py b/connectionmanager.py index 548b2ce..71bd885 100644 --- a/connectionmanager.py +++ b/connectionmanager.py @@ -2,6 +2,7 @@ import time from worker import worker + class ConnectionManager (threading.Thread): def __init__(self, node, mp): threading.Thread.__init__(self, name="ConnectionManagerThread") @@ -18,23 +19,20 @@ def connection_manager(self): self.logger.app_log.warning("Status: Starting connection manager") until_purge = 0 - while not self.node.IS_STOPPING or self.db_lock.locked(): + while not self.node.IS_STOPPING: try: # dict_keys = peer_dict.keys() # random.shuffle(peer_dict.items()) - if until_purge == 0: + if until_purge <= 0 and not self.db_lock.locked: # will purge once at start, then about every hour (120 * 30 sec) self.mp.MEMPOOL.purge() until_purge = 120 - until_purge -= 1 # peer management - if not self.node.is_regnet: # regnet never tries to connect - self.node.peers.client_loop(self.node, this_target = worker) - + self.node.peers.client_loop(self.node, this_target=worker) self.logger.app_log.warning(f"Status: Threads at {threading.active_count()} / {self.node.thread_limit}") self.logger.app_log.info(f"Status: Syncing nodes: {self.node.syncing}") self.logger.app_log.info(f"Status: Syncing nodes: {len(self.node.syncing)}/3") @@ -42,14 +40,13 @@ def connection_manager(self): # Status display for Peers related info self.node.peers.status_log() self.mp.MEMPOOL.status() - + # last block if self.node.last_block_ago: self.node.last_block_ago = time.time() - int(self.node.last_block_timestamp) - self.logger.app_log.warning(f"Status: Last block {self.node.last_block} was generated {'%.2f' % (self.node.last_block_ago / 60) } minutes ago") - # last block + self.logger.app_log.warning(f"Status: Last block {self.node.last_block} was generated " + f"{'%.2f' % (self.node.last_block_ago / 60) } minutes ago") # status Hook uptime = int(time.time() - self.node.startup_time) - status = {"protocolversion": self.node.version, "walletversion": self.node.app_version, "testnet": self.node.is_testnet, @@ -68,21 +65,11 @@ def connection_manager(self): self.node.plugin_manager.execute_action_hook('status', status) # end status hook - if self.node.peerfile_suggested: # if it is not empty - try: - self.node.peers.peers_dump(self.node.peerfile_suggested, self.node.peers.peer_dict) - except Exception as e: - self.logger.app_log.warning(f"There was an issue saving peers ({e}), skipped") - pass - # logger.app_log.info(threading.enumerate() all threads) - time.sleep(30) - """ + # time.sleep(30) for i in range(30): # faster stop - if not node.IS_STOPPING: + if not self.node.IS_STOPPING: time.sleep(1) - """ except Exception as e: self.logger.app_log.warning(f"Error in connection manger ({e})") - diff --git a/connections.py b/connections.py index 3489998..587425e 100644 --- a/connections.py +++ b/connections.py @@ -9,7 +9,7 @@ def send(sdef, data, slen=SLEN): sdef.setblocking(1) # Make sure the packet is sent in one call - sdef.sendall(str(len(str(json.dumps(data)))).encode("utf-8").zfill(slen) + str(json.dumps(data)).encode("utf-8")) + sdef.sendall(str(len(json.dumps(data))).encode("utf-8").zfill(slen) + json.dumps(data).encode("utf-8")) if "Linux" in platform.system(): READ_OR_ERROR = select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR | select.POLLNVAL diff --git a/db_looper.py b/db_looper.py deleted file mode 100644 index 814b213..0000000 --- a/db_looper.py +++ /dev/null @@ -1,40 +0,0 @@ -import threading -import time -import queue - -class DbManager (threading.Thread): - - def __init__(self,app_log): - threading.Thread.__init__(self) - self.app_log = app_log - self.q = queue.Queue() - - def run(self): - self.db_manager() - - def db_manager(self): - self.app_log.warning("db_manager initiated") - while True: - self.app_log.warning("db_manager running") - - self.app_log.warning("getting queue") - - if self.q: - queue_item = self.q.get() - self.app_log.warning("sending queue") - - - time.sleep(5) - -if __name__ == "__main__": - import options - import log - - config = options.Get() - config.read() - app_log = log.log("db_manager.log", "WARNING", True) - - db_manager = DbManager(app_log) - db_manager.start() - - print("we can continue without being blocked") \ No newline at end of file diff --git a/dbhandler.py b/dbhandler.py index 29c19d5..4dbd22c 100644 --- a/dbhandler.py +++ b/dbhandler.py @@ -5,9 +5,10 @@ import time import sqlite3 import essentials -from quantizer import * +from quantizer import quantize_two, quantize_eight, quantize_ten import functools from fork import Fork +import sys def sql_trace_callback(log, id, statement): @@ -30,18 +31,21 @@ def __init__(self, index_db, ledger_path, hyper_path, ram, ledger_ram_file, logg if self.trace_db_calls: self.index.set_trace_callback(functools.partial(sql_trace_callback,self.logger.app_log,"INDEX")) self.index.text_factory = str + self.index.execute('PRAGMA case_sensitive_like = 1;') self.index_cursor = self.index.cursor() self.hdd = sqlite3.connect(self.ledger_path, timeout=1) if self.trace_db_calls: self.hdd.set_trace_callback(functools.partial(sql_trace_callback,self.logger.app_log,"HDD")) self.hdd.text_factory = str + self.hdd.execute('PRAGMA case_sensitive_like = 1;') self.h = self.hdd.cursor() self.hdd2 = sqlite3.connect(self.hyper_path, timeout=1) if self.trace_db_calls: self.hdd2.set_trace_callback(functools.partial(sql_trace_callback,self.logger.app_log,"HDD2")) self.hdd2.text_factory = str + self.hdd2.execute('PRAGMA case_sensitive_like = 1;') self.h2 = self.hdd2.cursor() if self.ram: @@ -52,13 +56,18 @@ def __init__(self, index_db, ledger_path, hyper_path, ram, ledger_ram_file, logg if self.trace_db_calls: self.conn.set_trace_callback(functools.partial(sql_trace_callback,self.logger.app_log,"CONN")) self.conn.execute('PRAGMA journal_mode = WAL;') + self.conn.execute('PRAGMA case_sensitive_like = 1;') self.conn.text_factory = str self.c = self.conn.cursor() - self.SQL_TO_TRANSACTIONS = "INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?,?)" self.SQL_TO_MISC = "INSERT INTO misc VALUES (?,?)" + def last_block_hash(self): + self.execute(self.c, "SELECT block_hash FROM transactions WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;") + result = self.c.fetchone()[0] + return result + def pubkeyget(self, address): self.execute_param(self.c, "SELECT public_key FROM transactions WHERE address = ? and reward = 0 LIMIT 1", (address,)) result = self.c.fetchone()[0] @@ -125,6 +134,30 @@ def aliasesget(self, aliases_request): results.append(result) return results + def block_height_from_hash(self, data): + try: + self.execute_param(self.h, "SELECT block_height FROM transactions WHERE block_hash = ?;",(data,)) + result = self.h.fetchone()[0] + except: + result = None + + return result + + def blocksync(self, block): + blocks_fetched = [] + while sys.getsizeof( + str(blocks_fetched)) < 500000: # limited size based on txs in blocks + # db_handler.execute_param(db_handler.h, ("SELECT block_height, timestamp,address,recipient,amount,signature,public_key,keep,openfield FROM transactions WHERE block_height > ? AND block_height <= ?;"),(str(int(client_block)),) + (str(int(client_block + 1)),)) + self.execute_param(self.h, ( + "SELECT timestamp,address,recipient,amount,signature,public_key,operation,openfield FROM transactions WHERE block_height > ? AND block_height <= ?;"), + (str(int(block)), str(int(block + 1)),)) + result = self.h.fetchall() + if not result: + break + blocks_fetched.extend([result]) + block = int(block) + 1 + return blocks_fetched + def block_height_max(self): self.h.execute("SELECT max(block_height) FROM transactions") return self.h.fetchone()[0] @@ -146,15 +179,15 @@ def backup_higher(self, block_height): self.execute_param(self.c, "SELECT * FROM transactions WHERE block_height >= ?;", (block_height,)) backup_data = self.c.fetchall() - self.execute_param(self.c, "DELETE FROM transactions WHERE block_height >= ? OR block_height <= ?", (block_height, -block_height)) #this belongs to rollback_to - self.commit(self.conn) #this belongs to rollback_to + self.execute_param(self.c, "DELETE FROM transactions WHERE block_height >= ? OR block_height <= ?", (block_height, -block_height)) #this belongs to rollback_under + self.commit(self.conn) #this belongs to rollback_under - self.execute_param(self.c, "DELETE FROM misc WHERE block_height >= ?;", (block_height,)) #this belongs to rollback_to - self.commit(self.conn) #this belongs to rollback_to + self.execute_param(self.c, "DELETE FROM misc WHERE block_height >= ?;", (block_height,)) #this belongs to rollback_under + self.commit(self.conn) #this belongs to rollback_under return backup_data - def rollback_to(self, block_height): + def rollback_under(self, block_height): self.h.execute("DELETE FROM transactions WHERE block_height >= ? OR block_height <= ?", (block_height, -block_height,)) self.commit(self.hdd) @@ -167,6 +200,11 @@ def rollback_to(self, block_height): self.h2.execute("DELETE FROM misc WHERE block_height >= ?", (block_height,)) self.commit(self.hdd2) + def rollback_to(self, block_height): + # We don'tt need node to have the logger + self.logger.app_log.error("rollback_to is deprecated, use rollback_under") + self.rollback_under(block_height) + def tokens_rollback(self, node, height): """Rollback Token index @@ -185,24 +223,6 @@ def tokens_rollback(self, node, height): except Exception as e: node.logger.app_log.warning(f"Failed to roll back the token index below {(height)} due to {e}") - def staking_rollback(self, node, height): - """Rollback staking index - - :param height: height index of token in chain - - Simply deletes from the `staking` table where the block_height is - greater than or equal to the :param height: and logs the new height - - returns None - """ - try: - self.execute_param(self.index_cursor, "DELETE FROM staking WHERE block_height >= ?;", (height,)) - self.commit(self.index) - - node.logger.app_log.warning(f"Rolled back the staking index below {(height)}") - except Exception as e: - node.logger.app_log.warning(f"Failed to roll back the staking index below {(height)} due to {e}") - def aliases_rollback(self, node, height): """Rollback Alias index @@ -230,10 +250,18 @@ def dev_reward(self,node,block_array,miner_tx,mining_reward,mirror_hash): def hn_reward(self,node,block_array,miner_tx,mirror_hash): fork = Fork() - if node.last_block >= fork.POW_FORK or (node.is_testnet and node.last_block >= fork.POW_FORK_TESTNET): - self.reward_sum = "24" + if node.is_testnet and node.last_block >= fork.POW_FORK_TESTNET: + self.reward_sum = 24 - 10 * (node.last_block + 5 - fork.POW_FORK_TESTNET) / 3000000 + + elif node.is_mainnet and node.last_block >= fork.POW_FORK: + self.reward_sum = 24 - 10*(node.last_block + 5 - fork.POW_FORK)/3000000 else: - self.reward_sum = "8" + self.reward_sum = 24 + + if self.reward_sum < 0.5: + self.reward_sum = 0.5 + + self.reward_sum = '{:.8f}'.format(self.reward_sum) self.execute_param(self.c, self.SQL_TO_TRANSACTIONS, (-block_array.block_height_new, str(miner_tx.q_block_timestamp), "Hypernode Payouts", @@ -283,8 +311,6 @@ def misc_to_h2(data): try: - self.execute(self.c, "SELECT max(block_height) FROM transactions") - node.last_block = self.c.fetchone()[0] node.logger.app_log.warning(f"Chain: Moving new data to HDD, {node.hdd_block + 1} to {node.last_block} ") @@ -294,23 +320,20 @@ def misc_to_h2(data): result1 = self.c.fetchall() - if node.is_mainnet or node.ram: #testnet does not use hyperblocks, change this in the future - transactions_to_h(result1) - - if node.is_mainnet and node.ram: # we want to save to hyper.db from RAM/hyper.db depending on ram conf + transactions_to_h(result1) + if node.ram: # we want to save to hyper.db from RAM/hyper.db depending on ram conf transactions_to_h2(result1) self.execute_param(self.c, "SELECT * FROM misc WHERE block_height > ? ORDER BY block_height ASC", (node.hdd_block,)) result2 = self.c.fetchall() - if not node.is_testnet: #testnet does not use hyperblocks, change this in the future - misc_to_h(result2) - if node.is_mainnet and node.ram: # we want to save to hyper.db from RAM + misc_to_h(result2) + if node.ram: # we want to save to hyper.db from RAM misc_to_h2(result2) - self.execute(self.h, "SELECT max(block_height) FROM transactions") - node.hdd_block = self.h.fetchone()[0] + node.hdd_block = node.last_block + node.hdd_hash = node.last_block_hash node.logger.app_log.warning(f"Chain: {len(result1)} txs moved to HDD") except Exception as e: @@ -367,8 +390,27 @@ def execute_param(self, cursor, query, param): self.logger.app_log.warning(f"Database retry reason: {e}") time.sleep(1) + def fetchall(self, cursor, query, param=None): + """Helper to simplify calling code, execute and fetch in a single line instead of 2""" + if param is None: + self.execute(cursor, query) + else: + self.execute_param(cursor, query, param) + return cursor.fetchall() + + def fetchone(self, cursor, query, param=None): + """Helper to simplify calling code, execute and fetch in a single line instead of 2""" + if param is None: + self.execute(cursor, query) + else: + self.execute_param(cursor, query, param) + res = cursor.fetchone() + if res: + return res[0] + return None + def close(self): self.index.close() self.hdd.close() self.hdd2.close() - #self.conn.close() disabled for troubleshooting + self.conn.close() diff --git a/demo_getaddresssince.py b/demo_getaddresssince.py index 669bab6..5c7d48c 100644 --- a/demo_getaddresssince.py +++ b/demo_getaddresssince.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 """ Demo script for the api_getaddresssince api command. @@ -30,7 +31,7 @@ import sys -__version__ = "0.0.1" +__version__ = "0.0.2" def get_address_since(since, min_conf, address): diff --git a/difficulty.py b/difficulty.py index 5432794..7b89ba4 100644 --- a/difficulty.py +++ b/difficulty.py @@ -1,9 +1,9 @@ -from decimal import * +from decimal import Decimal import regnet import math import time from fork import * -from quantizer import * +from quantizer import quantize_two, quantize_eight, quantize_ten from fork import Fork def difficulty(node, db_handler): @@ -27,7 +27,7 @@ def difficulty(node, db_handler): timestamp_before_last = timestamp_last if previous is None else Decimal(previous[1]) db_handler.execute_param(db_handler.c, ( - "SELECT timestamp FROM transactions WHERE block_height > ? AND reward != 0 ORDER BY timestamp ASC LIMIT 2"), + "SELECT timestamp FROM transactions WHERE block_height > ? AND reward != 0 ORDER BY block_height ASC LIMIT 2"), (block_height - 1441,)) timestamp_1441 = Decimal(db_handler.c.fetchone()[0]) block_time_prev = (timestamp_before_last - timestamp_1441) / 1440 diff --git a/digest.py b/digest.py index fff86e9..cac828a 100644 --- a/digest.py +++ b/digest.py @@ -4,13 +4,13 @@ import essentials import mempool as mp -import mining import mining_heavy3 -import staking from difficulty import * from essentials import address_is_rsa, checkpoint_set, ledger_balance3 from polysign.signerfactory import SignerFactory from fork import Fork +import tokensv2 as tokens +from decimal import Decimal fork = Fork() @@ -26,7 +26,7 @@ def __init__(self): self.received_recipient = None self.received_amount = 0 self.received_signature_enc = None - self.received_public_key_hashed = None + self.received_public_key_b64encoded = None self.received_operation = None self.received_openfield = None @@ -36,35 +36,33 @@ def __init__(self): self.nonce = None self.miner_address = None - class PreviousBlock: - def __init__(self): - db_handler.execute(db_handler.c, "SELECT block_hash, block_height, timestamp FROM transactions " - "WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;") - result = db_handler.c.fetchone() - self.block_hash = result[0] - self.block_height = result[1] - self.q_timestamp_last = quantize_two(result[2]) - - class BlockArray: + class Block: + """array of transactions within a block""" def __init__(self): self.tx_count = 0 - self.block_height_new = node.last_block + 1 # for logging purposes. + self.block_height_new = node.last_block + 1 self.block_hash = 'N/A' self.failed_cause = '' self.block_count = 0 + self.transaction_list_converted = [] + + self.mining_reward = None + self.mirror_hash = None + self.start_time_block = quantize_two(time.time()) + self.tokens_operation_present = False def fork_reward_check(): # fork handling if node.is_testnet: if node.last_block > fork.POW_FORK_TESTNET: if not fork.check_postfork_reward_testnet(db_handler): - db_handler.rollback_to(fork.POW_FORK_TESTNET - 1) + db_handler.rollback_under(fork.POW_FORK_TESTNET - 1) raise ValueError("Rolling back chain due to old fork data") else: if node.last_block > fork.POW_FORK: if not fork.check_postfork_reward(db_handler): print("Rolling back") - db_handler.rollback_to(fork.POW_FORK - 1) + db_handler.rollback_under(fork.POW_FORK - 1) raise ValueError("Rolling back chain due to old fork data") # fork handling @@ -75,7 +73,7 @@ def transaction_validate(): if tx.start_time_tx < tx.q_received_timestamp: raise ValueError(f"Future transaction not allowed, timestamp " f"{quantize_two((tx.q_received_timestamp - tx.start_time_tx) / 60)} minutes in the future") - if previous_block.q_timestamp_last - 86400 > tx.q_received_timestamp: + if node.last_block_timestamp - 86400 > tx.q_received_timestamp: raise ValueError("Transaction older than 24h not allowed.") # Amount if float(tx.received_amount) < 0: @@ -90,25 +88,28 @@ def transaction_validate(): buffer = str((tx.received_timestamp, tx.received_address, tx.received_recipient, tx.received_amount, tx.received_operation, tx.received_openfield)).encode("utf-8") # Will raise if error - also includes reconstruction of address from pubkey to make sure it matches - SignerFactory.verify_bis_signature(tx.received_signature_enc, tx.received_public_key_hashed, buffer, + SignerFactory.verify_bis_signature(tx.received_signature_enc, tx.received_public_key_b64encoded, buffer, tx.received_address) node.logger.app_log.info(f"Valid signature from {tx.received_address} " f"to {tx.received_recipient} amount {tx.received_amount}") def rewards(): - if int(block_array.block_height_new) % 10 == 0: # every 10 blocks - db_handler.dev_reward(node, block_array, miner_tx, mining_reward, mirror_hash) - db_handler.hn_reward(node,block_array,miner_tx,mirror_hash) + if int(block_instance.block_height_new) % 10 == 0: # every 10 blocks + db_handler.dev_reward(node, block_instance, miner_tx, block_instance.mining_reward, block_instance.mirror_hash) + db_handler.hn_reward(node,block_instance,miner_tx,block_instance.mirror_hash) def check_signature(block): + # TODO EGG: benchmark this loop vs a single "WHERE IN" SQL + signature_list = [] + for entry in block: # sig 4 - block_array.tx_count += 1 + entry_signature = entry[4] if entry_signature: # prevent empty signature database retry hack signature_list.append(entry_signature) # reject block with transactions which are already in the ledger ram - db_handler.execute_param(db_handler.h, "SELECT block_height FROM transactions WHERE signature = ?;", + db_handler.execute_param(db_handler.h, "SELECT block_height FROM transactions WHERE substr(signature,1,4) = substr(?1,1,4) and signature = ?1;", (entry_signature,)) tx_presence_check = db_handler.h.fetchone() if tx_presence_check: @@ -116,7 +117,7 @@ def check_signature(block): raise ValueError(f"That transaction {entry_signature[:10]} is already in our ledger, " f"block_height {tx_presence_check[0]}") - db_handler.execute_param(db_handler.c, "SELECT block_height FROM transactions WHERE signature = ?;", + db_handler.execute_param(db_handler.c, "SELECT block_height FROM transactions WHERE substr(signature,1,4) = substr(?1,1,4) and signature = ?1;", (entry_signature,)) tx_presence_check = db_handler.c.fetchone() if tx_presence_check: @@ -126,308 +127,280 @@ def check_signature(block): else: raise ValueError(f"Empty signature from {peer_ip}") - if node.peers.is_banned(peer_ip): - # no need to loose any time with banned peers - raise ValueError("Cannot accept blocks from a banned peer") - # since we raise, it will also drop the connection, it's fine since he's banned. - if not node.db_lock.locked(): - block_array = BlockArray() - node.db_lock.acquire() - node.logger.app_log.warning(f"Database lock acquired") - while mp.MEMPOOL.lock.locked(): - time.sleep(0.1) - node.logger.app_log.info(f"Chain: Waiting for mempool to unlock {peer_ip}") + if block_instance.tx_count != len(set(signature_list)): + raise ValueError("There are duplicate transactions in this block, rejected") + + + + def sort_transactions(block): + # print("sort_transactions") + # print("block_instance.tx_count", block_instance.tx_count) + for tx_index, transaction in enumerate(block): + # print("tx_index", tx_index) + tx.start_time_tx = quantize_two(time.time()) + tx.q_received_timestamp = quantize_two(transaction[0]) + tx.received_timestamp = '%.2f' % tx.q_received_timestamp + tx.received_address = str(transaction[1])[:56] + tx.received_recipient = str(transaction[2])[:56] + tx.received_amount = '%.8f' % (quantize_eight(transaction[3])) + tx.received_signature_enc = str(transaction[4])[:684] + tx.received_public_key_b64encoded = str(transaction[5])[:1068] + tx.received_operation = str(transaction[6])[:30] + tx.received_openfield = str(transaction[7])[:100000] + + if tx.received_operation in ["token:issue","token:transfer"]: + block_instance.tokens_operation_present = True # update on change + + # if transaction == block[-1]: + if tx_index == block_instance.tx_count - 1: # faster than comparing the whole tx + if float(tx.received_amount) != 0: + raise ValueError("Coinbase (Mining) transaction must have zero amount") + if not address_is_rsa(tx.received_recipient): + # Compare address rather than sig, as sig could be made up + raise ValueError("Coinbase (Mining) transaction only supports legacy RSA Bismuth addresses") + + # recognize the last transaction as the mining reward transaction + miner_tx.q_block_timestamp = tx.q_received_timestamp + miner_tx.nonce = tx.received_openfield[:128] + miner_tx.miner_address = tx.received_address + # print("miner_tx1", miner_tx) + + block_instance.transaction_list_converted.append((tx.received_timestamp, + tx.received_address, + tx.received_recipient, + tx.received_amount, + tx.received_signature_enc, + tx.received_public_key_b64encoded, + tx.received_operation, + tx.received_openfield)) + transaction_validate() + + def process_transactions(block): + fees_block = [] + block_instance.mining_reward = 0 # avoid warning + + # Cache for multiple tx from same address + balances = {} + + for tx_index, transaction in enumerate(block): + db_timestamp = '%.2f' % quantize_two(transaction[0]) + db_address = str(transaction[1])[:56] + db_recipient = str(transaction[2])[:56] + db_amount = '%.8f' % quantize_eight(transaction[3]) + db_signature = str(transaction[4])[:684] + db_public_key_b64encoded = str(transaction[5])[:1068] + db_operation = str(transaction[6])[:30] + db_openfield = str(transaction[7])[:100000] + + block_debit_address = 0 + block_fees_address = 0 + + # this also is redundant on many tx per address block + for x in block: + if x[1] == db_address: # make calculation relevant to a particular address in the block + block_debit_address = quantize_eight(Decimal(block_debit_address) + Decimal(x[3])) + + if x != block[-1]: + block_fees_address = quantize_eight(Decimal(block_fees_address) + Decimal( + essentials.fee_calculate(db_openfield, db_operation, + node.last_block))) # exclude the mining tx from fees + + # node.logger.app_log.info("Fee: " + str(fee)) + + # decide reward + if tx_index == block_instance.tx_count - 1: + db_amount = 0 # prevent spending from another address, because mining txs allow delegation + + if node.is_testnet and node.last_block >= fork.POW_FORK_TESTNET: + block_instance.mining_reward = 15 - (block_instance.block_height_new - fork.POW_FORK_TESTNET) / 1100000 - 9.5 + elif node.is_mainnet and node.last_block >= fork.POW_FORK: + block_instance.mining_reward = 15 - (block_instance.block_height_new - fork.POW_FORK) / 1100000 - 9.5 + else: + block_instance.mining_reward = 15 - (quantize_eight(block_instance.block_height_new) / quantize_eight(1000000 / 2)) - Decimal("2.4") - node.logger.app_log.warning(f"Chain: Digesting started from {peer_ip}") - # variables that have been quantized are prefixed by q_ So we can avoid any unnecessary quantize again later. - # Takes time. Variables that are only used as quantized decimal are quantized once and for all. + if block_instance.mining_reward < 0.5: + block_instance.mining_reward = 0.5 - block_size = Decimal(sys.getsizeof(str(data))) / Decimal(1000000) - node.logger.app_log.warning(f"Chain: Block size: {block_size} MB") + reward = '{:.8f}'.format(block_instance.mining_reward + sum(fees_block)) + # don't request a fee for mined block so new accounts can mine + fee = 0 + else: + reward = 0 + fee = essentials.fee_calculate(db_openfield, db_operation, node.last_block) + fees_block.append(quantize_eight(fee)) + balance_pre = ledger_balance3(db_address, balances, db_handler) # keep this as c (ram hyperblock access) + balance = quantize_eight(balance_pre - block_debit_address) + + if quantize_eight(balance_pre) < quantize_eight(db_amount): + raise ValueError(f"{db_address} sending more than owned: {db_amount}/{balance_pre}") + + if quantize_eight(balance) - quantize_eight(block_fees_address) < 0: + # exclude fee check for the mining/header tx + raise ValueError(f"{db_address} Cannot afford to pay fees (balance: {balance}, " + f"block fees: {block_fees_address})") + + # append, but do not insert to ledger before whole block is validated, + # note that it takes already validated values (decimals, length) + node.logger.app_log.info(f"Chain: Appending transaction back to block with " + f"{len(block_transactions)} transactions in it") + block_transactions.append((str(block_instance.block_height_new), str(db_timestamp), str(db_address), + str(db_recipient), str(db_amount), str(db_signature), + str(db_public_key_b64encoded), str(block_instance.block_hash), str(fee), + str(reward), str(db_operation), str(db_openfield))) + try: + mp.MEMPOOL.delete_transaction(db_signature) + node.logger.app_log.info(f"Chain: Removed processed transaction {db_signature[:56]}" + f" from the mempool while digesting") + except: + # tx was not or is no more in the local mempool + pass + + def process_blocks(block_data): + # TODO: block_data shadows block_data from outer scope. Very error prone. + # here, functions in functions use both local vars or parent variables, it's a call for nasty bugs. + # take care of pycharms hints, do not define func in funcs. try: - block_array_data = data - # reject block with duplicate transactions - signature_list = [] - block_transactions = [] - - for block in block_array_data: + block_instance.block_count = len(block_data) - block_array.block_count += 1 + for block in block_data: + if node.IS_STOPPING: + node.logger.app_log.warning("Process_blocks aborted, node is stopping") + return # Reworked process: we exit as soon as we find an error, no need to process further tests. # Then the exception handler takes place. + # EGG: Reminder: quick test first, **always**. Heavy tests only thereafter. - # TODO EGG: benchmark this loop vs a single "WHERE IN" SQL - # move down, so bad format tx do not require sql query - check_signature(block) - - block_array.tx_count = len(signature_list) - if block_array.tx_count != len(set(signature_list)): - raise ValueError("There are duplicate transactions in this block, rejected") - - del signature_list[:] - - previous_block = PreviousBlock() - - block_array.block_height_new = previous_block.block_height + 1 + block_instance.tx_count = len(block) - db_handler.execute(db_handler.c, "SELECT max(block_height) FROM transactions") - node.last_block = db_handler.c.fetchone()[0] - - start_time_block = quantize_two(time.time()) - transaction_list_converted = [] # makes sure all the data are properly converted + block_instance.block_height_new = node.last_block + 1 + block_instance.start_time_block = quantize_two(time.time()) fork_reward_check() - for tx_index, transaction in enumerate(block): - tx = Transaction() - - tx.start_time_tx = quantize_two(time.time()) - tx.q_received_timestamp = quantize_two(transaction[0]) - tx.received_timestamp = '%.2f' % tx.q_received_timestamp - tx.received_address = str(transaction[1])[:56] - tx.received_recipient = str(transaction[2])[:56] - tx.received_amount = '%.8f' % (quantize_eight(transaction[3])) - tx.received_signature_enc = str(transaction[4])[:684] - tx.received_public_key_hashed = str(transaction[5])[:1068] - tx.received_operation = str(transaction[6])[:30] - tx.received_openfield = str(transaction[7])[:100000] - - # if transaction == block[-1]: - if tx_index == block_array.tx_count - 1: # faster than comparing the whole tx - if not address_is_rsa(tx.received_recipient): - # Compare address rather than sig, as sig could be made up - raise ValueError("Coinbase (Mining) transaction only supports legacy RSA Bismuth addresses") - miner_tx = MinerTransaction() - - # recognize the last transaction as the mining reward transaction - miner_tx.q_block_timestamp = tx.q_received_timestamp - miner_tx.nonce = tx.received_openfield[:128] - miner_tx.miner_address = tx.received_address - - transaction_list_converted.append((tx.received_timestamp, - tx.received_address, - tx.received_recipient, - tx.received_amount, - tx.received_signature_enc, - tx.received_public_key_hashed, - tx.received_operation, - tx.received_openfield)) - transaction_validate() - + # sort_transactions also computes several hidden variables, like miner_tx.q_block_timestamp + # So it has to be run before the check + # TODO: rework to avoid hidden variables and make the sequence clear. + # sort_transactions also validates all transactions and sigs, and this is a waste of time if the block timestamp is wrong. + sort_transactions(block) # reject blocks older than latest block - if miner_tx.q_block_timestamp <= previous_block.q_timestamp_last: - raise ValueError("Block is older than the previous one, will be rejected") + if miner_tx.q_block_timestamp <= node.last_block_timestamp: + # print("miner_tx2", miner_tx) + raise ValueError(f"!Block is older {miner_tx.q_block_timestamp} " + f"than the previous one {node.last_block_timestamp} , will be rejected") + + check_signature(block) # calculate current difficulty (is done for each block in block array, not super easy to isolate) diff = difficulty(node, db_handler) node.difficulty = diff - node.logger.app_log.warning(f"Time to generate block {previous_block.block_height + 1}: {'%.2f' % diff[2]}") + node.logger.app_log.warning(f"Time to generate block {node.last_block + 1}: {'%.2f' % diff[2]}") node.logger.app_log.warning(f"Current difficulty: {diff[3]}") node.logger.app_log.warning(f"Current blocktime: {diff[4]}") node.logger.app_log.warning(f"Current hashrate: {diff[5]}") node.logger.app_log.warning(f"Difficulty adjustment: {diff[6]}") node.logger.app_log.warning(f"Difficulty: {diff[0]} {diff[1]}") - # node.logger.app_log.info("Transaction list: {}".format(transaction_list_converted)) - - - block_array.block_hash = hashlib.sha224((str(transaction_list_converted) + previous_block.block_hash).encode("utf-8")).hexdigest() + block_instance.block_hash = hashlib.sha224((str(block_instance.transaction_list_converted) + node.last_block_hash).encode("utf-8")).hexdigest() + del block_instance.transaction_list_converted[:] # node.logger.app_log.info("Last block sha_hash: {}".format(block_hash)) - node.logger.app_log.info(f"Calculated block sha_hash: {block_array.block_hash}") + node.logger.app_log.info(f"Calculated block sha_hash: {block_instance.block_hash}") # node.logger.app_log.info("Nonce: {}".format(nonce)) # check if we already have the sha_hash db_handler.execute_param(db_handler.h, "SELECT block_height FROM transactions WHERE block_hash = ?", - (block_array.block_hash,)) + (block_instance.block_hash,)) dummy = db_handler.h.fetchone() if dummy: raise ValueError( "Skipping digestion of block {} from {}, because we already have it on block_height {}". - format(block_array.block_hash[:10], peer_ip, dummy[0])) + format(block_instance.block_hash[:10], peer_ip, dummy[0])) if node.is_mainnet: - diff_save = mining_heavy3.check_block(block_array.block_height_new, + diff_save = mining_heavy3.check_block(block_instance.block_height_new, miner_tx.miner_address, miner_tx.nonce, - previous_block.block_hash, + node.last_block_hash, diff[0], tx.received_timestamp, tx.q_received_timestamp, - previous_block.q_timestamp_last, + node.last_block_timestamp, peer_ip=peer_ip, app_log=node.logger.app_log) elif node.is_testnet: - diff_save = mining_heavy3.check_block(block_array.block_height_new, + diff_save = mining_heavy3.check_block(block_instance.block_height_new, miner_tx.miner_address, miner_tx.nonce, - previous_block.block_hash, + node.last_block_hash, diff[0], tx.received_timestamp, tx.q_received_timestamp, - previous_block.q_timestamp_last, + node.last_block_timestamp, peer_ip=peer_ip, app_log=node.logger.app_log) else: # it's regnet then, will use a specific fake method here. - diff_save = mining_heavy3.check_block(block_array.block_height_new, + diff_save = mining_heavy3.check_block(block_instance.block_height_new, miner_tx.miner_address, miner_tx.nonce, - previous_block.block_hash, + node.last_block_hash, regnet.REGNET_DIFF, tx.received_timestamp, tx.q_received_timestamp, - previous_block.q_timestamp_last, + node.last_block_timestamp, peer_ip=peer_ip, app_log=node.logger.app_log) - fees_block = [] - mining_reward = 0 # avoid warning - - # Cache for multiple tx from same address - balances = {} - for tx_index, transaction in enumerate(block): - db_timestamp = '%.2f' % quantize_two(transaction[0]) - db_address = str(transaction[1])[:56] - db_recipient = str(transaction[2])[:56] - db_amount = '%.8f' % quantize_eight(transaction[3]) - db_signature = str(transaction[4])[:684] - db_public_key_hashed = str(transaction[5])[:1068] - db_operation = str(transaction[6])[:30] - db_openfield = str(transaction[7])[:100000] - - block_debit_address = 0 - block_fees_address = 0 - - # this also is redundant on many tx per address block - for x in block: - if x[1] == db_address: # make calculation relevant to a particular address in the block - block_debit_address = quantize_eight(Decimal(block_debit_address) + Decimal(x[3])) - - if x != block[-1]: - block_fees_address = quantize_eight(Decimal(block_fees_address) + Decimal( - essentials.fee_calculate(db_openfield, db_operation, - node.last_block))) # exclude the mining tx from fees - - # print("block_fees_address", block_fees_address, "for", db_address) - # node.logger.app_log.info("Digest: Inbound block credit: " + str(block_credit)) - # node.logger.app_log.info("Digest: Inbound block debit: " + str(block_debit)) - # include the new block - - # if (start_time_tx < q_received_timestamp + 432000) and not quicksync: - # balance_pre = quantize_eight(credit_ledger - debit_ledger - fees + rewards) # without projection - balance_pre = ledger_balance3(db_address, balances, db_handler) # keep this as c (ram hyperblock access) - - # balance = quantize_eight(credit - debit - fees + rewards) - balance = quantize_eight(balance_pre - block_debit_address) - # node.logger.app_log.info("Digest: Projected transaction address balance: " + str(balance)) - # else: - # print("hyp2") - - fee = essentials.fee_calculate(db_openfield, db_operation, node.last_block) - - fees_block.append(quantize_eight(fee)) - # node.logger.app_log.info("Fee: " + str(fee)) - - # decide reward - if tx_index == block_array.tx_count - 1: - db_amount = 0 # prevent spending from another address, because mining txs allow delegation - if previous_block.block_height <= 10000000: - - if node.last_block >= fork.POW_FORK or (node.is_testnet and node.last_block >= fork.POW_FORK_TESTNET): - mining_reward = 15 - (quantize_eight(block_array.block_height_new) / quantize_eight(1000000 / 2)) - Decimal("2.4") - else: - mining_reward = 15 - (quantize_eight(block_array.block_height_new) / quantize_eight(1000000 / 2)) - Decimal("0.8") - - if mining_reward < 0: - mining_reward = 0 - else: - mining_reward = 0 - - reward = quantize_eight(mining_reward + sum(fees_block[:-1])) - # don't request a fee for mined block so new accounts can mine - fee = 0 - else: - reward = 0 - - if quantize_eight(balance_pre) < quantize_eight(db_amount): - raise ValueError(f"{db_address} sending more than owned: {db_amount}/{balance_pre}") - - if quantize_eight(balance) - quantize_eight(block_fees_address) < 0: - # exclude fee check for the mining/header tx - raise ValueError(f"{db_address} Cannot afford to pay fees (balance: {balance}, " - f"block fees: {block_fees_address})") - - # append, but do not insert to ledger before whole block is validated, - # note that it takes already validated values (decimals, length) - node.logger.app_log.info(f"Chain: Appending transaction back to block with " - f"{len(block_transactions)} transactions in it") - block_transactions.append((str(block_array.block_height_new), str(db_timestamp), str(db_address), - str(db_recipient), str(db_amount), str(db_signature), - str(db_public_key_hashed), str(block_array.block_hash), str(fee), - str(reward), str(db_operation), str(db_openfield))) - try: - mp.MEMPOOL.delete_transaction(db_signature) - node.logger.app_log.info(f"Chain: Removed processed transaction {db_signature[:56]}" - f" from the mempool while digesting") - except: - # tx was not or is no more in the local mempool - pass + + process_transactions(block) + + node.last_block = block_instance.block_height_new + node.last_block_hash = block_instance.block_hash # end for block # save current diff (before the new block) # quantized vars have to be converted, since Decimal is not json serializable... node.plugin_manager.execute_action_hook('block', - {'height': block_array.block_height_new, 'diff': diff_save, - 'hash': block_array.block_hash, + {'height': block_instance.block_height_new, 'diff': diff_save, + 'hash': block_instance.block_hash, 'timestamp': float(miner_tx.q_block_timestamp), 'miner': miner_tx.miner_address, 'ip': peer_ip}) node.plugin_manager.execute_action_hook('fullblock', - {'height': block_array.block_height_new, 'diff': diff_save, - 'hash': block_array.block_hash, + {'height': block_instance.block_height_new, 'diff': diff_save, + 'hash': block_instance.block_hash, 'timestamp': float(miner_tx.q_block_timestamp), 'miner': miner_tx.miner_address, 'ip': peer_ip, 'transactions': block_transactions}) - db_handler.to_db(block_array, diff_save, block_transactions) - - # savings - if node.is_testnet or block_array.block_height_new >= 843000: - # no savings for regnet - if int(block_array.block_height_new) % 10000 == 0: # every x blocks - - staking.staking_update(db_handler.conn, db_handler.c, db_handler.index, db_handler.index_cursor, - "normal", block_array.block_height_new, node.logger.app_log) - staking.staking_payout(db_handler.conn, db_handler.c, db_handler.index, db_handler.index_cursor, - block_array.block_height_new, float(miner_tx.q_block_timestamp), - node.logger.app_log) - staking.staking_revalidate(db_handler.conn, db_handler.c, db_handler.index, - db_handler.index_cursor, block_array.block_height_new, - node.logger.app_log) + db_handler.to_db(block_instance, diff_save, block_transactions) # new sha_hash db_handler.execute(db_handler.c, "SELECT * FROM transactions " "WHERE block_height = (SELECT max(block_height) FROM transactions)") # Was trying to simplify, but it's the latest mirror sha_hash. # not the latest block, nor the mirror of the latest block. - # c.execute("SELECT * FROM transactions WHERE block_height = ?", (block_array.block_height_new -1,)) + # c.execute("SELECT * FROM transactions WHERE block_height = ?", (block_instance.block_height_new -1,)) tx_list_to_hash = db_handler.c.fetchall() - mirror_hash = hashlib.blake2b(str(tx_list_to_hash).encode(), digest_size=20).hexdigest() + block_instance.mirror_hash = hashlib.blake2b(str(tx_list_to_hash).encode(), digest_size=20).hexdigest() # /new sha_hash rewards() # node.logger.app_log.warning("Block: {}: {} valid and saved from {}" - # .format(block_array.block_height_new, block_hash[:10], peer_ip)) - node.logger.app_log.warning(f"Valid block: {block_array.block_height_new}: " - f"{block_array.block_hash[:10]} with {len(block)} txs, " + # .format(block_instance.block_height_new, block_hash[:10], peer_ip)) + node.logger.app_log.warning(f"Valid block: {block_instance.block_height_new}: " + f"{block_instance.block_hash[:10]} with {len(block)} txs, " f"digestion from {peer_ip} completed in " - f"{str(time.time() - float(start_time_block))[:5]}s.") + f"{str(time.time() - float(block_instance.start_time_block))[:5]}s.") + + if block_instance.tokens_operation_present: + tokens.tokens_update(node, db_handler) + del block_transactions[:] node.peers.unban(peer_ip) @@ -440,14 +413,62 @@ def check_signature(block): # /whole block validation # NEW: returns new block sha_hash + except Exception as e: + # Left for edge cases debug + """ + print(e) + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + print(exc_type, fname, exc_tb.tb_lineno) + """ + raise + + + # TODO: no def in def, unreadable. we are 10 screens down the prototype of that function. + # digestion begins here + if node.peers.is_banned(peer_ip): + # no need to loose any time with banned peers + raise ValueError("Cannot accept blocks from a banned peer") + # since we raise, it will also drop the connection, it's fine since he's banned. + + tx = Transaction() + miner_tx = MinerTransaction() + block_instance = Block() + + if not node.db_lock.locked(): + + node.db_lock.acquire() + node.logger.app_log.warning(f"Database lock acquired") - checkpoint_set(node, block_array.block_height_new) - return block_array.block_hash + while mp.MEMPOOL.lock.locked(): + time.sleep(0.1) + node.logger.app_log.info(f"Chain: Waiting for mempool to unlock {peer_ip}") + + node.logger.app_log.warning(f"Chain: Digesting started from {peer_ip}") + # variables that have been quantized are prefixed by q_ So we can avoid any unnecessary quantize again later. + # Takes time. Variables that are only used as quantized decimal are quantized once and for all. + + block_size = Decimal(sys.getsizeof(str(data))) / Decimal(1000000) + node.logger.app_log.warning(f"Chain: Block size: {block_size} MB") + + try: + block_data = data + # reject block with duplicate transactions + block_transactions = [] + + process_blocks(block_data) + + checkpoint_set(node) + return node.last_block_hash except Exception as e: node.logger.app_log.warning(f"Chain processing failed: {e}") node.logger.app_log.info(f"Received data dump: {data}") - block_array.failed_cause = str(e) + block_instance.failed_cause = str(e) + + node.last_block = db_handler.block_max_ram()['block_height'] #get actual data from database on exception + node.last_block_hash = db_handler.last_block_hash() #get actual data from database on exception + # Temp exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] @@ -459,19 +480,20 @@ def check_signature(block): finally: - db_handler.db_to_drive(node) node.db_lock.release() node.logger.app_log.warning(f"Database lock released") - delta_t = time.time() - float(start_time_block) + delta_t = time.time() - float(block_instance.start_time_block) # node.logger.app_log.warning("Block: {}: {} digestion completed in {}s." - # .format(block_array.block_height_new, block_hash[:10], delta_t)) + # .format(block_instance.block_height_new, block_hash[:10], delta_t)) node.plugin_manager.execute_action_hook('digestblock', - {'failed': block_array.failed_cause, 'ip': peer_ip, - 'deltat': delta_t, "blocks": block_array.block_count, - "txs": block_array.tx_count}) + {'failed': block_instance.failed_cause, + 'ip': peer_ip, + 'deltat': delta_t, + "blocks": block_instance.block_count, + "txs": block_instance.tx_count}) else: node.logger.app_log.warning(f"Chain: Skipping processing from {peer_ip}, someone delivered data faster") diff --git a/essentials.py b/essentials.py index 9b67399..711520f 100644 --- a/essentials.py +++ b/essentials.py @@ -14,17 +14,31 @@ # from Crypto import Random from Cryptodome.PublicKey import RSA -from quantizer import * -from simplecrypt import * +from quantizer import quantize_two, quantize_eight, quantize_ten +from decimal import Decimal +from simplecrypt import encrypt, decrypt from typing import Union from polysign.signer import SignerType from polysign.signerfactory import SignerFactory -__version__ = "0.0.5" +__version__ = "0.0.6" -RE_RSA_ADDRESS = re.compile(r"[abcdef0123456789]{56}") -# TODO: improve that ECDSA one -RE_ECDSA_ADDRESS = re.compile(r"^Bis") +""" +For temp. code compatibility, dup code moved to polysign module +""" + + +def address_validate(address:str) -> bool: + return SignerFactory.address_is_valid(address) + + +def address_is_rsa(address: str) -> bool: + return SignerFactory.address_is_rsa(address) + + +""" +End compatibility +""" def format_raw_tx(raw: list) -> dict: @@ -35,7 +49,11 @@ def format_raw_tx(raw: list) -> dict: transaction['recipient'] = raw[3] transaction['amount'] = raw[4] transaction['signature'] = raw[5] - transaction['pubkey'] = base64.b64decode(raw[6]).decode('utf-8') + transaction['txid'] = raw[5][:56] + try: + transaction['pubkey'] = base64.b64decode(raw[6]).decode('utf-8') + except: + transaction['pubkey'] = raw[6] #support new pubkey schemes transaction['block_hash'] = raw[7] transaction['fee'] = raw[8] transaction['reward'] = raw[9] @@ -99,10 +117,9 @@ def round_down(number, order): return int(math.floor(number / order)) * order -def checkpoint_set(node, block_reference): - if block_reference > 2000: - node.checkpoint = round_down(block_reference, 1000) - 1000 - node.logger.app_log.warning(f"Checkpoint set to {node.checkpoint}") +def checkpoint_set(node): + node.checkpoint = round_down(node.last_block, 1000) - 1000 + node.logger.app_log.warning(f"Checkpoint set to {node.checkpoint}") def ledger_balance3(address, cache, db_handler): @@ -129,7 +146,8 @@ def ledger_balance3(address, cache, db_handler): return cache[address] -def sign_rsa(timestamp, address, recipient, amount, operation, openfield, key, public_key_hashed) -> Union[bool, tuple]: +def sign_rsa(timestamp, address, recipient, amount, operation, openfield, key, public_key_b64encoded) -> Union[bool, tuple]: + # TODO: move, make use of polysign module if not key: raise BaseException("The wallet is locked, you need to provide a decrypted key") try: @@ -139,9 +157,9 @@ def sign_rsa(timestamp, address, recipient, amount, operation, openfield, key, p signer = SignerFactory.from_private_key(key.exportKey().decode("utf-8"), SignerType.RSA) signature_enc = signer.sign_buffer_for_bis(buffer) # Extra: recheck - Raises if Error - SignerFactory.verify_bis_signature(signature_enc, public_key_hashed, buffer, address) + SignerFactory.verify_bis_signature(signature_enc, public_key_b64encoded, buffer, address) full_tx = str(timestamp), str(address), str(recipient), '%.8f' % float(amount), \ - str(signature_enc.decode("utf-8")), str(public_key_hashed.decode("utf-8")), \ + str(signature_enc.decode("utf-8")), str(public_key_b64encoded.decode("utf-8")), \ str(operation), str(openfield) return full_tx except: @@ -149,6 +167,7 @@ def sign_rsa(timestamp, address, recipient, amount, operation, openfield, key, p def keys_check(app_log, keyfile_name: str) -> None: + # TODO: move, make use of polysign module # key maintenance if os.path.isfile("privkey.der") is True: app_log.warning("privkey.der found") @@ -197,7 +216,8 @@ def keys_load(privkey_filename: str= "privkey.der", pubkey_filename: str= "pubke # print("loaded",privkey, pubkey) # import keys try: # unencrypted - key = RSA.importKey(open(privkey_filename).read()) + with open(privkey_filename) as fp: + key = RSA.importKey(fp.read()) private_key_readable = key.exportKey().decode("utf-8") # public_key = key.publickey() encrypted = False @@ -206,21 +226,23 @@ def keys_load(privkey_filename: str= "privkey.der", pubkey_filename: str= "pubke encrypted = True unlocked = False key = None - private_key_readable = open(privkey_filename).read() + with open(privkey_filename) as fp: + private_key_readable = fp.read() # public_key_readable = str(key.publickey().exportKey()) - public_key_readable = open(pubkey_filename.encode('utf-8')).read() + with open(pubkey_filename.encode('utf-8')) as fp: + public_key_readable = fp.read() if len(public_key_readable) not in (271, 799): raise ValueError("Invalid public key length: {}".format(len(public_key_readable))) - public_key_hashed = base64.b64encode(public_key_readable.encode('utf-8')) + public_key_b64encoded = base64.b64encode(public_key_readable.encode('utf-8')) address = hashlib.sha224(public_key_readable.encode('utf-8')).hexdigest() print("Upgrading wallet") keys_save(private_key_readable, public_key_readable, address, keyfile) - return key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_hashed, address, keyfile + return key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_b64encoded, address, keyfile def keys_unlock(private_key_encrypted: str) -> tuple: @@ -256,42 +278,9 @@ def keys_load_new(keyfile="wallet.der"): if len(public_key_readable) not in (271, 799): raise ValueError("Invalid public key length: {}".format(len(public_key_readable))) - public_key_hashed = base64.b64encode(public_key_readable.encode('utf-8')) - - return key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_hashed, address, keyfile - - -# Dup code, not pretty, but would need address module to avoid dup - Belongs to polysign module. - -def address_validate(address:str) -> bool: - if RE_RSA_ADDRESS.match(address): - return True # RSA - elif RE_ECDSA_ADDRESS.match(address): - if 100 > len(address) > 50: - return True # SignerED25519 - elif len(address) > 30: - return True # SignerECDSA - return False - - -def address_is_rsa(address: str) -> bool: - """Returns wether the given address is a legacy RSA one""" - return RE_RSA_ADDRESS.match(address) - + public_key_b64encoded = base64.b64encode(public_key_readable.encode('utf-8')) -# Dup code, not pretty - belong to polysign -def validate_pem(public_key: str) -> None: - # verify pem as cryptodome does - pem_data = base64.b64decode(public_key).decode("utf-8") - regex = re.compile("\s*-----BEGIN (.*)-----\s+") - match = regex.match(pem_data) - if not match: - raise ValueError("Not a valid PEM pre boundary") - marker = match.group(1) - regex = re.compile("-----END (.*)-----\s*$") - match = regex.search(pem_data) - if not match or match.group(1) != marker: - raise ValueError("Not a valid PEM post boundary") + return key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_b64encoded, address, keyfile def fee_calculate(openfield: str, operation: str='', block: int=0) -> Decimal: @@ -301,6 +290,8 @@ def fee_calculate(openfield: str, operation: str='', block: int=0) -> Decimal: fee = Decimal(fee) + Decimal("10") if openfield.startswith("alias="): fee = Decimal(fee) + Decimal("1") + #if operation == "alias:register": #add in the future, careful about forking + # fee = Decimal(fee) + Decimal("1") return quantize_eight(fee) diff --git a/fork.py b/fork.py index a4b19d6..26dc0e8 100644 --- a/fork.py +++ b/fork.py @@ -1,14 +1,14 @@ class Fork(): def __init__(self): - self.POW_FORK = 1200000 + self.POW_FORK = 1450000 self.POW_FORK_TESTNET = 894170 self.FORK_AHEAD = 5 - self.versions_remove = ['mainnet0019', 'mainnet0018', 'mainnet0017'] + self.versions_remove = ['mainnet0020', 'mainnet0019', 'mainnet0018', 'mainnet0017'] self.FORK_REWARD = None self.FORK_REWARD_TESTNET = None self.PASSED = False self.PASSED_TESTNET = False - self.REWARD_MAX = 11 + self.REWARD_MAX = 6 #self.POW_FORK = 1168860 #HACK #self.versions_remove = [] #HACK diff --git a/genesis.py b/genesis.py index 59caf78..8c338ad 100644 --- a/genesis.py +++ b/genesis.py @@ -33,13 +33,13 @@ print("Your address: {}".format(address)) print("Your private key:\n {}".format(private_key_readable)) print("Your public key:\n {}".format(public_key_readable)) - + with open("privkey.der", "a") as f: f.write(str(private_key_readable)) with open("pubkey.der", "a") as f: f.write(str(public_key_readable)) - + with open("address.txt", "a") as f: f.write("{}\n".format(address)) @@ -53,7 +53,7 @@ print("Your address: {}".format(address)) print("Your private key:\n {}".format(private_key_readable)) print("Your public key:\n {}".format(public_key_readable)) -public_key_hashed = base64.b64encode(public_key_readable) +public_key_b64encoded = base64.b64encode(public_key_readable) # import keys timestamp = str(time.time()) @@ -77,7 +77,7 @@ conn = sqlite3.connect('static/ledger.db') cursor = conn.cursor() cursor.execute("CREATE TABLE transactions (block_height INTEGER, timestamp, address, recipient, amount, signature, public_key, block_hash, fee, reward, operation, openfield)") - cursor.execute("INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", ("1", timestamp, 'genesis', address, '0', str(signature_enc), public_key_hashed, block_hash, 0, 1, 1, 'genesis')) # Insert a row of data + cursor.execute("INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", ("1", timestamp, 'genesis', address, '0', str(signature_enc), public_key_b64encoded, block_hash, 0, 1, 1, 'genesis')) # Insert a row of data conn.commit() # Save (commit) the changes mempool = sqlite3.connect('mempool.db') diff --git a/graphics/bishold.png b/graphics/bishold.png deleted file mode 100644 index 5621178..0000000 Binary files a/graphics/bishold.png and /dev/null differ diff --git a/graphics/bislover.PNG b/graphics/bislover.PNG deleted file mode 100644 index e7b038f..0000000 Binary files a/graphics/bislover.PNG and /dev/null differ diff --git a/graphics/book.png b/graphics/book.png deleted file mode 100644 index d2fcf75..0000000 Binary files a/graphics/book.png and /dev/null differ diff --git a/graphics/bsmall.png b/graphics/bsmall.png deleted file mode 100644 index 7f065eb..0000000 Binary files a/graphics/bsmall.png and /dev/null differ diff --git a/graphics/icon.jpg b/graphics/icon.jpg deleted file mode 100644 index 991009f..0000000 Binary files a/graphics/icon.jpg and /dev/null differ diff --git a/graphics/logo.png b/graphics/logo.png deleted file mode 100644 index 9791dd1..0000000 Binary files a/graphics/logo.png and /dev/null differ diff --git a/graphics/main_.jpg b/graphics/main_.jpg deleted file mode 100644 index 850439d..0000000 Binary files a/graphics/main_.jpg and /dev/null differ diff --git a/graphics/main__.jpg b/graphics/main__.jpg deleted file mode 100644 index 73b138e..0000000 Binary files a/graphics/main__.jpg and /dev/null differ diff --git a/graphics/old/b3.png b/graphics/old/b3.png deleted file mode 100644 index 184bd02..0000000 Binary files a/graphics/old/b3.png and /dev/null differ diff --git a/graphics/old/icon.gif b/graphics/old/icon.gif deleted file mode 100644 index ca520e8..0000000 Binary files a/graphics/old/icon.gif and /dev/null differ diff --git a/graphics/old/icon.ico b/graphics/old/icon.ico deleted file mode 100644 index bc68c9e..0000000 Binary files a/graphics/old/icon.ico and /dev/null differ diff --git a/graphics/old/icon.jpg b/graphics/old/icon.jpg deleted file mode 100644 index 9915186..0000000 Binary files a/graphics/old/icon.jpg and /dev/null differ diff --git a/graphics/old/left.bmp b/graphics/old/left.bmp deleted file mode 100644 index 69ef4c2..0000000 Binary files a/graphics/old/left.bmp and /dev/null differ diff --git a/graphics/old/logo.gif b/graphics/old/logo.gif deleted file mode 100644 index 206ea75..0000000 Binary files a/graphics/old/logo.gif and /dev/null differ diff --git a/graphics/old/logo.jpg b/graphics/old/logo.jpg deleted file mode 100644 index e036597..0000000 Binary files a/graphics/old/logo.jpg and /dev/null differ diff --git a/graphics/old/logo.png b/graphics/old/logo.png deleted file mode 100644 index 3d29eff..0000000 Binary files a/graphics/old/logo.png and /dev/null differ diff --git a/graphics/old/logo_256.png b/graphics/old/logo_256.png deleted file mode 100644 index 9385895..0000000 Binary files a/graphics/old/logo_256.png and /dev/null differ diff --git a/graphics/old/logo_80.gif b/graphics/old/logo_80.gif deleted file mode 100644 index 7e991f5..0000000 Binary files a/graphics/old/logo_80.gif and /dev/null differ diff --git a/graphics/old/mini.bmp b/graphics/old/mini.bmp deleted file mode 100644 index 5c6f262..0000000 Binary files a/graphics/old/mini.bmp and /dev/null differ diff --git a/graphics/riot.png b/graphics/riot.png deleted file mode 100644 index e0f6790..0000000 Binary files a/graphics/riot.png and /dev/null differ diff --git a/ledger_explorer.py b/ledger_explorer.py index 4d8ec9a..a70d1db 100644 --- a/ledger_explorer.py +++ b/ledger_explorer.py @@ -11,7 +11,7 @@ if "testnet" in version: port = 2829 - hyper_path = "static/test.db" + hyper_path = "static/ledger_test.db" def execute(cursor, query): """Secure execute for slow nodes""" @@ -270,4 +270,4 @@ def make_app(): if __name__ == "__main__": app = make_app() app.listen(5492) - tornado.ioloop.IOLoop.current().start() \ No newline at end of file + tornado.ioloop.IOLoop.current().start() diff --git a/libs/client.py b/libs/client.py index d4650c1..5a87197 100644 --- a/libs/client.py +++ b/libs/client.py @@ -1,4 +1,3 @@ class Client: def __init__(self): - self.banned = False self.connected = False \ No newline at end of file diff --git a/libs/keys.py b/libs/keys.py index d454fa4..3f383ab 100644 --- a/libs/keys.py +++ b/libs/keys.py @@ -1,6 +1,6 @@ class Keys(): def __init__(self): self.public_key_readable = None - self.public_key_hashed = None + self.public_key_b64encoded = None self.address = None - self.keyfile = None \ No newline at end of file + self.keyfile = None diff --git a/libs/node.py b/libs/node.py index e89de71..2c9c36b 100644 --- a/libs/node.py +++ b/libs/node.py @@ -3,7 +3,8 @@ import sys import platform -class Node(): + +class Node: def platform(self): if "Linux" in platform.system(): return True @@ -14,8 +15,10 @@ def __init__(self): self.app_version = None self.startup_time = None self.version_allow = None - self.hdd_block = None #interferes with block flows, careful - self.last_block = None #interferes with block flows, careful + self.hdd_block = None # in ram mode, this differs from node.last_block + self.hdd_hash = None # in ram mode, this differs from node.last_block_hash + self.last_block_hash = None # in ram mode, this differs from node.hdd_hash + self.last_block = None # in ram mode, this differs from node.hdd_block self.plugin_manager = None self.peers = None self.IS_STOPPING = False @@ -27,7 +30,6 @@ def __init__(self): self.is_regnet = False self.is_mainnet = False - self.port = None self.hyper_recompress = True self.hyper_path = None self.ledger_path = None @@ -55,8 +57,12 @@ def __init__(self): self.terminal_output = None self.egress = None self.genesis = None + + self.last_block_timestamp = None self.last_block_ago = None - self.last_block_timestamp = 0 + + self.recompress = None + self.accept_peers = True self.difficulty = None self.ledger_temp = None @@ -65,4 +71,4 @@ def __init__(self): self.py_version= int(str(sys.version_info.major) + str(sys.version_info.minor) + str(sys.version_info.micro)) self.keys = None - self.linux = self.platform() \ No newline at end of file + self.linux = self.platform() diff --git a/lwbench.py b/lwbench.py deleted file mode 100644 index db9b99f..0000000 --- a/lwbench.py +++ /dev/null @@ -1,91 +0,0 @@ -import connections -import collections -import socks -import time - -DEFAULT_PORT = 5658 - - -def convert_ip_port(ip, some_port): - """ - Get ip and port, but extract port from ip if ip was as ip:port - :param ip: - :param some_port: default port - :return: (ip, port) - """ - if ':' in ip: - ip, some_port = ip.split(':') - return ip, some_port - - -def connectible(ipport): - """return True if the ip:port can be connected to, without sending any command""" - try: - s = socks.socksocket() - s.settimeout(3) - ip, local_port = convert_ip_port(ipport, DEFAULT_PORT) - s.connect((ip, int(local_port))) - return True - except: - return False - - -def time_measure(light_ip, app_log): - port = DEFAULT_PORT - result_collection = {ip: [0, 0] for ip in light_ip} - - for address in result_collection: - try: - ip, local_port = convert_ip_port(address, port) - print("Attempting to benchmark {}:{}".format(ip, local_port)) - s = socks.socksocket() - s.settimeout(3) - - if local_port == DEFAULT_PORT: #doesn't work if a node uses non standard port, bench in else-path - will fail - #start benchmark - timer_start = time.time() - s.connect((ip, int(local_port))) - connections.send(s, "statusget", 10) - result = connections.receive(s, 10, 5) - timer_result = (time.time() - timer_start) * 5 #penalty to prio Wallet-Servers before nodes. local node should be so fast, to be still fastest, else it is better that a wallet-server is chosen! - result_collection[address] = timer_result, result[8][7] - app_log.warning("Result for {}:{}, a normal node, penalty-factor *5 (real result time/5): {}".format(ip, local_port, timer_result)) - #finish benchmark - else: - #start benchmark - timer_start = time.time() - s.connect((ip, int(local_port))) - connections.send(s, "statusget", 10) - result = connections.receive(s, 10, 5) - connections.send(s, "wstatusget", 10) - result_ws = connections.receive(s, 10, 5) - timer_result = time.time() - timer_start - #finish benchmark and load balance if too many clients - ws_clients = result_ws.get('clients') - if ws_clients > 300: - timer_result = timer_result + ws_clients/1000 - app_log.warning("Result for {}:{}, modified due to high client load: {}".format(ip, local_port, timer_result)) - elif ws_clients > 150: - timer_result = timer_result + ws_clients/10000 - app_log.warning("Result for {}:{}, modified due to client load: {}".format(ip, local_port, timer_result)) - else: - app_log.warning("Result for {}:{}, low load - unmodified: {}".format(ip, local_port, timer_result)) - result_collection[address] = timer_result, result[8][7] - - except Exception as e: - print("Cannot benchmark {}:{}".format(ip, local_port)) - - # sort IPs for measured Time - bench_result = collections.OrderedDict(sorted((value[0], key) for (key, value) in result_collection.items())) - light_ip = list(bench_result.values()) - - max_height_temp = list(result_collection.values()) - max_height = max(list(zip(*max_height_temp))[1]) - for key, value in result_collection.items(): - if int(value[1]) < (max_height - 5): - try: - light_ip.remove(key) - light_ip.append(key) - except Exception as e: - pass - return light_ip diff --git a/mempool.py b/mempool.py index 7c7eb05..67c7ced 100644 --- a/mempool.py +++ b/mempool.py @@ -2,30 +2,34 @@ Mempool module for Bismuth nodes """ -import base64 -import hashlib +import functools +# import base64 +# import hashlib import os import sqlite3 import sys import threading import time -import functools -from Cryptodome.Hash import SHA -#from Cryptodome.PublicKey import RSA -#from Cryptodome.Signature import PKCS1_v1_5 +from polysign.signerfactory import SignerFactory import essentials -from quantizer import * -from polysign.signerfactory import SignerFactory +from quantizer import quantize_two, quantize_eight, quantize_ten + +# from Cryptodome.Hash import SHA +# from Cryptodome.PublicKey import RSA +# from Cryptodome.Signature import PKCS1_v1_5 -__version__ = "0.0.5f" +__version__ = "0.0.6c" """ +0.0.5g - Add default param to mergedts for compatibility 0.0.5f - Using polysign 0.0.5e - add mergedts timestamp to tx for better handling of late txs quicker unfreeze less strict freezing +0.0.6b - Raise freeze tolerance to > 15 minutes old txs. +0.0.6c - Return last exception to client in all cases """ MEMPOOL = None @@ -47,7 +51,7 @@ # Create mempool table SQL_CREATE = "CREATE TABLE IF NOT EXISTS transactions (" \ "timestamp TEXT, address TEXT, recipient TEXT, amount TEXT, signature TEXT, " \ - "public_key TEXT, operation TEXT, openfield TEXT, mergedts INTEGER)" + "public_key TEXT, operation TEXT, openfield TEXT, mergedts INTEGER(4) not null default (strftime('%s','now')) )" # Purge old txs that may be stuck SQL_PURGE = "DELETE FROM transactions WHERE timestamp <= strftime('%s', 'now', '-1 day')" @@ -56,10 +60,10 @@ SQL_CLEAR = "DELETE FROM transactions" # Check for presence of a given tx signature -SQL_SIG_CHECK = 'SELECT timestamp FROM transactions WHERE signature = ?' +SQL_SIG_CHECK = 'SELECT timestamp FROM transactions WHERE substr(signature,1,4) = substr(?1,1,4) and signature = ?1' # delete a single tx -SQL_DELETE_TX = 'DELETE FROM transactions WHERE signature = ?' +SQL_DELETE_TX = 'DELETE FROM transactions WHERE substr(signature,1,4) = substr(?1,1,4) and signature = ?1' # Selects all tx from mempool - list fields so we don't send mergedts and keep compatibility SQL_SELECT_ALL_TXS = 'SELECT timestamp, address, recipient, amount, signature, public_key, operation, openfield FROM transactions' @@ -81,10 +85,12 @@ SQL_MEMPOOL_GET = "SELECT amount, openfield, operation FROM transactions WHERE address = ?;" + def sql_trace_callback(log, id, statement): line = f"SQL[{id}] {statement}" log.warning(line) + class Mempool: """The mempool manager. Thread safe""" @@ -107,11 +113,14 @@ def __init__(self, app_log, config=None, db_lock=None, testnet=False, trace_db_c self.trace_db_calls = trace_db_calls self.testnet = testnet - if not self.testnet: - self.mempool_ram_file = "file:mempool?mode=memory&cache=shared" - else: + + if self.testnet: app_log.warning("Starting mempool in testnet mode") + self.mempool_path = "mempool_testnet.db" self.mempool_ram_file = "file:mempool_testnet?mode=memory&cache=shared" + else: + self.mempool_ram_file = "file:mempool?mode=memory&cache=shared" + self.mempool_path = self.config.mempool_path #default self.check() @@ -147,7 +156,7 @@ def check(self): self.db.commit() self.app_log.warning("Status: In memory mempool file created") else: - self.db = sqlite3.connect('mempool.db', timeout=1, + self.db = sqlite3.connect(self.mempool_path, timeout=1, check_same_thread=False) if self.trace_db_calls: self.db.set_trace_callback(functools.partial(sql_trace_callback,self.app_log,"MEMPOOL")) @@ -160,8 +169,8 @@ def check(self): # print(res) if len(res) != 9: self.db.close() - os.remove("mempool.db") - self.db = sqlite3.connect('mempool.db', timeout=1, + os.remove(self.mempool_path) + self.db = sqlite3.connect(self.mempool_path, timeout=1, check_same_thread=False) if self.trace_db_calls: self.db.set_trace_callback(functools.partial(sql_trace_callback,self.app_log,"MEMPOOL")) @@ -425,7 +434,7 @@ def space_left_for_tx(self, transaction, mempool_size): # Sorry, no space left for this tx type. return False - def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): + def merge(self, data: list, peer_ip: str, c, size_bypass: bool=False, wait: bool=False, revert: bool =False) -> list: """ Checks and merge the tx list in out mempool :param data: @@ -439,7 +448,7 @@ def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): global REFUSE_OLDER_THAN # Easy cases of empty or invalid data if not data: - return "Mempool from {} was empty".format(peer_ip) + return ["Mempool from {} was empty".format(peer_ip)] mempool_result = [] if data == '*': raise ValueError("Connection lost") @@ -493,29 +502,50 @@ def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): if size_bypass or self.space_left_for_tx(transaction, mempool_size): # all transactions in the mempool need to be cycled to check for special cases, # therefore no while/break loop here - mempool_timestamp = '%.2f' % (quantize_two(transaction[0])) - mempool_timestamp_float = float(transaction[0]) # limit Decimal where not needed + try: + mempool_timestamp = '%.2f' % (quantize_two(transaction[0])) + mempool_timestamp_float = float(transaction[0]) # limit Decimal where not needed + except Exception as e: + mempool_result.append("Mempool: Invalid timestamp {}".format(transaction[0])) + if not essentials.address_validate(transaction[1]): + mempool_result.append("Mempool: Invalid address {}".format(transaction[1])) + continue + # We could now ignore the truncates here, I left them for explicit reminder of the various fields max lengths. mempool_address = str(transaction[1])[:56] + if not essentials.address_validate(transaction[2]): + mempool_result.append("Mempool: Invalid recipient {}".format(transaction[2])) + continue mempool_recipient = str(transaction[2])[:56] - mempool_amount = '%.8f' % (quantize_eight(transaction[3])) # convert scientific notation - mempool_amount_float = float(transaction[3]) + try: + mempool_amount = '%.8f' % (quantize_eight(transaction[3])) # convert scientific notation + mempool_amount_float = float(transaction[3]) + except Exception as e: + mempool_result.append("Mempool: Invalid amount {}".format(transaction[3])) + continue + if len(transaction[4]) > 684: + mempool_result.append("Mempool: Invalid signature len{}".format(len(transaction[4]))) + continue mempool_signature_enc = str(transaction[4])[:684] - mempool_public_key_hashed = str(transaction[5])[:1068] - if "b'" == mempool_public_key_hashed[:2]: - mempool_public_key_hashed = transaction[5][2:1070] + if len(transaction[5]) > 1068: + mempool_result.append("Mempool: Invalid pubkey len{}".format(len(transaction[5]))) + continue + mempool_public_key_b64encoded = str(transaction[5])[:1068] + if "b'" == mempool_public_key_b64encoded[:2]: + # Binary content instead of str - leftover from legacy code? + mempool_public_key_b64encoded = transaction[5][2:1070] + if len(transaction[6]) > 30: + mempool_result.append("Mempool: Invalid operation len{}".format(len(transaction[6]))) + continue mempool_operation = str(transaction[6])[:30] + if len(transaction[7]) > 100000: + mempool_result.append("Mempool: Invalid openfield len{}".format(len(transaction[7]))) + continue mempool_openfield = str(transaction[7])[:100000] # Begin with the easy tests that do not require cpu or disk access if mempool_amount_float < 0: mempool_result.append("Mempool: Negative balance spend attempt") continue - if not essentials.address_validate(mempool_address): - mempool_result.append("Mempool: Invalid address {}".format(mempool_address)) - continue - if not essentials.address_validate(mempool_recipient): - mempool_result.append("Mempool: Invalid recipient {}".format(mempool_recipient)) - continue if mempool_timestamp_float > time_now: mempool_result.append("Mempool: Future transaction rejected {}s". format(mempool_timestamp_float - time_now)) @@ -528,10 +558,15 @@ def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): # Then more cpu heavy tests buffer = str((mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, mempool_operation, mempool_openfield)).encode("utf-8") + #  Will raise if error - SignerFactory.verify_bis_signature(mempool_signature_enc, mempool_public_key_hashed, - buffer, - mempool_address) + try: + SignerFactory.verify_bis_signature(mempool_signature_enc, mempool_public_key_b64encoded, + buffer, + mempool_address) + except Exception as e: + mempool_result.append(f"Mempool: Signature did not match for address ({e})") + continue # Only now, process the tests requiring db access mempool_in = self.sig_check(mempool_signature_enc) @@ -543,7 +578,7 @@ def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): # reject transactions which are already in the ledger # TODO: not clean, will need to have ledger as a module too. # TODO: need better txid index, this is very sloooooooow - essentials.execute_param_c(c, "SELECT timestamp FROM transactions WHERE signature = ?", + essentials.execute_param_c(c, "SELECT timestamp FROM transactions WHERE substr(signature,1,4) = substr(?1,1,4) AND signature = ?1", (mempool_signature_enc,), self.app_log) ledger_in = bool(c.fetchone()) # remove from mempool if it's in both ledger and mempool already @@ -560,8 +595,8 @@ def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): mempool_result.append("That transaction is already in our ledger") # Can be a syncing node. Do not request mempool from this peer until FREEZE_MIN min # ledger_in is the ts of the tx in ledger. if it's recent, maybe the peer is just one block late. - # give him 3 minute margin. - if (peer_ip != '127.0.0.1') and (ledger_in < time_now - 60 * 3): + # give him 15 minute margin. + if (peer_ip != '127.0.0.1') and (ledger_in < time_now - 60 * 15): with self.peers_lock: self.peers_sent[peer_ip] = time.time() + FREEZE_MIN * 60 self.app_log.warning("Freezing mempool from {} for {} min.".format(peer_ip, FREEZE_MIN)) @@ -612,15 +647,17 @@ def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): "SELECT sum(reward) FROM transactions WHERE recipient = ?", (mempool_address,), self.app_log): rewards = quantize_eight(rewards) + quantize_eight(entry[0]) + # error conversion from NoneType to Decimal is not supported balance = quantize_eight(credit - debit - fees + rewards - quantize_eight(mempool_amount)) balance_pre = quantize_eight(credit - debit_ledger - fees + rewards) fee = essentials.fee_calculate(mempool_openfield, mempool_operation, last_block) - #print("Balance", balance, fee) + # print("Balance", balance, fee) - if quantize_eight(mempool_amount) > quantize_eight(balance_pre): #mp amount is already included in "balance" var! also, that tx might already be in the mempool + if quantize_eight(mempool_amount) > quantize_eight(balance_pre): + # mp amount is already included in "balance" var! also, that tx might already be in the mempool mempool_result.append("Mempool: Sending more than owned") continue if quantize_eight(balance) - quantize_eight(fee) < 0: @@ -630,10 +667,10 @@ def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): # Pfew! we can finally insert into mempool - all is str, type converted and enforced above self.execute("INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?)", (mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, - mempool_signature_enc, mempool_public_key_hashed, mempool_operation, + mempool_signature_enc, mempool_public_key_b64encoded, mempool_operation, mempool_openfield, int(time_now))) mempool_result.append("Mempool updated with a received transaction from {}".format(peer_ip)) - mempool_result.append("Success") + mempool_result.append("Success") # WARNING: Do not change string or case ever! self.commit() # Save (commit) the changes to mempool db mempool_size += sys.getsizeof(str(transaction)) / 1000000.0 @@ -647,5 +684,10 @@ def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): except Exception as e: self.app_log.warning("Mempool: Error processing: {} {}".format(data, e)) if self.config.debug: + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + self.app_log.warning("{} {} {}".format(exc_type, fname, exc_tb.tb_lineno)) + mempool_result.append("Exception: {}".format(str(e))) + # if left there, means debug can *not* be used in production, or exception is not sent back to the client. raise return mempool_result diff --git a/mining.py b/mining.py index 0c78ed4..43a957a 100644 --- a/mining.py +++ b/mining.py @@ -3,7 +3,7 @@ """ import hashlib -from quantizer import * +from quantizer import quantize_two, quantize_eight, quantize_ten __version__ = '0.0.1' diff --git a/mining_heavy3.py b/mining_heavy3.py index 0957c72..800c59a 100644 --- a/mining_heavy3.py +++ b/mining_heavy3.py @@ -12,14 +12,15 @@ import sys from hashlib import sha224 from hmac_drbg import DRBG -from quantizer import * +from quantizer import quantize_two, quantize_eight, quantize_ten +from decimal import Decimal import regnet from fork import Fork fork = Fork() -__version__ = '0.1.3' +__version__ = '0.1.4' print("Mining_Heavy3 v{}".format(__version__)) @@ -46,7 +47,7 @@ def anneal3(mmap, n): h7 = n & 0xffffffff n = n >> 32 index = ((h7 & ~0x7) % RND_LEN) * 4 - f1 = struct.unpack('I', mmap[index:index + 4])[0] + # f1 = struct.unpack('I', mmap[index:index + 4])[0] value = h7 ^ struct.unpack('I', mmap[index:index + 4])[0] res = "{:08x}".format(value) for i in range(6): @@ -66,9 +67,9 @@ def diffme_heavy3(pool_address, nonce, db_block_hash): # minimum possible diff diff = 1 diff_result = 0 - hash = sha224((pool_address + nonce + db_block_hash).encode("utf-8")).digest() - hash = int.from_bytes(hash, 'big') - annealed_sha = anneal3(MMAP, hash) + hash224 = sha224((pool_address + nonce + db_block_hash).encode("utf-8")).digest() + hash224 = int.from_bytes(hash224, 'big') + annealed_sha = anneal3(MMAP, hash224) bin_annealed_sha = bin_convert(annealed_sha) mining_condition = bin_convert(db_block_hash) while mining_condition[:diff] in bin_annealed_sha: @@ -135,7 +136,7 @@ def check_block(block_height_new, miner_address, nonce, db_block_hash, diff0, re return diff_save -def create_heavy3a(file_name): +def create_heavy3a(file_name="heavy3a.bin"): print("Creating Junction Noise file, this usually takes a few minutes...") gen = DRBG(b"Bismuth is a chemical element with symbol Bi and atomic number 83. It is a pentavalent post-transition metal and one of the pnictogens with chemical properties resembling its lighter homologs arsenic and antimony.") # Size in Gb - No more than 4Gb from a single seed @@ -148,26 +149,34 @@ def create_heavy3a(file_name): f.write(gen.generate(CHUNK_SIZE)) -def mining_open(): +def mining_open(file_name="heavy3a.bin"): """ Opens the Junction MMapped file """ global F global MMAP global RND_LEN - map = './heavy3a.bin' if os.path.isfile('./heavy3a.bin') else '../CSPRNG/rnd.bin' - if not os.path.isfile(map): - create_heavy3a('./heavy3a.bin') - map = './heavy3a.bin' + if os.path.isfile(file_name): + size = os.path.getsize(file_name) + if size != 1073741824: + print("Invalid size of heavy file {}.".format(file_name)) + try: + os.remove(file_name) + print("Deleted, Will be re-created") + except Exception as e: + print(e) + sys.exit() + if not os.path.isfile(file_name): + create_heavy3a(file_name) try: - F = open(map, "rb+") + F = open(file_name, "rb+") # memory-map the file, size 0 means whole file MMAP = mmap.mmap(F.fileno(), 0) - RND_LEN = os.path.getsize(map) // 4 + RND_LEN = os.path.getsize(file_name) // 4 if read_int_from_map(MMAP, 0) != 3786993664: - raise ValueError("Wrong file: {}".format(map)) + raise ValueError("Wrong file: {}".format(file_name)) if read_int_from_map(MMAP, 1024) != 1742706086: - raise ValueError("Wrong file: {}".format(map)) + raise ValueError("Wrong file: {}".format(file_name)) except Exception as e: print("Error while loading Junction file: {}".format(e)) sys.exit() @@ -190,4 +199,4 @@ def mining_close(): assert F F.close() except: - pass \ No newline at end of file + pass diff --git a/node.py b/node.py index 7b6e480..3865fd4 100644 --- a/node.py +++ b/node.py @@ -11,7 +11,7 @@ # issues with db? perhaps you missed a commit() or two -VERSION = "4.3.0.1" # Post fork candidate +VERSION = "4.4.0.1" # Post fork candidate 1 import functools import glob @@ -21,8 +21,11 @@ import sqlite3 import tarfile import threading +from sys import version_info + +import aliases # PREFORK_ALIASES +# import aliasesv2 as aliases # POSTFORK_ALIASES -import aliases # Bis specific modules import apihandler import connectionmanager @@ -31,7 +34,7 @@ import options import peershandler import plugins -import tokensv2 as tokens +import tokensv2 as tokens # TODO: unused here import wallet_keys from connections import send, receive from digest import * @@ -46,8 +49,6 @@ import base64 #/todo -getcontext().rounding = ROUND_HALF_EVEN - fork = Fork() appname = "Bismuth" @@ -55,12 +56,14 @@ # nodes_ban_reset=config.nodes_ban_reset + def sql_trace_callback(log, id, statement): line = f"SQL[{id}] {statement}" log.warning(line) def bootstrap(): + # TODO: Candidate for single user mode try: types = ['static/*.db-wal', 'static/*.db-shm'] for t in types: @@ -80,7 +83,12 @@ def bootstrap(): def check_integrity(database): + # TODO: Candidate for single user mode # check ledger integrity + + if not os.path.exists("static"): + os.mkdir("static") + with sqlite3.connect(database) as ledger_check: if node.trace_db_calls: ledger_check.set_trace_callback(functools.partial(sql_trace_callback,node.logger.app_log,"CHECK_INTEGRITY")) @@ -106,18 +114,18 @@ def check_integrity(database): def rollback(node, db_handler, block_height): node.logger.app_log.warning(f"Status: Rolling back below: {block_height}") - db_handler.rollback_to(block_height) + db_handler.rollback_under(block_height) # rollback indices db_handler.tokens_rollback(node, block_height) db_handler.aliases_rollback(node, block_height) - db_handler.staking_rollback(node, block_height) # rollback indices node.logger.app_log.warning(f"Status: Chain rolled back below {block_height} and will be resynchronized") def recompress_ledger(node, rebuild=False, depth=15000): + # TODO: Candidate for single user mode node.logger.app_log.warning(f"Status: Recompressing, please be patient") files_remove = [node.ledger_path + '.temp',node.ledger_path + '.temp-shm',node.ledger_path + '.temp-wal'] @@ -192,15 +200,14 @@ def recompress_ledger(node, rebuild=False, depth=15000): hyp.execute("VACUUM") hyper.close() - - if os.path.exists(node.hyper_path) and rebuild: + if os.path.exists(node.hyper_path): os.remove(node.hyper_path) # remove the old hyperblocks to rebuild os.rename(node.ledger_path + '.temp', node.hyper_path) def ledger_check_heights(node, db_handler): + # TODO: Candidate for single user mode """conversion of normal blocks into hyperblocks from ledger.db or hyper.db to hyper.db""" - if os.path.exists(node.hyper_path): # cross-integrity check @@ -213,37 +220,36 @@ def ledger_check_heights(node, db_handler): if hdd_block_max == hdd2_block_last == hdd2_block_last_misc == hdd_block_max_diff and node.hyper_recompress: # cross-integrity check node.logger.app_log.warning("Status: Recompressing hyperblocks (keeping full ledger)") - recompress = True + node.recompress = True #print (hdd_block_max,hdd2_block_last,node.hyper_recompress) elif hdd_block_max == hdd2_block_last and not node.hyper_recompress: node.logger.app_log.warning("Status: Hyperblock recompression skipped") - recompress = False + node.recompress = False else: lowest_block = min(hdd_block_max, hdd2_block_last, hdd_block_max_diff, hdd2_block_last_misc) highest_block = max(hdd_block_max, hdd2_block_last, hdd_block_max_diff, hdd2_block_last_misc) - node.logger.app_log.warning( f"Status: Cross-integrity check failed, {highest_block} will be rolled back below {lowest_block}") rollback(node,db_handler_initial,lowest_block) #rollback to the lowest value - - recompress = False + node.recompress = False else: node.logger.app_log.warning("Status: Compressing ledger to Hyperblocks") - recompress = True + node.recompress = True + - if recompress: - recompress_ledger(node) def bin_convert(string): + # TODO: Move to essentials.py return ''.join(format(ord(x), '8b').replace(' ', '0') for x in string) def balanceget(balance_address, db_handler): + # TODO: To move in db_handler, call by db_handler.balance_get(address) # verify balance # node.logger.app_log.info("Mempool: Verifying balance") @@ -326,6 +332,13 @@ def balanceget(balance_address, db_handler): def blocknf(node, block_hash_delete, peer_ip, db_handler, hyperblocks=False): + """ + Rolls back a single block, updates node object variables. + Rollback target must be above checkpoint. + Hash to rollback must match in case our ledger moved. + Not trusting hyperblock nodes for old blocks because of trimming, + they wouldn't find the hash and cause rollback. + """ node.logger.app_log.info(f"Rollback operation on {block_hash_delete} initiated by {peer_ip}") my_time = time.time() @@ -342,7 +355,6 @@ def blocknf(node, block_hash_delete, peer_ip, db_handler, hyperblocks=False): db_block_height = block_max_ram ['block_height'] db_block_hash = block_max_ram ['block_hash'] - ip = {'ip': peer_ip} node.plugin_manager.execute_filter_hook('filter_rollback_ip', ip) if ip['ip'] == 'no': @@ -369,22 +381,27 @@ def blocknf(node, block_hash_delete, peer_ip, db_handler, hyperblocks=False): node.logger.app_log.warning(f"Node {peer_ip} didn't find block {db_block_height}({db_block_hash})") # roll back hdd too - db_handler.rollback_to(db_block_height) - node.hdd_block = db_handler.block_height_max() + db_handler.rollback_under(db_block_height) # /roll back hdd too # rollback indices db_handler.tokens_rollback(node, db_block_height) db_handler.aliases_rollback(node, db_block_height) - db_handler.staking_rollback(node, db_block_height) # /rollback indices + node.last_block_timestamp = db_handler.last_block_timestamp() + node.last_block_hash = db_handler.last_block_hash() + node.last_block = db_block_height - 1 + node.hdd_hash = db_handler.last_block_hash() + node.hdd_block = db_block_height - 1 + tokens.tokens_update(node, db_handler) + except Exception as e: node.logger.app_log.warning(e) - finally: node.db_lock.release() + node.logger.app_log.warning(f"Database lock released") if skip: @@ -425,6 +442,7 @@ def blocknf(node, block_hash_delete, peer_ip, db_handler, hyperblocks=False): def sequencing_check(db_handler): + # TODO: Candidate for single user mode try: with open("sequencing_last", 'r') as filename: sequencing_last = int(filename.read()) @@ -435,7 +453,6 @@ def sequencing_check(db_handler): node.logger.app_log.warning(f"Status: Testing chain sequencing, starting with block {sequencing_last}") - chains_to_check = [node.ledger_path, node.hyper_path] for chain in chains_to_check: @@ -471,7 +488,6 @@ def sequencing_check(db_handler): # rollback indices db_handler.tokens_rollback(node, y) db_handler.aliases_rollback(node, y) - db_handler.staking_rollback(node, y) # rollback indices @@ -500,22 +516,26 @@ def sequencing_check(db_handler): conn2.set_trace_callback(functools.partial(sql_trace_callback,node.logger.app_log,"SEQUENCE-CHECK-CHAIN2B")) c2 = conn2.cursor() node.logger.app_log.warning( - f"Status: Chain {chain} difficulty sequencing error at: {row[0]} {row[0]} instead of {y}") - c2.execute("DELETE FROM transactions WHERE block_height >= ?", row[0],) + f"Status: Chain {chain} difficulty sequencing error at: {row[0]}. {row[0]} instead of {y}") + c2.execute("DELETE FROM transactions WHERE block_height >= ?", (row[0],)) conn2.commit() - c2.execute("DELETE FROM misc WHERE block_height >= ?", row[0],) + c2.execute("DELETE FROM misc WHERE block_height >= ?", (row[0],)) conn2.commit() db_handler.execute_param(conn2, ( 'DELETE FROM transactions WHERE address = "Development Reward" AND block_height <= ?'), (-row[0],)) conn2.commit() + + db_handler.execute_param(conn2, ( + 'DELETE FROM transactions WHERE address = "Hypernode Payouts" AND block_height <= ?'), + (-row[0],)) + conn2.commit() conn2.close() # rollback indices db_handler.tokens_rollback(node, y) db_handler.aliases_rollback(node, y) - db_handler.staking_rollback(node, y) # rollback indices node.logger.app_log.warning(f"Status: Due to a sequencing issue at block {y}, {chain} has been rolled back and will be resynchronized") @@ -535,7 +555,10 @@ def sequencing_check(db_handler): class ThreadedTCPRequestHandler(socketserver.BaseRequestHandler): def handle(self): - #this is a single thread + # this is a dedicated thread for each client (not ip) + if node.IS_STOPPING: + node.logger.app_log.warning("Inbound: Rejected incoming cnx, node is stopping") + return db_handler_instance = dbhandler.DbHandler(node.index_db, node.ledger_path, node.hyper_path, node.ram, node.ledger_ram_file, node.logger, trace_db_calls=node.trace_db_calls) @@ -567,18 +590,23 @@ def handle(self): node.plugin_manager.execute_filter_hook('peer_ip', dict_ip) if node.peers.is_banned(peer_ip) or dict_ip['ip'] == 'banned': - client_instance.banned = True self.request.close() node.logger.app_log.info(f"IP {peer_ip} banned, disconnected") + # TODO: I'd like to call + """ + node.peers.peersync({peer_ip: node.port}) + so we can save the peers that connected to us. + But not ok in current architecture: would delay the command, and we're not even sure it would be saved. + TODO: Workaround: make sure our external ip and port is present in the peers we announce, or new nodes are likely never to be announced. + Warning: needs public ip/port, not local ones! + """ timeout_operation = 120 # timeout timer_operation = time.time() # start counting - while not client_instance.banned and node.peers.version_allowed(peer_ip, node.version_allow) and client_instance.connected: + while not node.peers.is_banned(peer_ip) and node.peers.version_allowed(peer_ip, node.version_allow) and client_instance.connected: try: - - # Failsafe if self.request == -1: raise ValueError(f"Inbound: Closed socket from {peer_ip}") @@ -626,6 +654,7 @@ def handle(self): # receive theirs segments = receive(self.request) node.logger.app_log.info(mp.MEMPOOL.merge(segments, peer_ip, db_handler_instance.c, False)) + #improvement possible - pass peer_ip from worker # receive theirs @@ -645,8 +674,6 @@ def handle(self): # if len(mempool_txs) > 0: same as the other send(self.request, mempool_txs) - # send own - elif data == "hello": if node.is_regnet: node.logger.app_log.info("Inbound: Got hello but I'm in regtest mode, closing.") @@ -692,7 +719,7 @@ def handle(self): block_req = node.peers.consensus_max node.logger.app_log.warning("Longest chain rule triggered") - if int(received_block_height) >= block_req: + if int(received_block_height) >= block_req and int(received_block_height) > node.last_block: try: # they claim to have the longest chain, things must go smooth or ban node.logger.app_log.warning(f"Confirming to sync from {peer_ip}") @@ -711,8 +738,7 @@ def handle(self): node.logger.app_log.warning(f"Rejecting to sync from {peer_ip}") send(self.request, "blocksrj") node.logger.app_log.info( - f"Inbound: Distant peer {peer_ip} is at {received_block_height}, should be at least {block_req}") - + f"Inbound: Distant peer {peer_ip} is at {received_block_height}, should be at least {max(block_req,node.last_block+1)}") send(self.request, "sync") elif data == "blockheight": @@ -724,35 +750,29 @@ def handle(self): # consensus pool 1 (connection from them) consensus_blockheight = int(received_block_height) # str int to remove leading zeros # consensus_add(peer_ip, consensus_blockheight, self.request) - node.peers.consensus_add(peer_ip, consensus_blockheight, self.request, node.last_block) + node.peers.consensus_add(peer_ip, consensus_blockheight, self.request, node.hdd_block) # consensus pool 1 (connection from them) - db_block_height = db_handler_instance.block_height_max() - # append zeroes to get static length - send(self.request, db_block_height) + send(self.request, node.hdd_block) # send own block height - if int(received_block_height) > db_block_height: + if int(received_block_height) > node.hdd_block: node.logger.app_log.warning("Inbound: Client has higher block") - db_handler_instance.execute(db_handler_instance.c, - 'SELECT block_hash FROM transactions ORDER BY block_height DESC LIMIT 1') - db_block_hash = db_handler_instance.c.fetchone()[0] # get latest block_hash - - node.logger.app_log.info(f"Inbound: block_hash to send: {db_block_hash}") - send(self.request, db_block_hash) + node.logger.app_log.info(f"Inbound: block_hash to send: {node.hdd_hash}") + send(self.request, node.hdd_hash) # receive their latest sha_hash # confirm you know that sha_hash or continue receiving - elif int(received_block_height) <= db_block_height: - if int(received_block_height) == db_block_height: + elif int(received_block_height) <= node.hdd_block: + if int(received_block_height) == node.hdd_block: node.logger.app_log.info( f"Inbound: We have the same height as {peer_ip} ({received_block_height}), hash will be verified") else: node.logger.app_log.warning( - f"Inbound: We have higher ({db_block_height}) block height than {peer_ip} ({received_block_height}), hash will be verified") + f"Inbound: We have higher ({node.hdd_block}) block height than {peer_ip} ({received_block_height}), hash will be verified") data = receive(self.request) # receive client's last block_hash # send all our followup hashes @@ -760,26 +780,26 @@ def handle(self): node.logger.app_log.info(f"Inbound: Will seek the following block: {data}") try: - db_handler_instance.execute_param(db_handler_instance.h, - "SELECT block_height FROM transactions WHERE block_hash = ?;", (data,)) - client_block = db_handler_instance.h.fetchone()[0] + + client_block = db_handler_instance.block_height_from_hash(data) except Exception: node.logger.app_log.warning(f"Inbound: Block {data[:8]} of {peer_ip} not found") if node.full_ledger: - send(self.request, "blocknf") #announce block hash was not found + send(self.request, "blocknf") # announce block hash was not found else: - send(self.request, "blocknfhb") #announce we are on hyperblocks + send(self.request, "blocknfhb") # announce we are on hyperblocks send(self.request, data) + if node.peers.warning(self.request, peer_ip, "Forked", 2): + node.logger.app_log.info(f"{peer_ip} banned") + break + else: node.logger.app_log.info(f"Inbound: Client is at block {client_block}") # now check if we have any newer - db_handler_instance.execute(db_handler_instance.h, - 'SELECT block_hash FROM transactions ORDER BY block_height DESC LIMIT 1') - db_block_hash = db_handler_instance.h.fetchone()[0] # get latest block_hash - if db_block_hash == data or not node.egress: + if node.hdd_hash == data or not node.egress: if not node.egress: - node.logger.app_log.warning(f"Outbound: Egress disabled for {peer_ip}") + node.logger.app_log.warning(f"Inbound: Egress disabled for {peer_ip}") else: node.logger.app_log.info(f"Inbound: Client {peer_ip} has the latest block") @@ -788,22 +808,9 @@ def handle(self): else: - blocks_fetched = [] - del blocks_fetched[:] - while sys.getsizeof( - str(blocks_fetched)) < 500000: # limited size based on txs in blocks - db_handler_instance.execute_param(db_handler_instance.h, ( - "SELECT timestamp,address,recipient,amount,signature,public_key,operation,openfield FROM transactions WHERE block_height > ? AND block_height <= ?;"), - (str(int(client_block)), str(int(client_block + 1)),)) - result = db_handler_instance.h.fetchall() - if not result: - break - blocks_fetched.extend([result]) - client_block = int(client_block) + 1 + blocks_fetched = db_handler_instance.blocksync(client_block) - # blocks_send = [[l[1:] for l in group] for _, group in groupby(blocks_fetched, key=itemgetter(0))] # remove block number - - # node.logger.app_log.info("Inbound: Selected " + str(blocks_fetched) + " to send") + node.logger.app_log.info(f"Inbound: Selected {blocks_fetched}") send(self.request, "blocksfnd") @@ -817,8 +824,6 @@ def handle(self): node.logger.app_log.info( "Inbound: Client rejected to sync from us because we're don't have the latest block") - - except Exception as e: node.logger.app_log.info(f"Inbound: Sync failed {e}") @@ -833,7 +838,7 @@ def handle(self): if node.peers.warning(self.request, peer_ip, "Rollback", 2): node.logger.app_log.info(f"{peer_ip} banned") break - node.logger.app_log.info("Outbound: Deletion complete, sending sync request") + node.logger.app_log.info("Inbound: Deletion complete, sending sync request") while node.db_lock.locked(): time.sleep(node.pause) @@ -847,7 +852,7 @@ def handle(self): if node.peers.warning(self.request, peer_ip, "Rollback", 2): node.logger.app_log.info(f"{peer_ip} banned") break - node.logger.app_log.info("Outbound: Deletion complete, sending sync request") + node.logger.app_log.info("Inbound: Deletion complete, sending sync request") while node.db_lock.locked(): time.sleep(node.pause) @@ -857,52 +862,62 @@ def handle(self): # if (peer_ip in allowed or "any" in allowed): # from miner if node.peers.is_allowed(peer_ip, data): # from miner # TODO: rights management could be done one level higher instead of repeating the same check everywhere - - node.logger.app_log.info(f"Outbound: Received a block from miner {peer_ip}") + node.logger.app_log.info(f"Inbound: Received a block from miner {peer_ip}") # receive block segments = receive(self.request) # node.logger.app_log.info("Inbound: Combined mined segments: " + segments) - - # check if we have the latest block - - db_block_height = db_handler_instance.block_height_max() - - # check if we have the latest block - - mined = {"timestamp": time.time(), "last": db_block_height, "ip": peer_ip, "miner": "", + mined = {"timestamp": time.time(), "last": node.last_block, "ip": peer_ip, "miner": "", "result": False, "reason": ''} try: mined['miner'] = segments[0][-1][2] except: - pass + # Block is sent by miners/pools, we can drop the connection + # If there is a reason not to, use "continue" here and below instead of returns. + return # missing info, bye if node.is_mainnet: if len(node.peers.connection_pool) < 5 and not node.peers.is_whitelisted(peer_ip): - reason = "Outbound: Mined block ignored, insufficient connections to the network" + reason = "Inbound: Mined block ignored, insufficient connections to the network" mined['reason'] = reason node.plugin_manager.execute_action_hook('mined', mined) node.logger.app_log.info(reason) + return elif node.db_lock.locked(): - reason = "Outbound: Block from miner skipped because we are digesting already" + reason = "Inbound: Block from miner skipped because we are digesting already" mined['reason'] = reason node.plugin_manager.execute_action_hook('mined', mined) node.logger.app_log.warning(reason) - elif db_block_height >= node.peers.consensus_max - 3: + return + elif node.last_block >= node.peers.consensus_max - 3: mined['result'] = True node.plugin_manager.execute_action_hook('mined', mined) - node.logger.app_log.info("Outbound: Processing block from miner") - digest_block(node, segments, self.request, peer_ip, db_handler_instance) + node.logger.app_log.info("Inbound: Processing block from miner") + try: + digest_block(node, segments, self.request, peer_ip, db_handler_instance) + except ValueError as e: + node.logger.app_log.warning("Inbound: block {}".format(str(e))) + return + except Exception as e: + node.logger.app_log.error("Inbound: Processing block from miner {}".format(e)) + return # This new block may change the int(diff). Trigger the hook whether it changed or not. #node.difficulty = difficulty(node, db_handler_instance) - else: - reason = f"Outbound: Mined block was orphaned because node was not synced, we are at block {db_block_height}, should be at least {node.peers.consensus_max - 3}" + reason = f"Inbound: Mined block was orphaned because node was not synced, " \ + f"we are at block {node.last_block}, " \ + f"should be at least {node.peers.consensus_max - 3}" mined['reason'] = reason node.plugin_manager.execute_action_hook('mined', mined) node.logger.app_log.warning(reason) - else: - digest_block(node, segments, self.request, peer_ip, db_handler_instance) - + # Not mainnet + try: + digest_block(node, segments, self.request, peer_ip, db_handler_instance) + except ValueError as e: + node.logger.app_log.warning("Inbound: block {}".format(str(e))) + return + except Exception as e: + node.logger.app_log.error("Inbound: Processing block from miner {}".format(e)) + return else: receive(self.request) # receive block, but do nothing about it node.logger.app_log.info(f"{peer_ip} not whitelisted for block command") @@ -910,8 +925,9 @@ def handle(self): elif data == "blocklast": # if (peer_ip in allowed or "any" in allowed): # only sends the miner part of the block! if node.peers.is_allowed(peer_ip, data): - db_handler_instance.execute(db_handler_instance.c, - "SELECT * FROM transactions WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;") + db_handler_instance.execute(db_handler_instance.c, "SELECT * FROM transactions " + "WHERE reward != 0 " + "ORDER BY block_height DESC LIMIT 1;") block_last = db_handler_instance.c.fetchall()[0] send(self.request, block_last) @@ -990,7 +1006,6 @@ def handle(self): if node.peers.is_allowed(peer_ip, data): mempool_insert = receive(self.request) node.logger.app_log.warning("mpinsert command") - mpinsert_result = mp.MEMPOOL.merge(mempool_insert, peer_ip, db_handler_instance.c, True, True) node.logger.app_log.warning(f"mpinsert result: {mpinsert_result}") send(self.request, mpinsert_result) @@ -1071,7 +1086,7 @@ def handle(self): response_list.append(response) - # node.logger.app_log.info("Outbound: Extracted from the mempool: " + str(mempool_txs)) # improve: sync based on signatures only + # node.logger.app_log.info("Inbound: Extracted from the mempool: " + str(mempool_txs)) # improve: sync based on signatures only # if len(mempool_txs) > 0: #wont sync mempool until we send something, which is bad # send own @@ -1080,7 +1095,7 @@ def handle(self): elif data == "mpget" and node.peers.is_allowed(peer_ip, data): mempool_txs = mp.MEMPOOL.fetchall(mp.SQL_SELECT_TX_TO_SEND) - # node.logger.app_log.info("Outbound: Extracted from the mempool: " + str(mempool_txs)) # improve: sync based on signatures only + # node.logger.app_log.info("Inbound: Extracted from the mempool: " + str(mempool_txs)) # improve: sync based on signatures only # if len(mempool_txs) > 0: #wont sync mempool until we send something, which is bad # send own @@ -1253,15 +1268,13 @@ def handle(self): response_list.append(response) send(self.request, response_list) - send(self.request, result) else: node.logger.app_log.info(f"{peer_ip} not whitelisted for addlistlimmir command") - elif data == "aliasget": # all for a single address, no protection against overlapping if node.peers.is_allowed(peer_ip, data): - aliases.aliases_update(node.index_db, node.ledger_path, "normal", node.logger.app_log, trace_db_calls = node.trace_db_calls) + aliases.aliases_update(node, db_handler_instance) alias_address = receive(self.request) result = db_handler_instance.aliasget(alias_address) @@ -1272,7 +1285,7 @@ def handle(self): elif data == "aliasesget": # only gets the first one, for multiple addresses if node.peers.is_allowed(peer_ip, data): - aliases.aliases_update(node.index_db, node.ledger_path, "normal", node.logger.app_log, trace_db_calls = node.trace_db_calls) + aliases.aliases_update(node, db_handler_instance) aliases_request = receive(self.request) results = db_handler_instance.aliasesget(aliases_request) send(self.request, results) @@ -1280,18 +1293,12 @@ def handle(self): node.logger.app_log.info(f"{peer_ip} not whitelisted for aliasesget command") # Not mandatory, but may help to reindex with minimal sql queries - elif data == "tokensupdate": - if node.peers.is_allowed(peer_ip, data): - tokens.tokens_update(node.index_db, node.ledger_path, "normal", node.logger.app_log, - node.plugin_manager, trace_db_calls = node.trace_db_calls) - # + elif data == "tokensget": + # TODO: to be handled by token modules, with no sql here in node. if node.peers.is_allowed(peer_ip, data): - tokens.tokens_update(node.index_db, node.ledger_path, "normal", node.logger.app_log, - node. - _manager, trace_db_calls = node.trace_db_calls) - tokens_address = receive(self.request) + tokens_address = receive(self.request) tokens_user = db_handler_instance.tokens_user(tokens_address) tokens_list = [] @@ -1320,7 +1327,8 @@ def handle(self): elif data == "addfromalias": if node.peers.is_allowed(peer_ip, data): - aliases.aliases_update(node.index_db, node.ledger_path, "normal", node.logger.app_log, trace_db_calls = node.trace_db_calls) + aliases.aliases_update(node, db_handler_instance) + alias_address = receive(self.request) address_fetch = db_handler_instance.addfromalias(alias_address) node.logger.app_log.warning(f"Fetched the following alias address: {address_fetch}") @@ -1332,8 +1340,9 @@ def handle(self): elif data == "pubkeyget": if node.peers.is_allowed(peer_ip, data): pub_key_address = receive(self.request) - target_public_key_hashed = db_handler_instance.pubkeyget(pub_key_address) - send(self.request, target_public_key_hashed) + target_public_key_b64encoded = db_handler_instance.pubkeyget(pub_key_address) + # returns as stored in the DB, that is b64 encoded, except for RSA where it's b64 encoded twice. + send(self.request, target_public_key_b64encoded) else: node.logger.app_log.info(f"{peer_ip} not whitelisted for pubkeyget command") @@ -1357,7 +1366,14 @@ def handle(self): node.logger.app_log.info(f"{peer_ip} not whitelisted for aliascheck command") elif data == "txsend": + """ + This is most unsafe and should never be used. + - node gets the privkey + - dup code for assembling and signing the TX + TODO: DEPRECATED + """ if node.peers.is_allowed(peer_ip, data): + node.logger.app_log.warning("txsend is unsafe and deprecated, please don't use.") tx_remote = receive(self.request) # receive data necessary for remote tx construction @@ -1373,7 +1389,7 @@ def handle(self): tx_remote_key = RSA.importKey(remote_tx_privkey) remote_tx_pubkey = tx_remote_key.publickey().exportKey().decode("utf-8") - remote_tx_pubkey_hashed = base64.b64encode(remote_tx_pubkey.encode('utf-8')).decode("utf-8") + remote_tx_pubkey_b64encoded = base64.b64encode(remote_tx_pubkey.encode('utf-8')).decode("utf-8") remote_tx_address = hashlib.sha224(remote_tx_pubkey.encode("utf-8")).hexdigest() # derive remaining data @@ -1392,7 +1408,7 @@ def handle(self): # insert to mempool, where everything will be verified mempool_data = ((str(remote_tx_timestamp), str(remote_tx_address), str(remote_tx_recipient), '%.8f' % quantize_eight(remote_tx_amount), str(remote_signature_enc), - str(remote_tx_pubkey_hashed), str(remote_tx_operation), + str(remote_tx_pubkey_b64encoded), str(remote_tx_operation), str(remote_tx_openfield))) node.logger.app_log.info(mp.MEMPOOL.merge(mempool_data, peer_ip, db_handler_instance.c, True, True)) @@ -1446,49 +1462,45 @@ def handle(self): elif data == "statusget": if node.peers.is_allowed(peer_ip, data): - nodes_count = node.peers.consensus_size nodes_list = node.peers.peer_opinion_dict threads_count = threading.active_count() uptime = int(time.time() - node.startup_time) diff = node.difficulty server_timestamp = '%.2f' % time.time() - if node.reveal_address: revealed_address = node.keys.address - else: revealed_address = "private" - send(self.request, ( revealed_address, nodes_count, nodes_list, threads_count, uptime, node.peers.consensus, node.peers.consensus_percentage, VERSION, diff, server_timestamp)) - else: node.logger.app_log.info(f"{peer_ip} not whitelisted for statusget command") elif data == "statusjson": + # not only sends as an explicit dict, but also embeds extra info if node.peers.is_allowed(peer_ip, data): uptime = int(time.time() - node.startup_time) tempdiff = node.difficulty - if node.reveal_address: revealed_address = node.keys.address else: revealed_address = "private" - status = {"protocolversion": node.version, "address": revealed_address, "walletversion": VERSION, - "testnet": node.is_testnet, # config data - "blocks": node.last_block, "timeoffset": 0, + "testnet": node.is_testnet, + "blocks": node.hdd_block, "timeoffset": 0, "connections": node.peers.consensus_size, "connections_list": node.peers.peer_opinion_dict, - "difficulty": tempdiff[0], # live status, bitcoind format + "difficulty": tempdiff[0], "threads": threading.active_count(), "uptime": uptime, "consensus": node.peers.consensus, "consensus_percent": node.peers.consensus_percentage, - "server_timestamp": '%.2f' % time.time()} # extra data + "python_version": str(version_info[:3]), + "last_block_ago": node.last_block_ago, + "server_timestamp": '%.2f' % time.time()} if node.is_regnet: status['regnet'] = True send(self.request, status) @@ -1499,7 +1511,10 @@ def handle(self): try: node.apihandler.dispatch(data, self.request, db_handler_instance, node.peers) except Exception as e: - print(e) + if node.debug: + raise + else: + node.logger.app_log.warning(e) elif data == "diffget": if node.peers.is_allowed(peer_ip, data): @@ -1508,6 +1523,12 @@ def handle(self): else: node.logger.app_log.info(f"{peer_ip} not whitelisted for diffget command") + elif data == "portget": + if node.peers.is_allowed(peer_ip, data): + send(self.request, {"port": node.port}) + else: + node.logger.app_log.info(f"{peer_ip} not whitelisted for portget command") + elif data == "diffgetjson": if node.peers.is_allowed(peer_ip, data): diff = node.difficulty @@ -1547,10 +1568,26 @@ def handle(self): if node.peers.is_allowed(peer_ip, data): node.logger.app_log.warning(f"Received stop from {peer_ip}") node.IS_STOPPING = True + else: + node.logger.app_log.info(f"{peer_ip} not whitelisted for stop command") + elif data == "block_height_from_hash": + if node.peers.is_allowed(peer_ip, data): + hash = receive(self.request) + response = db_handler_instance.block_height_from_hash(hash) + send(self.request, response) + else: + node.logger.app_log.info(f"{peer_ip} not whitelisted for block_height_from_hash command") - elif data == "hyperlane": - pass + elif data == "addpeers": + if node.peers.is_allowed(peer_ip, data): + data = receive(self.request) + # peersync expects a dict encoded as json string, not a straight dict + res = node.peers.peersync(data) + send(self.request, {"added": res}) + node.logger.app_log.warning(f"{res} peers added") + else: + node.logger.app_log.warning(f"{peer_ip} not whitelisted for addpeers") else: if data == '*': @@ -1586,18 +1623,16 @@ def handle(self): return if not node.peers.version_allowed(peer_ip, node.version_allow): - node.logger.app_log.warning(f"Inbound: Closing connection to old {peer_ip} node: {node.peers.ip_to_mainnet[peer_ip]}") + node.logger.app_log.warning(f"Inbound: Closing connection to old {peer_ip} node: {node.peers.ip_to_mainnet['peer_ip']}") return -# client thread -# if you "return" from the function, the exception code will node be executed and client thread will hang - class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): pass def just_int_from(s): + # TODO: move to essentials.py return int(''.join(i for i in s if i.isdigit())) @@ -1605,6 +1640,7 @@ def setup_net_type(): """ Adjust globals depending on mainnet, testnet or regnet """ + # TODO: only deals with 'node' structure, candidate for single user mode. # Defaults value, dup'd here for clarity sake. node.is_mainnet = True node.is_testnet = False @@ -1629,29 +1665,30 @@ def setup_net_type(): node.index_db = "static/index.db" if node.is_mainnet: - # Allow only 20 and 21 - if node.version != 'mainnet0020': - node.version = 'mainnet0020' # Force in code. - if "mainnet0020" not in node.version_allow: - node.version_allow = ['mainnet0020', 'mainnet0021'] + # Allow only 21 and up + if node.version != 'mainnet0021': + node.version = 'mainnet0021' # Force in code. + if "mainnet0021" not in node.version_allow: + node.version_allow = ['mainnet0021', 'mainnet0022'] # Do not allow bad configs. if not 'mainnet' in node.version: node.logger.app_log.error("Bad mainnet version, check config.txt") sys.exit() num_ver = just_int_from(node.version) - if num_ver < 20: + if num_ver <21: node.logger.app_log.error("Too low mainnet version, check config.txt") sys.exit() for allowed in node.version_allow: num_ver = just_int_from(allowed) if num_ver < 20: - node.logger.app_log.error("Too low allowed version, min 0020: check config.txt") + node.logger.app_log.error("Too low allowed version, check config.txt") sys.exit() if "testnet" in node.version or node.is_testnet: node.port = 2829 - node.hyper_path = "static/test.db" - node.ledger_path = "static/test.db" # for tokens + node.hyper_path = "static/hyper_test.db" + node.ledger_path = "static/ledger_test.db" + node.ledger_ram_file = "file:ledger_testnet?mode=memory&cache=shared" #node.hyper_recompress = False node.peerfile = "peers_test.txt" @@ -1661,14 +1698,15 @@ def setup_net_type(): sys.exit() redownload_test = input("Status: Welcome to the testnet. Redownload test ledger? y/n") - if redownload_test == "y" or not os.path.exists("static/test.db"): - types = ['static/test.db-wal', 'static/test.db-shm', 'static/index_test.db'] + if redownload_test == "y": + types = ['static/ledger_test.db-wal', 'static/ledger_test.db-shm', 'static/index_test.db', 'static/hyper_test.db-wal', 'static/hyper_test.db-shm'] for type in types: for file in glob.glob(type): os.remove(file) print(file, "deleted") - download_file("https://bismuth.cz/test.db", "static/test.db") - download_file("https://bismuth.cz/index_test.db", "static/index_test.db") + download_file("https://bismuth.cz/test.tar.gz", "static/test.tar.gz") + with tarfile.open("static/test.tar.gz") as tar: + tar.extractall("static/") # NOT COMPATIBLE WITH CUSTOM PATH CONFS else: print("Not redownloading test db") @@ -1692,13 +1730,28 @@ def setup_net_type(): sys.exit() """ + def node_block_init(database): + # TODO: candidate for single user mode node.hdd_block = database.block_height_max() - node.last_block = node.hdd_block # ram equals drive at this point - checkpoint_set(node, node.hdd_block) node.difficulty = difficulty(node, db_handler_initial) # check diff for miner + node.last_block = node.hdd_block # ram equals drive at this point + + node.last_block_hash = database.last_block_hash() + node.hdd_hash = node.last_block_hash # ram equals drive at this point + + node.last_block_timestamp = database.last_block_timestamp() + + checkpoint_set(node) + + node.logger.app_log.warning("Status: Indexing aliases") + + aliases.aliases_update(node, database) + + def ram_init(database): + # TODO: candidate for single user mode try: if node.ram: node.logger.app_log.warning("Status: Moving database to RAM") @@ -1738,10 +1791,12 @@ def ram_init(database): node.logger.app_log.warning(e) raise + def initial_db_check(): """ Initial bootstrap check and chain validity control """ + # TODO: candidate for single user mode # force bootstrap via adding an empty "fresh_sync" file in the dir. if os.path.exists("fresh_sync") and node.is_mainnet: node.logger.app_log.warning("Status: Fresh sync required, bootstrapping from the website") @@ -1765,26 +1820,18 @@ def initial_db_check(): print("Database needs upgrading, bootstrapping...") bootstrap() - node.logger.app_log.warning(f"Status: Indexing tokens from ledger {node.ledger_path}") - tokens.tokens_update(node.index_db, node.ledger_path, "normal", node.logger.app_log, node.plugin_manager, trace_db_calls = node.trace_db_calls) - node.logger.app_log.warning("Status: Indexing aliases") - aliases.aliases_update(node.index_db, node.ledger_path, "normal", node.logger.app_log, trace_db_calls = node.trace_db_calls) - - - - def load_keys(): """Initial loading of crypto keys""" - + # TODO: candidate for single user mode essentials.keys_check(node.logger.app_log, "wallet.der") - node.keys.key, node.keys.public_key_readable, node.keys.private_key_readable, _, _, node.keys.public_key_hashed, node.keys.address, node.keys.keyfile = essentials.keys_load( + node.keys.key, node.keys.public_key_readable, node.keys.private_key_readable, _, _, node.keys.public_key_b64encoded, node.keys.address, node.keys.keyfile = essentials.keys_load( "privkey.der", "pubkey.der") if node.is_regnet: regnet.PRIVATE_KEY_READABLE = node.keys.private_key_readable - regnet.PUBLIC_KEY_HASHED = node.keys.public_key_hashed + regnet.PUBLIC_KEY_B64ENCODED = node.keys.public_key_b64encoded regnet.ADDRESS = node.keys.address regnet.KEY = node.keys.key @@ -1792,6 +1839,7 @@ def load_keys(): def verify(db_handler): + # TODO: candidate for single user mode try: node.logger.app_log.warning("Blockchain verification started...") # verify blockchain @@ -1899,6 +1947,7 @@ def verify(db_handler): db_transaction = str((db_timestamp, db_address, db_recipient, db_amount, db_operation, db_openfield)).encode("utf-8") try: + # Signer factory is aware of the different tx schemes, and will b64 decode public_key once or twice as needed. SignerFactory.verify_bis_signature(db_signature_enc, db_public_key_b64encoded, db_transaction, db_address) except Exception as e: sha_hash = SHA.new(db_transaction) @@ -1917,6 +1966,28 @@ def verify(db_handler): node.logger.app_log.warning("Error: {}".format(e)) raise + +def add_indices(db_handler: dbhandler.DbHandler): + CREATE_TXID4_INDEX_IF_NOT_EXISTS = "CREATE INDEX IF NOT EXISTS TXID4_Index ON transactions(substr(signature,1,4))" + CREATE_MISC_BLOCK_HEIGHT_INDEX_IF_NOT_EXISTS = "CREATE INDEX IF NOT EXISTS 'Misc Block Height Index' on misc(block_height)" + + node.logger.app_log.warning("Creating indices") + + # ledger.db + db_handler.execute(db_handler.h, CREATE_TXID4_INDEX_IF_NOT_EXISTS) + db_handler.execute(db_handler.h, CREATE_MISC_BLOCK_HEIGHT_INDEX_IF_NOT_EXISTS) + + # hyper.db + db_handler.execute(db_handler.h2, CREATE_TXID4_INDEX_IF_NOT_EXISTS) + db_handler.execute(db_handler.h2, CREATE_MISC_BLOCK_HEIGHT_INDEX_IF_NOT_EXISTS) + + # RAM or hyper.db + db_handler.execute(db_handler.c, CREATE_TXID4_INDEX_IF_NOT_EXISTS) + db_handler.execute(db_handler.c, CREATE_MISC_BLOCK_HEIGHT_INDEX_IF_NOT_EXISTS) + + node.logger.app_log.warning("Finished creating indices") + + if __name__ == "__main__": # classes node = node.Node() @@ -1934,6 +2005,10 @@ def verify(db_handler): # classes node.app_version = VERSION + # TODO: we could just loop over config items, and assign them to node. + # or just do node.config = config + # and use node.config.port... aso + node.version = config.version node.debug_level = config.debug_level node.port = config.port @@ -1956,6 +2031,7 @@ def verify(db_handler): node.accept_peers = config.accept_peers node.full_ledger = config.full_ledger node.trace_db_calls = config.trace_db_calls + node.heavy3_path = config.heavy3_path node.logger.app_log = log.log("node.log", node.debug_level, node.terminal_output) node.logger.app_log.warning("Configuration settings loaded") @@ -1973,8 +2049,10 @@ def verify(db_handler): if not node.full_ledger: node.logger.app_log.warning("Cloning hyperblocks to ledger file") shutil.copy(node.hyper_path, node.ledger_path) # hacked to remove all the endless checks - - mining_heavy3.mining_open() + # needed for docker logs + node.logger.app_log.warning(f"Checking Heavy3 file, can take up to 5 minutes...") + mining_heavy3.mining_open(node.heavy3_path) + node.logger.app_log.warning(f"Heavy3 file Ok!") try: # create a plugin manager, load all plugin modules and init node.plugin_manager = plugins.PluginManager(app_log=node.logger.app_log, init=True) @@ -1990,7 +2068,7 @@ def verify(db_handler): node.startup_time = time.time() try: - node.peers = peershandler.Peers(node.logger.app_log, config, node) + node.peers = peershandler.Peers(node.logger.app_log, config=config, node=node) # print(peers.peer_list_old_format()) # sys.exit() @@ -2010,18 +2088,28 @@ def verify(db_handler): db_handler_initial = dbhandler.DbHandler(node.index_db, node.ledger_path, node.hyper_path, node.ram, node.ledger_ram_file, node.logger, trace_db_calls=node.trace_db_calls) ledger_check_heights(node, db_handler_initial) + + + if node.recompress: + #todo: do not close database and move files, swap tables instead + db_handler_initial.close() + recompress_ledger(node) + db_handler_initial = dbhandler.DbHandler(node.index_db, node.ledger_path, node.hyper_path, node.ram, node.ledger_ram_file, node.logger, trace_db_calls=node.trace_db_calls) + ram_init(db_handler_initial) node_block_init(db_handler_initial) initial_db_check() - if not node.is_regnet: sequencing_check(db_handler_initial) if node.verify: verify(db_handler_initial) - #db_handler_initial.close() + add_indices(db_handler_initial) + + # TODO: until here, we are in single user mode. + # All the above goes into a "bootup" function, with methods from single_user module only. if not node.tor: # Port 0 means to select an arbitrary unused port @@ -2047,15 +2135,11 @@ def verify(db_handler): else: node.logger.app_log.warning("Status: Not starting a local server to conceal identity on Tor network") - # hyperlane_manager = hyperlane.HyperlaneManager(node.logger.app_log) - # hyperlane_manager.start() - # start connection manager connection_manager = connectionmanager.ConnectionManager(node, mp) connection_manager.start() # start connection manager - except Exception as e: node.logger.app_log.info(e) raise @@ -2066,7 +2150,6 @@ def verify(db_handler): node.logger.app_log.warning("Status: Bismuth loop running.") - while True: if node.IS_STOPPING: if node.db_lock.locked(): @@ -2076,3 +2159,4 @@ def verify(db_handler): node.logger.app_log.warning("Status: Securely disconnected main processes, subprocess termination in progress.") break time.sleep(0.1) + node.logger.app_log.warning("Status: Clean Stop") diff --git a/options.py b/options.py index 745257c..360a87b 100644 --- a/options.py +++ b/options.py @@ -1,8 +1,8 @@ import os.path as path from sys import exit +import json class Get: - # "param_name":["type"] or "param_name"=["type","property_name"] vars={ "port":["str"], @@ -26,7 +26,7 @@ class Get: "allowed":["str","allowed"], "ram":["bool","ram"], "node_ip":["str","node_ip"], - "light_ip":["list"], + "light_ip":["dict"], "reveal_address":["bool"], "accept_peers":["bool"], "banlist":["list"], @@ -38,6 +38,8 @@ class Get: "mempool_ram": ["bool"], "egress": ["bool"], "trace_db_calls": ["bool"], + "heavy3_path": ["str"], + "mempool_path": ["str"], } # Optional default values so we don't bug if they are not in the config. @@ -47,37 +49,45 @@ class Get: "regnet": False, "trace_db_calls": False, "mempool_ram": True, + "heavy3_path": "./heavy3a.bin", + "mempool_path": "./mempool.db", } def load_file(self, filename): #print("Loading",filename) - for line in open(filename): - if '=' in line: - left,right = map(str.strip,line.rstrip("\n").split("=")) - if "mempool_ram_conf" == left: - print("Inconsistent config, param is now mempool_ram in config.txt") - exit() - if not left in self.vars: - # Warn for unknown param? - continue - params = self.vars[left] - if params[0] == "int": - right = int(right) - elif params[0] == "list": - right = [item.strip() for item in right.split(",")] - elif params[0] == "bool": - if right.lower() in ["false", "0", "", "no"]: - right = False - else: - right = True + with open(filename) as fp: + for line in fp: + if '=' in line: + left,right = map(str.strip,line.rstrip("\n").split("=")) + if "mempool_ram_conf" == left: + print("Inconsistent config, param is now mempool_ram in config.txt") + exit() + if not left in self.vars: + # Warn for unknown param? + continue + params = self.vars[left] + if params[0] == "int": + right = int(right) + elif params[0] == "dict": + try: + right = json.loads(right) + except: #compatibility + right = [item.strip() for item in right.split(",")] + elif params[0] == "list": + right = [item.strip() for item in right.split(",")] + elif params[0] == "bool": + if right.lower() in ["false", "0", "", "no"]: + right = False + else: + right = True - else: - # treat as "str" - pass - if len(params) > 1: - # deal with properties that do not match the config name. - left = params[1] - setattr(self, left, right) + else: + # treat as "str" + pass + if len(params) > 1: + # deal with properties that do not match the config name. + left = params[1] + setattr(self, left, right) # Default genesis to keep compatibility self.genesis = "4edadac9093d9326ee4b17f869b14f1a2534f96f9c5d7b48dc9acaed" for key, default in self.defaults.items(): diff --git a/peers.txt b/peers.txt index 6f3e1a9..e5d3c0d 100644 --- a/peers.txt +++ b/peers.txt @@ -1 +1 @@ -{"127.0.0.1": "5658", "34.192.6.105": "5658", "66.70.181.150": "5658", "31.31.75.71": "5658", "62.112.10.156": "5658", "178.62.68.118": "5658", "51.15.228.170": "5658", "80.240.18.114": "5658", "91.121.77.179": "5658", "51.15.47.212": "5658", "46.171.63.219": "5658", "109.92.6.40": "5658", "51.15.211.92": "5658", "163.172.166.207": "5658", "212.24.111.139": "5658", "104.248.73.153": "5658", "194.19.235.82": "5658", "108.61.90.91": "5658", "149.28.120.120": "5658", "149.28.53.219": "5658", "51.68.190.246": "5658", "51.15.90.15": "5658", "159.69.12.98": "5658", "159.69.147.99": "5658", "51.15.211.156": "5658", "162.213.123.200": "5658", "209.246.143.198": "5658", "51.15.234.210": "5658", "163.172.161.7": "5658", "188.165.199.153": "5658", "217.163.23.242": "5658", "159.69.147.101": "5658", "185.125.46.56": "5658", "18.184.255.105": "5658", "46.101.186.35": "5658", "192.99.34.19": "5658", "45.76.15.224": "5658", "217.23.4.201": "5658", "204.12.231.58": "5658", "51.15.225.223": "5658"} \ No newline at end of file +{"127.0.0.1": "5658", "34.192.6.105": "5658", "51.15.46.90": "5658", "198.245.62.30": "5658", "51.15.90.15": "5658", "51.15.213.94": "5658", "163.172.222.163": "5658", "91.121.87.99": "5658", "91.121.77.179": "5658", "51.15.47.212": "5658", "188.165.199.153": "5658", "51.15.122.148": "5658", "51.15.211.156": "5658", "51.15.118.29": "5658", "51.15.254.16": "5658", "46.105.43.213": "5658", "194.19.235.82": "5658", "159.89.123.247": "5658", "217.23.4.201": "5658", "209.250.238.142": "5658", "185.125.46.56": "5658", "46.171.63.219": "5658", "139.59.25.152": "5658", "45.32.115.135": "5658", "104.238.173.26": "5658", "178.62.68.118": "5658", "109.236.83.141": "5658", "180.68.191.77": "5658", "107.191.39.23": "5658", "108.61.90.91": "5658", "51.68.190.246": "5658", "149.28.53.219": "5658", "149.28.46.106": "5658", "140.82.11.77": "5658", "139.180.199.99": "5658", "139.59.91.47": "5658", "62.112.10.156": "5658", "46.170.129.81": "5658", "109.236.82.102": "5658", "149.28.120.120": "5658", "208.167.245.204": "5658", "109.92.6.40": "5658", "162.213.123.200": "5658", "45.76.15.224": "5658", "45.77.6.146": "5658", "176.31.245.46": "5658", "188.166.118.218": "5658", "46.101.186.35": "5658", "217.163.23.242": "5658", "209.246.143.198": "5658", "104.248.73.153": "5658", "149.28.162.236": "5658", "159.89.10.229": "5658", "109.190.174.238": "5658", "178.128.222.221": "5658", "80.240.18.114": "5658", "192.99.34.19": "5658", "bismuth.live": "5658", "18.184.255.105": "5658", "217.23.14.6": "5658", "82.16.135.238": "5658"} \ No newline at end of file diff --git a/peershandler.py b/peershandler.py index 9a8e2f0..a2c1a0d 100644 --- a/peershandler.py +++ b/peershandler.py @@ -3,25 +3,23 @@ @EggPoolNet """ -import connections import json import os -import re +import shutil +# import re import sys import threading -import time -import shutil +from time import time +import random import socks +import connections import regnet +from essentials import most_common_dict, percentage_in -from essentials import most_common, most_common_dict, percentage_in +__version__ = "0.0.18" -__version__ = "0.0.12" - - -# TODO : some config options are and others without => clean up later on class Peers: """The peers manager. A thread safe peers manager""" @@ -29,15 +27,14 @@ class Peers: __slots__ = ('app_log','config','logstats','node','peersync_lock','startup_time','reset_time','warning_list','stats', 'connection_pool','peer_opinion_dict','consensus_percentage','consensus', 'tried','peer_dict','peerfile','suggested_peerfile','banlist','whitelist','ban_threshold', - 'ip_to_mainnet', 'peers', 'first_run', 'accept_peers', 'peerlist_updated') + 'ip_to_mainnet', 'peers', 'accept_peers', 'peerlist_updated') def __init__(self, app_log, config=None, logstats=True, node=None): self.app_log = app_log self.config = config self.logstats = logstats - self.peersync_lock = threading.Lock() - self.startup_time = time.time() + self.startup_time = time() self.reset_time = self.startup_time self.warning_list = [] self.stats = [] @@ -56,7 +53,6 @@ def __init__(self, app_log, config=None, logstats=True, node=None): self.peerfile = "peers.txt" self.suggested_peerfile = "suggested_peers.txt" - self.first_run = True self.peerlist_updated = False self.node = node @@ -87,6 +83,11 @@ def is_regnet(self): return True return "regnet" in self.config.version + def dict_shuffle(self, dictinary): + l = list(dictinary.items()) + random.shuffle(l) + return dict(l) + def status_dict(self): """Returns a status as a dict""" status = {"version": self.config.VERSION, "stats": self.stats} @@ -109,65 +110,70 @@ def version_allowed(self, ip, version_allow): return True return self.ip_to_mainnet[ip] in version_allow - def peer_dump(self, file, peer): - """saves single peer to drive""" - with open(file, "r") as peer_file: - peers_pairs = json.load(peer_file) - peers_pairs[peer] = self.config.port #make this dynamic once - with open(file, "w") as peer_file: - json.dump(peers_pairs, peer_file) - - def peers_dump(self, file, peerdict): + def peers_test(self, file, peerdict: dict, strict=True): """Validates then adds a peer to the peer list on disk""" # called by Sync, should not be an issue, but check if needs to be thread safe or not. + # also called by self.client_loop, which is to be reworked + # Egg: Needs to be thread safe. self.peerlist_updated = False + try: + with open(file, "r") as peer_file: + peers_pairs = json.load(peer_file) + # TODO: rework, because this takes too much time and freezes the status thread. + # to be done in a dedicated thread, with one peer per xx seconds, not all at once, and added properties. + for ip, port in dict(peerdict).items(): + # I do create a new dict copy above, because logs showed that the dict can change while iterating + if self.node.IS_STOPPING: + # Early exit if stopping + return + try: + if ip not in peers_pairs: + self.app_log.info(f"Testing connectivity to: {ip}:{port}") + s = socks.socksocket() + try: + # connect timeout + s.settimeout(5) + if self.config.tor: + s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) + if strict: + s.connect((ip, int(port))) + connections.send(s, "getversion") + versiongot = connections.receive(s, timeout=1) + if versiongot == "*": + raise ValueError("peer busy") + if versiongot not in self.config.version_allow: + raise ValueError(f"cannot save {ip}, incompatible protocol version {versiongot} " + f"not in {self.config.version_allow}") + self.app_log.info(f"Inbound: Distant peer {ip}:{port} responding: {versiongot}") + else: + s.connect((ip, int(port))) + finally: + # properly end the connection in all cases + try: + s.close() + except: + pass + peers_pairs[ip] = port + self.app_log.info(f"Inbound: Peer {ip}:{port} saved to peers") + self.peerlist_updated = True + else: + self.app_log.info("Distant peer already in peers") - with open(file, "r") as peer_file: - peers_pairs = json.load(peer_file) - - for ip, port in peerdict.items(): - - try: - if ip not in peers_pairs: - self.app_log.info(f"Testing connectivity to: {ip}:{port}") - s = socks.socksocket() - if self.config.tor: - s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) - - s.connect((ip, port)) - connections.send(s, "getversion") - versiongot = connections.receive(s, timeout=1) - if versiongot == "*": - raise ValueError("Peer busy") - self.app_log.info(f"Inbound: Distant peer {ip}:{port} responding: {versiongot}") - - try: - connections.send(s,"getversion") - versiongot = connections.receive(s, timeout=1) - self.app_log.info(f"Inbound: Distant peer {ip}:{port} responding: {versiongot}") - except Exception as e: - self.app_log.info(f"Inbound: Distant peer {ip}:{port} not responding: {e}") - - s.close() - # properly end the connection - - #peers_pairs[ip] = port - - self.app_log.info(f"Inbound: Peer {ip}:{port} saved to peer list") - self.peerlist_updated = True - - else: - self.app_log.info("Distant peer already in peer list") + except Exception as e: + # exception for a single peer + self.app_log.info(f"Inbound: Distant peer not connectible ({e})") - except: - self.app_log.info("Inbound: Distant peer not connectible") - pass + if self.peerlist_updated: + self.app_log.warning(f"{file} peerlist updated ({len(peers_pairs)}) total") # the whole dict is saved + with open(f"{file}.tmp", "w") as peer_file: + json.dump(peers_pairs, peer_file) + shutil.move(f"{file}.tmp",file) + else: + self.app_log.warning(f"{file} peerlist update skipped, no changes") - if self.peerlist_updated: - self.app_log.warning("Peerlist updated") - with open(f"{file}.tmp", "w") as peer_file: - json.dump(peers_pairs, peer_file) - shutil.move(f"{file}.tmp",file) + except Exception as e: + # Exception for the file itself. + self.app_log.info(f"Error reading {file}: '{e}'") def append_client(self, client): """ @@ -198,11 +204,11 @@ def warning(self, sdef, ip, reason, count): """Adds a weighted warning to a peer.""" # TODO: Not thread safe atm. Should use a thread aware list or some lock if ip not in self.whitelist: - # TODO: use a dict instead of several occurences in a list + # TODO: use a dict instead of several occurrences in a list for x in range(count): self.warning_list.append(ip) - self.app_log.warning(f"Added {count} warning(s) to {ip}: {reason} ({self.warning_list.count(ip)} / {self.ban_threshold})") - + self.app_log.warning(f"Added {count} warning(s) to {ip}: {reason} " + f"({self.warning_list.count(ip)} / {self.ban_threshold})") if self.warning_list.count(ip) >= self.ban_threshold: self.banlist.append(ip) self.app_log.warning(f"{ip} is banned: {reason}") @@ -210,17 +216,22 @@ def warning(self, sdef, ip, reason, count): else: return False - def peers_get(self, peerfile=''): - """Returns a peerfile from disk as a dict {ip:port}""" + def peers_get(self, peer_file=''): + """Returns a peer_file from disk as a dict {ip:port}""" peer_dict = {} - if not peerfile: - peerfile = self.peerfile - if not os.path.exists(peerfile): - with open(peerfile, "a"): - self.app_log.warning("Peer file created") - else: - with open(peerfile, "r") as peer_file: - peer_dict = json.load(peer_file) + try: + if not peer_file: + peer_file = self.peerfile + if not os.path.exists(peer_file): + with open(peer_file, "w") as fp: + # was "a": append would risk adding stuff to a file create in the mean time. + self.app_log.warning("Peer file created") + fp.write("{}") # empty dict. An empty string is not json valid. + else: + with open(peer_file, "r") as fp: + peer_dict = json.load(fp) + except Exception as e: + self.app_log.warning(f"Error peers_get {e} reading {peer_file}") return peer_dict def peer_list_disk_format(self): @@ -258,8 +269,11 @@ def is_allowed(self, peer_ip, command=''): # Always allow whitelisted ip to post as block if 'block' == command and self.is_whitelisted(peer_ip): return True - # only allow local host for "stop" command - if 'stop' == command: + # always allowed commands, only required and non cpu intensive. + if command in ('portget',): + return True + # only allow local host for "stop" and addpeers command + if command in ('stop', 'addpeers'): return peer_ip == '127.0.0.1' return peer_ip in self.config.allowed or "any" in self.config.allowed @@ -267,124 +281,67 @@ def is_whitelisted(self, peer_ip, command=''): # TODO: could be handled later on via "allowed" and rights. return peer_ip in self.whitelist or "127.0.0.1" == peer_ip - def is_banned(self, peer_ip): + def is_banned(self, peer_ip) -> bool: return peer_ip in self.banlist - def peers_test(self, peerfile): - """Tests all peers from a list.""" - # TODO: lengthy, no need to test everyone at once? - if not self.peersync_lock.locked() and self.config.accept_peers: - self.peersync_lock.acquire() - try: - peer_dict = self.peers_get(peerfile) - peers_remove = {} + def dict_validate(self, json_dict: str) -> str: + """temporary fix for broken peerlists""" + if json_dict.count("}") > 1: + result = json_dict.split("}")[0] + "}" + else: + result = json_dict + return result - for key, value in peer_dict.items(): - ip, port = key, int(value) - try: - s = socks.socksocket() - if self.config.tor: - s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) - - s.connect((ip, port)) - connections.send(s, "getversion") - versiongot = connections.receive(s, timeout=1) - if versiongot == "*": - raise ValueError ("Peer busy") - self.app_log.info(f"Inbound: Distant peer {ip}:{port} responding: {versiongot}") - s.close() - - self.app_log.warning(f"Connection to {ip}:{port} successful, keeping the peer") - except Exception as e: - if self.config.purge and not self.is_testnet: - # remove from peerfile if not connectible - self.app_log.info(f"Inbound: Distant peer {ip}:{port} not responding: {e}") - - peers_remove[key] = value - pass - - for key in peers_remove: - del peer_dict[key] - self.app_log.info(f"Removed formerly active peer {key}") - - with open(peerfile, "w") as output: - json.dump(peer_dict, output) - finally: - self.peersync_lock.release() - - def peersync(self, subdata): - """Got a peers list from a peer, process. From worker().""" + def peersync(self, subdata: str) -> int: + """Got a peers list from a peer, process. From worker(). + returns the number of added peers, -1 if it was locked or not accepting new peers + subdata is a dict, { 'ip': 'port'}""" # early exit to reduce future levels if not self.config.accept_peers: - return + return -1 if self.peersync_lock.locked(): + # TODO: means we will lose those peers forever. + # TODO: buffer, and keep track of recently tested peers. self.app_log.info("Outbound: Peer sync occupied") - return - self.peersync_lock.acquire() - try: - if "(" in str(subdata): # OLD WAY - server_peer_tuples = re.findall("'([\d.]+)', '([\d]+)'", subdata) - - self.app_log.info(f"Received following {len(server_peer_tuples)} peers: {server_peer_tuples}") - with open(self.peerfile, "r") as peer_file: - peers = json.load(peer_file) - - for pair in set(server_peer_tuples): # set removes duplicates - if pair not in peers and self.accept_peers: - self.app_log.info(f"Outbound: {pair} is a new peer, saving if connectible") - try: - s_purge = socks.socksocket() - if self.config.tor: - s_purge.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) - - s_purge.connect((pair[0], int(pair[1]))) # save a new peer file with only active nodes - s_purge.close() - # suggested - - with open(self.suggested_peerfile) as peers_existing: - peers_suggested = json.load(peers_existing) - if pair not in peers_suggested: - peers_suggested[pair[0]] = pair[1] - - with open(self.suggested_peerfile, "w") as peer_list_file: - json.dump(peers_suggested, peer_list_file) - # suggested - peers[pair[0]] = pair[1] - with open(self.peerfile, "w") as peer_file: - json.dump(peers, peer_file) - - except: - pass - self.app_log.info("Not connectible") - else: - self.app_log.info(f"Outbound: {pair} is not a new peer") + return -1 + with self.peersync_lock: + try: + total_added = 0 - else: # json format - self.app_log.info(f"Received following {len(json.loads(subdata))} peers: {subdata}") - for ip,port in json.loads(subdata).items(): + subdata = self.dict_validate(subdata) + data_dict = json.loads(subdata) - if ip not in self.peer_dict.keys(): + self.app_log.info(f"Received {len(data_dict)} peers.") + # Simplified the log, every peers then has a ok or ko status anyway. + for ip, port in data_dict.items(): + if ip not in self.peer_dict: self.app_log.info(f"Outbound: {ip}:{port} is a new peer, saving if connectible") try: s_purge = socks.socksocket() + s_purge.settimeout(5) if self.config.tor: s_purge.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_purge.connect((ip, int(port))) # save a new peer file with only active nodes s_purge.close() - - if ip not in self.peer_dict.keys(): + # This only adds to our local dict, does not force save. + if ip not in self.peer_dict: + total_added += 1 self.peer_dict[ip] = port - + self.app_log.info(f"Inbound: Peer {ip}:{port} saved to local peers") except: - pass self.app_log.info("Not connectible") else: self.app_log.info(f"Outbound: {ip}:{port} is not a new peer") - finally: - self.peersync_lock.release() + except Exception as e: + self.app_log.warning(e) + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + print(exc_type, fname, exc_tb.tb_lineno) + raise + return total_added def consensus_add(self, peer_ip, consensus_blockheight, sdef, last_block): # obviously too old blocks, we have half a day worth of validated blocks after them @@ -414,7 +371,6 @@ def consensus_add(self, peer_ip, consensus_blockheight, sdef, last_block): print(exc_type, fname, exc_tb.tb_lineno) raise - def consensus_remove(self, peer_ip): if peer_ip in self.peer_opinion_dict: try: @@ -424,7 +380,6 @@ def consensus_remove(self, peer_ip): raise def can_connect_to(self, host, port): - """ Tells if we can connect to this host :param host: @@ -440,7 +395,7 @@ def can_connect_to(self, host, port): tries, timeout = self.tried[host_port] except: tries, timeout = 0, 0 # unknown host for now, never tried. - if timeout > time.time(): + if timeout > time(): return False # We tried before, timeout is not expired. if self.is_whitelisted(host): return True # whitelisted peers are always connectible, without variability condition. @@ -478,7 +433,7 @@ def add_try(self, host, port): tries += 1 if tries > 3: tries = 3 - self.tried[host_port] = (tries, time.time() + delay) + self.tried[host_port] = (tries, time() + delay) # Temp self.app_log.info(f"Set timeout {delay} try {tries} for {host_port}") @@ -507,7 +462,7 @@ def reset_tried(self): Remove the older timeouts from the tried list. Keep the recent ones or we end up trying the first ones again and again """ - limit = time.time() + 12*60 # matches 2.5 tries :) + limit = time() + 12*60 # matches 2.5 tries :) remove = [client for client in self.tried if self.tried[client][1] > limit] for client in remove: del self.tried[client] @@ -515,7 +470,9 @@ def reset_tried(self): def client_loop(self, node, this_target): """Manager loop called every 30 sec. Handles maintenance""" try: - for key, value in self.peer_dict.items(): + for key, value in dict(self.dict_shuffle(self.peer_dict)).items(): + # The dict() above is not an error or a cast, + # it's to make a copy of the dict and avoid "dictionary changed size during iteration" host = key port = int(value) @@ -529,12 +486,12 @@ def client_loop(self, node, this_target): t.daemon = True t.start() - if len(self.peer_dict) < 3 and int(time.time() - self.startup_time) > 60: + if len(self.peer_dict) < 6 and int(time() - self.startup_time) > 30: # join in random peers after x seconds self.app_log.warning("Not enough peers in consensus, joining in peers suggested by other nodes") self.peer_dict.update(self.peers_get(self.suggested_peerfile)) - if len(self.connection_pool) < self.config.nodes_ban_reset and int(time.time() - self.startup_time) > 15: + if len(self.connection_pool) < self.config.nodes_ban_reset and int(time() - self.startup_time) > 15: # do not reset before 30 secs have passed self.app_log.warning(f"Only {len(self.connection_pool)} connections active, resetting banlist") del self.banlist[:] @@ -542,35 +499,38 @@ def client_loop(self, node, this_target): del self.warning_list[:] if len(self.connection_pool) < 10: - self.app_log.warning(f"Only {len(self.connection_pool)} connections active, resetting the connection history") + self.app_log.warning(f"Only {len(self.connection_pool)} connections active, " + f"resetting the connection history") # TODO: only reset large timeouts, or we end up trying the sames over and over if we never get to 10. # self. self.reset_tried() - if self.config.nodes_ban_reset and len(self.connection_pool) <= len(self.banlist) \ - and int(time.time() - self.reset_time) > 60*10: + if self.config.nodes_ban_reset <= len(self.banlist) and len(self.connection_pool) <= len(self.banlist) \ + and (time() - self.reset_time) > 60 * 10: # do not reset too often. 10 minutes here - self.app_log.warning(f"Less active connections ({len(self.connection_pool)}) than banlist ({len(self.banlist)}), resetting banlist and tried list") + self.app_log.warning(f"Less active connections ({len(self.connection_pool)}) " + f"than banlist ({len(self.banlist)}), resetting banlist and tried list") del self.banlist[:] self.banlist.extend(self.config.banlist) # reset to config version del self.warning_list[:] self.reset_tried() - self.reset_time = time.time() - - if self.first_run and int(time.time() - self.startup_time) > 60: - self.app_log.warning("Status: Testing peers") - self.peers_test(self.peerfile) - self.peers_test(self.suggested_peerfile) - self.first_run = False - - if int(time.time() - self.startup_time) > 15: # refreshes peers from drive - self.peer_dict.update(self.peers_get(self.peerfile)) + self.reset_time = time() - self.peers_dump(self.suggested_peerfile,self.peer_dict) + self.app_log.warning("Status: Testing peers") + self.peer_dict.update(self.peers_get(self.peerfile)) + # self.peer_dict.update(self.peers_get(self.suggested_peerfile)) + # TODO: this is not OK. client_loop is called every 30 sec and should NOT contain any lengthy calls. + self.peers_test(self.suggested_peerfile, self.peer_dict, strict=False) + self.peers_test(self.peerfile, self.peer_dict, strict=True) except Exception as e: - self.app_log.warning(f"Status: Manager run skipped due to error: {e}") + self.app_log.warning(f"Status: peers client loop skipped due to error: {e}") + # raise + """We do not want to raise here, since the rest of the calling method would be skipped also. + It's ok to skip this part only + The calling method has other important subsequent calls that have to be done. + """ def status_log(self): """Prints the peers part of the node status""" @@ -588,4 +548,4 @@ def status_log(self): if self.consensus: # once the consensus is filled self.app_log.warning(f"Status: Consensus height: {self.consensus} = {self.consensus_percentage}%") self.app_log.warning(f"Status: Last block opinion: {self.peer_opinion_dict}") - self.app_log.warning(f"Status: Total number of nodes: {len(self.peer_opinion_dict)}") \ No newline at end of file + self.app_log.warning(f"Status: Total number of nodes: {len(self.peer_opinion_dict)}") diff --git a/polysign/__init__.py b/polysign/__init__.py deleted file mode 100644 index d18f409..0000000 --- a/polysign/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '0.0.2' diff --git a/polysign/signer.py b/polysign/signer.py deleted file mode 100644 index dcdedc7..0000000 --- a/polysign/signer.py +++ /dev/null @@ -1,122 +0,0 @@ -""" -Abstract and Factory class to handle various Bismuth signature and addresses schemes -""" - -import json - -from abc import ABC, abstractmethod -from enum import Enum -from typing import Union - - -class SignerType(Enum): - """ - Possible signing schemes - """ - NONE = 0 - RSA = 1 - ECDSA = 2 - ED25519 = 3 - BTC = 1000 # Tests - CRW = 1001 # Tests - - -class SignerSubType(Enum): - """ - Possible addresses subtype - """ - NONE = 0 - MAINNET_REGULAR = 1 - MAINNET_MULTISIG = 2 - TESTNET_REGULAR = 3 - TESTNET_MULTISIG = 4 - - -class Signer(ABC): - - # Slots allow to spare ram when there can be several instances - __slot__ = ('_private_key', '_public_key', '_address', '_type', '_subtype', '_compressed', 'verbose') - - _address_versions = {SignerSubType.MAINNET_REGULAR: b'\x00'} - - def __init__(self, private_key: Union[bytes, str]=b'', public_key: Union[bytes, str]=b'', address: str='', - compressed: bool=True, subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - self._private_key = private_key - self._public_key = public_key - self._address = address - self._type = SignerType.NONE - self._subtype = subtype - self.verbose = False - self._compressed = compressed - - @property - def compressed(self): - return self._compressed - - @property - def type(self): - """Name of the signer instance""" - return self._type.name - - @abstractmethod - def from_private_key(self, private_key: Union[bytes, str], subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - pass - - @abstractmethod - def from_full_info(self, private_key: Union[bytes, str], public_key: Union[bytes, str]=b'', address: str='', - subtype: SignerSubType = SignerSubType.MAINNET_REGULAR, verify: bool=True): - pass - - @abstractmethod - def from_seed(self, seed: str='', subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - """Use seed == '' to generate a random key""" - pass - - @classmethod - @abstractmethod - def public_key_to_address(cls, public_key: Union[bytes, str], - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> str: - """Reconstruct an address from the public key""" - pass - - @classmethod - def address_version_for_subtype(cls, subtype: SignerSubType) -> bytes: - # Specific one if exists, else mainnet regular, else \x00 - return cls._address_versions.get(subtype, cls._address_versions.get(subtype.MAINNET_REGULAR, b'\x00')) - - @classmethod - @abstractmethod - def verify_signature(cls, signature: Union[bytes, str], public_key: Union[bytes, str], buffer: bytes, - address: str=''): - """Verify signature from raw signature & pubkey. Address may be used to determine the sig type""" - pass - - @classmethod - @abstractmethod - def verify_bis_signature(cls, signature: str, public_key: str, buffer: bytes, address: str=''): - """Verify signature from bismuth tx network format - pubkey is b64 encoded twice - ecdsa and ed25519 are b64 encoded)""" - pass - - @abstractmethod - def sign_buffer_raw(self, buffer: bytes) -> bytes: - """Sign a buffer, sends a raw bytes array""" - pass - - @abstractmethod - def sign_buffer_for_bis(self, buffer: bytes) -> str: - """Sign a buffer, sends under the format expected by bismuth network format""" - pass - - def to_dict(self): - """Returns core properties as dict, compact bin form""" - info = {'address': self._address, 'private_key': self._private_key, 'public_key': self._public_key, - 'compressed': self._compressed, 'type': self._type.name, 'sub_type': self._subtype.name} - return info - - def to_json(self): - """Returns a json string, with bin items as hex strings""" - info = self.to_dict() - info['private_key'] = info['private_key'].hex() - info['public_key'] = info['public_key'].hex() - return json.dumps(info) diff --git a/polysign/signer_btc.py b/polysign/signer_btc.py deleted file mode 100644 index ea190f6..0000000 --- a/polysign/signer_btc.py +++ /dev/null @@ -1,118 +0,0 @@ -""" - -""" - -import base58 -import hashlib -import random -from os import urandom -from polysign.signer import Signer, SignerType, SignerSubType -from typing import Union -from hashlib import sha256 -from base64 import b64decode, b64encode -from coincurve import PrivateKey - - -class SignerBTC(Signer): - - __slots__ = ('_key', ) - - _address_versions = {SignerSubType.MAINNET_REGULAR: b'\x00', SignerSubType.TESTNET_REGULAR: b'\x6f'} - - def __init__(self, private_key: Union[bytes, str]=b'', public_key: Union[bytes, str]=b'', address: str='', - compressed: bool=True, subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - super().__init__(private_key, public_key, address, compressed, subtype=subtype) - self._key = None - self._type = SignerType.ECDSA - - def from_private_key(self, private_key: Union[bytes, str], subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - """Accepts both bytes[32] or str (hex format)""" - if type(private_key) == str: - return self.from_seed(private_key) - return self.from_seed(private_key.hex()) - - def from_full_info(self, private_key: Union[bytes, str], public_key: Union[bytes, str]=b'', address: str='', - subtype: SignerSubType = SignerSubType.MAINNET_REGULAR, verify: bool=True): - print('TODO - SignerBTC.from_full_info') - - def from_seed(self, seed: str='', subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - """Creates key from seed - for ecdsa, seed = pk - 32 bytes random buffer""" - if subtype != SignerSubType.MAINNET_REGULAR: - self._subtype = subtype - if len(seed) > 64: - # Too long seed, trim (could use better scheme for more entropy) - seed = seed[:64] - elif seed == '': - # No seed, use urandom - seed = urandom(32) - elif len(seed) < 64: - # Too short seed, use as PRNG seed - random.seed(seed) - seed = random.getrandbits(32*8).hex() - try: - key = PrivateKey.from_hex(seed) - public_key = key.public_key.format(compressed=True).hex() - # print("Public Key", public_key) - self._key = key - self._private_key = key.to_hex() # == seed - self._public_key = public_key - except Exception as e: - print("Exception {} reading RSA private key".format(e)) - # print("identifier", self.identifier().hex()) - self._address = self.address() - - def identifier(self): - """Returns double hash of pubkey as per btc standards""" - return hashlib.new('ripemd160', sha256(bytes.fromhex(self._public_key)).digest()).digest() - - def address(self): - """Returns properly serialized address from pubkey as per btc standards""" - vh160 = self.address_version_for_subtype(self._subtype) + self.identifier() # raw content - chk = sha256(sha256(vh160).digest()).digest()[:4] - return base58.b58encode(vh160 + chk).decode('utf-8') - - @classmethod - def public_key_to_address(cls, public_key: Union[bytes, str], - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> str: - """Reconstruct an address from the public key""" - if type(public_key) == str: - identifier = hashlib.new('ripemd160', sha256(bytes.fromhex(public_key)).digest()).digest() - else: - identifier = hashlib.new('ripemd160', sha256(public_key).digest()).digest() - vh160 = cls.address_version_for_subtype(subtype) + identifier # raw content - checksum = sha256(sha256(vh160).digest()).digest()[:4] - return base58.b58encode(vh160 + checksum).decode('utf-8') - - @classmethod - def verify_signature(cls, signature: Union[bytes, str], public_key: Union[bytes, str], buffer: bytes, - address: str='') -> None: - """Verify signature from raw signature. Address may be used to determine the sig type""" - raise ValueError("SignerBTC.verify_signature not impl.") - - @classmethod - def verify_bis_signature(cls, signature: str, public_key: str, buffer: bytes, address: str = '') -> None: - """Verify signature from bismuth tx network format (ecdsa sig and pubkey are b64 encoded) - Returns None, but raises ValueError if needed.""" - public_key = b64decode(public_key).decode('utf-8') - # print(public_key) - - """ TODO - public_key_object = RSA.importKey(public_key_pem) - signature_decoded = b64decode(signature) - verifier = PKCS1_v1_5.new(public_key_object) - sha_hash = SHA.new(buffer) - if not verifier.verify(sha_hash, signature_decoded): - raise ValueError(f"Invalid signature from {address}") - """ - # Reconstruct address from pubkey to make sure it matches - if address != cls.public_key_to_address(public_key): - raise ValueError("Attempt to spend from a wrong address") - - def sign_buffer_raw(self, buffer: bytes) -> bytes: - """Sign a buffer, sends a raw bytes array""" - pass - - def sign_buffer_for_bis(self, buffer: bytes) -> str: - """Sign a buffer, sends under the format expected by bismuth network format""" - # sig is b64 encoded - return b64encode(self.sign_buffer_raw(buffer)) diff --git a/polysign/signer_crw.py b/polysign/signer_crw.py deleted file mode 100644 index e9ad8aa..0000000 --- a/polysign/signer_crw.py +++ /dev/null @@ -1,104 +0,0 @@ -""" - -""" - -import base58 -import hashlib -from polysign.signer import Signer, SignerType, SignerSubType -from typing import Union -from hashlib import sha256 -from base64 import b64decode, b64encode -from coincurve import PrivateKey - - -class SignerCRW(Signer): - - __slots__ = ('_key', ) - - _address_versions = {SignerSubType.MAINNET_REGULAR: b'\x01\x75\x07'} - - def __init__(self, private_key: Union[bytes, str]=b'', public_key: Union[bytes, str]=b'', address: str='', - compressed=False, subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - super().__init__(private_key, public_key, address, compressed=compressed, subtype=subtype) - self._key = None - self._type = SignerType.ECDSA - - def from_private_key(self, private_key: Union[bytes, str], subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - print('TODO') - - def from_full_info(self, private_key: Union[bytes, str], public_key: Union[bytes, str]=b'', address: str='', - subtype: SignerSubType = SignerSubType.MAINNET_REGULAR, verify: bool=True): - print('TODO') - - def from_seed(self, seed: str='', subtype: SignerSubType = SignerSubType.MAINNET_REGULAR): - print('crw from seed {}'.format(seed)) - if subtype != SignerSubType.MAINNET_REGULAR: - self._subtype = subtype - try: - key = PrivateKey.from_hex(seed) - public_key = key.public_key.format(compressed=False).hex() - print("Public Key", public_key) - self._key = key - self._private_key = key.to_hex() # == seed - self._public_key = public_key - except Exception as e: - print("Exception {} reading RSA private key".format(e)) - print("identifier", self.identifier().hex()) - self._address = self.address() - - def identifier(self): - """Returns double hash of pubkey""" - # faafd1966c79c472360ef1cf8860169df6e7554a - return hashlib.new('ripemd160', sha256(bytes.fromhex(self._public_key)).digest()).digest() - - def address(self): - # 1PrWZ4CXSXWbg87XS9ShhwMV6TiSXtycT7 - vh160 = self.address_version_for_subtype(self._subtype) + self.identifier() # raw content - chk = sha256(sha256(vh160).digest()).digest()[:4] - return base58.b58encode(vh160 + chk).decode('utf-8') - - @classmethod - def public_key_to_address(cls, public_key: Union[bytes, str], - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> str: - """Reconstruct an address from the public key""" - if type(public_key) == str: - identifier = hashlib.new('ripemd160', sha256(bytes.fromhex(public_key)).digest()).digest() - else: - identifier = hashlib.new('ripemd160', sha256(public_key).digest()).digest() - vh160 = cls.address_version_for_subtype(subtype) + identifier # raw content - checksum = sha256(sha256(vh160).digest()).digest()[:4] - return base58.b58encode(vh160 + checksum).decode('utf-8') - - @classmethod - def verify_signature(cls, signature: Union[bytes, str], public_key: Union[bytes, str], buffer: bytes, - address: str='') -> None: - """Verify signature from raw signature. Address may be used to determine the sig type""" - raise ValueError("SignerCRW.verify_signature not impl.") - - @classmethod - def verify_bis_signature(cls, signature: str, public_key: str, buffer: bytes, address: str = '') -> None: - """Verify signature from bismuth tx network format (ecdsa sig and pubkey are b64 encoded) - Returns None, but raises ValueError if needed.""" - public_key = b64decode(public_key).decode('utf-8') - # print(public_key) - - """ TODO - public_key_object = RSA.importKey(public_key_pem) - signature_decoded = b64decode(signature) - verifier = PKCS1_v1_5.new(public_key_object) - sha_hash = SHA.new(buffer) - if not verifier.verify(sha_hash, signature_decoded): - raise ValueError(f"Invalid signature from {address}") - """ - # Reconstruct address from pubkey to make sure it matches - if address != cls.public_key_to_address(public_key): - raise ValueError("Attempt to spend from a wrong address") - - def sign_buffer_raw(self, buffer: bytes) -> bytes: - """Sign a buffer, sends a raw bytes array""" - pass - - def sign_buffer_for_bis(self, buffer: bytes) -> str: - """Sign a buffer, sends under the format expected by bismuth network format""" - # sig is b64 encoded - return b64encode(self.sign_buffer_raw(buffer)) diff --git a/polysign/signer_ecdsa.py b/polysign/signer_ecdsa.py deleted file mode 100644 index 43a0ed8..0000000 --- a/polysign/signer_ecdsa.py +++ /dev/null @@ -1,115 +0,0 @@ -""" - -""" - -import hashlib -import random -from base64 import b64decode, b64encode -from hashlib import sha256 -from os import urandom -from typing import Union - -import base58 -from coincurve import PrivateKey, verify_signature -from polysign.signer import Signer, SignerType, SignerSubType - - -class SignerECDSA(Signer): - - __slots__ = ('_key', ) - - _address_versions = {SignerSubType.MAINNET_REGULAR: b'\x4f\x54\x5b', - SignerSubType.MAINNET_MULTISIG: b'\x4f\x54\xc8', - SignerSubType.TESTNET_REGULAR: b'\x01\x7a\xb6\x85', - SignerSubType.TESTNET_MULTISIG: b'\x01\x46\xeb\xa5'} - - def __init__(self, private_key: Union[bytes, str]=b'', public_key: Union[bytes, str]=b'', address: str='', - compressed: bool=True, subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - super().__init__(private_key, public_key, address, compressed=compressed, subtype=subtype) - self._key = None - self._type = SignerType.ECDSA - - def from_private_key(self, private_key: Union[bytes, str], subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - """Accepts both bytes[32] or str (hex format)""" - if type(private_key) == str: - return self.from_seed(private_key) - return self.from_seed(private_key.hex()) - - def from_full_info(self, private_key: Union[bytes, str], public_key: Union[bytes, str]=b'', address: str='', - subtype: SignerSubType = SignerSubType.MAINNET_REGULAR, verify: bool=True): - raise ValueError("SignerRsa.from_full_info not impl.") - - def from_seed(self, seed: str='', subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - """Creates key from seed - for ecdsa, seed = pk - 32 bytes random buffer""" - if subtype != SignerSubType.MAINNET_REGULAR: - self._subtype = subtype - if len(seed) > 64: - # Too long seed, trim (could use better scheme for more entropy) - seed = seed[:64] - elif seed == '': - # No seed, use urandom - seed = urandom(32) - elif len(seed) < 64: - # Too short seed, use as PRNG seed - random.seed(seed) - seed = random.getrandbits(32*8).hex() - try: - key = PrivateKey.from_hex(seed) - public_key = key.public_key.format(compressed=True).hex() - # print("Public Key", public_key) - self._key = key - self._private_key = key.to_hex() # == seed - self._public_key = public_key - except Exception as e: - print("Exception {} reading RSA private key".format(e)) - # print("identifier", self.identifier().hex()) - self._address = self.address() - - def identifier(self): - """Returns double hash of pubkey as per btc standards""" - return hashlib.new('ripemd160', sha256(bytes.fromhex(self._public_key)).digest()).digest() - - def address(self): - """Returns properly serialized address from pubkey as per btc standards""" - vh160 = self.address_version_for_subtype(self._subtype) + self.identifier() # raw content - chk = sha256(sha256(vh160).digest()).digest()[:4] - return base58.b58encode(vh160 + chk).decode('utf-8') - - @classmethod - def public_key_to_address(cls, public_key: Union[bytes, str], - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> str: - """Reconstruct an address from the public key""" - if type(public_key) == str: - identifier = hashlib.new('ripemd160', sha256(bytes.fromhex(public_key)).digest()).digest() - else: - identifier = hashlib.new('ripemd160', sha256(public_key).digest()).digest() - vh160 = cls.address_version_for_subtype(subtype) + identifier # raw content - checksum = sha256(sha256(vh160).digest()).digest()[:4] - return base58.b58encode(vh160 + checksum).decode('utf-8') - - @classmethod - def verify_signature(cls, signature: Union[bytes, str], public_key: Union[bytes, str], buffer: bytes, - address: str='') -> None: - """Verify signature from raw signature. Address may be used to determine the sig type""" - raise ValueError("SignerECDSA.verify_signature not impl.") - - @classmethod - def verify_bis_signature(cls, signature: str, public_key: str, buffer: bytes, address: str = '') -> None: - """Verify signature from bismuth tx network format (ecdsa sig and pubkey are b64 encoded) - Returns None, but raises ValueError if needed.""" - public_key = b64decode(public_key) - valid = verify_signature(b64decode(signature), buffer, public_key) - if not valid: - raise ValueError(f"Invalid signature from {address}") - # Reconstruct address from pubkey to make sure it matches - if address != cls.public_key_to_address(public_key): - raise ValueError("Attempt to spend from a wrong address") - - def sign_buffer_raw(self, buffer: bytes) -> bytes: - """Sign a buffer, sends a raw bytes array""" - # TODO: see "custom_nonce" optional item - return self._key.sign(buffer) - - def sign_buffer_for_bis(self, buffer: bytes) -> str: - """Sign a buffer, sends under the format expected by bismuth network format""" - return b64encode(self.sign_buffer_raw(buffer)).decode('utf-8') diff --git a/polysign/signer_ed25519.py b/polysign/signer_ed25519.py deleted file mode 100644 index c17d5ed..0000000 --- a/polysign/signer_ed25519.py +++ /dev/null @@ -1,125 +0,0 @@ -""" - -""" - -import random -from base64 import b64decode, b64encode -from hashlib import sha256 -from os import urandom -from typing import Union - -import base58 -import ed25519 -from polysign.signer import Signer, SignerType, SignerSubType - - -class SignerED25519(Signer): - - __slots__ = ('_key', ) - - _address_versions = {SignerSubType.MAINNET_REGULAR: b'\x03\xb8\x6c\xf3', - SignerSubType.MAINNET_MULTISIG: b'\x03\xb8\x72\x14', - SignerSubType.TESTNET_REGULAR: b'\x11\xc2\xce\x7c', - SignerSubType.TESTNET_MULTISIG: b'\x0f\x54\xfd\x2d'} - - def __init__(self, private_key: Union[bytes, str]=b'', public_key: Union[bytes, str]=b'', address: str='', - compressed: bool=False, subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - super().__init__(private_key, public_key, address, compressed=compressed, subtype=subtype) - self._key = None - self._type = SignerType.ED25519 - - def from_private_key(self, private_key: Union[bytes, str], subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - """Accepts both bytes[32] or str (hex format)""" - if type(private_key) == str: - return self.from_seed(private_key) - return self.from_seed(private_key.hex()) - - def from_full_info(self, private_key: Union[bytes, str], public_key: Union[bytes, str]=b'', address: str='', - subtype: SignerSubType = SignerSubType.MAINNET_REGULAR, verify: bool=True): - raise ValueError("SignerED25519.from_full_info not impl.") - - def from_seed(self, seed: str='', subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - """Creates key from seed - for ED25519, seed = pk - 32 bytes random buffer""" - if subtype != SignerSubType.MAINNET_REGULAR: - self._subtype = subtype - if len(seed) > 64: - # Too long seed, trim (could use better scheme for more entropy) - seed = seed[:64] - elif seed == '': - # No seed, use urandom - seed = urandom(32) - elif len(seed) < 64: - # Too short seed, use as PRNG seed - random.seed(seed) - seed = random.getrandbits(32*8).hex() - try: - print("SEED", seed) - # TODO: check flow, there may be many unnecessary hex-byte-hex-bytes conversions from top to bottom - key = ed25519.SigningKey(bytes.fromhex(seed)) - hexa = key.to_ascii(encoding="hex").decode('utf-8') - # print("ED25519 Privk Key", hexa) # e5b42f3c-3fe02e16-1d42ff47-07a174a5 715b2badc7d4d3aebbea9081bd9123d5 - verifying_key = key.get_verifying_key() - public_key = verifying_key.to_ascii(encoding="hex").decode('utf-8') - # public_key = hexa[32:] - # print("ED25519 Public Key", public_key) - self._key = key - self._private_key = hexa - self._public_key = public_key - except Exception as e: - print("Exception {} reading ED25519 private key".format(e)) - # print("identifier", self.identifier().hex()) - self._address = self.address() - - """ - def identifier(self): - #Returns double hash of pubkey as per btc standards - return hashlib.new('ripemd160', sha256(bytes.fromhex(self._public_key)).digest()).digest() - """ - - def address(self) -> str: - """Returns properly serialized address from pubkey""" - # No double hash for pubkey, nor for checksum - base = self.address_version_for_subtype(self._subtype) + bytes.fromhex(self._public_key) # raw content - chk = sha256(base).digest()[:4] - return base58.b58encode(base + chk).decode('utf-8') - - @classmethod - def public_key_to_address(cls, public_key: Union[bytes, str], - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> str: - """Reconstruct an address from the public key""" - # TODO: same for this family, could factorize in an ancestor with other methods - if type(public_key) == str: - public_key = bytes.fromhex(public_key) - base = cls.address_version_for_subtype(subtype) + public_key # raw content - checksum = sha256(base).digest()[:4] - return base58.b58encode(base + checksum).decode('utf-8') - - @classmethod - def verify_signature(cls, signature: Union[bytes, str], public_key: Union[bytes, str], buffer: bytes, - address: str='') -> None: - """Verify signature from raw signature. Address may be used to determine the sig subtype""" - try: - # print("verif", signature, public_key, len(public_key)) - verifying_key = ed25519.VerifyingKey(public_key) - verifying_key.verify(signature, buffer) - except Exception as e: - print(e) - raise ValueError(f"Invalid ED25519 signature from {address}") - # Reconstruct address from pubkey to make sure it matches - address_rebuild = cls.public_key_to_address(public_key) - if address != address_rebuild: - raise ValueError(f"Attempt to spend from a wrong address {address} instead of {address_rebuild}") - - @classmethod - def verify_bis_signature(cls, signature: str, public_key: str, buffer: bytes, address: str = '') -> None: - """Verify signature from bismuth tx network format (ecdsa sig and pubkey are b64 encoded) - Returns None, but raises ValueError if needed.""" - cls.verify_signature(b64decode(signature), b64decode(public_key), buffer, address) - - def sign_buffer_raw(self, buffer: bytes) -> bytes: - """Sign a buffer, sends a raw bytes array""" - return self._key.sign(buffer) - - def sign_buffer_for_bis(self, buffer: bytes) -> str: - """Sign a buffer, sends under the format expected by bismuth network format""" - return b64encode(self.sign_buffer_raw(buffer)).decode('utf-8') diff --git a/polysign/signer_rsa.py b/polysign/signer_rsa.py deleted file mode 100644 index 3070317..0000000 --- a/polysign/signer_rsa.py +++ /dev/null @@ -1,121 +0,0 @@ - -import json -import re -from base64 import b64encode, b64decode -from hashlib import sha224 -from typing import Union - -from Cryptodome.Hash import SHA -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import PKCS1_v1_5 - -from polysign.signer import Signer, SignerType, SignerSubType - -# Compile once and for all -PEM_BEGIN = re.compile(r"\s*-----BEGIN (.*)-----\s+") -PEM_END = re.compile(r"-----END (.*)-----\s*$") - - -class SignerRSA(Signer): - - __slots__ = ('_key', ) - - def __init__(self, private_key: Union[bytes, str]=b'', public_key: Union[bytes, str]=b'', address: str='', - compressed: bool = True, subtype: SignerSubType=SignerSubType.MAINNET_REGULAR): - super().__init__(private_key, public_key, address, compressed=compressed, subtype=subtype) - # RSA does not have compressed format - self._type = SignerType.RSA - # For the Key object - self._key = None - - @classmethod - def validate_pem(cls, pem_data: str) -> None: - """ Validate PEM data - returns None, raise on error. - """ - # verify pem as cryptodome does - match = PEM_BEGIN.match(pem_data) - if not match: - raise ValueError("Not a valid PEM pre boundary") - marker = match.group(1) - match = PEM_END.search(pem_data) - if not match or match.group(1) != marker: - raise ValueError("Not a valid PEM post boundary") - # verify pem as cryptodome does - - @classmethod - def public_key_to_address(cls, public_key: Union[bytes, str], - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> str: - """Reconstruct an address from the public key""" - if type(public_key) != str: - # But union annotation kept for common interface sake. - raise ValueError("RSA pubkey are str, pem format") - return sha224(public_key.encode('utf-8')).hexdigest() - - def to_json(self) -> str: - """for RSA, keys are stored as PEM format, not binary""" - info = self.to_dict() - return json.dumps(info) - - def from_private_key(self, private_key: Union[bytes, str], - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> None: - if type(private_key) is not str: - raise RuntimeError('RSA private key have to be strings') - try: - key = RSA.importKey(private_key) - public_key_readable = key.publickey().exportKey().decode("utf-8") - if len(public_key_readable) not in (271, 799): - raise ValueError("Invalid public key length: {}".format(len(public_key_readable))) - address = sha224(public_key_readable.encode('utf-8')).hexdigest() - # If we had no error, we can store - self._key = key - self._private_key = private_key - self._public_key = public_key_readable - self._address = address - except Exception as e: - print("Exception {} reading RSA private key".format(e)) - - def from_seed(self, seed: str='', subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> None: - """ - if subtype != SignerSubType.MAINNET_REGULAR: - self._subtype = subtype - """ - raise ValueError("SignerRsa.from_seed not impl. - seed {}".format(seed)) - - def from_full_info(self, private_key: Union[bytes, str], public_key: Union[bytes, str]=b'', address: str='', - subtype: SignerSubType = SignerSubType.MAINNET_REGULAR, verify: bool=True): - raise ValueError("SignerRsa.from_full_info not impl.") - - @classmethod - def verify_signature(cls, signature: Union[bytes, str], public_key: Union[bytes, str], buffer: bytes, - address: str='') -> None: - """Verify signature from raw signature. Address may be used to determine the sig type""" - raise ValueError("SignerRsa.verify_signature not impl.") - - @classmethod - def verify_bis_signature(cls, signature: str, public_key: str, buffer: bytes, address: str = '') -> None: - """Verify signature from bismuth tx network format (rsa sig is b64 encoded twice) - Returns None, but raises ValueError if needed.""" - public_key_pem = b64decode(public_key).decode('utf-8') - # Will raise if does not match - cls.validate_pem(public_key_pem) - public_key_object = RSA.importKey(public_key_pem) - signature_decoded = b64decode(signature) - verifier = PKCS1_v1_5.new(public_key_object) - sha_hash = SHA.new(buffer) - if not verifier.verify(sha_hash, signature_decoded): - raise ValueError(f"Invalid signature from {address}") - # Reconstruct address from pubkey to make sure it matches - if address != cls.public_key_to_address(public_key_pem): - raise ValueError("Attempt to spend from a wrong address") - - def sign_buffer_raw(self, buffer: bytes) -> bytes: - """Sign a buffer, returns a raw bytes array""" - h = SHA.new(buffer) - signer = PKCS1_v1_5.new(self._key) - return signer.sign(h) - - def sign_buffer_for_bis(self, buffer: bytes) -> str: - """Sign a buffer, sends under the format expected by bismuth network format""" - # For RSA, sig is b64 encoded - return b64encode(self.sign_buffer_raw(buffer)) diff --git a/polysign/signerfactory.py b/polysign/signerfactory.py deleted file mode 100644 index 02c6dd0..0000000 --- a/polysign/signerfactory.py +++ /dev/null @@ -1,77 +0,0 @@ -import re -from os import urandom -from typing import Type, Union - -from polysign.signer import Signer, SignerType, SignerSubType -from polysign.signer_btc import SignerBTC -from polysign.signer_crw import SignerCRW -from polysign.signer_rsa import SignerRSA -from polysign.signer_ecdsa import SignerECDSA -from polysign.signer_ed25519 import SignerED25519 - -RE_RSA_ADDRESS = re.compile(r"[abcdef0123456789]{56}") -# TODO: improve that ECDSA one -RE_ECDSA_ADDRESS = re.compile(r"^Bis") - - -def signer_for_type(signer_type: SignerType) -> Union[Type[Signer], None]: - """Returns the class matching a signer type.""" - links = {SignerType.RSA: SignerRSA, SignerType.ED25519: SignerED25519, - SignerType.ECDSA: SignerECDSA, SignerType.BTC: SignerBTC, - SignerType.CRW: SignerCRW, - } - return links.get(signer_type, None) - - -class SignerFactory: - """""" - - @classmethod - def from_private_key(cls, private_key: Union[bytes, str], signer_type: SignerType=SignerType.RSA, - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> Signer: - """Detect the type of the key, creates and return the matching signer""" - # TODO: detect by private_key - signer_class = signer_for_type(signer_type) - if signer_class is None: - raise ValueError("Unsupported Key type") - signer = signer_class() - signer.from_private_key(private_key, subtype) - return signer - - @classmethod - def from_full_info(cls, private_key: Union[bytes, str], public_key: Union[bytes, str]=b'', address: str='', - signer_type: SignerType=SignerType.RSA, subtype: SignerSubType=SignerSubType.MAINNET_REGULAR, - verify: bool=True) -> Signer: - pass - - @classmethod - def address_to_signer(cls, address: str) -> Type[Signer]: - if RE_RSA_ADDRESS.match(address): - return SignerRSA - elif RE_ECDSA_ADDRESS.match(address): - if len(address) > 50: - return SignerED25519 - else: - return SignerECDSA - - raise ValueError("Unsupported Address type") - - @classmethod - def from_seed(cls, seed: str='', signer_type: SignerType=SignerType.RSA, - subtype: SignerSubType=SignerSubType.MAINNET_REGULAR) -> Signer: - if seed == '': - seed = urandom(32).hex() - signer_class = signer_for_type(signer_type) - if signer_class is None: - raise ValueError("Unsupported Key type") - signer = signer_class() - signer.from_seed(seed, subtype) - return signer - - @classmethod - def verify_bis_signature(cls, signature: str, public_key: str, buffer: bytes, address: str) -> None: - """Verify signature from bismuth tx network format""" - # Find the right signer class - verifier = cls.address_to_signer(address) - # let it do the job - verifier.verify_bis_signature(signature, public_key, buffer, address) diff --git a/polysign/todo.md b/polysign/todo.md deleted file mode 100644 index d44261e..0000000 --- a/polysign/todo.md +++ /dev/null @@ -1,33 +0,0 @@ - -regtest add -2c57b1d58f79a4cf821a5ce2b77d5ddf45961ebde33ea48ff7a40439 - - -# Get some cash -python3 commands.py generate 2 -# check our balance (address likely not that one) -python3 commands.py balanceget 2c57b1d58f79a4cf821a5ce2b77d5ddf45961ebde33ea48ff7a40439 -# Send one to the test vector ecdsa address -python3 send_nogui.py 1 Bis1SAk19HCWpDAThwFiaP9xA6zWjzsga7Hog -# Mine one block so the tx goes in -python3 commands.py generate 1 -# Check our ecdsa address -python3 commands.py balanceget Bis1SAk19HCWpDAThwFiaP9xA6zWjzsga7Hog -# send back some BIS to the RSA address -python3 ecdsa_send_regtest.py - -# send to ed25519 address -python3 send_nogui.py 1 Bis13AbAZwMeY1C5GuFuVuVKLSjr3RdKG63g4CEx6epwSbhpuDU3rj -# Mine one block so the tx goes in -python3 commands.py generate 1 -# Check our ecdsa address -python3 commands.py balanceget Bis13AbAZwMeY1C5GuFuVuVKLSjr3RdKG63g4CEx6epwSbhpuDU3rj -# send back some BIS to the RSA address -python3 ed25519_send_regtest.py -# Mine one block so the tx goes in -python3 commands.py generate 1 -# Check our ecdsa address -python3 commands.py balanceget Bis13AbAZwMeY1C5GuFuVuVKLSjr3RdKG63g4CEx6epwSbhpuDU3rj - - -- Enforce RSA for coinbase diff --git a/quantizer.py b/quantizer.py index 93513be..45db584 100644 --- a/quantizer.py +++ b/quantizer.py @@ -1,16 +1,30 @@ -from decimal import * +from decimal import Decimal + +DECIMAL_ZERO_2DP = Decimal('0.00') +DECIMAL_ZERO_8DP = Decimal('0.00000000') +DECIMAL_ZERO_10DP = Decimal('0.0000000000') + def quantize_two(value): + if not value: + return DECIMAL_ZERO_2DP value = Decimal(value) - value = value.quantize(Decimal('0.00')) + value = value.quantize(DECIMAL_ZERO_2DP) return value + def quantize_eight(value): + if not value: + # Will match 0 as well as False and None + return DECIMAL_ZERO_8DP value = Decimal(value) - value = value.quantize(Decimal('0.00000000')) + value = value.quantize(DECIMAL_ZERO_8DP) return value + def quantize_ten(value): + if not value: + return DECIMAL_ZERO_10DP value = Decimal(value) - value = value.quantize(Decimal('0.0000000000')) - return value \ No newline at end of file + value = value.quantize(DECIMAL_ZERO_10DP) + return value diff --git a/recovery.py b/recovery.py deleted file mode 100644 index 3771d5d..0000000 --- a/recovery.py +++ /dev/null @@ -1,19 +0,0 @@ -from Cryptodome.PublicKey import RSA -import hashlib -import json - -def recover(key): - private_key_readable = key.exportKey().decode("utf-8") - public_key_readable = key.publickey().exportKey().decode("utf-8") - address = hashlib.sha224(public_key_readable.encode("utf-8")).hexdigest() - - wallet_dict = {} - wallet_dict['Private Key'] = private_key_readable - wallet_dict['Public Key'] = public_key_readable - wallet_dict['Address'] = address - - with open ("wallet_recovered.der", 'w') as wallet_file: - json.dump (wallet_dict, wallet_file) - - print ("Wallet recovered to: wallet_recovered.der") - return (address, "wallet_recovered.der") \ No newline at end of file diff --git a/regnet.py b/regnet.py index afc9481..090a98d 100644 --- a/regnet.py +++ b/regnet.py @@ -8,6 +8,7 @@ import sqlite3 import sys import time +import functools from Cryptodome.PublicKey import RSA from Cryptodome.Hash import SHA from Cryptodome.Signature import PKCS1_v1_5 @@ -31,7 +32,6 @@ REGNET_SUGGESTED_PEERS = "peers_reg.txt" SQL_INDEX = [ "CREATE TABLE aliases (block_height INTEGER, address, alias)", - "CREATE TABLE staking (block_height INTEGER, timestamp NUMERIC, address, balance)", "CREATE TABLE tokens (block_height INTEGER, timestamp, token, address, recipient, txid, amount INTEGER)" ] SQL_LEDGER = [ "CREATE TABLE misc (block_height INTEGER, difficulty TEXT)", @@ -63,13 +63,17 @@ ADDRESS = 'This is a fake address placeholder for regtest mode only' KEY = None PRIVATE_KEY_READABLE = 'matching priv key' -PUBLIC_KEY_HASHED = 'matching pub key b64' +PUBLIC_KEY_B64ENCODED = 'matching pub key b64' DIGEST_BLOCK = None # because of compatibility - huge node refactor wanted. +def sql_trace_callback(log, id, statement): + line = f"SQL[{id}] {statement}" + log.warning(line) + def generate_one_block(blockhash, mempool_txs, node, db_handler): try: @@ -121,7 +125,7 @@ def generate_one_block(blockhash, mempool_txs, node, db_handler): if signer.verify(hash, signature): node.logger.app_log.warning("Signature valid") block_send.append((str(block_timestamp), str(ADDRESS[:56]), str(ADDRESS[:56]), '%.8f' % float(0), - str(signature_enc.decode("utf-8")), str(PUBLIC_KEY_HASHED.decode("utf-8")), + str(signature_enc.decode("utf-8")), str(PUBLIC_KEY_B64ENCODED.decode("utf-8")), "0", str(nonce))) # mining reward tx node.logger.app_log.warning("Block to send: {}".format(block_send)) # calc hash @@ -138,6 +142,7 @@ def generate_one_block(blockhash, mempool_txs, node, db_handler): fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] node.logger.app_log.warning(exc_type, fname, exc_tb.tb_lineno) + def command(sdef, data, blockhash, node, db_handler): try: node.logger.app_log.warning("Regnet got command {}".format(data)) @@ -168,14 +173,14 @@ def init(app_log, trace_db_calls=False): # create empty index db with sqlite3.connect(REGNET_DB) as source_db: if trace_db_calls: - upgrade.set_trace_callback(functools.partial(sql_trace_callback,app_log,"REGNET-INIT")) + source_db.set_trace_callback(functools.partial(sql_trace_callback,app_log,"REGNET-INIT")) for request in SQL_LEDGER: source_db.execute(request) source_db.commit() # create empty reg db with sqlite3.connect(REGNET_INDEX) as source_db: if trace_db_calls: - upgrade.set_trace_callback(functools.partial(sql_trace_callback,app_log,"REGNET-INIT-INDEX")) + source_db.set_trace_callback(functools.partial(sql_trace_callback,app_log,"REGNET-INIT-INDEX")) for request in SQL_INDEX: source_db.execute(request) source_db.commit() diff --git a/requirements-node.txt b/requirements-node.txt index 804ae4e..d563966 100644 --- a/requirements-node.txt +++ b/requirements-node.txt @@ -8,6 +8,7 @@ PySocks pycryptodomex requests -ed25519 +polysign>=0.1.0 base58 -coincurve +# coincurve +# ed25519 diff --git a/requirements.txt b/requirements.txt index a542de8..6c43e7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -# Pip3 requirements for a node and wallet setup +# Pip3 requirements - synced with requirements-node since wallet is now separate. # You need to manually run the following line first # pip3 install simple-crypt --no-deps @@ -7,16 +7,8 @@ PySocks pycryptodomex -pillow -pyqrcode -matplotlib -ed25519 -base58 -coincurve - -# request may be needed by helper scripts requests - -# These are no more used -# pypng -# pyinstaller +polysign>=0.1.0 +base58 +# coincurve +# ed25519 diff --git a/send_csv.py b/send_csv.py index 1766f05..6ba41cb 100644 --- a/send_csv.py +++ b/send_csv.py @@ -1,4 +1,5 @@ import time +import sys """ Call send_nogui.py (edit not to require manual confirmation) @@ -34,8 +35,11 @@ parser = argparse.ArgumentParser(description='Bismuth Batch reward sender') # parser.add_argument("-v", "--verbose", action="count", default=False, help='Be verbose.') parser.add_argument("-y", "--yes", action="count", default=False, help='Do send') +parser.add_argument("-w", "--wallet", help='Path to wallet, use quotation marks') args = parser.parse_args() +print(sys.argv[3]) + total = 0 nb = 0 for line in open('rewards.csv' , 'r'): @@ -45,15 +49,16 @@ try: total += float(data[1]) data[1] = float(data[1]) - 0.01 - command = "{} {} {} {} tx ".format(PYTHON_EXECUTABLE, SEND_PATH, data[1], data[0]) + command = f"{PYTHON_EXECUTABLE} {SEND_PATH} {data[1]} {data[0]} {None} {None} {sys.argv[3]} " #arguments are passed here if args.yes: - print("Running: {}".format(command)) + print(f"Running: {command} tx") os.system(command) else: - print("Check: {}, didn't you forget the magic word?".format(command)) + print(f"Check: {command}, didn't you forget the magic word?") + sys.exit(0) nb += 1 time.sleep(1) except Exception as e: print (e) -print("{} Transactions, {} $BIS total.".format(nb, total)) +print(f"{nb} Transactions, {total} $BIS total.") diff --git a/send_nogui.py b/send_nogui.py deleted file mode 100644 index cc8bc34..0000000 --- a/send_nogui.py +++ /dev/null @@ -1,151 +0,0 @@ -# this file takes optional arguments, arg1 = amount to spend, arg2 = recipient address, arg3 = keep forever (0/1), arg4=OpenField data -# args3+4 are not prompted if ran without args - -from Cryptodome.Signature import PKCS1_v1_5 -from Cryptodome.Hash import SHA -from essentials import fee_calculate, address_validate - -import base64 -import time -import sqlite3 -import essentials -import sys -import options -import re -import socks -import connections - -config = options.Get() -config.read() -ledger_path = config.ledger_path -hyper_path = config.hyper_path - - -key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_hashed, address, keyfile = essentials.keys_load("privkey.der", "pubkey.der") - -if encrypted: - key, private_key_readable = essentials.keys_unlock(private_key_readable) - -print('Number of arguments: %d arguments.' % len(sys.argv)) -print('Argument List: %s' % ', '.join(sys.argv)) - -# get balance - -if 'regnet' in config.version: - debit_mempool = 0 -else: - # include mempool fees - mempool = sqlite3.connect('mempool.db') - mempool.text_factory = str - m = mempool.cursor() - m.execute("SELECT count(amount), sum(amount) FROM transactions WHERE address = ?;", (address,)) - result = m.fetchall()[0] - if result[1] != None: - debit_mempool = float('%.8f' % (float(result[1]) + float(result[1]) * 0.001 + int(result[0]) * 0.01)) - else: - debit_mempool = 0 - # include mempool fees - - -conn = sqlite3.connect(ledger_path) -conn.text_factory = str -c = conn.cursor() - -s = socks.socksocket() -s.settimeout(10) -# s.connect(("bismuth.live", 5658)) -if 'regnet' in config.version: - s.connect(("127.0.0.1", 3030)) -else: - s.connect(("127.0.0.1", 5658)) - -connections.send (s, "balanceget", 10) -connections.send (s, address, 10) # change address here to view other people's transactions -stats_account = connections.receive (s, 10) -balance = stats_account[0] -#credit = stats_account[1] -#debit = stats_account[2] -#fees = stats_account[3] -#rewards = stats_account[4] - - -print("Transaction address: %s" % address) -print("Transaction address balance: %s" % balance) - -try: - amount_input = sys.argv[1] -except IndexError: - amount_input = input("Amount: ") - -try: - recipient_input = sys.argv[2] -except IndexError: - recipient_input = input("Recipient: ") - -if not address_validate(recipient_input): - print("Wrong recipient address format") - exit(1) - -try: - operation_input = sys.argv[3] -except IndexError: - operation_input = 0 - -try: - openfield_input = sys.argv[4] -except IndexError: - openfield_input = input("Enter openfield data (message): ") - - -# hardfork fee display -fee = fee_calculate(openfield_input) -print("Fee: %s" % fee) - -confirm = input("Confirm (y/n): ") - -if confirm != 'y': - print("Transaction cancelled, user confirmation failed") - exit(1) - -# hardfork fee display -try: - float(amount_input) - is_float = 1 -except ValueError: - is_float = 0 - exit(1) - - -timestamp = '%.2f' % time.time() -transaction = (str(timestamp), str(address), str(recipient_input), '%.8f' % float(amount_input), str(operation_input), str(openfield_input)) # this is signed -print(transaction) - -h = SHA.new(str(transaction).encode("utf-8")) -signer = PKCS1_v1_5.new(key) -signature = signer.sign(h) -signature_enc = base64.b64encode(signature) -txid = signature_enc[:56] - -print("Encoded Signature: %s" % signature_enc.decode("utf-8")) -print("Transaction ID: %s" % txid.decode("utf-8")) - -verifier = PKCS1_v1_5.new(key) - -if verifier.verify(h, signature): - if float(amount_input) < 0: - print("Signature OK, but cannot use negative amounts") - - elif float(amount_input) + float(fee) > float(balance): - print("Mempool: Sending more than owned") - - else: - tx_submit = (str (timestamp), str (address), str (recipient_input), '%.8f' % float (amount_input), str (signature_enc.decode ("utf-8")), str (public_key_hashed.decode("utf-8")), str (operation_input), str (openfield_input)) - connections.send (s, "mpinsert", 10) - connections.send (s, tx_submit, 10) - reply = connections.receive (s, 10) - print ("Client: {}".format (reply)) -else: - print("Invalid signature") - # enter transaction end - -s.close() diff --git a/send_nogui_noconf.py b/send_nogui_noconf.py index 20c64f1..2419d9a 100644 --- a/send_nogui_noconf.py +++ b/send_nogui_noconf.py @@ -1,145 +1,130 @@ -# this file takes optional arguments, arg1 = amount to spend, arg2 = recipient address, arg3 = keep forever (0/1), arg4=OpenField data -# args3+4 are not prompted if ran without args +""" +Send a transaction from console, with no password nor confirmation asked. +To be used for unattended, automated processes. -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import PKCS1_v1_5 -from Cryptodome.Hash import SHA -from essentials import fee_calculate +This file takes optional arguments, + +arg1: amount to send +arg2: recipient address +arg3: operation +arg4: OpenField data +arg5: wallet file +arg6: request confirmation for every transaction + +args3,4,6 are not prompted if ran without args +""" -from simplecrypt import encrypt, decrypt import base64 -import time -import sqlite3 -import os -import essentials import sys -import options -import getpass -import re +import time + import socks -import connections +from Cryptodome.Hash import SHA +from Cryptodome.Signature import PKCS1_v1_5 -config = options.Get() -config.read() -ledger_path = config.ledger_path -hyper_path = config.hyper_path +from bismuthclient import rpcconnections +from bisbasic import essentials, options +from bisbasic.essentials import fee_calculate +from polysign.signerfactory import SignerFactory -key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_hashed, address, keyfile = essentials.keys_load_new("hypernode_contract.der") +def connect(): + if 'regnet' in config.version: + port = 3030 + elif 'testnet' in config.version: + port = 2829 + else: + port = 5658 -if encrypted: - key, private_key_readable = essentials.keys_unlock(private_key_readable) + return rpcconnections.Connection(("127.0.0.1", int(port))) -print('Number of arguments: %d arguments.' % len(sys.argv)) -print('Argument List: %s' % ', '.join(sys.argv)) +if __name__ == "__main__": + config = options.Get() + config.read() -# get balance + try: + wallet_file = sys.argv[5] + except: + wallet_file = input("Path to wallet: ") -# include mempool fees -mempool = sqlite3.connect('mempool.db') -mempool.text_factory = str -m = mempool.cursor() -m.execute("SELECT count(amount), sum(amount) FROM transactions WHERE address = ?;", (address,)) -result = m.fetchall()[0] -if result[1] != None: - debit_mempool = float('%.8f' % (float(result[1]) + float(result[1]) * 0.001 + int(result[0]) * 0.01)) -else: - debit_mempool = 0 -# include mempool fees + try: + request_confirmation = sys.argv[6] + except: + request_confirmation = False -conn = sqlite3.connect(ledger_path) + key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_b64encoded, address, keyfile = essentials.keys_load_new(wallet_file) -conn.text_factory = str -c = conn.cursor() + if encrypted: + key, private_key_readable = essentials.keys_unlock(private_key_readable) -def connect(): - s = socks.socksocket() - s.settimeout(10) - s.connect(("31.31.75.71", 8150)) - #s.connect(("127.0.0.1", 5658)) - return s - -s = connect() -connections.send (s, "balanceget", 10) -connections.send (s, address, 10) # change address here to view other people's transactions -stats_account = connections.receive (s, 10) -balance = stats_account[0] -#credit = stats_account[1] -#debit = stats_account[2] -#fees = stats_account[3] -#rewards = stats_account[4] - - -print("Transction address: %s" % address) -print("Transction address balance: %s" % balance) - -# get balance -def address_validate(address): - if re.match ('[abcdef0123456789]{56}', address): - return True - else: - return False + print(f'Number of arguments: {len(sys.argv)} arguments.') + print(f'Argument list: {"".join(sys.argv)}') + print(f'Using address: {address}') -try: - amount_input = sys.argv[1] -except IndexError: - amount_input = input("Amount: ") + # get balance -try: - recipient_input = sys.argv[2] -except IndexError: - recipient_input = input("Recipient: ") + s = connect() + s._send ("balanceget") + s._send (address) # change address here to view other people's transactions + stats_account = s._receive() + balance = stats_account[0] + print("Transaction address: %s" % address) + print("Transaction address balance: %s" % balance) + try: + amount_input = sys.argv[1] + except IndexError: + amount_input = input("Amount: ") -if not address_validate(recipient_input): - print("Wrong address format") - exit(1) + try: + recipient_input = sys.argv[2] + except IndexError: + recipient_input = input("Recipient: ") -try: - operation_input = sys.argv[3] -except IndexError: - operation_input = 0 + if not SignerFactory.address_is_valid(recipient_input): + print("Wrong address format") + sys.exit(1) -try: - openfield_input = sys.argv[4] -except IndexError: - openfield_input = "" + try: + operation_input = sys.argv[3] + except IndexError: + operation_input = "" + try: + openfield_input = sys.argv[4] + except IndexError: + openfield_input = "" -# hardfork fee display -fee = fee_calculate(openfield_input) -print("Fee: %s" % fee) + fee = fee_calculate(openfield_input) + print("Fee: %s" % fee) -# confirm = input("Confirm (y/n): ") + if request_confirmation: + confirm = input("Confirm (y/n): ") -# if confirm != 'y': -# print("Transaction cancelled, user confirmation failed") -# exit(1) + if confirm != 'y': + print("Transaction cancelled, user confirmation failed") + exit(1) -# hardfork fee display -try: - float(amount_input) - is_float = 1 -except ValueError: - is_float = 0 - exit(1) + try: + float(amount_input) + is_float = 1 + except ValueError: + is_float = 0 + sys.exit(1) -if len(str(recipient_input)) != 56: - print("Wrong address length") -else: - timestamp = '%.2f' % time.time() + timestamp = '%.2f' % (time.time() - 5) #remote proofing + # TODO: use transaction object, no dup code for buffer assembling transaction = (str(timestamp), str(address), str(recipient_input), '%.8f' % float(amount_input), str(operation_input), str(openfield_input)) # this is signed - # print transaction - + # TODO: use polysign here h = SHA.new(str(transaction).encode("utf-8")) signer = PKCS1_v1_5.new(key) signature = signer.sign(h) signature_enc = base64.b64encode(signature) txid = signature_enc[:56] - print("Encoded Signature: %s" % signature_enc.decode("utf-8")) - print("Transaction ID: %s" % txid.decode("utf-8")) + print(f"Encoded Signature: {signature_enc.decode('utf-8')}") + print(f"Transaction ID: {txid.decode('utf-8')}") verifier = PKCS1_v1_5.new(key) @@ -151,24 +136,23 @@ def address_validate(address): print("Mempool: Sending more than owned") else: - tx_submit = (str (timestamp), str (address), str (recipient_input), '%.8f' % float (amount_input), str (signature_enc.decode ("utf-8")), str (public_key_hashed.decode("utf-8")), str (operation_input), str (openfield_input)) + tx_submit = (str (timestamp), str (address), str (recipient_input), '%.8f' % float (amount_input), str (signature_enc.decode ("utf-8")), str (public_key_b64encoded.decode("utf-8")), str (operation_input), str (openfield_input)) while True: try: - connections.send (s, "mpinsert", 10) - connections.send (s, tx_submit, 10) - reply = connections.receive (s, 10) + s._send("mpinsert") + s._send (tx_submit) + reply = s._receive() print ("Client: {}".format (reply)) - if reply != "*": #response can be empty due to different timeout setting + if reply != "*": # response can be empty due to different timeout setting break else: print("Connection cut, retrying") except Exception as e: - print("A problem occurred: {}, retrying".format(e)) + print(f"A problem occurred: {e}, retrying") s = connect() pass else: print("Invalid signature") - # enter transaction end -s.close() + s.close() diff --git a/setup.iss b/setup.iss index 3ea6e02..586f1a7 100644 --- a/setup.iss +++ b/setup.iss @@ -1,17 +1,12 @@ -; -- Example1.iss -- -; Demonstrates copying 3 files and creating an icon. - -; SEE THE DOCUMENTATION FOR DETAILS ON CREATING .ISS SCRIPT FILES! - [Setup] -AppName=Bismuth +AppName=Bismuth Node AppVersion=4.x -DefaultDirName={pf}\Bismuth -DefaultGroupName=Bismuth +DefaultDirName={pf}\Bismuth Node +DefaultGroupName=Bismuth Node UninstallDisplayIcon={app}\node.exe Compression=lzma2 SolidCompression=yes -OutputBaseFilename=Bismuth_installer +OutputBaseFilename=Bismuth_node_setup SetupIconFile=graphics\icon.ico DisableDirPage=no @@ -19,31 +14,30 @@ WizardImageFile=graphics\left.bmp WizardSmallImageFile=graphics\mini.bmp [Files] -Source: "Dist\*" ; DestDir: "{app}"; Flags: recursesubdirs; +Source: "dist\*" ; DestDir: "{app}"; Flags: recursesubdirs; [Icons] -Name: "{group}\Full Node"; Filename: "{app}\node.exe" -Name: "{group}\Wallet"; Filename: "{app}\wallet.exe" -Name: "{group}\Uninstall Bismuth"; Filename: "{uninstallexe}" +Name: "{group}\Bismuth Node"; Filename: "{app}\node.exe" +Name: "{group}\Stop Bismuth Node"; Filename: "{app}\node_stop.exe" +Name: "{group}\Uninstall Bismuth Node"; Filename: "{uninstallexe}" -Name: "{commondesktop}\Full Node"; Filename: "{app}\node.exe" -Name: "{commondesktop}\Wallet"; Filename: "{app}\wallet.exe" +Name: "{commondesktop}\Bismuth Node"; Filename: "{app}\node.exe" +Name: "{commondesktop}\Stop Bismuth Node"; Filename: "{app}\node_stop.exe" [Registry] -; keys for 32-bit systems -Root: HKCU32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\wallet.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 -Root: HKLM32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\wallet.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 ; keys for 64-bit systems Root: HKCU32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\node.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 Root: HKLM32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\node.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 - ; keys for 64-bit systems -Root: HKCU64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\wallet.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 -Root: HKLM64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\wallet.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 +Root: HKCU32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\node_stop.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 +Root: HKLM32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\node_stop.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 + ; keys for 64-bit systems Root: HKCU64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\node.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 Root: HKLM64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\node.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 +; keys for 64-bit systems +Root: HKCU64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\node_stop.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 +Root: HKLM64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\node_stop.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 [Run] Filename: "{app}\node.exe"; Description: "Full Node"; Flags: shellexec postinstall skipifsilent unchecked -Filename: "{app}\wallet.exe"; Description: "Wallet"; Flags: shellexec postinstall skipifsilent diff --git a/setup_nuitka.iss b/setup_nuitka.iss deleted file mode 100644 index 848191a..0000000 --- a/setup_nuitka.iss +++ /dev/null @@ -1,58 +0,0 @@ -; -- Example1.iss -- -; Demonstrates copying 3 files and creating an icon. - -; SEE THE DOCUMENTATION FOR DETAILS ON CREATING .ISS SCRIPT FILES! - -[Setup] -AppName=Bismuth -AppVersion=4.x -DefaultDirName={pf}\Bismuth -DefaultGroupName=Bismuth -UninstallDisplayIcon={app}\files\node.exe -Compression=lzma2 -SolidCompression=yes -OutputBaseFilename=Bismuth_installer_nuitka -SetupIconFile=graphics\icon.ico -DisableDirPage=no - -WizardImageFile=graphics\left.bmp -WizardSmallImageFile=graphics\mini.bmp - -[Files] -Source: "Dist\*" ; DestDir: "{app}"; Flags: recursesubdirs; - -[Icons] -Name: "{group}\Full Node"; Filename: "{app}\files\node.exe" -Name: "{group}\Wallet"; Filename: "{app}\files\wallet.exe" -Name: "{group}\Stop Node"; Filename: "{app}\files\node_stop.exe" -Name: "{group}\Uninstall Bismuth"; Filename: "{uninstallexe}" - -Name: "{commondesktop}\Full Node"; Filename: "{app}\files\node.exe" -Name: "{commondesktop}\Wallet"; Filename: "{app}\files\wallet.exe" -Name: "{commondesktop}\Stop Node"; Filename: "{app}\files\node_stop.exe" - -[Registry] -; keys for 32-bit systems -Root: HKCU32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\wallet.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 -Root: HKLM32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\wallet.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 -; keys for 64-bit systems -Root: HKCU32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\node.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 -Root: HKLM32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\node.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 -; keys for 64-bit systems -Root: HKCU32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\node_stop.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 -Root: HKLM32; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\node_stop.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: not IsWin64 - - -; keys for 64-bit systems -Root: HKCU64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\wallet.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 -Root: HKLM64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\wallet.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 -; keys for 64-bit systems -Root: HKCU64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\node.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 -Root: HKLM64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\node.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 -; keys for 64-bit systems -Root: HKCU64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\node_stop.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 -Root: HKLM64; Subkey: "SOFTWARE\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers"; ValueType: String; ValueName: "{app}\files\node_stop.exe"; ValueData: "RUNASADMIN"; Flags: uninsdeletekeyifempty uninsdeletevalue; Check: IsWin64 - -[Run] -Filename: "{app}\files\node.exe"; Description: "Full Node"; Flags: shellexec postinstall skipifsilent unchecked -Filename: "{app}\files\wallet.exe"; Description: "Wallet"; Flags: shellexec postinstall skipifsilent diff --git a/staking.py b/staking.py index 2fc5817..9807f1a 100644 --- a/staking.py +++ b/staking.py @@ -3,10 +3,11 @@ #todo: rollbacks inside node; make sure delagete/ip is only allowed characters #operation: staking:register +#last working integrated implementation: https://github.com/bismuthfoundation/Bismuth/commit/69fd3aafd31cce6def1b09ffc64c95d457243c8a import sqlite3 import log -from quantizer import * +from quantizer import quantize_two, quantize_eight, quantize_ten import mempool as mp from hashlib import blake2b import re @@ -201,7 +202,7 @@ def staking_revalidate(conn,c,index,index_cursor,block,app_log): mp.MEMPOOL = mp.Mempool (app_log,config,None,False) - conn = sqlite3.connect('static/test.db') + conn = sqlite3.connect('static/ledger_test.db') conn.text_factory = str c = conn.cursor() @@ -212,4 +213,4 @@ def staking_revalidate(conn,c,index,index_cursor,block,app_log): address = "4edadac9093d9326ee4b17f869b14f1a2534f96f9c5d7b48dc9acaed" staking_update(conn, c,index,index_cursor, "normal", 736600, app_log) staking_payout(conn, c,index,index_cursor,736600, 1525304875, app_log) - staking_revalidate (conn, c,index, index_cursor,736600, app_log) \ No newline at end of file + staking_revalidate (conn, c,index, index_cursor,736600, app_log) diff --git a/static/tar.py b/static/tar.py index 7663b37..82de835 100644 --- a/static/tar.py +++ b/static/tar.py @@ -5,8 +5,8 @@ import tarfile import sys import sqlite3 -from decimal import * -from quantizer import * +from decimal import Decimal +from quantizer import quantize_two, quantize_eight, quantize_ten import process_search from essentials import address_validate @@ -106,10 +106,6 @@ def balance_differences(): check = '> Ko' tar_obj.errors += 1 - """ - if address.lower() != address or len(address) != 56 and (balance1 or balance2) != 0: - print (f"{address} > wrong recipient") - """ if not address_validate(address) and (balance1 or balance2) != 0: print (f"{address} > wrong recipient") diff --git a/static/tar_testnet.py b/static/tar_testnet.py new file mode 100644 index 0000000..324fb75 --- /dev/null +++ b/static/tar_testnet.py @@ -0,0 +1,164 @@ +#this file is marginally dynamic, make sure you know what you run it against +import sys +sys.path.append("../") + +import tarfile +import sys +import sqlite3 +from decimal import Decimal +from quantizer import quantize_two, quantize_eight, quantize_ten +import process_search +from essentials import address_validate + + +class Tar: + def __init__(self): + self.hdd = sqlite3.connect("ledger_test.db", timeout=1) + self.hdd.text_factory = str + self.h = self.hdd.cursor() + + self.hdd2 = sqlite3.connect("hyper_test.db", timeout=1) + self.hdd2.text_factory = str + self.h2 = self.hdd2.cursor() + + self.errors = 0 + self.h_name = "ledger" + self.h2_name = "hyperblocks" + + +tar_obj = Tar() + + +def vacuum(cursor, name): + print(f"Vacuuming {name}") + cursor.execute("VACUUM") + + +def dupes_check_sigs(cursor, name): + print (f"Testing {name} for sig duplicates") + + cursor.execute("SELECT * FROM transactions WHERE signature IN (SELECT signature FROM transactions WHERE signature != '0' GROUP BY signature HAVING COUNT(*) >1)") + results = cursor.fetchall() + + dupes_allowed = [708334,708335] + + for result in results: + if result[0] not in dupes_allowed: + print (f"Duplicate entry on block: {result}") + tar_obj.errors += 1 + + +def dupes_check_rows_transactions(cursor, name): + print (f"Testing {name} for transaction row duplicates") + + cursor.execute("SELECT block_height, timestamp, address, recipient, amount, signature, public_key, block_hash, fee, reward, operation, openfield, COUNT(*) FROM transactions GROUP BY block_height, timestamp, address, recipient, amount, signature, public_key, block_hash, fee, reward, operation, openfield HAVING COUNT(*) > 1") + result = cursor.fetchall() + for entry in result: + print(f"Duplicate entry on block: {entry}") + tar_obj.errors += 1 + + +def dupes_check_rows_misc(cursor, name): + print (f"Testing {name} for misc row duplicates") + + cursor.execute("SELECT block_height, difficulty, COUNT(*) FROM misc GROUP BY block_height, difficulty HAVING COUNT(*) > 1") + result = cursor.fetchall() + for entry in result: + print(f"Duplicate entry on block: {entry}") + tar_obj.errors += 1 + + +def balance_from_cursor(cursor, address): + credit = Decimal("0") + debit = Decimal("0") + for entry in cursor.execute("SELECT amount,reward FROM transactions WHERE recipient = ? ",(address, )): + try: + #result = cursor.fetchall() + credit = credit + quantize_eight(entry[0]) + quantize_eight(entry[1]) + #print (result) + credit = 0 if credit is None else credit + except Exception as e: + credit = 0 + #print (credit) + + for entry in cursor.execute("SELECT amount,fee FROM transactions WHERE address = ? ",(address, )): + try: + # result = cursor.fetchall() + debit = debit + quantize_eight(entry[0]) + quantize_eight(entry[1]) + # print (result) + debit = 0 if debit is None else debit + except Exception as e: + debit = 0 + # print (debit) + + return quantize_eight(credit-debit) + + +def balance_differences(): + + print ("Selecting all addresses from full ledger for errors") + tar_obj.h.execute ("SELECT distinct(recipient) FROM transactions group by recipient;") + addresses = tar_obj.h.fetchall () + + for address in addresses: + address = address[0] + balance1 = balance_from_cursor(tar_obj.h, address) + balance2 = balance_from_cursor(tar_obj.h2, address) + if (balance1 == balance2): + check = ' Ok' + else: + check = '> Ko' + tar_obj.errors += 1 + + if not address_validate(address) and (balance1 or balance2) != 0: + print (f"{address} > wrong recipient") + + print(f"{check} {address} {balance1} {balance2}") + + if (Decimal(balance1) < 0 or Decimal(balance2) < 0): + print(address,balance1,balance2) + + print(f"Done, {tar_obj.errors} errors.") + + +balance_differences() +dupes_check_rows_transactions(tar_obj.h, tar_obj.h_name) +dupes_check_rows_transactions(tar_obj.h2, tar_obj.h2_name) +dupes_check_rows_misc(tar_obj.h, tar_obj.h_name) +dupes_check_rows_misc(tar_obj.h2, tar_obj.h2_name) +dupes_check_sigs(tar_obj.h, tar_obj.h_name) +dupes_check_sigs(tar_obj.h2, tar_obj.h2_name) + + +if tar_obj.errors > 0: + print("There were errors, cannot continue") + tar_obj.hdd.close() + tar_obj.hdd2.close() + +else: + vacuum(tar_obj.h, tar_obj.h_name) + vacuum(tar_obj.h2, tar_obj.h2_name) + tar_obj.hdd.close() + tar_obj.hdd2.close() + + if not process_search.proccess_presence ("node.py"): + files = ["ledger_test.db-wal","ledger_test.db-shm","ledger_test.db","hyper_test.db-shm", "hyper_test.db-wal", "hyper_test.db", "index_test.db"] + + tar = tarfile.open("test.tar.gz", "w:gz") + + for file in files: + try: + print ("Compressing", file) + tar.add(file, arcname=file) + except: + "Error compressing {}".format(file) + + print("Compression finished for", files) + tar.close() + + else: + print ("Node is running, cannot continue") + +input("Press any key to continue") + + diff --git a/suggested_peers.txt b/suggested_peers.txt index 18990c1..71ea558 100644 --- a/suggested_peers.txt +++ b/suggested_peers.txt @@ -1 +1 @@ -{"127.0.0.1": "5658", "34.192.6.105": "5658", "198.245.62.30": "5658", "66.70.181.150": "5658", "51.15.211.156": "5658", "91.121.87.99": "5658", "51.15.47.212": "5658", "31.31.75.71": "5658", "62.112.10.156": "5658", "178.62.68.118": "5658", "51.15.228.170": "5658", "80.240.18.114": "5658", "91.121.77.179": "5658", "46.171.63.219": "5658", "109.92.6.40": "5658", "51.15.211.92": "5658", "163.172.166.207": "5658", "212.24.111.139": "5658", "104.248.73.153": "5658", "194.19.235.82": "5658", "108.61.90.91": "5658", "149.28.120.120": "5658", "149.28.53.219": "5658", "51.68.190.246": "5658", "51.15.90.15": "5658", "159.69.12.98": "5658", "159.69.147.99": "5658", "162.213.123.200": "5658", "209.246.143.198": "5658", "51.15.234.210": "5658", "163.172.161.7": "5658", "188.165.199.153": "5658", "217.163.23.242": "5658", "159.69.147.101": "5658", "185.125.46.56": "5658", "18.184.255.105": "5658", "46.101.186.35": "5658", "192.99.34.19": "5658", "45.76.15.224": "5658", "217.23.4.201": "5658", "204.12.231.58": "5658", "51.15.225.223": "5658", "109.236.83.141": "5658"} \ No newline at end of file +{"127.0.0.1": "5658", "34.192.6.105": "5658", "176.31.245.46": "5658", "45.77.6.146": "5658", "104.238.173.26": "5658", "162.213.123.200": "5658", "46.101.186.35": "5658", "212.24.111.139": "5658", "159.89.10.229": "5658", "209.246.143.198": "5658", "51.15.90.15": "5658", "209.250.238.142": "5658", "51.15.211.92": "5658", "139.59.25.152": "5658", "149.28.53.219": "5658", "185.125.46.56": "5658", "51.15.213.94": "5658", "217.23.4.201": "5658", "62.112.10.156": "5658", "198.245.62.30": "5658", "163.172.222.163": "5658", "51.68.190.246": "5658", "142.93.243.200": "5658", "178.128.222.221": "5658", "91.121.87.99": "5658", "139.59.91.47": "5658", "109.236.82.102": "5658", "91.121.77.179": "5658", "194.19.235.82": "5658", "46.105.43.213": "5658", "188.166.118.218": "5658", "108.61.90.91": "5658", "149.28.120.120": "5658", "107.191.39.23": "5658", "51.15.46.90": "5658", "51.15.234.210": "5658", "51.15.228.170": "5658", "159.69.147.99": "5658", "45.32.115.135": "5658", "51.15.47.212": "5658", "46.171.63.219": "5658", "80.240.18.114": "5658", "217.163.23.242": "5658", "163.172.166.207": "5658", "109.92.6.40": "5658", "51.15.95.155": "5658", "45.76.15.224": "5658", "188.165.199.153": "5658", "159.89.123.247": "5658", "109.236.83.141": "5658", "51.15.122.148": "5658", "81.169.153.71": "5658", "217.23.14.6": "5658", "163.172.161.7": "5658", "192.99.34.19": "5658", "51.15.225.223": "5658", "51.15.211.156": "5658", "140.82.11.77": "5658", "159.69.147.101": "5658", "104.248.73.153": "5658", "18.184.255.105": "5658", "51.15.118.29": "5658", "139.180.199.99": "5658", "51.15.254.16": "5658", "149.28.181.100": "5658", "180.68.191.77": "5658", "142.93.93.4": "5658", "82.16.135.238": "5658", "208.167.245.204": "5658", "109.190.174.238": "5658", "149.28.162.236": "5658", "94.156.35.223": "5658", "185.206.145.85": "5658", "91.92.136.22": "5658", "79.123.162.82": "5658", "94.156.144.5": "5658", "94.156.189.200": "5658", "185.177.59.161": "5658", "94.156.35.6": "5658", "212.73.150.90": "5658", "91.92.136.130": "5658", "85.217.170.187": "5658", "185.203.118.56": "5658", "185.206.147.64": "5658", "94.156.35.150": "5658", "85.217.170.190": "5658", "185.206.146.219": "5658", "85.217.171.9": "5658", "91.92.128.156": "5658", "94.156.35.111": "5658", "91.92.128.46": "5658", "91.92.136.112": "5658", "178.62.68.118": "5658", "149.28.46.106": "5658", "46.170.129.81": "5658", "bismuth.live": "5658"} \ No newline at end of file diff --git a/tests/ledger_balance.py b/tests/ledger_balance.py index 1db416a..085985b 100644 --- a/tests/ledger_balance.py +++ b/tests/ledger_balance.py @@ -8,7 +8,7 @@ # custom modules sys.path.append('../') -from quantizer import * +from quantizer import quantize_two, quantize_eight, quantize_ten LEDGER_PATH = '../static/ledger.db' HYPER_PATH = '../static/hyper.db' diff --git a/themes/Aaron.jpg b/themes/Aaron.jpg deleted file mode 100644 index 88ecc7f..0000000 Binary files a/themes/Aaron.jpg and /dev/null differ diff --git a/themes/Bismuth.jpg b/themes/Bismuth.jpg deleted file mode 100644 index e945450..0000000 Binary files a/themes/Bismuth.jpg and /dev/null differ diff --git a/themes/Clockwork.jpg b/themes/Clockwork.jpg deleted file mode 100644 index 7bf2ec5..0000000 Binary files a/themes/Clockwork.jpg and /dev/null differ diff --git a/themes/Cockpit.jpg b/themes/Cockpit.jpg deleted file mode 100644 index 62bd776..0000000 Binary files a/themes/Cockpit.jpg and /dev/null differ diff --git a/themes/Diamonds.jpg b/themes/Diamonds.jpg deleted file mode 100644 index 37833b6..0000000 Binary files a/themes/Diamonds.jpg and /dev/null differ diff --git a/themes/Gold.jpg b/themes/Gold.jpg deleted file mode 100644 index 002ec5a..0000000 Binary files a/themes/Gold.jpg and /dev/null differ diff --git a/themes/Hamcat.jpg b/themes/Hamcat.jpg deleted file mode 100644 index cde2b71..0000000 Binary files a/themes/Hamcat.jpg and /dev/null differ diff --git a/themes/Huracan.jpg b/themes/Huracan.jpg deleted file mode 100644 index 5841c9f..0000000 Binary files a/themes/Huracan.jpg and /dev/null differ diff --git a/themes/Knives.jpg b/themes/Knives.jpg deleted file mode 100644 index 5983f45..0000000 Binary files a/themes/Knives.jpg and /dev/null differ diff --git a/themes/Koi.jpg b/themes/Koi.jpg deleted file mode 100644 index 5da8cf5..0000000 Binary files a/themes/Koi.jpg and /dev/null differ diff --git a/themes/Mharti.jpg b/themes/Mharti.jpg deleted file mode 100644 index aad88fd..0000000 Binary files a/themes/Mharti.jpg and /dev/null differ diff --git a/themes/Mocho.jpg b/themes/Mocho.jpg deleted file mode 100644 index 906ffb3..0000000 Binary files a/themes/Mocho.jpg and /dev/null differ diff --git a/themes/Mountain.jpg b/themes/Mountain.jpg deleted file mode 100644 index d516609..0000000 Binary files a/themes/Mountain.jpg and /dev/null differ diff --git a/themes/Skynet.jpg b/themes/Skynet.jpg deleted file mode 100644 index a35debc..0000000 Binary files a/themes/Skynet.jpg and /dev/null differ diff --git a/themes/Stardust.jpg b/themes/Stardust.jpg deleted file mode 100644 index df313c8..0000000 Binary files a/themes/Stardust.jpg and /dev/null differ diff --git a/themes/Terminator.jpg b/themes/Terminator.jpg deleted file mode 100644 index 0476d01..0000000 Binary files a/themes/Terminator.jpg and /dev/null differ diff --git a/tokensv2.py b/tokensv2.py index 4db0e8c..18e5bc1 100644 --- a/tokensv2.py +++ b/tokensv2.py @@ -10,104 +10,84 @@ __version__ = '0.0.2' + def blake2bhash_generate(data): # new hash blake2bhash = blake2b(str(data).encode(), digest_size=20).hexdigest() return blake2bhash # new hash -import functools -def sql_trace_callback(log, id, statement): - line = f"SQL[{id}] {statement}" - log.warning(line) - -def tokens_update(file, ledger, mode, app_log, plugin_manager=None, trace_db_calls = False): - if mode not in ("normal", "reindex"): - raise ValueError("Wrong value for tokens_update function") - - conn = sqlite3.connect(ledger) - if trace_db_calls: - conn.set_trace_callback(functools.partial(sql_trace_callback,app_log,"TOKENS-UPDATE-LEDGER")) - conn.text_factory = str - c = conn.cursor() - - tok = sqlite3.connect(file) - if trace_db_calls: - tok.set_trace_callback(functools.partial(sql_trace_callback,app_log,"TOKENS-UPDATE-FILE")) - tok.text_factory = str - t = tok.cursor() - t.execute("CREATE TABLE IF NOT EXISTS tokens (block_height INTEGER, timestamp, token, address, recipient, txid, amount INTEGER)") - tok.commit() - - if mode == "reindex": - app_log.warning("Token database will be reindexed") - t.execute("DELETE FROM tokens") - tok.commit() - - t.execute("SELECT block_height FROM tokens ORDER BY block_height DESC LIMIT 1;") +def tokens_update(node, db_handler_instance): + + db_handler_instance.index_cursor.execute("CREATE TABLE IF NOT EXISTS tokens (block_height INTEGER, timestamp, token, address, recipient, txid, amount INTEGER)") + db_handler_instance.index.commit() + + db_handler_instance.index_cursor.execute("SELECT block_height FROM tokens ORDER BY block_height DESC LIMIT 1;") try: - token_last_block = int(t.fetchone()[0]) + token_last_block = int(db_handler_instance.index_cursor.fetchone()[0]) except: token_last_block = 0 - app_log.warning("Token anchor block: {}".format(token_last_block)) + node.logger.app_log.warning("Token anchor block: {}".format(token_last_block)) - # app_log.warning all token issuances - c.execute("SELECT block_height, timestamp, address, recipient, signature, operation, openfield FROM transactions WHERE block_height >= ? AND operation = ? AND reward = 0 ORDER BY block_height ASC;", (token_last_block, "token:issue",)) - results = c.fetchall() - app_log.warning(results) + # node.logger.app_log.warning all token issuances + db_handler_instance.c.execute("SELECT block_height, timestamp, address, recipient, signature, operation, openfield FROM transactions WHERE block_height >= ? AND operation = ? AND reward = 0 ORDER BY block_height ASC;", (token_last_block, "token:issue",)) + results = db_handler_instance.c.fetchall() + node.logger.app_log.warning(results) tokens_processed = [] for x in results: - token_name = x[6].split(":")[0].lower().strip() try: - t.execute("SELECT * from tokens WHERE token = ?", (token_name,)) - dummy = t.fetchall()[0] # check for uniqueness - app_log.warning("Token issuance already processed: {}".format(token_name,)) - except: - if token_name not in tokens_processed: - block_height = x[0] - app_log.warning("Block height {}".format(block_height)) - - timestamp = x[1] - app_log.warning("Timestamp {}".format(timestamp)) + token_name = x[6].split(":")[0].lower().strip() + try: + db_handler_instance.index_cursor.execute("SELECT * from tokens WHERE token = ?", (token_name,)) + dummy = db_handler_instance.index_cursor.fetchall()[0] # check for uniqueness + node.logger.app_log.warning("Token issuance already processed: {}".format(token_name,)) + except: + if token_name not in tokens_processed: + block_height = x[0] + node.logger.app_log.warning("Block height {}".format(block_height)) + timestamp = x[1] + node.logger.app_log.warning("Timestamp {}".format(timestamp)) - tokens_processed.append(token_name) - app_log.warning("Token: {}".format(token_name)) + tokens_processed.append(token_name) + node.logger.app_log.warning("Token: {}".format(token_name)) - issued_by = x[3] - app_log.warning("Issued by: {}".format(issued_by)) + issued_by = x[3] + node.logger.app_log.warning("Issued by: {}".format(issued_by)) - txid = x[4][:56] - app_log.warning("Txid: {}".format(txid)) + txid = x[4][:56] + node.logger.app_log.warning("Txid: {}".format(txid)) - total = x[6].split(":")[1] - app_log.warning("Total amount: {}".format(total)) + total = x[6].split(":")[1] + node.logger.app_log.warning("Total amount: {}".format(total)) - t.execute("INSERT INTO tokens VALUES (?,?,?,?,?,?,?)", - (block_height, timestamp, token_name, "issued", issued_by, txid, total)) + db_handler_instance.index_cursor.execute("INSERT INTO tokens VALUES (?,?,?,?,?,?,?)", + (block_height, timestamp, token_name, "issued", issued_by, txid, total)) - if plugin_manager: - plugin_manager.execute_action_hook('token_issue', - {'token': token_name, 'issuer': issued_by, - 'txid': txid, 'total': total}) + if node.plugin_manager: + node.plugin_manager.execute_action_hook('token_issue', + {'token': token_name, 'issuer': issued_by, + 'txid': txid, 'total': total}) - else: - app_log.warning("This token is already registered: {}".format(x[1])) + else: + node.logger.app_log.warning("This token is already registered: {}".format(x[1])) + except: + node.logger.app_log.warning("Error parsing") - tok.commit() - # app_log.warning all token issuances + db_handler_instance.index.commit() + # node.logger.app_log.warning all token issuances - # app_log.warning("---") + # node.logger.app_log.warning("---") - # app_log.warning all transfers of a given token + # node.logger.app_log.warning all transfers of a given token # token = "worthless" - c.execute("SELECT operation, openfield FROM transactions WHERE (block_height >= ? OR block_height <= ?) AND operation = ? and reward = 0 ORDER BY block_height ASC;", + db_handler_instance.c.execute("SELECT operation, openfield FROM transactions WHERE (block_height >= ? OR block_height <= ?) AND operation = ? and reward = 0 ORDER BY block_height ASC;", (token_last_block, -token_last_block, "token:transfer",)) #includes mirror blocks - openfield_transfers = c.fetchall() + openfield_transfers = db_handler_instance.c.fetchall() # print(openfield_transfers) tokens_transferred = [] @@ -117,97 +97,108 @@ def tokens_update(file, ledger, mode, app_log, plugin_manager=None, trace_db_cal tokens_transferred.append(token_name) if tokens_transferred: - app_log.warning("Token transferred: {}".format(tokens_transferred)) + node.logger.app_log.warning("Token transferred: {}".format(tokens_transferred)) for token in tokens_transferred: - app_log.warning("processing {}".format(token)) - c.execute("SELECT block_height, timestamp, address, recipient, signature, operation, openfield FROM transactions WHERE (block_height >= ? OR block_height <= ?) AND operation = ? AND openfield LIKE ? AND reward = 0 ORDER BY block_height ASC;", - (token_last_block, -token_last_block, "token:transfer",token + '%',)) - results2 = c.fetchall() - app_log.warning(results2) - - for r in results2: - block_height = r[0] - app_log.warning("Block height {}".format(block_height)) - - timestamp = r[1] - app_log.warning("Timestamp {}".format(timestamp)) - - token = r[6].split(":")[0] - app_log.warning("Token {} operation".format(token)) - - sender = r[2] - app_log.warning("Transfer from {}".format(sender)) - - recipient = r[3] - app_log.warning("Transfer to {}".format(recipient)) - - txid = r[4][:56] - if txid == "0": - txid = blake2bhash_generate(r) - app_log.warning("Txid: {}".format(txid)) - - try: - transfer_amount = int(r[6].split(":")[1]) - except: - transfer_amount = 0 - - app_log.warning("Transfer amount {}".format(transfer_amount)) - - # calculate balances - t.execute("SELECT sum(amount) FROM tokens WHERE recipient = ? AND block_height < ? AND token = ?", - (sender,block_height,token,)) - - try: - credit_sender = int(t.fetchone()[0]) - except: - credit_sender = 0 - app_log.warning("Sender's credit {}".format(credit_sender)) + try: + node.logger.app_log.warning("processing {}".format(token)) + db_handler_instance.c.execute("SELECT block_height, timestamp, address, recipient, signature, operation, openfield FROM transactions WHERE (block_height >= ? OR block_height <= ?) AND operation = ? AND openfield LIKE ? AND reward = 0 ORDER BY block_height ASC;", + (token_last_block, -token_last_block, "token:transfer",token + ':%',)) + results2 = db_handler_instance.c.fetchall() + node.logger.app_log.warning(results2) + + for r in results2: + block_height = r[0] + node.logger.app_log.warning("Block height {}".format(block_height)) + + timestamp = r[1] + node.logger.app_log.warning("Timestamp {}".format(timestamp)) + + token = r[6].split(":")[0] + node.logger.app_log.warning("Token {} operation".format(token)) + + sender = r[2] + node.logger.app_log.warning("Transfer from {}".format(sender)) + + recipient = r[3] + node.logger.app_log.warning("Transfer to {}".format(recipient)) + + txid = r[4][:56] + if txid == "0": + txid = blake2bhash_generate(r) + node.logger.app_log.warning("Txid: {}".format(txid)) + + try: + transfer_amount = int(r[6].split(":")[1]) + except: + transfer_amount = 0 + + node.logger.app_log.warning("Transfer amount {}".format(transfer_amount)) + + # calculate balances + db_handler_instance.index_cursor.execute("SELECT sum(amount) FROM tokens WHERE recipient = ? AND block_height < ? AND token = ?", + (sender,block_height,token,)) + + try: + credit_sender = int(db_handler_instance.index_cursor.fetchone()[0]) + except: + credit_sender = 0 + node.logger.app_log.warning("Sender's credit {}".format(credit_sender)) + + db_handler_instance.index_cursor.execute("SELECT sum(amount) FROM tokens WHERE address = ? AND block_height <= ? AND token = ?", + (sender,block_height,token,)) + try: + debit_sender = int(db_handler_instance.index_cursor.fetchone()[0]) + except: + debit_sender = 0 + node.logger.app_log.warning("Sender's debit: {}".format(debit_sender)) + # calculate balances + + # node.logger.app_log.warning all token transfers + balance_sender = credit_sender - debit_sender + + node.logger.app_log.warning("Sender's balance {}".format(balance_sender)) + + try: + db_handler_instance.index_cursor.execute("SELECT txid from tokens WHERE txid = ?", (txid,)) + dummy = db_handler_instance.index_cursor.fetchone() # check for uniqueness + if dummy: + node.logger.app_log.warning("Token operation already processed: {} {}".format(token, txid)) + else: + if (balance_sender - transfer_amount >= 0 and transfer_amount > 0): + db_handler_instance.index_cursor.execute("INSERT INTO tokens VALUES (?,?,?,?,?,?,?)", + (abs(block_height), timestamp, token, sender, recipient, txid, transfer_amount)) + if node.plugin_manager: + node.plugin_manager.execute_action_hook('token_transfer', + {'token': token, 'from': sender, + 'to': recipient, 'txid': txid, 'amount': transfer_amount}) + + else: # save block height and txid so that we do not have to process the invalid transactions again + node.logger.app_log.warning("Invalid transaction by {}".format(sender)) + db_handler_instance.index_cursor.execute("INSERT INTO tokens VALUES (?,?,?,?,?,?,?)", (block_height, "", "", "", "", txid, "")) + except Exception as e: + node.logger.app_log.warning("Exception {}".format(e)) + + node.logger.app_log.warning("Processing of {} finished".format(token)) + except: + node.logger.app_log.warning("Error parsing") - t.execute("SELECT sum(amount) FROM tokens WHERE address = ? AND block_height <= ? AND token = ?", - (sender,block_height,token,)) - try: - debit_sender = int(t.fetchone()[0]) - except: - debit_sender = 0 - app_log.warning("Sender's debit: {}".format(debit_sender)) - # calculate balances - - # app_log.warning all token transfers - balance_sender = credit_sender - debit_sender - if balance_sender < 0 and sender == "staking": - app_log.warning("Total staked {}".format(abs(balance_sender))) - else: - app_log.warning("Sender's balance {}".format(balance_sender)) - try: - t.execute("SELECT txid from tokens WHERE txid = ?", (txid,)) - dummy = t.fetchone() # check for uniqueness - if dummy: - app_log.warning("Token operation already processed: {} {}".format(token, txid)) - else: - if (balance_sender - transfer_amount >= 0 and transfer_amount > 0) or (sender == "staking"): - t.execute("INSERT INTO tokens VALUES (?,?,?,?,?,?,?)", - (abs(block_height), timestamp, token, sender, recipient, txid, transfer_amount)) - if plugin_manager: - plugin_manager.execute_action_hook('token_transfer', - {'token': token, 'from': sender, - 'to': recipient, 'txid': txid, 'amount': transfer_amount}) + db_handler_instance.index.commit() - else: # save block height and txid so that we do not have to process the invalid transactions again - app_log.warning("Invalid transaction by {}".format(sender)) - t.execute("INSERT INTO tokens VALUES (?,?,?,?,?,?,?)", (block_height, "", "", "", "", txid, "")) - except Exception as e: - app_log.warning("Exception {}".format(e)) - app_log.warning("Processing of {} finished".format(token)) +if __name__ == "__main__": + from libs import node,logger + import dbhandler - tok.commit() + node = node.Node() + node.debug_level = "WARNING" + node.terminal_output = True - tok.close() - conn.close() + node.logger = logger.Logger() + node.logger.app_log = log.log("local_test.log", node.debug_level, node.terminal_output) + node.logger.app_log.warning("Configuration settings loaded") + db_handler = dbhandler.DbHandler("static/index_local_test.db","static/ledger.db","static/hyper.db", False, None, node.logger, False) -if __name__ == "__main__": - app_log = log.log("tokens.log", "WARNING", True) - tokens_update("static/index_test.db", "static/test.db", "normal", app_log) + tokens_update(node, db_handler) # tokens_update("tokens.db","reindex") diff --git a/wallet.py b/wallet.py deleted file mode 100644 index 222d64a..0000000 --- a/wallet.py +++ /dev/null @@ -1,2139 +0,0 @@ -# add manual refresh, objectify - -# icons created using http://www.winterdrache.de/freeware/png2ico/ - -import threading -import csv -import glob -import os -import platform -import tarfile -import time -import webbrowser -from datetime import datetime -from decimal import * -# from operator import itemgetter -from tkinter import * -from tkinter import filedialog, messagebox, ttk -import ast - -import socks -from Cryptodome.Cipher import AES, PKCS1_OAEP -from Cryptodome.Hash import SHA -from Cryptodome.PublicKey import RSA -from Cryptodome.Random import get_random_bytes -from Cryptodome.Signature import PKCS1_v1_5 - -import connections -import essentials -import log -import lwbench -import options -import recovery -import requests -from bisurl import * -from essentials import fee_calculate -from quantizer import quantize_eight -from simplecrypt import encrypt, decrypt -from tokensv2 import * - - -class Keys: - def __init__(self): - self.key = None - self.public_key_readable = None - self.private_key_readable = None - self.encrypted = None - self.unlocked = None - self.public_key_hashed = None - self.myaddress = None - self.keyfile = None - - -# Wallet needs a version for itself -__version__ = '0.8.3' - -# upgrade wallet location after nuitka-required "files" folder introduction -if os.path.exists("../wallet.der") and not os.path.exists("wallet.der") and "Windows" in platform.system(): - print("Upgrading wallet location") - os.rename("../wallet.der", "wallet.der") -# upgrade wallet location after nuitka-required "files" folder introduction - - -"""nuitka -import PIL.Image, PIL.ImageTk, pyqrcode -import matplotlib -from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2Tk -matplotlib.use('TkAgg') -from matplotlib.figure import Figure3 -""" -# import keys - -class Wallet(): - def __init__(self): - self.block_height_old = None - self.statusget = None - self.s = None - - self.ip = None - self.port = None - - self.tx_tree = None - - self.balance = None - self.block_height_old = None - self.mempool_total = None - self.stats_timestamp = None - - -def mempool_clear(s): - connections.send(s, "mpclear", 10) - - -def mempool_get(s): - # result = [['1524749959.44', '2ac10094cc1d3dd2375f8e1aa51115afd33926e3fa69472f2ea987f5', 'edf2d63cdf0b6275ead22c9e6d66aa8ea31dc0ccb367fad2e7c08a25', '11.15000000', 'rd7Op7gZlp7bBkdL5EogrhkHB3WFGNKfc2cGqzrKzwtFCJf/3nKt13y/1MggR5ioA1RAHFn/8m5q+ot7nv6bTcAOhXHgK/CcqplNBNEp3J+RFf1IbEbhbckJsdVbRvrkQoMRZmSrwCwJm+v/pB9KYqG3R5OVKmEyc5KbUZsRuTUrZjPL6lPd+VfYy6x2Wnr5JgC+q7zvQPH0+yqVqOxcbgYggbbNyHHfYIj+0k0uK+8hwmRCe+SfG+/JZwiSp8ZO4Teyd6/NDmss0AaDRYfAVmxLMEg0aOf1xPbURL3D9gyUsDWupRFVT88eS22cRTPgvS5fwpPt/rV8QUa58eRHSMJ3MDwxpkJ7KeMkN5dfiQ/NZ0HFQc3vDwnGJ1ybyVDnw/i7ioRsJtTC0hGNO33lNVqKnbQ8yQ/7yihqfUsCM1qXA/a5Q+9bRx1mr0ff+h7BYxK7qoVF/2KeiS7Ia8OiX8EPUSiwFxCnGsY+Ie+AgQlaiUIlen2LoGw41NGmZufvWXFqK9ww8e50n35OmKw0MQrJrzOr/7iBoCOXmW0/jEzbJNM7NKEni7iFNwbfk3Xzkoh8A2/m9hdDPKZASdF1hFpNVnGJnDvuINRNn3xBUqoxCQUY50b9mGO4hdkLgVOQOVvzYvdYjB0B+XJTvmfLtWQKOcAJ4/E7tr8dSrC7sPY=', 'LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlJQ0lqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FnOEFNSUlDQ2dLQ0FnRUE0SjdSS2VPWGN2OXhaTGN6R2IvSQprV2MvanU3cktvLzIrNGJuS0NsQituT0VwNDY5Vzd5YmF3eW1mR2xVUmpvYjg3MjZ6eWFDdDVrOEJqNXU1Y25MCk1XaENueGNwdGltUytmeHA1WGx5NGs5TUNQUDlYODZFc1U0ZjBrcVBhZjhnais1MG5LdjM4a01ZMHFSR0k0U0QKNS9wVlpCY1ptRjN0eVFPYzh0SWJERk9vUHJta0FpTy9LQnAxWHA4Q0dFK24zaTdKdS9zUFlzcDZFRERobjVrVAptVDMxUGVOZ2tUOTh4OW5rSmhSTmxmQTE2Mi9ia2gva2JISE1hUE1JYUhsUDhSbGVNazlqS0hCNjVOWFVMVHNLCjZZa2FNK2F3aGVpUWIwVDE2cm5tY3N4NHZBbWViUEFBWTQ1WWNqMWx3L3lpU0ZXWWpvdkcrQjBkZ0JuTDVXbUUKb2d6bnQxN04zYzZnU1JBNEYrUUhrVlA1RjBUejdTSXFuWnZDeCtEMDhjam9hVWgxUi9SeFNlT1Mwd3pEdWdNOQpMZjhSQXZweVRxR0xmUWpYY252YnVaMGNBc1g4SzFCR3lvTDZIZ3h2U3kzeUJBMlZvSjlnM1JncVUxU3NraHgrCktsdlg0S0VWeXUxLzlMbXRpc3dKSFZGTitEdVhTV1VqMjk0RURsZktsTlRKY0h1LytQWHFyeGVzbkpjOGttYisKWVlYS0R3YnRKNDFRMnRZalBwd1BOSmpDdm1Ca2haSzR2VEFIQXNKVTVEV1pQZkRJeEN6WDVFbFFRUGNhVUV6MApvbnAzNDVpeVV0TFZZcmdIdTJCNmIvNkNqMWlCNm90SitNV1RUYXVOUHcrYXczeVRHK1NUM3dxeG5qS3I3YkoyCnJGVkFnUFBCRlI5cmVoUUpmTXBoTGtVQ0F3RUFBUT09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQ==', '0', '672ce4daaeb73565'], ['1524749904.95', '4edadac9093d9326ee4b17f869b14f1a2534f96f9c5d7b48dc9acaed', '4edadac9093d9326ee4b17f869b14f1a2534f96f9c5d7b48dc9acaed', '0.00000000', 'bQmnTD79aL1hjoyVF/ARfMLFfMtQiqpmvk88fPAGW1LUqLQen87+6i+2flBCuSPOWvjHQBMJ3Ctyk5MtuWj6KtoltWSKXev2tYfgNSiAOuo1YIbUhDwTBtHI5UY6X9eNmFjB5Iny0/7VB+cotV1ZBPpgCx1xQn45CtAVk4IYaXc=', 'LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlHZk1BMEdDU3FHU0liM0RRRUJBUVVBQTRHTkFEQ0JpUUtCZ1FES3ZMVGJEeDg1YTF1Z2IvNnhNTWhWT3E2VQoyR2VZVDgrSXEyejlGd0lNUjQwbDJ0dEdxTks3dmFyTmNjRkxJdThLbjRvZ0RRczNXU1dRQ3hOa2haaC9GcXpGCllZYTMvSXRQUGZ6clhxZ2Fqd0Q4cTRadDRZbWp0OCsyQmtJbVBqakZOa3VUUUl6Mkl1M3lGcU9JeExkak13N24KVVZ1OXRGUGlVa0QwVm5EUExRSURBUUFCCi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQ==', '0', '']] - - mempool_window = Toplevel() - mempool_window.title("Mempool") - - def update(): - for child in mempool_window.winfo_children(): # prevent hangup - child.destroy() - - mp_tree = ttk.Treeview(mempool_window, selectmode="extended", columns=('sender', 'recipient', 'amount')) - # mp_tree.grid(row=0, column=0) - - # table - - mp_tree.heading("#0", text='time') - mp_tree.column("#0", anchor='center', width=100) - - mp_tree.heading("#1", text='sender') - mp_tree.column("#1", anchor='center', width=350) - - mp_tree.heading("#2", text='recipient') - mp_tree.column("#2", anchor='center', width=350) - - mp_tree.heading("#3", text='amount') - mp_tree.column("#3", anchor='center', width=100) - - mp_tree.grid(sticky=N + S + W + E) - # table - - for tx in wallet.mempool_total: - mp_tree.insert('', 'end', text=datetime.fromtimestamp(float(tx[0])).strftime('%y-%m-%d %H:%M'), values=(tx[1], tx[2], tx[3])) - - clear_mempool_b = Button(mempool_window, text="Clear Mempool", command=lambda: mempool_clear(s), height=1, width=20, font=("Tahoma", 8)) - clear_mempool_b.grid(row=1, column=0, sticky=N + S + W + E) - close_mempool_b = Button(mempool_window, text="Close", command=lambda: mempool_window.destroy(), height=1, width=20, font=("Tahoma", 8)) - close_mempool_b.grid(row=2, column=0, sticky=N + S + W + E) - - def refresh_mp_auto(): - try: - # global frame_chart - root.after(0, update()) - root.after(10000, refresh_mp_auto) - - except Exception as e: - print("Mempool window closed, disabling auto-refresh ({})".format(e)) - - refresh_mp_auto() - - -def recover(): - result = recovery.recover(keyring.key) - messagebox.showinfo("Recovery Result", result) - - -def address_validate(address): - if re.match('[abcdef0123456789]{56}', address): - return True - else: - return False - - -def create_url_clicked(app_log, command, recipient, amount, operation, openfield): - """isolated function so no GUI leftovers are in bisurl.py""" - - result = create_url(app_log, command, recipient, amount, operation, openfield) - url_r.delete(0, END) - url_r.insert(0, result) - - -def read_url_clicked(app_log, url): - """isolated function so no GUI leftovers are in bisurl.py""" - result = (read_url(app_log, url)) - - recipient.delete(0, END) - amount.delete(0, END) - operation.delete(0, END) - openfield.delete("1.0", END) - - recipient.insert(0, result[1]) # amount - amount.insert(0, result[2]) # recipient - - operation.insert(INSERT, result[3]) # operation - openfield.insert(INSERT, result[4]) # openfield - - -def convert_ip_port(ip): - """ - Get ip and port, but extract port from ip if ip was as ip:port - :param ip: - :param some_port: default port - :return: (ip, port) - """ - if ':' in ip: - ip, port = ip.split(':') - - return ip, port - - -def node_connect(): - - keep_trying = True - while keep_trying: - for pair in light_ip: - try: - connect_ip, connect_port = convert_ip_port(pair) - wallet.ip = connect_ip - app_log.warning("Status: Attempting to connect to {}:{} out of {}".format(connect_ip, connect_port, light_ip)) - wallet.s = socks.socksocket() - wallet.s.settimeout(3) - wallet.s.connect((connect_ip, int(connect_port))) - connections.send(wallet.s, "statusget", 10) - result = connections.receive(wallet.s, 10) # validate the connection - app_log.warning("Connection OK") - app_log.warning("Status: Wallet connected to {}:{}".format(connect_ip, connect_port)) - ip_connected_var.set("{}:{}".format(connect_ip, connect_port)) - keep_trying = False - break - except Exception as e: - app_log.warning("Status: Cannot connect to {}:{}".format(connect_ip, connect_port)) - time.sleep(1) - - -def node_connect_once(ip): # Connect a light-wallet-ip directly from menu - try: - connect_ip, connect_port = convert_ip_port(ip) - wallet.ip = connect_ip - app_log.warning("Status: Attempting to connect to {}:{} out of {}".format(ip, connect_port, light_ip)) - wallet.s = socks.socksocket() - wallet.s.settimeout(3) - wallet.s.connect((connect_ip, int(connect_port))) - connections.send(wallet.s, "statusget", 10) - result = connections.receive(wallet.s, 10) # validate the connection - app_log.warning("Connection OK") - app_log.warning("Status: Wallet connected to {}:{}".format(connect_ip, connect_port)) - ip_connected_var.set("{}:{}".format(connect_ip, connect_port)) - except Exception as e: - app_log.warning("Status: Cannot connect to {}:{}".format(connect_ip, connect_port)) - node_connect() - - -def replace_regex(string, replace): - replaced_string = re.sub(r'^{}'.format(replace), "", string) - return replaced_string - - -def alias_register(alias_desired): - connections.send(wallet.s, "aliascheck", 10) - connections.send(wallet.s, alias_desired, 10) - - result = connections.receive(wallet.s, 10) - - if result == "Alias free": - send("0", keyring.myaddress, "", "alias=" + alias_desired) - pass - else: - messagebox.showinfo("Conflict", "Name already registered") - - -def help(): - top13 = Toplevel() - top13.title("Help") - aliases_box = Text(top13, width=100) - aliases_box.grid(row=0, pady=0) - - aliases_box.insert(INSERT, "Encrypt with PK:\n Encrypt the data with the recipient's private key. Only they will be able to view it.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Mark as Message:\n Mark data as message. The recipient will be able to view it in the message section.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Base64 Encoding:\n Encode the data with base64, it is a group of binary-to-text encoding scheme that representd binary data in an ASCII string format by translating it into a radix-64 representation.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Operation:\n A static operation for blockchain programmability.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Data:\n A variable operation for blockchain programmability.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Alias Recipient:\n Use an alias of the recipient in the recipient field if they have one registered") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Resolve Aliases:\n Show aliases instead of addressess where applicable in the table below.") - aliases_box.insert(INSERT, "\n\n") - - close = Button(top13, text="Close", command=top13.destroy) - close.grid(row=3, column=0, sticky=W + E) - - -def data_insert_clear(): - openfield.delete('1.0', END) # remove previous - - -def all_spend_clear(): - all_spend_var.set(False) - - amount.delete(0, END) - amount.insert(0, 0) - - -def all_spend(): - # all_spend_var.set(True) - all_spend_check() - - -def all_spend_check(): - if all_spend_var.get(): - openfield_fee_calc = openfield.get("1.0", END).strip() - - if encode_var.get() and not msg_var.get(): - openfield_fee_calc = base64.b64encode(openfield_fee_calc.encode("utf-8")).decode("utf-8") - - if msg_var.get() and encode_var.get(): - openfield_fee_calc = "bmsg=" + base64.b64encode(openfield_fee_calc.encode("utf-8")).decode("utf-8") - if msg_var.get() and not encode_var.get(): - openfield_fee_calc = "msg=" + openfield_fee_calc - if encrypt_var.get(): - openfield_fee_calc = "enc=" + str(openfield_fee_calc) - - fee_from_all = fee_calculate(openfield_fee_calc) - amount.delete(0, END) - amount.insert(0, (Decimal(balance_raw.get()) - Decimal(fee_from_all))) - - -def fingerprint(): - root.filename = filedialog.askopenfilename(multiple=True, initialdir="", title="Select files for fingerprinting") - - dict = {} - - for file in root.filename: - with open(file, 'rb') as fp: - data = hashlib.blake2b(fp.read()).hexdigest() - dict[os.path.split(file)[-1]] = data - - openfield.insert(INSERT, dict) - - -def keys_untar(archive): - with open(archive, "r") as archive_file: - tar = tarfile.open(archive_file.name) - name = tar.getnames() - tar.extractall() - app_log.warning("{} file untarred successfully".format(name)) - return name - - -def keys_load_dialog(): - wallet_load = filedialog.askopenfilename(multiple=False, initialdir="", title="Select wallet") - - if wallet_load.endswith('.gz'): - print(wallet_load) - wallet_load = keys_untar(wallet_load)[0] - - keyring.key, keyring.public_key_readable, keyring.private_key_readable, keyring.encrypted, keyring.unlocked, keyring.public_key_hashed, keyring.myaddress, keyring.keyfile = essentials.keys_load_new(wallet_load) # upgrade later, remove blanks - - encryption_button_refresh() - - gui_address_t.delete(0, END) - gui_address_t.insert(INSERT, keyring.myaddress) - - recipient_address.config(state=NORMAL) - recipient_address.delete(0, END) - recipient_address.insert(INSERT, keyring.myaddress) - recipient_address.config(state=DISABLED) - - sender_address.config(state=NORMAL) - sender_address.delete(0, END) - sender_address.insert(INSERT, keyring.myaddress) - sender_address.config(state=DISABLED) - - t = threading.Thread(target=refresh,args=(keyring.myaddress, wallet.s)) - t.start() - - - - -def keys_backup(): - root.filename = filedialog.asksaveasfilename(initialdir="", title="Select backup file") - - if not root.filename == "": - if not root.filename.endswith(".tar.gz"): - root.filename = root.filename + ".tar.gz" - - der_files = glob.glob("*.der") - - tar = tarfile.open(root.filename, "w:gz") - for der_file in der_files: - tar.add(der_file, arcname=der_file) - tar.close() - - -def watch(): - address = gui_address_t.get() - t = threading.Thread(target=refresh,args=(address, wallet.s)) - t.start() - - -def unwatch(): - gui_address_t.delete(0, END) - gui_address_t.insert(INSERT, keyring.myaddress) - t = threading.Thread(target=refresh,args=(keyring.myaddress, wallet.s)) - t.start() - - -def aliases_list(): - top12 = Toplevel() - top12.title("Your aliases") - aliases_box = Text(top12, width=100) - aliases_box.grid(row=0, pady=0) - - connections.send(wallet.s, "aliasget", 10) - connections.send(wallet.s, keyring.myaddress, 10) - - aliases_self = connections.receive(wallet.s, 10) - - for x in aliases_self: - aliases_box.insert(INSERT, replace_regex(x[0], "alias=")) - aliases_box.insert(INSERT, "\n") - - close = Button(top12, text="Close", command=top12.destroy) - close.grid(row=3, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - -def recipient_insert(): - recipient.delete(0, END) - recipient.insert(0, root.clipboard_get()) - - -def address_insert(): - gui_address_t.delete(0, END) - gui_address_t.insert(0, root.clipboard_get()) - - -def data_insert(): - openfield.delete('1.0', END) # remove previous - openfield.insert(INSERT, root.clipboard_get()) - - -def data_insert_r(): - openfield_r.delete('1.0', END) # remove previous - openfield_r.insert(INSERT, root.clipboard_get()) - - -def url_insert(): - url.delete(0, END) # remove previous - url.insert(0, root.clipboard_get()) - - -def address_copy(): - root.clipboard_clear() - root.clipboard_append(keyring.myaddress) - - -def url_copy(): - root.clipboard_clear() - root.clipboard_append(url_r.get()) - - -def recipient_copy(): - root.clipboard_clear() - root.clipboard_append(recipient.get()) - - -def percentage(percent, whole): - return (Decimal(percent) * Decimal(whole) / 100) - - -def alias(): - alias_var = StringVar() - - # enter password - top8 = Toplevel() - top8.title("Enter Desired Name") - - alias_label = Label(top8, text="Input name") - alias_label.grid(row=0, column=0, sticky=N + W, padx=15, pady=(5, 0)) - - input_alias = Entry(top8, textvariable=alias_var) - input_alias.grid(row=1, column=0, sticky=N + E, padx=15, pady=(0, 5)) - - dismiss = Button(top8, text="Register", command=lambda: alias_register(alias_var.get().strip())) - dismiss.grid(row=2, column=0, sticky=W + E, padx=15, pady=(15, 0)) - - dismiss = Button(top8, text="Dismiss", command=top8.destroy) - dismiss.grid(row=3, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - -def encrypt_get_password(): - if keyring.encrypted: - messagebox.showwarning("Error", "Already encrypted") - return - - # enter password - top3 = Toplevel() - top3.title("Enter Password") - - password_label = Label(top3, text="Input password") - password_label.grid(row=0, column=0, sticky=N + W, padx=15, pady=(5, 0)) - - password_var_enc.set("") - input_password = Entry(top3, textvariable=password_var_enc, show='*') - input_password.grid(row=1, column=0, sticky=N + E, padx=15, pady=(0, 5)) - - confirm_label = Label(top3, text="Confirm password") - confirm_label.grid(row=2, column=0, sticky=N + W, padx=15, pady=(5, 0)) - - password_var_con.set("") - input_password_con = Entry(top3, textvariable=password_var_con, show='*') - input_password_con.grid(row=3, column=0, sticky=N + E, padx=15, pady=(0, 5)) - - enter = Button(top3, text="Encrypt", command=lambda: encrypt_fn(top3)) - enter.grid(row=4, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - cancel = Button(top3, text="Cancel", command=top3.destroy) - cancel.grid(row=5, column=0, sticky=W + E, padx=15, pady=(5, 5)) - # enter password - - -def lock_fn(button): - key = None - decrypt_b.configure(text="Unlock", state=NORMAL) - lock_b.configure(text="Locked", state=DISABLED) - messagemenu.entryconfig("Sign Messages", state=DISABLED) # messages - walletmenu.entryconfig("Recovery", state=DISABLED) # recover - password_var_dec.set("") - - -def encrypt_fn(destroy_this): - password = password_var_enc.get() - password_conf = password_var_con.get() - - if password == password_conf: - busy(destroy_this) - try: - ciphertext = encrypt(password, keyring.private_key_readable) - ciphertext_export = base64.b64encode(ciphertext).decode() - essentials.keys_save(ciphertext_export, keyring.public_key_readable, keyring.myaddress, keyring.keyfile) - - # encrypt_b.configure(text="Encrypted", state=DISABLED) - - keyring.key, keyring.public_key_readable, keyring.private_key_readable, keyring.encrypted, keyring.unlocked, keyring.public_key_hashed, keyring.myaddress, keyring.keyfile = essentials.keys_load_new(keyring.keyfile.name) - - encryption_button_refresh() - finally: - notbusy(destroy_this) - destroy_this.destroy() - # lock_b.configure(text="Lock", state=NORMAL) - else: - messagebox.showwarning("Mismatch", "Password Mismatch") - - -def decrypt_get_password(): - # enter password - top4 = Toplevel() - top4.title("Enter Password") - - input_password = Entry(top4, textvariable=password_var_dec, show='*') - input_password.grid(row=0, column=0, sticky=N + E, padx=15, pady=(5, 5)) - - enter = Button(top4, text="Unlock", command=lambda: decrypt_fn(top4)) - enter.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - cancel = Button(top4, text="Cancel", command=top4.destroy) - cancel.grid(row=2, column=0, sticky=W + E, padx=15, pady=(5, 5)) - # enter password - - -def decrypt_fn(destroy_this): - busy(destroy_this) - try: - keyring.password = password_var_dec.get() - - keyring.decrypted_privkey = decrypt(keyring.password, base64.b64decode(keyring.private_key_readable)) # decrypt privkey - - keyring.key = RSA.importKey(keyring.decrypted_privkey) # be able to sign - - notbusy(destroy_this) - destroy_this.destroy() - - decrypt_b.configure(text="Unlocked", state=DISABLED) - lock_b.configure(text="Lock", state=NORMAL) - messagemenu.entryconfig("Sign Messages", state=NORMAL) # messages - walletmenu.entryconfig("Recovery", state=NORMAL) # recover - except: - notbusy(destroy_this) - messagebox.showwarning("Locked", "Wrong password") - - password_var_dec.set("") - - -def send_confirm(amount_input, recipient_input, operation_input, openfield_input): - amount_input = quantize_eight(amount_input) - - # Exchange check - exchange_addresses = { - "edf2d63cdf0b6275ead22c9e6d66aa8ea31dc0ccb367fad2e7c08a25": "Cryptopia", - "f6c0363ca1c5aa28cc584252e65a63998493ff0a5ec1bb16beda9bac": "qTrade", - } - if recipient_input in exchange_addresses and len(openfield_input) < 16: - messagebox.showinfo("Cannot send", - "Identification message is missing for {}, please include it" - .format(exchange_addresses[recipient_input])) - return - - top10 = Toplevel() - top10.title("Confirm") - - if alias_cb_var.get(): # alias check - connections.send(wallet.s, "addfromalias", 10) - connections.send(wallet.s, recipient_input, 10) - recipient_input = connections.receive(wallet.s, 10) - - # encr check - if encrypt_var.get(): - # get recipient's public key - - connections.send(wallet.s, "pubkeyget", 10) - connections.send(wallet.s, recipient_input, 10) - target_public_key_hashed = connections.receive(wallet.s, 10) - - recipient_key = RSA.importKey(base64.b64decode(target_public_key_hashed).decode("utf-8")) - - # openfield_input = str(target_public_key.encrypt(openfield_input.encode("utf-8"), 32)) - - data = openfield_input.encode("utf-8") - # print (open("pubkey.der").read()) - session_key = get_random_bytes(16) - cipher_aes = AES.new(session_key, AES.MODE_EAX) - - # Encrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(recipient_key) - - # Encrypt the data with the AES session key - ciphertext, tag = cipher_aes.encrypt_and_digest(data) - enc_session_key = (cipher_rsa.encrypt(session_key)) - openfield_input = str([x for x in (cipher_aes.nonce, tag, ciphertext, enc_session_key)]) - - # encr check - - if encode_var.get() and not msg_var.get(): - openfield_input = base64.b64encode(openfield_input.encode("utf-8")).decode("utf-8") - if msg_var.get() and encode_var.get(): - openfield_input = "bmsg=" + base64.b64encode(openfield_input.encode("utf-8")).decode("utf-8") - if msg_var.get() and not encode_var.get(): - openfield_input = "msg=" + openfield_input - if encrypt_var.get(): - openfield_input = "enc=" + str(openfield_input) - - fee = fee_calculate(openfield_input, operation_input) - - confirmation_dialog = Text(top10, width=100) - confirmation_dialog.insert(INSERT, ("Amount: {}\nFee: {}\nTotal: {}\nTo: {}\nOperation: {}\nData: {}".format('{:.8f}'.format(amount_input), '{:.8f}'.format(fee), '{:.8f}'.format(Decimal(amount_input) + Decimal(fee)), recipient_input, operation_input, openfield_input))) - confirmation_dialog.configure(state="disabled") - confirmation_dialog.grid(row=0, pady=0) - - enter = Button(top10, text="Confirm", command=lambda: send_confirmed(amount_input, recipient_input, operation_input, openfield_input, top10)) - enter.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - done = Button(top10, text="Cancel", command=top10.destroy) - done.grid(row=2, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - -def send_confirmed(amount_input, recipient_input, operation_input, openfield_input, top10): - send(amount_input, recipient_input, operation_input, openfield_input) - top10.destroy() - - -def send(amount_input, recipient_input, operation_input, openfield_input): - all_spend_check() - - if keyring.key is None: - messagebox.showerror("Locked", "Wallet is locked") - - app_log.warning("Received tx command") - - try: - Decimal(amount_input) - except: - messagebox.showerror("Invalid Amount", "Amount must be a number") - - # alias check - - # alias check - - if not address_validate(recipient_input): - messagebox.showerror("Invalid Address", "Invalid address format") - else: - - app_log.warning("Amount: {}".format(amount_input)) - app_log.warning("Recipient: {}".format(recipient_input)) - app_log.warning("Data: {}".format(openfield_input)) - - tx_timestamp = '%.2f' % (float(wallet.stats_timestamp) - abs(float(wallet.stats_timestamp) - time.time())) # randomize timestamp for unique signatures - transaction = (str(tx_timestamp), str(keyring.myaddress), str(recipient_input), '%.8f' % float(amount_input), str(operation_input), str(openfield_input)) # this is signed, float kept for compatibility - - h = SHA.new(str(transaction).encode("utf-8")) - signer = PKCS1_v1_5.new(keyring.key) - signature = signer.sign(h) - signature_enc = base64.b64encode(signature) - app_log.warning("Client: Encoded Signature: {}".format(signature_enc.decode("utf-8"))) - - verifier = PKCS1_v1_5.new(keyring.key) - - if verifier.verify(h, signature): - - app_log.warning("Client: The signature is valid, proceeding to save transaction, signature, new txhash and the public key to mempool") - - # print(str(timestamp), str(address), str(recipient_input), '%.8f' % float(amount_input),str(signature_enc), str(public_key_hashed), str(keep_input), str(openfield_input)) - tx_submit = str(tx_timestamp), str(keyring.myaddress), str(recipient_input), '%.8f' % float(amount_input), str(signature_enc.decode("utf-8")), str(keyring.public_key_hashed.decode("utf-8")), str(operation_input), str(openfield_input) # float kept for compatibility - - while True: - connections.send(wallet.s, "mpinsert", 10) - connections.send(wallet.s, tx_submit, 10) - reply = connections.receive(wallet.s, 10) - app_log.warning("Client: {}".format(reply)) - if reply[-1] == "Success": - messagebox.showinfo("OK", "Transaction accepted to mempool") - else: - messagebox.showerror("Error", "There was a problem with transaction processing. Full message: {}".format(reply)) - break - - t = threading.Thread(target=refresh, args=(gui_address_t.get(), wallet.s)) - t.start() - - else: - app_log.warning("Client: Invalid signature") - # enter transaction end - - -# def app_quit(): -# app_log.warning("Received quit command") -# root.destroy() - - -def qr(address): - """nuitka - address_qr = pyqrcode.create(address) - address_qr.png('address_qr.png') - - # popup - top = Toplevel() - top.title("Address QR Code") - - im = PIL.Image.open("address_qr.png") - - photo = PIL.ImageTk.PhotoImage(im.resize((320, 320))) - label = Label(top, image=photo) - label.image = photo # keep a reference! - label.pack() - - # msg = Message(top, text="hi") - # msg.pack() - - button = Button(top, text="Dismiss", command=top.destroy) - button.pack() - # popup - """ - - -def msg_dialogue(address): - connections.send(wallet.s, "addlist", 10) - connections.send(wallet.s, keyring.myaddress, 10) - addlist = connections.receive(wallet.s, 10) - print(addlist) - - def msg_received_get(addlist): - - for x in addlist: - if x[11].startswith(("msg=", "bmsg=", "enc=msg=", "enc=bmsg=")) and x[3] == address: - # print(x[11]) - - connections.send(wallet.s, "aliasget", 10) - connections.send(wallet.s, x[2], 10) - - msg_address = connections.receive(wallet.s, 10)[0][0] - - if x[11].startswith("enc=msg="): - msg_received_digest = replace_regex(x[11], "enc=msg=") - try: - # msg_received_digest = key.decrypt(ast.literal_eval(msg_received_digest)).decode("utf-8") - - (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_received_digest) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(keyring.key) - session_key = cipher_rsa.decrypt(enc_session_key) - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - msg_received_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") - - except: - msg_received_digest = "Could not decrypt message" - - elif x[11].startswith("enc=bmsg="): - msg_received_digest = replace_regex(x[11], "enc=bmsg=") - try: - msg_received_digest = base64.b64decode(msg_received_digest).decode("utf-8") - - # msg_received_digest = key.decrypt(ast.literal_eval(msg_received_digest)).decode("utf-8") - (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_received_digest) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(keyring.key) - session_key = cipher_rsa.decrypt(enc_session_key) - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - msg_received_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") - - except: - msg_received_digest = "Could not decrypt message" - - - elif x[11].startswith("bmsg="): - msg_received_digest = replace_regex(x[11], "bmsg=") - try: - msg_received_digest = base64.b64decode(msg_received_digest).decode("utf-8") - except: - msg_received_digest = "Could not decode message" - - elif x[11].startswith("msg="): - msg_received_digest = replace_regex(x[11], "msg=") - - msg_received.insert(INSERT, ((time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(Decimal(x[1])))) + " From " + replace_regex(msg_address, "alias=") + ": " + msg_received_digest) + "\n") - - def msg_sent_get(addlist): - - for x in addlist: - if x[11].startswith(("msg=", "bmsg=", "enc=msg=", "enc=bmsg=")) and x[2] == address: - # print(x[11]) - - connections.send(wallet.s, "aliasget", 10) - connections.send(wallet.s, x[3], 10) - received_aliases = connections.receive(wallet.s, 10) - msg_recipient = received_aliases[0][0] - - if x[11].startswith("enc=msg="): - msg_sent_digest = replace_regex(x[11], "enc=msg=") - try: - # msg_sent_digest = key.decrypt(ast.literal_eval(msg_sent_digest)).decode("utf-8") - (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_sent_digest) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(keyring.key) - session_key = cipher_rsa.decrypt(enc_session_key) - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - msg_sent_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") - - except: - msg_sent_digest = "Could not decrypt message" - - - elif x[11].startswith("enc=bmsg="): - msg_sent_digest = replace_regex(x[11], "enc=bmsg=") - try: - msg_sent_digest = base64.b64decode(msg_sent_digest).decode("utf-8") - # msg_sent_digest = key.decrypt(ast.literal_eval(msg_sent_digest)).decode("utf-8") - (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_sent_digest) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(keyring.key) - session_key = cipher_rsa.decrypt(enc_session_key) - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - msg_sent_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") - except: - msg_sent_digest = "Could not decrypt message" - - elif x[11].startswith("bmsg="): - msg_sent_digest = replace_regex(x[11], "bmsg=") - try: - msg_sent_digest = base64.b64decode(msg_sent_digest).decode("utf-8") - except: - msg_sent_digest = "Could not decode message" - - elif x[11].startswith("msg="): - msg_sent_digest = replace_regex(x[11], "msg=") - - msg_sent.insert(INSERT, ((time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(Decimal(x[1])))) + " To " + replace_regex(msg_recipient, "alias=") + ": " + msg_sent_digest) + "\n") - - # popup - top11 = Toplevel() - top11.title("Messaging") - - Label(top11, text="Received:", width=20).grid(row=0) - - msg_received = Text(top11, width=100, height=20, font=("Tahoma", 8)) - msg_received.grid(row=1, column=0, sticky=W, padx=5, pady=(5, 5)) - msg_received_get(addlist) - - Label(top11, text="Sent:", width=20).grid(row=2) - - msg_sent = Text(top11, width=100, height=20, font=("Tahoma", 8)) - msg_sent.grid(row=3, column=0, sticky=W, padx=5, pady=(5, 5)) - msg_sent_get(addlist) - - dismiss = Button(top11, text="Dismiss", command=top11.destroy) - dismiss.grid(row=5, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - # popup - - -def refresh_auto(): - t = threading.Thread(target=refresh, args=(gui_address_t.get(), wallet.s)) - root.after(0, t.start()) - - - root.after(30000, refresh_auto) - - -def stats(): - """nuitka - - stats_window = Toplevel() - stats_window.title("Node Statistics") - stats_window.resizable(0, 0) - - frame_chart = Frame(stats_window, height=100, width=100) - frame_chart.grid(row=0, column=1, rowspan=999) - f = Figure(figsize=(11, 7), dpi=100) - f.set_facecolor('silver') - f.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=0.5) - - canvas = FigureCanvasTkAgg(f, master=frame_chart) - canvas.get_tk_widget().grid(row=0, column=1, sticky=W, padx=15, pady=(0, 0)) - - def chart_fill(): - print("Filling the chart") - f.clear() - - rows = 4 - columns = 2 - - # f.remove(first) - first = f.add_subplot(rows, columns, 1) - first.plot((range(len(stats_nodes_count_list))), (stats_nodes_count_list)) - first.ticklabel_format(useOffset=False) - - first_2 = f.add_subplot(rows, columns, 1) - first_2.plot((range(len(stats_thread_count_list))), (stats_thread_count_list)) - first_2.ticklabel_format(useOffset=False) - first.legend(('Nodes', 'Threads'), loc='best', shadow=True) - - second = f.add_subplot(rows, columns, 2) - second.plot((range(len(stats_consensus_list))), (stats_consensus_list)) - second.legend(('Consensus Block',), loc='best', shadow=True) - second.ticklabel_format(useOffset=False) - - third = f.add_subplot(rows, columns, 3) - third.plot((range(len(stats_consensus_percentage_list))), (stats_consensus_percentage_list)) - third.legend(('Consensus Level',), loc='best', shadow=True) - third.ticklabel_format(useOffset=False) - - fourth = f.add_subplot(rows, columns, 4) - fourth.plot((range(len(stats_diff_list_2))), (stats_diff_list_2)) - fourth.legend(('Time To Generate Block',), loc='best', shadow=True) - fourth.ticklabel_format(useOffset=False) - - fifth = f.add_subplot(rows, columns, 5) - fifth.plot((range(len(stats_diff_list_0))), (stats_diff_list_0)) - fifth.ticklabel_format(useOffset=False) - - fifth_2 = f.add_subplot(rows, columns, 5) - fifth_2.plot((range(len(stats_diff_list_1))), (stats_diff_list_1)) - fifth_2.ticklabel_format(useOffset=False) - - fifth_3 = f.add_subplot(rows, columns, 5) - fifth_3.plot((range(len(stats_diff_list_3))), (stats_diff_list_3)) - fifth_3.ticklabel_format(useOffset=False) - fifth.legend(('Diff 1', 'Diff 2', 'Diff Current',), loc='best', shadow=True) - - sixth = f.add_subplot(rows, columns, 6) - sixth.plot((range(len(stats_diff_list_4))), (stats_diff_list_4)) - sixth.legend(('Block Time',), loc='best', shadow=True) - sixth.ticklabel_format(useOffset=False) - - seventh = f.add_subplot(rows, columns, 7) - seventh.plot((range(len(stats_diff_list_5))), (stats_diff_list_5)) - seventh.legend(('Hashrate',), loc='best', shadow=True) - seventh.ticklabel_format(useOffset=False) - - eigth = f.add_subplot(rows, columns, 8) - eigth.plot((range(len(stats_diff_list_6))), (stats_diff_list_6)) - eigth.legend(('Difficulty Adjustment',), loc='best', shadow=True) - eigth.ticklabel_format(useOffset=False) - - # a tk.DrawingArea - canvas.draw() - - def update(): - print("Statistics update triggered") - stats_address = statusget[0] - stats_nodes_count = statusget[1] - stats_nodes_list = statusget[2] - stats_thread_count = statusget[3] - stats_uptime = statusget[4] - stats_consensus = statusget[5] - stats_consensus_percentage = statusget[6] - stats_version = statusget[7] - stats_diff = statusget[8] - - stats_address_label_var.set("Node Address: {}".format(stats_address)) - stats_nodes_count_label_var.set("Number of Nodes: {}".format(stats_nodes_count)) - stats_nodes_list_text_var.delete(0, END) - for entry in stats_nodes_list: - stats_nodes_list_text_var.insert(END, entry) - stats_nodes_list_text_var.grid(row=2, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_thread_count_var.set("Number of Threads: {}".format(stats_thread_count)) - stats_uptime_var.set("Uptime: {:.2f} hours".format(stats_uptime / 60 / 60)) - stats_consensus_var.set("Consensus Block: {}".format(stats_consensus)) - stats_consensus_consensus_percentage_var.set("Consensus Level: {:.2f}%".format(stats_consensus_percentage)) - stats_version_var.set("Node: {}".format(stats_version)) - stats_diff_var_0.set("Difficulty 1: {}".format(stats_diff[0])) - stats_diff_var_1.set("Difficulty 2: {}".format(stats_diff[1])) - stats_diff_var_2.set("Time to Generate Block: {}".format(stats_diff[2])) - stats_diff_var_3.set("Current Block Difficulty: {}".format(stats_diff[3])) - stats_diff_var_4.set("Block Time: {}".format(stats_diff[4])) - stats_diff_var_5.set("Hashrate: {}".format(stats_diff[5])) - stats_diff_var_6.set("Difficulty Adjustment: {}".format(stats_diff[6])) - - stats_address_label_var = StringVar() - stats_address_label = Label(stats_window, textvariable=stats_address_label_var) - stats_address_label.grid(row=0, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_nodes_count_label_var = StringVar() - stats_nodes_count_label = Label(stats_window, textvariable=stats_nodes_count_label_var) - stats_nodes_count_label.grid(row=1, column=0, sticky=E, padx=15, pady=(0, 0)) - - scrollbar = Scrollbar(stats_window) - scrollbar.grid(row=2, column=0, sticky=N + S + E, padx=140) - stats_nodes_list_text_var = Listbox(stats_window, width=20, height=10, font=("Tahoma", 8)) - scrollbar.config(command=stats_nodes_list_text_var.yview) - - stats_thread_count_var = StringVar() - stats_thread_count_label = Label(stats_window, textvariable=stats_thread_count_var) - stats_thread_count_label.grid(row=3, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_uptime_var = StringVar() - stats_uptime_label = Label(stats_window, textvariable=stats_uptime_var) - stats_uptime_label.grid(row=4, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_consensus_var = StringVar() - stats_consensus_label = Label(stats_window, textvariable=stats_consensus_var) - stats_consensus_label.grid(row=5, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_consensus_consensus_percentage_var = StringVar() - stats_consensus_consensus_percentage_label = Label(stats_window, textvariable=stats_consensus_consensus_percentage_var) - stats_consensus_consensus_percentage_label.grid(row=6, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_version_var = StringVar() - stats_version_label = Label(stats_window, textvariable=stats_version_var) - stats_version_label.grid(row=7, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_0 = StringVar() - stats_diff_label_0 = Label(stats_window, textvariable=stats_diff_var_0) - stats_diff_label_0.grid(row=8, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_1 = StringVar() - stats_diff_label_1 = Label(stats_window, textvariable=stats_diff_var_1) - stats_diff_label_1.grid(row=9, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_2 = StringVar() - stats_diff_label_2 = Label(stats_window, textvariable=stats_diff_var_2) - stats_diff_label_2.grid(row=10, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_3 = StringVar() - stats_diff_label_3 = Label(stats_window, textvariable=stats_diff_var_3) - stats_diff_label_3.grid(row=11, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_4 = StringVar() - stats_diff_label_4 = Label(stats_window, textvariable=stats_diff_var_4) - stats_diff_label_4.grid(row=12, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_5 = StringVar() - stats_diff_label_5 = Label(stats_window, textvariable=stats_diff_var_5) - stats_diff_label_5.grid(row=13, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_6 = StringVar() - stats_diff_label_6 = Label(stats_window, textvariable=stats_diff_var_6) - stats_diff_label_6.grid(row=14, column=0, sticky=E, padx=15, pady=(0, 0)) - - def refresh_stats_auto(): - try: - # global frame_chart - root.after(0, update()) - root.after(10000, refresh_stats_auto) - - chart_fill() - except Exception as e: - print("Statistics window closed, disabling auto-refresh ({})".format(e)) - - refresh_stats_auto() - """ - - -def csv_export(s): - connections.send(s, "addlist", 10) # senders - connections.send(s, keyring.myaddress, 10) - - tx_list = connections.receive(s, 10) - print(tx_list) - - root.filename = filedialog.asksaveasfilename(initialdir="", title="Select CSV file") - - with open(root.filename, 'w', newline='') as csvfile: - for transaction in tx_list: - writer = csv.writer(csvfile, quoting=csv.QUOTE_MINIMAL) - writer.writerow([transaction[0], transaction[1], transaction[3], transaction[4], transaction[5], transaction[6], transaction[7], transaction[8], transaction[9], transaction[10], transaction[11]]) - - return - - -def token_transfer(token, amount, window): - operation.delete(0, END) - operation.insert(0, "token:transfer") - - openfield.delete('1.0', END) # remove previous - openfield.insert(INSERT, "{}:{}".format(token, amount)) - window.destroy() - - send_confirm(0, recipient.get(), "token:transfer", "{}:{}".format(token, amount)) - - -def token_issue(token, amount, window): - operation.delete(0, END) - operation.insert(0, "token:issue") - - openfield.delete('1.0', END) # remove previous - openfield.insert(INSERT, "{}:{}".format(token, amount)) - recipient.delete(0, END) - recipient.insert(INSERT, keyring.myaddress) - window.destroy() - - send_confirm(0, recipient.get(), "token:issue", "{}:{}".format(token, amount)) - - -def tokens(): - tokens_main = Frame(tab_tokens, relief='ridge', borderwidth=0) - tokens_main.grid(row=0, column=0, pady=5, padx=5, sticky=N + W + E + S) - # tokens_main.title ("Tokens") - - token_box = Listbox(tokens_main, width=100) - token_box.grid(row=0, pady=0) - - scrollbar_v = Scrollbar(tokens_main, command=token_box.yview) - scrollbar_v.grid(row=0, column=1, sticky=N + S + E) - - connections.send(wallet.s, "tokensget", 10) - connections.send(wallet.s, gui_address_t.get(), 10) - tokens_results = connections.receive(wallet.s, 10) - print(tokens_results) - - for pair in tokens_results: - try: - token = pair[0] - balance = pair[1] - token_box.insert(END, (token, ":", balance)) - except: - app_log.warning("There was an issue fetching tokens") - pass - - # callback - def callback(event): - token_select = (token_box.get(token_box.curselection()[0])) - token_name_var.set(token_select[0]) - token_amount_var.set(token_select[2]) - - token_box.bind('', callback) - - # callback - - token_name_var = StringVar() - token_name = Entry(tokens_main, textvariable=token_name_var, width=80) - token_name.grid(row=2, column=0, sticky=E, padx=15, pady=(5, 5)) - - token_name_label_var = StringVar() - token_name_label_var.set("Token Name:") - token_name_label = Label(tokens_main, textvariable=token_name_label_var) - token_name_label.grid(row=2, column=0, sticky=W, padx=15, pady=(0, 0)) - - # balance_var = StringVar() - # balance_msg_label = Label(frame_buttons, textvariable=balance_var) - - token_amount_var = StringVar() - token_amount = Entry(tokens_main, textvariable=token_amount_var, width=80, ) - token_amount.grid(row=3, column=0, sticky=E, padx=15, pady=(5, 5)) - - token_amount_label_var = StringVar() - token_amount_label_var.set("Token Amount:") - token_amount_label = Label(tokens_main, textvariable=token_amount_label_var) - token_amount_label.grid(row=3, column=0, sticky=W, padx=15, pady=(0, 0)) - - transfer = Button(tokens_main, text="Transfer", command=lambda: token_transfer(token_name_var.get(), token_amount_var.get(), tokens_main)) - transfer.grid(row=4, column=0, sticky=W + E, padx=5) - - issue = Button(tokens_main, text="Issue", command=lambda: token_issue(token_name_var.get(), token_amount_var.get(), tokens_main)) - issue.grid(row=5, column=0, sticky=W + E, padx=5) - - # cancel = Button (tokens_main, text="Cancel", command=tokens_main.destroy) - # cancel.grid (row=6, column=0, sticky=W + E, padx=5) - - -def tx_tree_define(): - - - wallet.tx_tree = ttk.Treeview(tab_transactions, selectmode="extended", columns=('sender', 'recipient', 'amount', 'type'), height=20) - wallet.tx_tree.grid(row=1, column=0) - - # table - wallet.tx_tree.heading("#0", text='time') - wallet.tx_tree.column("#0", anchor='center', width=100) - - wallet.tx_tree.heading("#1", text='sender') - wallet.tx_tree.column("#1", anchor='center', width=347) - - wallet.tx_tree.heading("#2", text='recipient') - wallet.tx_tree.column("#2", anchor='center', width=347) - - wallet.tx_tree.heading("#3", text='amount') - wallet.tx_tree.column("#3", anchor='center', width=35) - - wallet.tx_tree.heading("#4", text='type') - wallet.tx_tree.column("#4", anchor='center', width=40) - - wallet.tx_tree.grid(sticky=N + S + W + E) - - -def table(address, addlist_20, mempool_total): - # transaction table - # data - try: - wallet.tx_tree.destroy() - except: - pass - tx_tree_define() - - for tx in mempool_total: - tag = "mempool" - - if tx[1] == address: - wallet.tx_tree.insert('', 'end', text=datetime.fromtimestamp(float(tx[0])).strftime('%y-%m-%d %H:%M'), values=(tx[1], tx[2], tx[3], "?"), tags=tag) - - # aliases - addlist_addressess = [] - reclist_addressess = [] - - for tx in addlist_20: - addlist_addressess.append(tx[2]) # append address - reclist_addressess.append(tx[3]) # append recipient - - if resolve_var.get(): - connections.send(wallet.s, "aliasesget", 10) # senders - connections.send(wallet.s, addlist_addressess, 10) - aliases_address_results = connections.receive(wallet.s, 10) - - connections.send(wallet.s, "aliasesget", 10) # recipients - connections.send(wallet.s, reclist_addressess, 10) - aliases_rec_results = connections.receive(wallet.s, 10) - - for index, tx in enumerate(addlist_20): - tx[2] = aliases_address_results[index] - tx[3] = aliases_rec_results[index] - # aliases - - # bind local address to local alias - if resolve_var.get(): - connections.send(wallet.s, "aliasesget", 10) # local - connections.send(wallet.s, [gui_address_t.get()], 10) - alias_local_result = connections.receive(wallet.s, 10)[0] - # bind local address to local alias - - for tx in addlist_20: - if tx[3] == gui_address_t.get(): - tag = "received" - else: - tag = "sent" - - # case for alias = this address - if resolve_var.get(): - print(tx[3], alias_local_result) - if tx[3] == alias_local_result: - tag = "received" - # case for alias = this address - - if Decimal(tx[9]) > 0: - symbol = "MIN" - elif tx[11].startswith("bmsg"): - symbol = "B64M" - elif tx[11].startswith("msg"): - symbol = "MSG" - else: - symbol = "TX" - - wallet.tx_tree.insert('', 'end', text=datetime.fromtimestamp(float(tx[1])).strftime('%y-%m-%d %H:%M'), values=(tx[2], tx[3], tx[4], symbol), tags=tag) - - wallet.tx_tree.tag_configure("received", background='palegreen1') - wallet.tx_tree.tag_configure("sent", background='chocolate1') - - # table - - -def refresh(address, s): - - - - - # print "refresh triggered" - try: - connections.send(s, "statusget", 10) - wallet.statusget = connections.receive(s, 10) - wallet.status_version = wallet.statusget[7] - wallet.stats_timestamp = wallet.statusget[9] - server_timestamp_var.set("GMT: {}".format(time.strftime("%H:%M:%S", time.gmtime(int(float(wallet.stats_timestamp)))))) - - # data for charts - - """ - block_height = statusget[8][7] # move chart only if the block height changes, returned from diff 7 - try: - block_height_old - except: - block_height_old = block_height # init - - if block_height_old != block_height or not stats_nodes_count_list: # or if list is empty - print("Chart update in progress") - - stats_nodes_count_list.append(statusget[1]) - stats_thread_count_list.append(statusget[3]) - stats_consensus_list.append(statusget[5]) - stats_consensus_percentage_list.append(statusget[6]) - - stats_diff_list_0.append(statusget[8][0]) - stats_diff_list_1.append(statusget[8][1]) - stats_diff_list_2.append(statusget[8][2]) - stats_diff_list_3.append(statusget[8][3]) - stats_diff_list_4.append(statusget[8][4]) - stats_diff_list_5.append(statusget[8][5]) - stats_diff_list_6.append(statusget[8][6]) - - block_height_old = block_height - else: - print("Chart update skipped, block hasn't moved") - # data for charts - """ - - connections.send(s, "balanceget", 10) - connections.send(s, address, 10) # change address here to view other people's transactions - stats_account = connections.receive(s, 10) - balance = stats_account[0] - credit = stats_account[1] - debit = stats_account[2] - fees = stats_account[3] - rewards = stats_account[4] - - app_log.warning("Transaction address balance: {}".format(balance)) - - # 0000000011"statusget" - # 0000000011"blocklast" - connections.send(s, "blocklast", 10) - block_get = connections.receive(s, 10) - bl_height = block_get[0] - db_timestamp_last = block_get[1] - hash_last = block_get[7] - - # check difficulty - connections.send(s, "diffget", 10) - diff = connections.receive(s, 10) - # check difficulty - - print(diff) - diff_msg = int(diff[1]) # integer is enough - - # network status - time_now = str(time.time()) - last_block_ago = Decimal(time_now) - Decimal(db_timestamp_last) - if last_block_ago > 300: - sync_msg = "{}m behind".format((int(last_block_ago / 60))) - sync_msg_label.config(fg='red') - else: - sync_msg = "Last block: {}s ago".format((int(last_block_ago))) - sync_msg_label.config(fg='green') - - # network status - - connections.send(s, "mpget", 10) # senders - wallet.mempool_total = connections.receive(s, 10) - print(wallet.mempool_total) - - # fees_current_var.set("Current Fee: {}".format('%.8f' % float(fee))) - balance_var.set("Balance: {:.8f} BIS".format(Decimal(balance))) - balance_raw.set(balance) - # address_var.set("Address: {}".format(address)) - debit_var.set("Sent Total: {:.8f} BIS".format(Decimal(debit))) - credit_var.set("Received Total: {:.8f} BIS".format(Decimal(credit))) - fees_var.set("Fees Paid: {:.8f} BIS".format(Decimal(fees))) - rewards_var.set("Rewards: {:.8f} BIS".format(Decimal(rewards))) - bl_height_var.set("Block: {}".format(bl_height)) - diff_msg_var.set("Difficulty: {}".format(diff_msg)) - sync_msg_var.set(sync_msg) - - hash_var.set("Hash: {}...".format(hash_last[:6])) - mempool_count_var.set("Mempool txs: {}".format(len(wallet.mempool_total))) - - connections.send(s, "annverget", 10) - annverget = connections.receive(s, 10) - version_var.set("Node: {}/{}".format(wallet.status_version, annverget)) - - # if status_version != annverget: - # version_color = "red" - # else: - # version_color = "green" - # version_var_label.config (fg=version_color) - - connections.send(s, "addlistlim", 10) - connections.send(s, address, 10) - connections.send(s, "20", 10) - addlist = connections.receive(s, 10) - print(addlist) - - table(address, addlist, wallet.mempool_total) - # root.after(1000, refresh) - - # canvas bg - root.update() - width_root = root.winfo_width() - height_root = root.winfo_height() - - # frame_main.update() - width_main = tab_main.winfo_width() - height_main = tab_main.winfo_height() - - canvas_main.configure(width=width_main, height=height_main) - # photo_main.resize (width_main,height_main) - - # canvas bg - - connections.send(s, "annget", 10) - annget = connections.receive(s, 10) - - ann_var_text.config(state=NORMAL) - ann_var_text.delete('1.0', END) - ann_var_text.insert(INSERT, annget) - ann_var_text.config(state=DISABLED) - - all_spend_check() - - - except Exception as e: - app_log.warning(e) - node_connect() - - -def sign(): - def verify_this(): - try: - received_public_key = RSA.importKey(public_key_gui.get("1.0", END)) - verifier = PKCS1_v1_5.new(received_public_key) - hash = SHA.new(input_text.get("1.0", END).encode("utf-8")) - received_signature_dec = base64.b64decode(output_signature.get("1.0", END)) - - if verifier.verify(hash, received_signature_dec): - messagebox.showinfo("Validation Result", "Signature valid") - else: - messagebox.showinfo("Validation Result", "Signature invalid") - except: - messagebox.showerror("Validation Result", "Signature invalid") - - def sign_this(): - h = SHA.new(input_text.get("1.0", END).encode("utf-8")) - signer = PKCS1_v1_5.new(keyring.key) - signature = signer.sign(h) - signature_enc = base64.b64encode(signature) - - output_signature.delete('1.0', END) # remove previous - output_signature.insert(INSERT, signature_enc) - - # popup - top = Toplevel() - top.title("Sign message") - # top.geometry("%dx%d%+d%+d" % (800, 600, 0, 0)) - # top.grid_propagate(False) - - Label(top, text="Message:", width=20).grid(row=0, pady=0) - input_text = Text(top, height=10) - # label.image = photo # keep a reference! - input_text.grid(row=1, column=0, sticky=N + E, padx=15, pady=(0, 0)) - - Label(top, text="Public Key:", width=20).grid(row=2, pady=0) - public_key_gui = Text(top, height=10) - public_key_gui.insert(INSERT, keyring.public_key_readable) - public_key_gui.grid(row=3, column=0, sticky=N + E, padx=15, pady=(0, 0)) - - Label(top, text="Signature:", width=20).grid(row=4, pady=0) - output_signature = Text(top, height=10) - output_signature.grid(row=5, column=0, sticky=N + E, padx=15, pady=(0, 0)) - - # msg = Message(top, text="hi") - # msg.pack() - - sign_message = Button(top, text="Sign Message", command=sign_this) - sign_message.grid(row=6, column=0, sticky=W + E, padx=15, pady=(5, 0)) - - sign_message = Button(top, text="Verify Message", command=verify_this) - sign_message.grid(row=7, column=0, sticky=W + E, padx=15, pady=(15, 0)) - - dismiss = Button(top, text="Dismiss", command=top.destroy) - dismiss.grid(row=8, column=0, sticky=W + E, padx=15, pady=(15, 5)) - # popup - - -def hyperlink_howto(): - url = "https://github.com/EggPool/BismuthHowto" - webbrowser.open(url, new=1) - - -def hyperlink_BE(): - url = "https://bismuth.online" - webbrowser.open(url, new=1) - - -def hyperlink_BISGit(): - url = "https://github.com/hclivess/Bismuth/releases" - webbrowser.open(url, new=1) - - -def hyperlink_bct(): - url = "https://bitcointalk.org/index.php?topic=1896497.0" - webbrowser.open(url, new=1) - - -def support_collection(sync_msg_var, version_var): - sup_col = Toplevel() - sup_col.title("Collection of Basic Information") - collection_box = Text(sup_col, width=100) - collection_box.grid(row=0, pady=0) - - version = wallet.statusget[7] - stats_timestamp = wallet.statusget[9] - connections.send(wallet.s, "blocklast", 10) - block_get = connections.receive(wallet.s, 10) - bl_height = block_get[0] - - connections.send(wallet.s, "blocklast", 10) - blocklast = connections.receive(wallet.s, 10) - db_timestamp_last = blocklast[1] - time_now = float(time.time()) - last_block_ago = int(time_now - db_timestamp_last) - - collection_box.config(wrap=WORD) - collection_box.insert(INSERT, "If you have questions or want to report a problem, please copy the information below to provide it.") - collection_box.insert(INSERT, "\n\n") - collection_box.insert(INSERT, "Your OS: {} {}".format(platform.system(), platform.release())) - collection_box.insert(INSERT, "\nNode Version: {}".format(version)) - collection_box.insert(INSERT, "\nConnected to: {}".format(wallet.ip)) - collection_box.insert(INSERT, "\nLast Block: {}".format(bl_height)) - collection_box.insert(INSERT, "\nSeconds since Last Block: {}".format(last_block_ago)) - collection_box.insert(INSERT, "\nNode GMT: {}".format(time.strftime("%H:%M:%S", time.gmtime(int(float(stats_timestamp)))))) - - close = Button(sup_col, text="Close", command=sup_col.destroy) - close.grid(row=3, column=0, sticky=W + E) - - -def click_on_tab_tokens(event): - if str(nbtabs.index(nbtabs.select())) == "4": - tokens() - - -def themes(theme): - """nuitka - - # global photo_bg, photo_main - global photo_main - - if theme == "Barebone" or None: - # canvas_bg.delete("all") - canvas_main.delete("all") - - else: - # img_bg = PIL.Image.open ("themes/{}_bg.jpg".format(theme)) - # photo_bg = PIL.ImageTk.PhotoImage (img_bg) - # canvas_bg.create_image (0, 0, image=photo_bg, anchor=NW) - - width_main = tab_main.winfo_width() - height_main = tab_main.winfo_height() - - main_bg = PIL.Image.open("themes/{}.jpg".format(theme)).resize((width_main, height_main), PIL.Image.ANTIALIAS) - photo_main = PIL.ImageTk.PhotoImage(main_bg) - canvas_main.create_image(0, 0, image=photo_main, anchor=NW) - - with open("theme", "w") as theme_file: - theme_file.write(theme) - """ - - -def encryption_button_refresh(): - if keyring.unlocked: - decrypt_b.configure(text="Unlocked", state=DISABLED) - if not keyring.unlocked: - decrypt_b.configure(text="Unlock", state=NORMAL) - messagemenu.entryconfig("Sign Messages", state="disabled") # messages - walletmenu.entryconfig("Recovery", state="disabled") # recover - if not keyring.encrypted: - encrypt_b.configure(text="Encrypt", state=NORMAL) - if keyring.encrypted: - encrypt_b.configure(text="Encrypted", state=DISABLED) - lock_b.configure(text="Lock", state=DISABLED) - - -def get_best_ipport_to_use(light_ip_list): - """Use different methods to return the best possible ip:port""" - while True: - # If we have 127.0.0.1 in the list, first try it - if '127.0.0.1:5658' in light_ip_list or '127.0.0.1' in light_ip_list: - if lwbench.connectible('127.0.0.1:5658'): - # No need to go further. - return ['127.0.0.1:5658'] - - # Then try the new API - wallets = [] - try: - rep = requests.get("http://api.bismuth.live/servers/wallet/legacy.json") - if rep.status_code == 200: - wallets = rep.json() - # print(wallets) - except Exception as e: - app_log.warning("Error {} getting Server list from API, using lwbench instead".format(e)) - - if not wallets: - # no help from api, use previous benchmark - ipport_list = lwbench.time_measure(light_ip_list, app_log) - return ipport_list - - # We have a server list, order by load - sorted_wallets = sorted([wallet for wallet in wallets if wallet['active']], key=lambda k: (k['clients'] + 1) / (k['total_slots'] + 2)) - # print(sorted_wallets) - """ - # try to connect in sequence, keep the first one ok. - for wallet in sorted_wallets: - print(wallet) - ipport = "{}:{}".format(wallet['ip'], wallet['port']) - print(ipport) - if lwbench.connectible(ipport): - return [ipport] - """ - if sorted_wallets: - return ["{}:{}".format(wallet['ip'], wallet['port']) for wallet in sorted_wallets] - - # If we get here, all hope is lost! - app_log.warning("No connectible server... let try again in a few sec") - time.sleep(10) - - -def busy(an_item=None): - an_item = an_item if an_item else root - an_item.config(cursor="watch") - - -def notbusy(an_item=None): - an_item = an_item if an_item else root - an_item.config(cursor="") - - -if __name__ == "__main__": - keyring = Keys() - wallet = Wallet() - - # data for charts - stats_nodes_count_list = [] - stats_thread_count_list = [] - stats_consensus_list = [] - stats_consensus_percentage_list = [] - stats_diff_list_0 = [] - stats_diff_list_1 = [] - stats_diff_list_2 = [] - stats_diff_list_3 = [] - stats_diff_list_4 = [] - stats_diff_list_5 = [] - stats_diff_list_6 = [] - # data for charts - - if os.path.exists("privkey.der"): - private_key_load = "privkey.der" - else: - private_key_load = "privkey_encrypted.der" - public_key_load = "pubkey.der" - - # print(getcontext()) - config = options.Get() - - config.read() - debug_level = config.debug_level - light_ip = config.light_ip - node_ip = config.node_ip - version = config.version - terminal_output = config.terminal_output - gui_scaling = config.gui_scaling - - wallet.port = config.port - if "testnet" in version: - wallet.port = 2829 - light_ip = ["127.0.0.1"] - - app_log = log.log("wallet.log", debug_level, terminal_output) - - essentials.keys_check(app_log, "wallet.der") - - keyring.key, keyring.public_key_readable, keyring.private_key_readable, keyring.encrypted, keyring.unlocked, keyring.public_key_hashed, keyring.myaddress, keyring.keyfile = essentials.keys_load(private_key_load, public_key_load) - print("Keyfile: {}".format(keyring.keyfile)) - - light_ip_conf = light_ip - - light_ip = get_best_ipport_to_use(light_ip_conf) - # light_ip.insert(0,node_ip) - # light_ip = "127.0.0.1:8150" - - root = Tk() - - root.wm_title("Bismuth Light Wallet - v{}".format(__version__)) - # root.geometry("1310x700") #You want the size of the app to be 500x500 - - # root['bg']="black" - - """nuitka - root.resizable(0, 0) # Don't allow resizing in the x or y direction / resize #nuitka - img_icon = PIL.Image.open("graphics/icon.jpg") #nuitka - photo_icon = PIL.ImageTk.PhotoImage(img_icon) #nuitka - root.tk.call('wm', 'iconphoto', root._w, photo_icon, ) #nuitka - """ - - if gui_scaling == "adapt": - dpi_value = root.winfo_fpixels('1i') - root.tk.call('tk', 'scaling', dpi_value / 72) - - elif gui_scaling != "default": - root.tk.call("tk", "scaling", gui_scaling) - - password_var_enc = StringVar() - password_var_con = StringVar() - password_var_dec = StringVar() - - frame_bottom = Frame(root, relief='sunken', borderwidth=1) - frame_bottom.grid(row=5, column=0, sticky='NESW', pady=5, padx=5) - - # notebook widget - nbtabs = ttk.Notebook(root) - nbtabs.grid(row=1, column=0, sticky='NESW', pady=5, padx=5) - - # tab_main Main - tab_main = ttk.Frame(nbtabs) - nbtabs.add(tab_main, text='Overview') - - canvas_main = Canvas(tab_main, highlightthickness=0) - canvas_main.grid(row=0, column=0, sticky=W + E + N + S, columnspan=99, rowspan=99) - - frame_logo = Frame(tab_main, relief='ridge', borderwidth=4) - frame_logo.grid(row=1, column=0, pady=5, padx=5, sticky=W) - - frame_coins = Frame(tab_main, relief='ridge', borderwidth=4) - frame_coins.grid(row=0, column=0, sticky=W + E + N, pady=5, padx=5) - - frame_hyperlinks = Frame(tab_main, relief='ridge', borderwidth=4) - frame_hyperlinks.grid(row=0, column=98, pady=5, padx=5, sticky=W + N) - - frame_support = Frame(tab_main, relief='ridge', borderwidth=4) - frame_support.grid(row=98, column=98, pady=5, padx=5, sticky=W + N) - - # frame_mainstats = Frame(tab_main, relief = 'ridge', borderwidth = 4) - # frame_mainstats.grid(row=5, column=1, sticky=W + E + N, pady=5, padx=5) - - # tab_transactions transactions - tab_transactions = ttk.Frame(nbtabs) - - nbtabs.add(tab_transactions, text='History') - - frame_entries_t = Frame(tab_transactions, relief='ridge', borderwidth=0) - frame_entries_t.grid(row=0, column=0, pady=5, padx=5) - - # frame_labels_t = Frame(tab_transactions,relief = 'ridge', borderwidth = 0) - # frame_labels_t.grid(row=0, column=0, pady=5, padx=5, sticky=N+W+E+S) - - frame_table = Frame(tab_transactions, relief='ridge', borderwidth=0) - frame_table.grid(row=1, column=0, sticky=W + E + N, pady=5, padx=5) - - # refresh(myaddress, s) - - # tab_send sendcoin tab - tab_send = ttk.Frame(nbtabs) - nbtabs.add(tab_send, text='Send') - - frame_entries = Frame(tab_send) - frame_entries.grid(row=0, column=0, pady=5, padx=5, sticky=N + W + E + S) - - frame_send = Frame(tab_send, relief='ridge', borderwidth=1) - frame_send.grid(row=0, column=2, pady=5, padx=5, sticky=N) - - frame_tick = Frame(frame_send, relief='ridge', borderwidth=1) - frame_tick.grid(row=4, column=0, pady=5, padx=5, sticky=S) - - # tab_receive receive - tab_receive = ttk.Frame(nbtabs) - nbtabs.add(tab_receive, text='Receive') - - frame_entries_r = Frame(tab_receive, relief='ridge', borderwidth=0) - frame_entries_r.grid(row=0, column=0, pady=5, padx=5, sticky=N + W + E + S) - - recipient_address = Entry(frame_entries_r, width=60, text=keyring.myaddress) - recipient_address.insert(0, keyring.myaddress) - - recipient_address.grid(row=0, column=1, sticky=W, pady=5, padx=5) - recipient_address.configure(state=DISABLED) - - amount_r = Entry(frame_entries_r, width=60) - amount_r.grid(row=2, column=1, sticky=W, pady=5, padx=5) - amount_r.insert(0, "0.00000000") - - openfield_r = Text(frame_entries_r, width=60, height=5, font=("Tahoma", 8)) - openfield_r.grid(row=3, column=1, sticky=W, pady=5, padx=5) - - operation_r = Entry(frame_entries_r, width=60) - operation_r.grid(row=4, column=1, sticky=W, pady=5, padx=5) - - url_r = Entry(frame_entries_r, width=60) - url_r.grid(row=5, column=1, sticky=W, pady=5, padx=5) - url_r.insert(0, "bis://") - - # tab5 tokens - tab_tokens = ttk.Frame(nbtabs) - nbtabs.add(tab_tokens, text='Tokens') - - nbtabs.bind('<>', click_on_tab_tokens) - - # frames - # menu - - # canvas - menubar = Menu(root) - walletmenu = Menu(menubar, tearoff=0) - menubar.add_cascade(label="Wallet", menu=walletmenu) - walletmenu.add_command(label="Load Wallet...", command=keys_load_dialog) - walletmenu.add_command(label="Backup Wallet...", command=keys_backup) - walletmenu.add_command(label="Encrypt Wallet...", command=encrypt_get_password) - walletmenu.add_separator() - walletmenu.add_command(label="Recovery", command=recover) - walletmenu.add_separator() - # walletmenu.add_command(label="Spending URL QR", command=lambda: qr(url.get())) - # walletmenu.add_command(label="Reception URL QR", command=lambda: qr(url_r.get())) - walletmenu.add_command(label="Alias Registration...", command=alias) - walletmenu.add_command(label="Show Alias", command=aliases_list) - walletmenu.add_command(label="Fingerprint...", command=fingerprint) - walletmenu.add_separator() - walletmenu.add_command(label="Exit", command=root.quit) - - messagemenu = Menu(menubar, tearoff=0) - menubar.add_cascade(label="Message", menu=messagemenu) - messagemenu.add_command(label="Show Messages", command=lambda: msg_dialogue(gui_address_t.get())) - messagemenu.add_command(label="Sign Messages", command=sign) - - if not os.path.exists("theme"): - with open("theme", "w") as theme_file: - theme_file.write("Barebone") - - theme_menu = Menu(menubar, tearoff=0) - - theme_list = [] - for theme_picture in glob.glob('themes/*.jpg'): - theme_picture = os.path.basename(theme_picture).split('.jpg')[0] - theme_list.append(theme_picture) - theme_menu.add_command(label=theme_picture, command=lambda theme_picture=theme_picture: themes(theme_picture)) # wow this lambda is amazing - - theme_menu.add_command(label="Barebone", command=lambda: themes("Barebone")) - menubar.add_cascade(label="Themes", menu=theme_menu) - - miscmenu = Menu(menubar, tearoff=0) - menubar.add_cascade(label="Misc", menu=miscmenu) - miscmenu.add_command(label="Mempool", command=lambda: mempool_get(wallet.s)) - miscmenu.add_command(label="CSV Export...", command=lambda: csv_export(wallet.s)) - miscmenu.add_command(label="Statistics", command=lambda: stats()) - miscmenu.add_command(label="Help", command=help) - - connect_menu = Menu(menubar, tearoff=0) - menubar.add_cascade(label="Connection", menu=connect_menu) - connect_list = [] - - for ip_once in light_ip: - connect_list.append(ip_once) - connect_menu.add_command(label=ip_once, command=lambda ip_once=ip_once: node_connect_once(ip_once)) - - # labels - Label(frame_entries, text="My Address:").grid(row=0, sticky=W + N, pady=5, padx=5) - Label(frame_entries, text="Recipient:").grid(row=1, sticky=W, pady=5, padx=5) - Label(frame_entries, text="Amount:").grid(row=2, sticky=W, pady=5, padx=5) - Label(frame_entries, text="Data:", height=4).grid(row=3, sticky=W, pady=5, padx=5) - Label(frame_entries, text="Operation:", height=4).grid(row=4, sticky=W, pady=5, padx=5) - Label(frame_entries, text="URL:").grid(row=5, sticky=W + S, pady=5, padx=5) - Label(frame_entries, text="If you have a BIS URL, copy it, click paste-button\n" - "on URL field and then click 'read'." - "If you want to send Bismuth\n" - "to the shown recipient, click send and then\n" - "the confirmation dialog opens.", justify=LEFT).grid(row=6, column=1, sticky=W + S, pady=1, padx=1, columnspan=2) - - Label(frame_entries_r, text="Recipient:").grid(row=0, sticky=W, pady=5, padx=5) - Label(frame_entries_r, text="Amount:").grid(row=2, sticky=W, pady=5, padx=5) - Label(frame_entries_r, text="Data:", height=4).grid(row=3, sticky=W, pady=5, padx=5) - Label(frame_entries_r, text="Operation:", height=4).grid(row=4, sticky=W, pady=5, padx=5) - Label(frame_entries_r, text="URL:").grid(row=5, sticky=W + S, pady=5, padx=5) - - Label(frame_entries_r, text="Enter amount and if wanted, a message in field Data.\n" - "Your address is automatically used. Click create and copy the url.", justify=LEFT).grid(row=6, column=1, sticky=W + S, pady=1, padx=1, columnspan=2) - - Label(frame_entries_t, text="Address:").grid(row=0, column=0, sticky=W + N, pady=5, padx=5) - - resolve_var = BooleanVar() - resolve = Checkbutton(frame_entries_t, text="Aliases", variable=resolve_var, command=lambda: refresh(gui_address_t.get(), wallet.s), width=14, anchor=W) - resolve.grid(row=0, column=5, sticky=W) - - # canvas - - # display the menu - root.config(menu=menubar) - # menu - - # buttons - - send_b = Button(frame_send, text="Send Bismuth", command=lambda: send_confirm(str(amount.get()).strip(), recipient.get().strip(), operation.get().strip(), (openfield.get("1.0", END)).strip()), height=2, width=22, font=("Tahoma", 12)) - send_b.grid(row=0, column=0) - - frame_logo_buttons = Frame(frame_send) - frame_logo_buttons.grid(row=5, column=0, padx=5, pady=5) - - encrypt_b = Button(frame_logo_buttons, text="Encrypt", command=encrypt_get_password, height=1, width=8) - encrypt_b.grid(row=0, column=0) - decrypt_b = Button(frame_logo_buttons, text="Unlock", command=decrypt_get_password, height=1, width=8) - decrypt_b.grid(row=0, column=1) - lock_b = Button(frame_logo_buttons, text="Locked", command=lambda: lock_fn(lock_b), height=1, width=8, state=DISABLED) - lock_b.grid(row=0, column=2) - - encryption_button_refresh() - # buttons - - # refreshables - - # update balance label - balance_raw = StringVar() - balance_var = StringVar() - - # address_var = StringVar() - # address_var_label = Label(frame_coins, textvariable=address_var, font=("Tahoma", 8, "bold")) - # address_var_label.grid(row=0, column=0, sticky=S, padx=15) - - balance_msg_label = Label(frame_coins, textvariable=balance_var, font=("Tahoma", 16, "bold")) - balance_msg_label.grid(row=1, column=0, sticky=S, padx=15) - - balance_msg_label_sendtab = Label(frame_send, textvariable=balance_var, font=("Tahoma", 10)) - balance_msg_label_sendtab.grid(row=3, column=0, sticky=N + S) - - debit_var = StringVar() - spent_msg_label = Label(frame_coins, textvariable=debit_var, font=("Tahoma", 12)) - spent_msg_label.grid(row=2, column=0, sticky=N + E, padx=15) - - credit_var = StringVar() - received_msg_label = Label(frame_coins, textvariable=credit_var, font=("Tahoma", 12)) - received_msg_label.grid(row=3, column=0, sticky=N + E, padx=15) - - fees_var = StringVar() - fees_paid_msg_label = Label(frame_coins, textvariable=fees_var, font=("Tahoma", 12)) - fees_paid_msg_label.grid(row=4, column=0, sticky=N + E, padx=15) - - rewards_var = StringVar() - rewards_paid_msg_label = Label(frame_coins, textvariable=rewards_var, font=("Tahoma", 12)) - rewards_paid_msg_label.grid(row=5, column=0, sticky=N + E, padx=15) - - bl_height_var = StringVar() - block_height_label = Label(frame_bottom, textvariable=bl_height_var) - block_height_label.grid(row=0, column=7, sticky=S + E, padx=5) - - ip_connected_var = StringVar() - ip_connected_label = Label(frame_bottom, textvariable=ip_connected_var) - ip_connected_label.grid(row=0, column=8, sticky=S + E, padx=5) - - diff_msg_var = StringVar() - diff_msg_label = Label(frame_bottom, textvariable=diff_msg_var) - diff_msg_label.grid(row=0, column=5, sticky=S + E, padx=5) - - sync_msg_var = StringVar() - sync_msg_label = Label(frame_bottom, textvariable=sync_msg_var) - sync_msg_label.grid(row=0, column=0, sticky=N + E, padx=15) - - version_var = StringVar() - version_var_label = Label(frame_bottom, textvariable=version_var) - version_var_label.grid(row=0, column=2, sticky=N + E, padx=15) - - hash_var = StringVar() - hash_var_label = Label(frame_bottom, textvariable=hash_var) - hash_var_label.grid(row=0, column=4, sticky=S + E, padx=5) - - mempool_count_var = StringVar() - mempool_count_var_label = Label(frame_bottom, textvariable=mempool_count_var) - mempool_count_var_label.grid(row=0, column=3, sticky=S + E, padx=5) - - server_timestamp_var = StringVar() - server_timestamp_label = Label(frame_bottom, textvariable=server_timestamp_var) - server_timestamp_label.grid(row=0, column=9, sticky=S + E, padx=5) - - ann_var = StringVar() - ann_var_text = Text(frame_logo, width=20, height=4, font=("Tahoma", 8)) - ann_var_text.grid(row=1, column=0, sticky=E + W, padx=5, pady=5) - ann_var_text.config(wrap=WORD) - ann_var_text.config(background="grey75") - - encode_var = BooleanVar() - alias_cb_var = BooleanVar() - msg_var = BooleanVar() - encrypt_var = BooleanVar() - all_spend_var = BooleanVar() - - # address and amount - - # gui_address.configure(state="readonly") - - gui_copy_address = Button(frame_entries, text="Copy", command=address_copy, font=("Tahoma", 7)) - gui_copy_address.grid(row=0, column=2, sticky=W) - - gui_copy_recipient = Button(frame_entries, text="Copy", command=recipient_copy, font=("Tahoma", 7)) - gui_copy_recipient.grid(row=1, column=2, sticky=W) - - gui_insert_recipient = Button(frame_entries, text="Paste", command=recipient_insert, font=("Tahoma", 7)) - gui_insert_recipient.grid(row=1, column=3, sticky=W) - - # gui_help = Button(frame_entries, text="Help", command=help, font=("Tahoma", 7)) - # gui_help.grid(row=4, column=2, sticky=W + E, padx=(5, 0)) - - gui_all_spend = Checkbutton(frame_entries, text="All", variable=all_spend_var, command=all_spend, font=("Tahoma", 7)) - gui_all_spend.grid(row=2, column=2, sticky=W) - - gui_all_spend_clear = Button(frame_entries, text="Clear", command=all_spend_clear, font=("Tahoma", 7)) - gui_all_spend_clear.grid(row=2, column=3, sticky=W) - - data_insert_clipboard = Button(frame_entries, text="Paste", command=data_insert, font=("Tahoma", 7)) - data_insert_clipboard.grid(row=3, column=2) - - data_insert_clear = Button(frame_entries, text="Clear", command=data_insert_clear, font=("Tahoma", 7)) - data_insert_clear.grid(row=3, column=3, sticky=W) - - url_insert_clipboard = Button(frame_entries, text="Paste", command=url_insert, font=("Tahoma", 7)) - url_insert_clipboard.grid(row=5, column=2, sticky=W) - - read_url_b = Button(frame_entries, text="Read", command=lambda: read_url_clicked(app_log, url.get()), font=("Tahoma", 7)) - read_url_b.grid(row=5, column=3, sticky=W) - - data_insert_clipboard = Button(frame_entries_r, text="Paste", command=data_insert_r, font=("Tahoma", 7)) - data_insert_clipboard.grid(row=3, column=2) - - data_insert_clear = Button(frame_entries_r, text="Clear", command=data_insert_clear, font=("Tahoma", 7)) - data_insert_clear.grid(row=3, column=3, sticky=W) - - gui_copy_address_r = Button(frame_entries_r, text="Copy", command=address_copy, font=("Tahoma", 7)) - gui_copy_address_r.grid(row=0, column=2, sticky=W) - - gui_copy_url_r = Button(frame_entries_r, text="Copy", command=url_copy, font=("Tahoma", 7)) - gui_copy_url_r.grid(row=5, column=3, sticky=W) - - create_url_b = Button(frame_entries_r, text="Create", command=lambda: create_url_clicked(app_log, "pay", gui_address_t.get(), amount_r.get(), operation_r.get(), openfield_r.get("1.0", END).strip()), font=("Tahoma", 7)) - create_url_b.grid(row=5, column=2, sticky=W) - - gui_paste_address = Button(frame_entries_t, text="Paste", command=address_insert, font=("Tahoma", 7)) - gui_paste_address.grid(row=0, column=2, sticky=W) - - gui_watch = Button(frame_entries_t, text="Watch", command=watch, font=("Tahoma", 7)) - gui_watch.grid(row=0, column=3, sticky=W) - - gui_unwatch = Button(frame_entries_t, text="Reset", command=unwatch, font=("Tahoma", 7)) - gui_unwatch.grid(row=0, column=4, sticky=W, padx=(0, 5)) - - # hyperlinks - hyperlink_BISGit = Button(frame_hyperlinks, text="Bismuth@Github", command=hyperlink_BISGit, font=("Tahoma", 7)) - hyperlink_BISGit.grid(row=0, column=0, sticky=N + E + S + W, padx=1, pady=1) - - hyperlink_BE = Button(frame_hyperlinks, text="Official Block Explorer", command=hyperlink_BE, font=("Tahoma", 7)) - hyperlink_BE.grid(row=1, column=0, sticky=N + E + S + W, padx=1, pady=1) - - hyperlink_howto = Button(frame_hyperlinks, text="HowTos@Github", command=hyperlink_howto, font=("Tahoma", 7)) - hyperlink_howto.grid(row=2, column=0, sticky=N + E + S + W, padx=1, pady=1) - - hyperlink_bct = Button(frame_hyperlinks, text="BIS@Bitcointalk", command=hyperlink_bct, font=("Tahoma", 7)) - hyperlink_bct.grid(row=3, column=0, sticky=N + E + S + W, padx=1, pady=1) - # hyperlinks - - # supportbutton - dev_support = Button(frame_support, text="Collect Info for Support", command=lambda: support_collection(str(sync_msg_var), str(version_var)), font=("Tahoma", 7)) - dev_support.grid(row=98, column=98, sticky=N + E + S + W, padx=1, pady=1) - # supportbutton - - gui_address_t = Entry(frame_entries_t, width=60) - gui_address_t.grid(row=0, column=1, sticky=W, pady=5, padx=5) - gui_address_t.insert(0, keyring.myaddress) - - sender_address = Entry(frame_entries, width=60) - sender_address.insert(0, keyring.myaddress) - sender_address.grid(row=0, column=1, sticky=W, pady=5, padx=5) - sender_address.configure(state=DISABLED) - - recipient = Entry(frame_entries, width=60) - recipient.grid(row=1, column=1, sticky=W, pady=5, padx=5) - - amount = Entry(frame_entries, width=60) - amount.grid(row=2, column=1, sticky=W, pady=5, padx=5) - amount.insert(0, "0.00000000") - - openfield = Text(frame_entries, width=60, height=5, font=("Tahoma", 8)) - openfield.grid(row=3, column=1, sticky=W, pady=5, padx=5) - - operation = Entry(frame_entries, width=60) - operation.grid(row=4, column=1, sticky=W, pady=5, padx=5) - - url = Entry(frame_entries, width=60) - url.grid(row=5, column=1, sticky=W, pady=5, padx=5) - url.insert(0, "bis://") - - encode = Checkbutton(frame_tick, text="Base64 Encoding", variable=encode_var, command=all_spend_check, width=14, anchor=W) - encode.grid(row=0, column=0, sticky=W) - - msg = Checkbutton(frame_tick, text="Message", variable=msg_var, command=all_spend_check, width=14, anchor=W) - msg.grid(row=1, column=0, sticky=W) - - encr = Checkbutton(frame_tick, text="Encrypt with PK", variable=encrypt_var, command=all_spend_check, width=14, anchor=W) - encr.grid(row=2, column=0, sticky=W) - - alias_cb = Checkbutton(frame_tick, text="Alias Recipient", variable=alias_cb_var, command=None, width=14, anchor=W) - alias_cb.grid(row=4, column=0, sticky=W) - - balance_enumerator = Entry(frame_entries, width=5) - # address and amount - - # logo - - # logo_hash_decoded = base64.b64decode(icons.logo_hash) - # logo = PhotoImage(data="graphics/logo.png") - - """nuitka - logo_img = PIL.Image.open("graphics/logo.png") - logo = PIL.ImageTk.PhotoImage(logo_img) - - Label(frame_logo, image=logo).grid(column=0, row=0) - # logo - """ - node_connect() - refresh_auto() - - try: - themes(open("theme", "r").read()) # load last selected theme - except: - with open("theme", "w") as theme_file: - theme_file.write("Barebone") - - root.mainloop() \ No newline at end of file diff --git a/wallet_async.py b/wallet_async.py deleted file mode 100644 index 7a589d4..0000000 --- a/wallet_async.py +++ /dev/null @@ -1,2058 +0,0 @@ -# add manual refresh, objectify - -# icons created using http://www.winterdrache.de/freeware/png2ico/ - -import ast -import asyncio -import base64 -import csv -import glob -import hashlib -import os -import platform -import re -import sys -import tarfile -import threading -import time -import webbrowser -from datetime import datetime -from decimal import Decimal, getcontext -from tkinter import (DISABLED, END, INSERT, LEFT, NORMAL, NW, WORD, BooleanVar, - Button, Canvas, Checkbutton, E, Entry, Frame, Label, - Listbox, Menu, N, S, Scrollbar, StringVar, Text, Tk, - Toplevel, W, filedialog, messagebox, ttk) - -import matplotlib -import PIL.Image -import PIL.ImageTk -import pyqrcode -import socks -from Cryptodome.Cipher import AES, PKCS1_OAEP -from Cryptodome.Hash import SHA -from Cryptodome.PublicKey import RSA -from Cryptodome.Random import get_random_bytes -from Cryptodome.Signature import PKCS1_v1_5 -from matplotlib.backends.backend_tkagg import (FigureCanvasTkAgg, - NavigationToolbar2Tk) -from matplotlib.figure import Figure - -import async_client -import essentials -import icons -import log -import options -import recovery -from bisurl import create_url, read_url -from essentials import fee_calculate -from quantizer import quantize_eight -from simplecrypt import decrypt, encrypt -from lwbench import time_measure - -# from tokensv2 import * -# import sqlite3 -# from random import shuffle -# from tornado.ioloop import IOLoop -# import aioprocessing -# import connections - -matplotlib.use('TkAgg') - -__version__ = "1.0.4" - - -def mempool_clear(s): - async_client.connection.send("mpclear") - - -def mempool_get(s): - # result = [['1524749959.44', '2ac10094cc1d3dd2375f8e1aa51115afd33926e3fa69472f2ea987f5', 'edf2d63cdf0b6275ead22c9e6d66aa8ea31dc0ccb367fad2e7c08a25', '11.15000000', 'rd7Op7gZlp7bBkdL5EogrhkHB3WFGNKfc2cGqzrKzwtFCJf/3nKt13y/1MggR5ioA1RAHFn/8m5q+ot7nv6bTcAOhXHgK/CcqplNBNEp3J+RFf1IbEbhbckJsdVbRvrkQoMRZmSrwCwJm+v/pB9KYqG3R5OVKmEyc5KbUZsRuTUrZjPL6lPd+VfYy6x2Wnr5JgC+q7zvQPH0+yqVqOxcbgYggbbNyHHfYIj+0k0uK+8hwmRCe+SfG+/JZwiSp8ZO4Teyd6/NDmss0AaDRYfAVmxLMEg0aOf1xPbURL3D9gyUsDWupRFVT88eS22cRTPgvS5fwpPt/rV8QUa58eRHSMJ3MDwxpkJ7KeMkN5dfiQ/NZ0HFQc3vDwnGJ1ybyVDnw/i7ioRsJtTC0hGNO33lNVqKnbQ8yQ/7yihqfUsCM1qXA/a5Q+9bRx1mr0ff+h7BYxK7qoVF/2KeiS7Ia8OiX8EPUSiwFxCnGsY+Ie+AgQlaiUIlen2LoGw41NGmZufvWXFqK9ww8e50n35OmKw0MQrJrzOr/7iBoCOXmW0/jEzbJNM7NKEni7iFNwbfk3Xzkoh8A2/m9hdDPKZASdF1hFpNVnGJnDvuINRNn3xBUqoxCQUY50b9mGO4hdkLgVOQOVvzYvdYjB0B+XJTvmfLtWQKOcAJ4/E7tr8dSrC7sPY=', 'LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlJQ0lqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FnOEFNSUlDQ2dLQ0FnRUE0SjdSS2VPWGN2OXhaTGN6R2IvSQprV2MvanU3cktvLzIrNGJuS0NsQituT0VwNDY5Vzd5YmF3eW1mR2xVUmpvYjg3MjZ6eWFDdDVrOEJqNXU1Y25MCk1XaENueGNwdGltUytmeHA1WGx5NGs5TUNQUDlYODZFc1U0ZjBrcVBhZjhnais1MG5LdjM4a01ZMHFSR0k0U0QKNS9wVlpCY1ptRjN0eVFPYzh0SWJERk9vUHJta0FpTy9LQnAxWHA4Q0dFK24zaTdKdS9zUFlzcDZFRERobjVrVAptVDMxUGVOZ2tUOTh4OW5rSmhSTmxmQTE2Mi9ia2gva2JISE1hUE1JYUhsUDhSbGVNazlqS0hCNjVOWFVMVHNLCjZZa2FNK2F3aGVpUWIwVDE2cm5tY3N4NHZBbWViUEFBWTQ1WWNqMWx3L3lpU0ZXWWpvdkcrQjBkZ0JuTDVXbUUKb2d6bnQxN04zYzZnU1JBNEYrUUhrVlA1RjBUejdTSXFuWnZDeCtEMDhjam9hVWgxUi9SeFNlT1Mwd3pEdWdNOQpMZjhSQXZweVRxR0xmUWpYY252YnVaMGNBc1g4SzFCR3lvTDZIZ3h2U3kzeUJBMlZvSjlnM1JncVUxU3NraHgrCktsdlg0S0VWeXUxLzlMbXRpc3dKSFZGTitEdVhTV1VqMjk0RURsZktsTlRKY0h1LytQWHFyeGVzbkpjOGttYisKWVlYS0R3YnRKNDFRMnRZalBwd1BOSmpDdm1Ca2haSzR2VEFIQXNKVTVEV1pQZkRJeEN6WDVFbFFRUGNhVUV6MApvbnAzNDVpeVV0TFZZcmdIdTJCNmIvNkNqMWlCNm90SitNV1RUYXVOUHcrYXczeVRHK1NUM3dxeG5qS3I3YkoyCnJGVkFnUFBCRlI5cmVoUUpmTXBoTGtVQ0F3RUFBUT09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQ==', '0', '672ce4daaeb73565'], ['1524749904.95', '4edadac9093d9326ee4b17f869b14f1a2534f96f9c5d7b48dc9acaed', '4edadac9093d9326ee4b17f869b14f1a2534f96f9c5d7b48dc9acaed', '0.00000000', 'bQmnTD79aL1hjoyVF/ARfMLFfMtQiqpmvk88fPAGW1LUqLQen87+6i+2flBCuSPOWvjHQBMJ3Ctyk5MtuWj6KtoltWSKXev2tYfgNSiAOuo1YIbUhDwTBtHI5UY6X9eNmFjB5Iny0/7VB+cotV1ZBPpgCx1xQn45CtAVk4IYaXc=', 'LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUlHZk1BMEdDU3FHU0liM0RRRUJBUVVBQTRHTkFEQ0JpUUtCZ1FES3ZMVGJEeDg1YTF1Z2IvNnhNTWhWT3E2VQoyR2VZVDgrSXEyejlGd0lNUjQwbDJ0dEdxTks3dmFyTmNjRkxJdThLbjRvZ0RRczNXU1dRQ3hOa2haaC9GcXpGCllZYTMvSXRQUGZ6clhxZ2Fqd0Q4cTRadDRZbWp0OCsyQmtJbVBqakZOa3VUUUl6Mkl1M3lGcU9JeExkak13N24KVVZ1OXRGUGlVa0QwVm5EUExRSURBUUFCCi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQ==', '0', '']] - - mempool_window = Toplevel() - mempool_window.title("Mempool") - - def update(): - for child in mempool_window.winfo_children(): # prevent hangup - child.destroy() - - mp_tree = ttk.Treeview(mempool_window, selectmode="extended", columns=('sender', 'recipient', 'amount')) - # mp_tree.grid(row=0, column=0) - - # table - - mp_tree.heading("#0", text='time') - mp_tree.column("#0", anchor='center', width=100) - - mp_tree.heading("#1", text='sender') - mp_tree.column("#1", anchor='center', width=350) - - mp_tree.heading("#2", text='recipient') - mp_tree.column("#2", anchor='center', width=350) - - mp_tree.heading("#3", text='amount') - mp_tree.column("#3", anchor='center', width=100) - - mp_tree.grid(sticky=N + S + W + E) - # table - - for tx in mempool_total: - mp_tree.insert('', 'end', text=datetime.fromtimestamp(float(tx[0])).strftime('%y-%m-%d %H:%M'), values=(tx[1], tx[2], tx[3])) - - clear_mempool_b = Button(mempool_window, text="Clear Mempool", command=lambda: mempool_clear(s), height=1, width=20, font=("Tahoma", 8)) - clear_mempool_b.grid(row=1, column=0, sticky=N + S + W + E) - close_mempool_b = Button(mempool_window, text="Close", command=lambda: mempool_window.destroy(), height=1, width=20, font=("Tahoma", 8)) - close_mempool_b.grid(row=2, column=0, sticky=N + S + W + E) - - def refresh_mp_auto(): - try: - # global frame_chart - root.after(0, update()) - root.after(10000, refresh_mp_auto) - - except Exception as e: - print("Mempool window closed, disabling auto-refresh({})".format(e)) - - refresh_mp_auto() - - -def recover(): - result = recovery.recover(key) - messagebox.showinfo("Recovery Result", result) - - -def address_validate(address): - if re.match('[abcdef0123456789]{56}', address): - return True - else: - return False - - -def create_url_clicked(app_log, command, recipient, amount, operation, openfield): - """isolated function so no GUI leftovers are in bisurl.py""" - - result = create_url(app_log, command, recipient, amount, operation, openfield) - url_r.delete(0, END) - url_r.insert(0, result) - - -def read_url_clicked(app_log, url): - """isolated function so no GUI leftovers are in bisurl.py""" - result =(read_url(app_log, url)) - - recipient.delete(0, END) - amount.delete(0, END) - operation.delete(0, END) - openfield.delete("1.0", END) - - recipient.insert(0, result[1]) # amount - amount.insert(0, result[2]) # recipient - - operation.insert(INSERT, result[3]) # operation - openfield.insert(INSERT, result[4]) # openfield - - - -def replace_regex(string, replace): - replaced_string = re.sub(r'^{}'.format(replace), "", string) - return replaced_string - - -def alias_register(alias_desired): - # This will freeze, but let say it's ok, we're waiting for a feedback. - result = async_client.connection.command("aliascheck", alias_desired) - - if result == "Alias free": - send("0", myaddress, "", "alias=" + alias_desired) - pass - else: - messagebox.showinfo("Conflict", "Name already registered") - - -def help(): - top13 = Toplevel() - top13.title("Help") - aliases_box = Text(top13, width=100) - aliases_box.grid(row=0, pady=0) - - aliases_box.insert(INSERT, "Encrypt with PK:\n Encrypt the data with the recipient's private key. Only they will be able to view it.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Mark as Message:\n Mark data as message. The recipient will be able to view it in the message section.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Base64 Encoding:\n Encode the data with base64, it is a group of binary-to-text encoding scheme that representd binary data in an ASCII string format by translating it into a radix-64 representation.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Operation:\n A static operation for blockchain programmability.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Data:\n A variable operation for blockchain programmability.") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Alias Recipient:\n Use an alias of the recipient in the recipient field if they have one registered") - aliases_box.insert(INSERT, "\n\n") - aliases_box.insert(INSERT, "Resolve Aliases:\n Show aliases instead of addressess where applicable in the table below.") - aliases_box.insert(INSERT, "\n\n") - - close = Button(top13, text="Close", command=top13.destroy) - close.grid(row=3, column=0, sticky=W + E) - - -def data_insert_clear(): - openfield.delete('1.0', END) # remove previous - - -def all_spend_clear(): - all_spend_var.set(False) - - amount.delete(0, END) - amount.insert(0, 0) - - -def all_spend(): - # all_spend_var.set(True) - all_spend_check() - - -def all_spend_check(): - if all_spend_var.get(): - openfield_fee_calc = openfield.get("1.0", END).strip() - - if encode_var.get() and not msg_var.get(): - openfield_fee_calc = base64.b64encode(openfield_fee_calc.encode("utf-8")).decode("utf-8") - - if msg_var.get() and encode_var.get(): - openfield_fee_calc = "bmsg=" + base64.b64encode(openfield_fee_calc.encode("utf-8")).decode("utf-8") - if msg_var.get() and not encode_var.get(): - openfield_fee_calc = "msg=" + openfield_fee_calc - if encrypt_var.get(): - openfield_fee_calc = "enc=" + str(openfield_fee_calc) - - fee_from_all = fee_calculate(openfield_fee_calc) - amount.delete(0, END) - amount.insert(0,(Decimal(balance_raw.get()) - Decimal(fee_from_all))) - - -def fingerprint(): - root.filename = filedialog.askopenfilename(multiple=True, initialdir="", title="Select files for fingerprinting") - - data_dict = {} - - for f in root.filename: - with open(f, 'rb') as fp: - data = hashlib.blake2b(fp.read()).hexdigest() - data_dict[os.path.split(f)[-1]] = data - - openfield.insert(INSERT, data_dict) - - -def keys_load_dialog(): - global key - global private_key_readable - global encrypted - global unlocked - global public_key_hashed - global myaddress - global private_key_load - global public_key_load - wallet_load = filedialog.askopenfilename(multiple=False, initialdir="", title="Select private key") - - key, _, private_key_readable, encrypted, unlocked, public_key_hashed, myaddress, keyfile = essentials.keys_load_new(wallet_load) # upgrade later, remove blanks - - encryption_button_refresh() - - gui_address_t.delete(0, END) - gui_address_t.insert(INSERT, myaddress) - - recipient_address.config(state=NORMAL) - recipient_address.delete(0, END) - recipient_address.insert(INSERT, myaddress) - recipient_address.config(state=DISABLED) - - sender_address.config(state=NORMAL) - sender_address.delete(0, END) - sender_address.insert(INSERT, myaddress) - sender_address.config(state=DISABLED) - - refresh(myaddress, s) - - -def keys_backup(): - root.filename = filedialog.asksaveasfilename(initialdir="", title="Select backup file") - - if not root.filename == "": - if not root.filename.endswith(".tar.gz"): - root.filename = root.filename + ".tar.gz" - - der_files = glob.glob("*.der") - - with tarfile.open(root.filename, "w:gz") as tar: - for der_file in der_files: - tar.add(der_file, arcname=der_file) - - -def watch(): - address = gui_address_t.get() - refresh(address, s) - - -def unwatch(): - gui_address_t.delete(0, END) - gui_address_t.insert(INSERT, myaddress) - refresh(myaddress, s) - - -def aliases_list(): - top12 = Toplevel() - top12.title("Your aliases") - aliases_box = Text(top12, width=100) - aliases_box.grid(row=0, pady=0) - - # This will freeze, but let say it's ok, we're waiting for a feedback. - aliases_self = async_client.connection.command("aliasget", myaddress) - - for x in aliases_self: - aliases_box.insert(INSERT, replace_regex(x[0], "alias=")) - aliases_box.insert(INSERT, "\n") - - close = Button(top12, text="Close", command=top12.destroy) - close.grid(row=3, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - -def recipient_insert(): - recipient.delete(0, END) - recipient.insert(0, root.clipboard_get()) - - -def address_insert(): - gui_address_t.delete(0, END) - gui_address_t.insert(0, root.clipboard_get()) - - -def data_insert(): - openfield.delete('1.0', END) # remove previous - openfield.insert(INSERT, root.clipboard_get()) - - -def data_insert_r(): - openfield_r.delete('1.0', END) # remove previous - openfield_r.insert(INSERT, root.clipboard_get()) - - -def url_insert(): - url.delete(0, END) # remove previous - url.insert(0, root.clipboard_get()) - - -def address_copy(): - root.clipboard_clear() - root.clipboard_append(myaddress) - - -def url_copy(): - root.clipboard_clear() - root.clipboard_append(url_r.get()) - - -def recipient_copy(): - root.clipboard_clear() - root.clipboard_append(recipient.get()) - -def alias(): - alias_var = StringVar() - - # enter password - top8 = Toplevel() - top8.title("Enter Desired Name") - - alias_label = Label(top8, text="Input name") - alias_label.grid(row=0, column=0, sticky=N + W, padx=15, pady=(5, 0)) - - input_alias = Entry(top8, textvariable=alias_var) - input_alias.grid(row=1, column=0, sticky=N + E, padx=15, pady=(0, 5)) - - dismiss = Button(top8, text="Register", command=lambda: alias_register(alias_var.get().strip())) - dismiss.grid(row=2, column=0, sticky=W + E, padx=15, pady=(15, 0)) - - dismiss = Button(top8, text="Dismiss", command=top8.destroy) - dismiss.grid(row=3, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - -def encrypt_get_password(): - # enter password - top3 = Toplevel() - top3.title("Enter Password") - - password_label = Label(top3, text="Input password") - password_label.grid(row=0, column=0, sticky=N + W, padx=15, pady=(5, 0)) - - input_password = Entry(top3, textvariable=password_var_enc, show='*') - input_password.grid(row=1, column=0, sticky=N + E, padx=15, pady=(0, 5)) - - confirm_label = Label(top3, text="Confirm password") - confirm_label.grid(row=2, column=0, sticky=N + W, padx=15, pady=(5, 0)) - - input_password_con = Entry(top3, textvariable=password_var_con, show='*') - input_password_con.grid(row=3, column=0, sticky=N + E, padx=15, pady=(0, 5)) - - enter = Button(top3, text="Encrypt", command=lambda: encrypt_fn(top3)) - enter.grid(row=4, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - cancel = Button(top3, text="Cancel", command=top3.destroy) - cancel.grid(row=5, column=0, sticky=W + E, padx=15, pady=(5, 5)) - # enter password - - -def lock_fn(button): - # key = None - decrypt_b.configure(text="Unlock", state=NORMAL) - lock_b.configure(text="Locked", state=DISABLED) - messagemenu.entryconfig("Sign Messages", state=DISABLED) # messages - walletmenu.entryconfig("Recovery", state=DISABLED) # recover - password_var_dec.set("") - - -def encrypt_fn(destroy_this): - global key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_hashed, myaddress - password = password_var_enc.get() - password = password_var_con.get() - - if password == password: - - ciphertext = encrypt(password, private_key_readable) - ciphertext_export = base64.b64encode(ciphertext).decode() - essentials.keys_save(ciphertext_export, public_key_readable, myaddress, keyfile) - - # encrypt_b.configure(text="Encrypted", state=DISABLED) - - key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_hashed, myaddress, keyfile = essentials.keys_load(private_key_load, public_key_load) - encryption_button_refresh() - - destroy_this.destroy() - # lock_b.configure(text="Lock", state=NORMAL) - else: - messagebox.showwarning("Mismatch", "Password Mismatch") - - -def decrypt_get_password(): - # enter password - top4 = Toplevel() - top4.title("Enter Password") - - input_password = Entry(top4, textvariable=password_var_dec, show='*') - input_password.grid(row=0, column=0, sticky=N + E, padx=15, pady=(5, 5)) - - enter = Button(top4, text="Unlock", command=lambda: decrypt_fn(top4)) - enter.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - cancel = Button(top4, text="Cancel", command=top4.destroy) - cancel.grid(row=2, column=0, sticky=W + E, padx=15, pady=(5, 5)) - # enter password - - -def decrypt_fn(destroy_this): - global key - try: - password = password_var_dec.get() - - decrypted_privkey = decrypt(password, base64.b64decode(private_key_readable)) # decrypt privkey - - key = RSA.importKey(decrypted_privkey) # be able to sign - - destroy_this.destroy() - - decrypt_b.configure(text="Unlocked", state=DISABLED) - lock_b.configure(text="Lock", state=NORMAL) - messagemenu.entryconfig("Sign Messages", state=NORMAL) # messages - walletmenu.entryconfig("Recovery", state=NORMAL) # recover - except: - messagebox.showwarning("Locked", "Wrong password") - - return key - - -def sendirm(amount_input, recipient_input, operation_input, openfield_input): - amount_input = quantize_eight(amount_input) - - # Exchange check - exchange_addresses = { - "edf2d63cdf0b6275ead22c9e6d66aa8ea31dc0ccb367fad2e7c08a25": "Cryptopia", - "f6c0363ca1c5aa28cc584252e65a63998493ff0a5ec1bb16beda9bac": "qTrade", - } - if recipient_input in exchange_addresses and len(openfield_input) < 16: - messagebox.showinfo("Cannot send", - "Identification message is missing for {}, please include it" - .format(exchange_addresses[recipient_input])) - return - - top10 = Toplevel() - top10.title("Confirm") - - if alias_cb_var.get(): # alias check - """ - connections.send(s, "addfromalias", 10) - connections.send(s, recipient_input, 10) - recipient_input = connections.receive(s, 10) - """ - # This will freeze, but let say it's ok, we're waiting for a feedback. - recipient_input = async_client.connection.command("addfromalias", recipient_input) - - - # encr check - if encrypt_var.get(): - # get recipient's public key - """ - connections.send(s, "pubkeyget", 10) - connections.send(s, recipient_input, 10) - target_public_key_hashed = connections.receive(s, 10) - """ - # This will freeze, but let say it's ok, we're waiting for a feedback. - target_public_key_hashed = async_client.connection.command("pubkeyget", recipient_input) - - recipient_key = RSA.importKey(base64.b64decode(target_public_key_hashed).decode("utf-8")) - - # openfield_input = str(target_public_key.encrypt(openfield_input.encode("utf-8"), 32)) - - data = openfield_input.encode("utf-8") - # print(open("pubkey.der").read()) - session_key = get_random_bytes(16) - cipher_aes = AES.new(session_key, AES.MODE_EAX) - - # Encrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(recipient_key) - - # Encrypt the data with the AES session key - ciphertext, tag = cipher_aes.encrypt_and_digest(data) - enc_session_key =(cipher_rsa.encrypt(session_key)) - openfield_input = str([x for x in(cipher_aes.nonce, tag, ciphertext, enc_session_key)]) - - # encr check - - if encode_var.get() and not msg_var.get(): - openfield_input = base64.b64encode(openfield_input.encode("utf-8")).decode("utf-8") - if msg_var.get() and encode_var.get(): - openfield_input = "bmsg=" + base64.b64encode(openfield_input.encode("utf-8")).decode("utf-8") - if msg_var.get() and not encode_var.get(): - openfield_input = "msg=" + openfield_input - if encrypt_var.get(): - openfield_input = "enc=" + str(openfield_input) - - fee = fee_calculate(openfield_input) - - confirmation_dialog = Text(top10, width=100) - confirmation_dialog.insert(INSERT,("Amount: {}\nFee: {}\nTotal: {}\nTo: {}\nOperation: {}\nData: {}".format('{:.8f}'.format(amount_input), '{:.8f}'.format(fee), '{:.8f}'.format(Decimal(amount_input) + Decimal(fee)), recipient_input, operation_input, openfield_input))) - confirmation_dialog.configure(state="disabled") - confirmation_dialog.grid(row=0, pady=0) - - enter = Button(top10, text="Confirm", command=lambda: sendirmed(amount_input, recipient_input, operation_input, openfield_input, top10)) - enter.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - done = Button(top10, text="Cancel", command=top10.destroy) - done.grid(row=2, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - -def sendirmed(amount_input, recipient_input, operation_input ,openfield_input, top10): - send(amount_input, recipient_input, operation_input, openfield_input) - top10.destroy() - - -def send(amount_input, recipient_input, operation_input, openfield_input): - all_spend_check() - - if key is None: - messagebox.showerror("Locked", "Wallet is locked") - - app_log.warning("Received tx command") - - try: - Decimal(amount_input) - except: - messagebox.showerror("Invalid Amount", "Amount must be a number") - - # alias check - - # alias check - - if not address_validate(recipient_input): - messagebox.showerror("Invalid Address", "Invalid address format") - else: - - app_log.warning("Amount: {}".format(amount_input)) - app_log.warning("Recipient: {}".format(recipient_input)) - app_log.warning("Data: {}".format(openfield_input)) - - tx_timestamp = '%.2f' %(float(stats_timestamp) - abs(float(stats_timestamp) - time.time())) #randomize timestamp for unique signatures - transaction =(str(tx_timestamp), str(myaddress), str(recipient_input), '%.8f' % float(amount_input), str(operation_input), str(openfield_input)) # this is signed, float kept for compatibility - - h = SHA.new(str(transaction).encode("utf-8")) - signer = PKCS1_v1_5.new(key) - signature = signer.sign(h) - signature_enc = base64.b64encode(signature) - app_log.warning("Client: Encoded Signature: {}".format(signature_enc.decode("utf-8"))) - - verifier = PKCS1_v1_5.new(key) - - if verifier.verify(h, signature): - - app_log.warning("Client: The signature is valid, proceeding to save transaction, signature, new txhash and the public key to mempool") - - # print(str(timestamp), str(address), str(recipient_input), '%.8f' % float(amount_input),str(signature_enc), str(public_key_hashed), str(keep_input), str(openfield_input)) - tx_submit = str(tx_timestamp), str(myaddress), str(recipient_input), '%.8f' % float(amount_input), str(signature_enc.decode("utf-8")), str(public_key_hashed.decode("utf-8")), str(operation_input), str(openfield_input) # float kept for compatibility - - while True: - """ - connections.send(s, "mpinsert", 10) - connections.send(s, tx_submit, 10) - reply = connections.receive(s, 10) - """ - # This will freeze, but let say it's ok, we're waiting for a feedback. - reply = async_client.connection.command("mpinsert", tx_submit) - - app_log.warning("Client: {}".format(reply)) - if reply[-1] == "Success": - messagebox.showinfo("OK","Transaction accepted to mempool") - else: - messagebox.showerror("Error","There was a problem with transaction processing. Full message: {}".format(reply)) - break - - refresh(gui_address_t.get(), s) - else: - app_log.warning("Client: Invalid signature") - # enter transaction end - - -# def app_quit(): -# app_log.warning("Received quit command") -# root.destroy() - - -def qr(address): - address_qr = pyqrcode.create(address) - address_qr.png('address_qr.png') - - # popup - top = Toplevel() - top.title("Address QR Code") - - with PIL.Image.open("address_qr.png") as im: - photo = PIL.ImageTk.PhotoImage(im.resize((320, 320))) - label = Label(top, image=photo) - label.image = photo # keep a reference! - label.pack() - - # msg = Message(top, text="hi") - # msg.pack() - - button = Button(top, text="Dismiss", command=top.destroy) - button.pack() - # popup - - -def msg_dialogue(address): - """ - connections.send(s, "addlist", 10) - connections.send(s, myaddress, 10) - addlist = connections.receive(s, 10) - """ - # This will freeze, but let say it's ok, we're waiting for a feedback. - addlist = async_client.connection.command("addlist", myaddress) - print(addlist) - - def msg_received_get(addlist): - - for x in addlist: - if x[11].startswith(("msg=", "bmsg=", "enc=msg=", "enc=bmsg=")) and x[3] == address: - # print(x[11]) - """ - connections.send(s, "aliasget", 10) - connections.send(s, x[2], 10) - msg_address = connections.receive(s, 10)[0][0] - """ - # We could/should use the cache, but say we do a live request for now. - msg_address = async_client.connection.command("aliasget", x[2]) - - if x[11].startswith("enc=msg="): - msg_received_digest = replace_regex(x[11], "enc=msg=") - try: - # msg_received_digest = key.decrypt(ast.literal_eval(msg_received_digest)).decode("utf-8") - - (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_received_digest) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(key) - session_key = cipher_rsa.decrypt(enc_session_key) - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - msg_received_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") - - except: - msg_received_digest = "Could not decrypt message" - - elif x[11].startswith("enc=bmsg="): - msg_received_digest = replace_regex(x[11], "enc=bmsg=") - try: - msg_received_digest = base64.b64decode(msg_received_digest).decode("utf-8") - - # msg_received_digest = key.decrypt(ast.literal_eval(msg_received_digest)).decode("utf-8") - (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_received_digest) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(key) - session_key = cipher_rsa.decrypt(enc_session_key) - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - msg_received_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") - - except: - msg_received_digest = "Could not decrypt message" - - - elif x[11].startswith("bmsg="): - msg_received_digest = replace_regex(x[11], "bmsg=") - try: - msg_received_digest = base64.b64decode(msg_received_digest).decode("utf-8") - except: - msg_received_digest = "Could not decode message" - - elif x[11].startswith("msg="): - msg_received_digest = replace_regex(x[11], "msg=") - - msg_received.insert(INSERT,((time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(Decimal(x[1])))) + " From " + replace_regex(msg_address, "alias=") + ": " + msg_received_digest) + "\n") - - def msg_sent_get(addlist): - - for x in addlist: - if x[11].startswith(("msg=", "bmsg=", "enc=msg=", "enc=bmsg=")) and x[2] == address: - # print(x[11]) - """ - connections.send(s, "aliasget", 10) - connections.send(s, x[3], 10) - received_aliases = connections.receive(s, 10) - """ - # We could/should use the cache, but say we do a live request for now. - received_aliases = async_client.connection.command("aliasget", x[3]) - - msg_recipient = received_aliases[0][0] - - if x[11].startswith("enc=msg="): - msg_sent_digest = replace_regex(x[11], "enc=msg=") - try: - # msg_sent_digest = key.decrypt(ast.literal_eval(msg_sent_digest)).decode("utf-8") - (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_sent_digest) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(key) - session_key = cipher_rsa.decrypt(enc_session_key) - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - msg_sent_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") - - except: - msg_sent_digest = "Could not decrypt message" - - - elif x[11].startswith("enc=bmsg="): - msg_sent_digest = replace_regex(x[11], "enc=bmsg=") - try: - msg_sent_digest = base64.b64decode(msg_sent_digest).decode("utf-8") - # msg_sent_digest = key.decrypt(ast.literal_eval(msg_sent_digest)).decode("utf-8") - (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_sent_digest) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(key) - session_key = cipher_rsa.decrypt(enc_session_key) - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - msg_sent_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") - except: - msg_sent_digest = "Could not decrypt message" - - elif x[11].startswith("bmsg="): - msg_sent_digest = replace_regex(x[11], "bmsg=") - try: - msg_sent_digest = base64.b64decode(msg_sent_digest).decode("utf-8") - except: - msg_sent_digest = "Could not decode message" - - elif x[11].startswith("msg="): - msg_sent_digest = replace_regex(x[11], "msg=") - - msg_sent.insert(INSERT,((time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(Decimal(x[1])))) + " To " + replace_regex(msg_recipient, "alias=") + ": " + msg_sent_digest) + "\n") - - # popup - top11 = Toplevel() - top11.title("Messaging") - - Label(top11, text="Received:", width=20).grid(row=0) - - msg_received = Text(top11, width=100, height=20, font=("Tahoma", 8)) - msg_received.grid(row=1, column=0, sticky=W, padx=5, pady=(5, 5)) - msg_received_get(addlist) - - Label(top11, text="Sent:", width=20).grid(row=2) - - msg_sent = Text(top11, width=100, height=20, font=("Tahoma", 8)) - msg_sent.grid(row=3, column=0, sticky=W, padx=5, pady=(5, 5)) - msg_sent_get(addlist) - - dismiss = Button(top11, text="Dismiss", command=top11.destroy) - dismiss.grid(row=5, column=0, sticky=W + E, padx=15, pady=(5, 5)) - - # popup - - -def refresh_auto(): - status = async_client.connection.loop_status() - if status != True: - #benchmark light_ip-list - benchmark_lightip(app_log) - #restart loop - restart_loop() - root.after(0, refresh(gui_address_t.get(), s)) - root.after(10000, refresh_auto) - - -def stats(): - stats_window = Toplevel() - stats_window.title("Node Statistics") - stats_window.resizable(0, 0) - - # canvas_stats_bg = Canvas(root, highlightthickness=0) - # canvas_stats_bg.grid(row=0, column=0, rowspan=200, columnspan=200, sticky=W + E + S + N) - - # stats_window.update() - # width_stats = stats_window.winfo_width() - # height_stats = stats_window.winfo_height() - - # img_stats_bg = PhotoImage(file="graphics/brushed.png") - # canvas_bg.create_image(width_stats, height_stats, image=img_stats_bg) - - frame_chart = Frame(stats_window, height=100, width=100) - frame_chart.grid(row=0, column=1, rowspan=999) - f = Figure(figsize=(11, 7), dpi=100) - f.set_facecolor('silver') - f.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=0.5) - - canvas = FigureCanvasTkAgg(f, master=frame_chart) - canvas.get_tk_widget().grid(row=0, column=1, sticky=W, padx=15, pady=(0, 0)) - - def chart_fill(): - print("Filling the chart") - f.clear() - - rows = 4 - columns = 2 - - # f.remove(first) - first = f.add_subplot(rows, columns, 1) - first.plot((range(len(stats_nodes_count_list))),(stats_nodes_count_list)) - first.ticklabel_format(useOffset=False) - - first_2 = f.add_subplot(rows, columns, 1) - first_2.plot((range(len(stats_thread_count_list))),(stats_thread_count_list)) - first_2.ticklabel_format(useOffset=False) - first.legend(('Nodes', 'Threads'), loc='best', shadow=True) - - second = f.add_subplot(rows, columns, 2) - second.plot((range(len(stats_consensus_list))),(stats_consensus_list)) - second.legend(('Consensus Block',), loc='best', shadow=True) - second.ticklabel_format(useOffset=False) - - third = f.add_subplot(rows, columns, 3) - third.plot((range(len(stats_consensus_percentage_list))),(stats_consensus_percentage_list)) - third.legend(('Consensus Level',), loc='best', shadow=True) - third.ticklabel_format(useOffset=False) - - fourth = f.add_subplot(rows, columns, 4) - fourth.plot((range(len(stats_diff_list_2))),(stats_diff_list_2)) - fourth.legend(('Time To Generate Block',), loc='best', shadow=True) - fourth.ticklabel_format(useOffset=False) - - fifth = f.add_subplot(rows, columns, 5) - fifth.plot((range(len(stats_diff_list_0))),(stats_diff_list_0)) - fifth.ticklabel_format(useOffset=False) - - fifth_2 = f.add_subplot(rows, columns, 5) - fifth_2.plot((range(len(stats_diff_list_1))),(stats_diff_list_1)) - fifth_2.ticklabel_format(useOffset=False) - - fifth_3 = f.add_subplot(rows, columns, 5) - fifth_3.plot((range(len(stats_diff_list_3))),(stats_diff_list_3)) - fifth_3.ticklabel_format(useOffset=False) - fifth.legend(('Diff 1', 'Diff 2', 'Diff Current',), loc='best', shadow=True) - - sixth = f.add_subplot(rows, columns, 6) - sixth.plot((range(len(stats_diff_list_4))),(stats_diff_list_4)) - sixth.legend(('Block Time',), loc='best', shadow=True) - sixth.ticklabel_format(useOffset=False) - - seventh = f.add_subplot(rows, columns, 7) - seventh.plot((range(len(stats_diff_list_5))),(stats_diff_list_5)) - seventh.legend(('Hashrate',), loc='best', shadow=True) - seventh.ticklabel_format(useOffset=False) - - eigth = f.add_subplot(rows, columns, 8) - eigth.plot((range(len(stats_diff_list_6))),(stats_diff_list_6)) - eigth.legend(('Difficulty Adjustment',), loc='best', shadow=True) - eigth.ticklabel_format(useOffset=False) - - # a tk.DrawingArea - canvas.draw() - - def update(): - print("Statistics update triggered") - stats_address = statusget[0] - stats_nodes_count = statusget[1] - stats_nodes_list = statusget[2] - stats_thread_count = statusget[3] - stats_uptime = statusget[4] - stats_consensus = statusget[5] - stats_consensus_percentage = statusget[6] - stats_version = statusget[7] - stats_diff = statusget[8] - - - stats_address_label_var.set("Node Address: {}".format(stats_address)) - stats_nodes_count_label_var.set("Number of Nodes: {}".format(stats_nodes_count)) - stats_nodes_list_text_var.delete(0, END) - for entry in stats_nodes_list: - stats_nodes_list_text_var.insert(END, entry) - stats_nodes_list_text_var.grid(row=2, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_thread_count_var.set("Number of Threads: {}".format(stats_thread_count)) - stats_uptime_var.set("Uptime: {:.2f} hours".format(stats_uptime / 60 / 60)) - stats_consensus_var.set("Consensus Block: {}".format(stats_consensus)) - stats_consensus_consensus_percentage_var.set("Consensus Level: {:.2f}%".format(stats_consensus_percentage)) - stats_version_var.set("Version: {}".format(stats_version)) - stats_diff_var_0.set("Difficulty 1: {}".format(stats_diff[0])) - stats_diff_var_1.set("Difficulty 2: {}".format(stats_diff[1])) - stats_diff_var_2.set("Time to Generate Block: {}".format(stats_diff[2])) - stats_diff_var_3.set("Current Block Difficulty: {}".format(stats_diff[3])) - stats_diff_var_4.set("Block Time: {}".format(stats_diff[4])) - stats_diff_var_5.set("Hashrate: {}".format(stats_diff[5])) - stats_diff_var_6.set("Difficulty Adjustment: {}".format(stats_diff[6])) - - stats_address_label_var = StringVar() - stats_address_label = Label(stats_window, textvariable=stats_address_label_var) - stats_address_label.grid(row=0, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_nodes_count_label_var = StringVar() - stats_nodes_count_label = Label(stats_window, textvariable=stats_nodes_count_label_var) - stats_nodes_count_label.grid(row=1, column=0, sticky=E, padx=15, pady=(0, 0)) - - scrollbar = Scrollbar(stats_window) - scrollbar.grid(row=2, column=0, sticky=N + S + E, padx=140) - stats_nodes_list_text_var = Listbox(stats_window, width=20, height=10, font=("Tahoma", 8)) - scrollbar.config(command=stats_nodes_list_text_var.yview) - - stats_thread_count_var = StringVar() - stats_thread_count_label = Label(stats_window, textvariable=stats_thread_count_var) - stats_thread_count_label.grid(row=3, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_uptime_var = StringVar() - stats_uptime_label = Label(stats_window, textvariable=stats_uptime_var) - stats_uptime_label.grid(row=4, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_consensus_var = StringVar() - stats_consensus_label = Label(stats_window, textvariable=stats_consensus_var) - stats_consensus_label.grid(row=5, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_consensus_consensus_percentage_var = StringVar() - stats_consensus_consensus_percentage_label = Label(stats_window, textvariable=stats_consensus_consensus_percentage_var) - stats_consensus_consensus_percentage_label.grid(row=6, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_version_var = StringVar() - stats_version_label = Label(stats_window, textvariable=stats_version_var) - stats_version_label.grid(row=7, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_0 = StringVar() - stats_diff_label_0 = Label(stats_window, textvariable=stats_diff_var_0) - stats_diff_label_0.grid(row=8, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_1 = StringVar() - stats_diff_label_1 = Label(stats_window, textvariable=stats_diff_var_1) - stats_diff_label_1.grid(row=9, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_2 = StringVar() - stats_diff_label_2 = Label(stats_window, textvariable=stats_diff_var_2) - stats_diff_label_2.grid(row=10, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_3 = StringVar() - stats_diff_label_3 = Label(stats_window, textvariable=stats_diff_var_3) - stats_diff_label_3.grid(row=11, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_4 = StringVar() - stats_diff_label_4 = Label(stats_window, textvariable=stats_diff_var_4) - stats_diff_label_4.grid(row=12, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_5 = StringVar() - stats_diff_label_5 = Label(stats_window, textvariable=stats_diff_var_5) - stats_diff_label_5.grid(row=13, column=0, sticky=E, padx=15, pady=(0, 0)) - - stats_diff_var_6 = StringVar() - stats_diff_label_6 = Label(stats_window, textvariable=stats_diff_var_6) - stats_diff_label_6.grid(row=14, column=0, sticky=E, padx=15, pady=(0, 0)) - - def refresh_stats_auto(): - try: - # global frame_chart - root.after(0, update()) - root.after(10000, refresh_stats_auto) - - chart_fill() - except Exception as e: - print("Statistics window closed, disabling auto-refresh({})".format(e)) - - refresh_stats_auto() - - -def csv_export(s): - """ - connections.send(s, "addlist", 10) # senders - connections.send(s, myaddress, 10) - tx_list = connections.receive(s, 10) - print(tx_list) - """ - # This will freeze, but let say it's ok, we're waiting for a feedback. - tx_list = async_client.connection.command("addlist", myaddress) - - root.filename = filedialog.asksaveasfilename(initialdir="", title="Select CSV file") - - with open(root.filename, 'w', newline='') as csvfile: - for transaction in tx_list: - writer = csv.writer(csvfile, quoting=csv.QUOTE_MINIMAL) - writer.writerow([transaction[0], transaction[1], transaction[3], transaction[4], transaction[5], transaction[6], transaction[7], transaction[8], transaction[9], transaction[10], transaction[11]]) - - return - - -def token_transfer(token, amount, window): - operation.delete(0, END) - operation.insert(0, "token:transfer") - - openfield.delete('1.0', END) # remove previous - openfield.insert(INSERT, "{}:{}".format(token, amount)) - window.destroy() - - sendirm(0, recipient.get(),"token:transfer", "{}:{}".format(token, amount)) - - -def token_issue(token, amount, window): - operation.delete(0, END) - operation.insert(0, "token:issue") - - openfield.delete('1.0', END) # remove previous - openfield.insert(INSERT, "{}:{}".format(token, amount)) - recipient.delete(0, END) - recipient.insert(INSERT, myaddress) - window.destroy() - - sendirm(0, recipient.get(),"token:issue", "{}:{}".format(token, amount)) - - -def tokens(): - tokens_main = Frame (tab_tokens, relief='ridge', borderwidth=0) - tokens_main.grid (row=0, column=0, pady=5, padx=5, sticky=N + W + E + S) - #tokens_main.title("Tokens") - token_box = Listbox (tokens_main, width=100) - token_box.grid (row=0, pady=0) - - scrollbar_v = Scrollbar(tokens_main, command=token_box.yview) - scrollbar_v.grid(row=0, column=1, sticky=N + S + E) - - """ - connections.send(s, "tokensget", 10) - connections.send(s, gui_address_t.get(), 10) - tokens_results = connections.receive(s, 10) - """ - # This will freeze, but let say it's ok, we're waiting for a feedback. - tokens_results = async_client.connection.command("tokensget", gui_address_t.get()) - - print(tokens_results) - - for pair in tokens_results: - token = pair[0] - balance = pair[1] - token_box.insert(END,(token, ":", balance)) - - # callback - def callback(event): - token_select =(token_box.get(token_box.curselection()[0])) - token_name_var.set(token_select[0]) - token_amount_var.set(token_select[2]) - - token_box.bind('', callback) - - # callback - - token_name_var = StringVar() - token_name = Entry(tokens_main, textvariable=token_name_var, width=80) - token_name.grid(row=2, column=0, sticky=E, padx=15, pady=(5, 5)) - - token_name_label_var = StringVar() - token_name_label_var.set("Token Name:") - token_name_label = Label(tokens_main, textvariable=token_name_label_var) - token_name_label.grid(row=2, column=0, sticky=W, padx=15, pady=(0, 0)) - - # balance_var = StringVar() - # balance_msg_label = Label(frame_buttons, textvariable=balance_var) - - token_amount_var = StringVar() - token_amount = Entry(tokens_main, textvariable=token_amount_var, width=80, ) - token_amount.grid(row=3, column=0, sticky=E, padx=15, pady=(5, 5)) - - token_amount_label_var = StringVar() - token_amount_label_var.set("Token Amount:") - token_amount_label = Label(tokens_main, textvariable=token_amount_label_var) - token_amount_label.grid(row=3, column=0, sticky=W, padx=15, pady=(0, 0)) - - transfer = Button(tokens_main, text="Transfer", command=lambda: token_transfer(token_name_var.get(), token_amount_var.get(), tokens_main)) - transfer.grid(row=4, column=0, sticky=W + E, padx=5) - - issue = Button(tokens_main, text="Issue", command=lambda: token_issue(token_name_var.get(), token_amount_var.get(), tokens_main)) - issue.grid(row=5, column=0, sticky=W + E, padx=5) - - #cancel = Button(tokens_main, text="Cancel", command=tokens_main.destroy) - #cancel.grid(row=6, column=0, sticky=W + E, padx=5) - - -def tx_tree_define(): - global tx_tree - - tx_tree = ttk.Treeview(tab_transactions, selectmode="extended", columns=('sender', 'recipient', 'amount', 'type'), height=20) - tx_tree.grid(row=1, column=0) - - # table - tx_tree.heading("#0", text='time') - tx_tree.column("#0", anchor='center', width=100) - - tx_tree.heading("#1", text='sender') - tx_tree.column("#1", anchor='center', width=347) - - tx_tree.heading("#2", text='recipient') - tx_tree.column("#2", anchor='center', width=347) - - tx_tree.heading("#3", text='amount') - tx_tree.column("#3", anchor='center', width=35) - - tx_tree.heading("#4", text='type') - tx_tree.column("#4", anchor='center', width=40) - - tx_tree.grid(sticky=N + S + W + E) - - -def table(address, addlist_20, mempool_total): - global tx_tree - # transaction table - # data - try: - tx_tree.destroy() - except: - pass - tx_tree_define() - - for tx in mempool_total: - tag = "mempool" - - if tx[1] == address: - tx_tree.insert('', 'end', text=datetime.fromtimestamp(float(tx[0])).strftime('%y-%m-%d %H:%M'), values=(tx[1], tx[2], tx[3], "?"), tags=tag) - - if resolve_var.get(): - # aliases - #  local address - needed_aliases = [gui_address_t.get()] - for tx in addlist_20: - needed_aliases.append(tx[2]) # append address - needed_aliases.append(tx[3]) # append recipient - # no need to ask the same alias twice - needed_aliases = set(needed_aliases) - # ask for the cached version and trigger update in background - aliases = async_client.connection.aliases(needed_aliases) - - for tx in addlist_20: - tx[2], tx[3] = aliases[tx[2]], aliases[tx[3]] - - local_address = gui_address_t.get() - for tx in addlist_20: - if tx[3] == local_address: - tag = "received" - else: - tag = "sent" - # case for alias = this address - if resolve_var.get() and tx[3] == aliases[local_address]: - tag = "received" - # case for alias = this address - - if Decimal(tx[9]) > 0: - symbol = "MIN" - elif tx[11].startswith("bmsg"): - symbol = "B64M" - elif tx[11].startswith("msg"): - symbol = "MSG" - else: - symbol = "TX" - - tx_tree.insert('', 'end', text=datetime.fromtimestamp(float(tx[1])).strftime('%y-%m-%d %H:%M'), values=(tx[2], tx[3], tx[4], symbol), tags=tag) - - tx_tree.tagigure("received", background='palegreen1') - tx_tree.tagigure("sent", background='chocolate1') - - # table - - -def refresh(address, s=None): - global balance - global statusget - global block_height_old - global mempool_total - global stats_timestamp - - status = async_client.connection.status(address) - if not async_client.connection.connected: - app_log.warning("Not connected yet, please wait") - ip_connected_var.set("Connecting...") - frame_bottom.config(bg="red") - return - if not "stats_account" in status: - app_log.warning("No info yet, please wait") - ip_connected_var.set("Syncing...") - frame_bottom.config(bg="orange") - return - - ip_connected_var.set(async_client.connection.ip_port) - frame_bottom.config(bg="") - # TEMP - # print("Status", status) - - # print "refresh triggered" - try: - statusget = status['statusget'] - status_version = statusget[7] - stats_timestamp = statusget[9] - - server_timestamp_var.set("GMT: {}".format(time.strftime("%H:%M:%S", time.gmtime(int(float(stats_timestamp)))))) - - # data for charts - block_height = statusget[8][7] # move chart only if the block height changes, returned from diff 7 - try: - block_height_old - except: - block_height_old = block_height # init - - if block_height_old != block_height or not stats_nodes_count_list: # or if list is empty - print("Chart update in progress") - - stats_nodes_count_list.append(statusget[1]) - stats_thread_count_list.append(statusget[3]) - stats_consensus_list.append(statusget[5]) - stats_consensus_percentage_list.append(statusget[6]) - - stats_diff_list_0.append(statusget[8][0]) - stats_diff_list_1.append(statusget[8][1]) - stats_diff_list_2.append(statusget[8][2]) - stats_diff_list_3.append(statusget[8][3]) - stats_diff_list_4.append(statusget[8][4]) - stats_diff_list_5.append(statusget[8][5]) - stats_diff_list_6.append(statusget[8][6]) - - block_height_old = block_height - else: - print("Chart update skipped, block hasn't moved") - # data for charts - - stats_account = status['stats_account'] - balance = stats_account[0] - credit = stats_account[1] - debit = stats_account[2] - fees = stats_account[3] - rewards = stats_account[4] - - app_log.warning("Transaction address balance: {}".format(balance)) - - block_get = status['block_get'] - bl_height = block_get[0] - db_timestamp_last = block_get[1] - hash_last = block_get[7] - - # check difficulty - diff = status['diffget'] - # check difficulty - - print(diff) - diff_msg = int(diff[1]) # integer is enough - - # network status - time_now = str(time.time()) - last_block_ago = Decimal(time_now) - Decimal(db_timestamp_last) - if last_block_ago > 300: - sync_msg = "{}m behind".format((int(last_block_ago / 60))) - sync_msg_label.config(fg='red') - else: - sync_msg = "Last block: {}s ago".format((int(last_block_ago))) - sync_msg_label.config(fg='green') - - # network status - - mempool_total = status['mpget'] - # print(mempool_total) - - # fees_current_var.set("Current Fee: {}".format('%.8f' % float(fee))) - balance_var.set("Balance: {:.8f} BIS".format(Decimal(balance))) - balance_raw.set(balance) - debit_var.set("Sent Total: {:.8f} BIS".format(Decimal(debit))) - credit_var.set("Received Total: {:.8f} BIS".format(Decimal(credit))) - fees_var.set("Fees Paid: {:.8f} BIS".format(Decimal(fees))) - rewards_var.set("Rewards: {:.8f} BIS".format(Decimal(rewards))) - bl_height_var.set("Block: {}".format(bl_height)) - diff_msg_var.set("Difficulty: {}".format(diff_msg)) - sync_msg_var.set(sync_msg) - - hash_var.set("Hash: {}...".format(hash_last[:6])) - mempool_count_var.set("Mempool txs: {}".format(len(mempool_total))) - - annverget = status['annverget'] - version_var.set("Version: {}/{}".format(status_version, annverget)) - - # if status_version != annverget: - # version_color = "red" - # else: - # version_color = "green" - # version_var_label.config(fg=version_color) - - addlist = status['addlist'] - - table(address, addlist, mempool_total) - # root.after(1000, refresh) - - # canvas bg - root.update() - # width_root = root.winfo_width() - # height_root = root.winfo_height() - - # frame_main.update() - width_main = tab_main.winfo_width() - height_main = tab_main.winfo_height() - - canvas_main.configure(width=width_main, height=height_main) - # photo_main.resize(width_main,height_main) - - # canvas bg - - annget = status['annget'] - ann_var_text.config(state=NORMAL) - ann_var_text.delete('1.0', END) - ann_var_text.insert(INSERT, annget) - ann_var_text.config(state=DISABLED) - - all_spend_check() - - - except Exception as e: - app_log.warning(e) - exc_type, _, exc_tb = sys.exc_info() - fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - print(exc_type, fname, exc_tb.tb_lineno) - # node_connect() - - -def sign(): - def verify_this(): - try: - received_public_key = RSA.importKey(public_key_gui.get("1.0", END)) - verifier = PKCS1_v1_5.new(received_public_key) - h = SHA.new(input_text.get("1.0", END).encode("utf-8")) - received_signature_dec = base64.b64decode(output_signature.get("1.0", END)) - - if verifier.verify(h, received_signature_dec): - messagebox.showinfo("Validation Result", "Signature valid") - else: - raise ValueError("Invalid Signature") - except: - messagebox.showerror("Validation Result", "Signature invalid") - - def sign_this(): - h = SHA.new(input_text.get("1.0", END).encode("utf-8")) - signer = PKCS1_v1_5.new(key) - signature = signer.sign(h) - signature_enc = base64.b64encode(signature) - - output_signature.delete('1.0', END) # remove previous - output_signature.insert(INSERT, signature_enc) - - # popup - top = Toplevel() - top.title("Sign message") - # top.geometry("%dx%d%+d%+d" %(800, 600, 0, 0)) - # top.grid_propagate(False) - - Label(top, text="Message:", width=20).grid(row=0, pady=0) - input_text = Text(top, height=10) - # label.image = photo # keep a reference! - input_text.grid(row=1, column=0, sticky=N + E, padx=15, pady=(0, 0)) - - Label(top, text="Public Key:", width=20).grid(row=2, pady=0) - public_key_gui = Text(top, height=10) - public_key_gui.insert(INSERT, public_key_readable) - public_key_gui.grid(row=3, column=0, sticky=N + E, padx=15, pady=(0, 0)) - - Label(top, text="Signature:", width=20).grid(row=4, pady=0) - output_signature = Text(top, height=10) - output_signature.grid(row=5, column=0, sticky=N + E, padx=15, pady=(0, 0)) - - # msg = Message(top, text="hi") - # msg.pack() - - sign_message = Button(top, text="Sign Message", command=sign_this) - sign_message.grid(row=6, column=0, sticky=W + E, padx=15, pady=(5, 0)) - - sign_message = Button(top, text="Verify Message", command=verify_this) - sign_message.grid(row=7, column=0, sticky=W + E, padx=15, pady=(15, 0)) - - dismiss = Button(top, text="Dismiss", command=top.destroy) - dismiss.grid(row=8, column=0, sticky=W + E, padx=15, pady=(15, 5)) - # popup - - -def hyperlink_howto(): - url = "https://github.com/EggPool/BismuthHowto" - webbrowser.open(url, new=1) - - -def hyperlink_BE(): - url = "https://bismuth.online" - webbrowser.open(url, new=1) - - -def hyperlink_BISGit(): - url = "https://github.com/hclivess/Bismuth/releases" - webbrowser.open(url, new=1) - - -def hyperlink_bct(): - url = "https://bitcointalk.org/index.php?topic=1896497.0" - webbrowser.open(url, new=1) - -def support_collection(sync_msg_var, version_var): - sup_col = Toplevel() - sup_col.title("Collection of Basic Information") - collection_box = Text(sup_col, width=100) - collection_box.grid(row=0, pady=0) - - version = statusget[7] - stats_timestamp = statusget[9] - """ - connections.send(s, "blocklast", 10) - block_get = connections.receive(s, 10) - """ - # This will freeze, but let say it's ok, we're waiting for a feedback. - block_get = async_client.connection.command("blocklast") - - bl_height = block_get[0] - db_timestamp_last = block_get[1] - time_now = float(time.time()) - last_block_ago = int(time_now - db_timestamp_last) - ip = 'N/A' - - - collection_box.config(wrap=WORD) - collection_box.insert(INSERT, "If you have questions or want to report a problem, please copy the information below to provide it.") - collection_box.insert(INSERT, "\n\n") - collection_box.insert(INSERT, "Your OS: {} {}".format(platform.system(), platform.release())) - collection_box.insert(INSERT, "\nNode Version: {}".format(version)) - collection_box.insert(INSERT, "\nConnected to: {}".format(ip)) - collection_box.insert(INSERT, "\nLast Block: {}".format(bl_height)) - collection_box.insert(INSERT, "\nSeconds since Last Block: {}".format(last_block_ago)) - collection_box.insert(INSERT, "\nNode GMT: {}".format(time.strftime("%H:%M:%S", time.gmtime(int(float(stats_timestamp)))))) - - close = Button(sup_col, text="Close", command=sup_col.destroy) - close.grid(row=3, column=0, sticky=W + E) - - -def themes(theme): - # global photo_bg, photo_main - global photo_main - - if theme == "Barebone" or None: - # canvas_bg.delete("all") - canvas_main.delete("all") - - else: - # img_bg = PIL.Image.open("themes/{}_bg.jpg".format(theme)) - # photo_bg = PIL.ImageTk.PhotoImage(img_bg) - # canvas_bg.create_image(0, 0, image=photo_bg, anchor=NW) - - width_main = tab_main.winfo_width() - height_main = tab_main.winfo_height() - - with PIL.Image.open("themes/{}.jpg".format(theme)) as main_bg: - photo_main = PIL.ImageTk.PhotoImage(main_bg.resize((width_main, height_main), PIL.Image.ANTIALIAS)) - canvas_main.create_image(0, 0, image=photo_main, anchor=NW) - - with open("theme", "w") as theme_file: - theme_file.write(theme) - - - -def encryption_button_refresh(): - if unlocked: - decrypt_b.configure(text="Unlocked", state=DISABLED) - if not unlocked: - decrypt_b.configure(text="Unlock", state=NORMAL) - messagemenu.entryconfig("Sign Messages", state="disabled") # messages - walletmenu.entryconfig("Recovery", state="disabled") # recover - if not encrypted: - encrypt_b.configure(text="Encrypt", state=NORMAL) - if encrypted: - encrypt_b.configure(text="Encrypted", state=DISABLED) - - -def connection_thread(): - """ - This is running the connection client ioloop in a thread - :return: - """ - try: - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - async_client.connection = async_client.AsyncClient(light_ip, app_log, loop, rebench_timer, address=myaddress) - loop.create_task(async_client.connection.background()) - try: - loop.run_forever() - except KeyboardInterrupt: - loop.stop() - app_log.info("exited from loop") - except Exception as e: - print("ect", e) - - -def restart_loop(): - loop = threading.Thread(target=connection_thread) - loop.daemon = True - loop.start() - - -def benchmark_lightip(app_log): - global light_ip - global rebench_timer - #benchmark light_ip-list - light_ip = time_measure(light_ip, app_log) - rebench_timer = time.time() - - - -if __name__ == "__main__": - # globalize - global block_height_old - global statusget - global key - global private_key_readable - global encrypted - global unlocked - global public_key_hashed - global myaddress - global private_key_load - global public_key_load - global s - - # data for charts - stats_nodes_count_list = [] - stats_thread_count_list = [] - stats_consensus_list = [] - stats_consensus_percentage_list = [] - stats_diff_list_0 = [] - stats_diff_list_1 = [] - stats_diff_list_2 = [] - stats_diff_list_3 = [] - stats_diff_list_4 = [] - stats_diff_list_5 = [] - stats_diff_list_6 = [] - # data for charts - - if os.path.exists("privkey.der"): - private_key_load = "privkey.der" - else: - private_key_load = "privkey_encrypted.der" - - public_key_load = "pubkey.der" - - print(getcontext()) - - config = options.Get() - config.read() - port = config.port - light_ip = config.light_ip - version = config.version - gui_scaling = config.gui_scaling - - - if "testnet" in version: - port = 2829 - light_ip = ["127.0.0.1"] - - # app_log = log.log("gui.log", debug_level) - app_log = log.log("wallet.log", config.debug_level, config.terminal_output) - - essentials.keys_check(app_log, "wallet.der") - essentials.db_check(app_log) - - key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_hashed, myaddress, keyfile = essentials.keys_load(private_key_load, public_key_load) - - #benchmark light_ip-list - benchmark_lightip(app_log) - - # Build TK INTERFACE - - root = Tk() - - root.wm_title("Bismuth Light Wallet") - # root.geometry("1310x700") #You want the size of the app to be 500x500 - root.resizable(0, 0) # Don't allow resizing in the x or y direction / resize - # root['bg']="black" - - with PIL.Image.open("graphics/icon.jpg") as img_icon: - photo_icon = PIL.ImageTk.PhotoImage(img_icon) - root.tk.call('wm', 'iconphoto', root._w, photo_icon, ) - - if gui_scaling == "adapt": - dpi_value = root.winfo_fpixels('1i') - root.tk.call('tk', 'scaling', dpi_value / 72) - - elif gui_scaling != "default": - root.tk.call("tk", "scaling", gui_scaling) - - password_var_enc = StringVar() - password_var_con = StringVar() - password_var_dec = StringVar() - - # canvas_bg = Canvas(root,highlightthickness=0) - # canvas_bg.grid(row=0, column=0, rowspan=200,columnspan=200,sticky=W + E + S + N) - - frame_bottom = Frame(root, relief='sunken', borderwidth=1) - frame_bottom.grid(row=5, column=0, sticky='NESW', pady=5, padx=5) - - # notebook widget - nbtabs = ttk.Notebook(root) - nbtabs.grid(row=1, column=0, sticky='NESW', pady=5, padx=5) - - # tab_main Main - tab_main = ttk.Frame(nbtabs) - nbtabs.add(tab_main, text='Overview') - - canvas_main = Canvas(tab_main, highlightthickness=0) - canvas_main.grid(row=0, column=0, sticky=W + E + N + S, columnspan=99, rowspan=99) - - frame_logo = Frame(tab_main, relief='ridge', borderwidth=4) - frame_logo.grid(row=1, column=0, pady=5, padx=5, sticky=W) - - frame_coins = Frame(tab_main, relief='ridge', borderwidth=4) - frame_coins.grid(row=0, column=0, sticky=W + E + N, pady=5, padx=5) - - frame_hyperlinks = Frame(tab_main, relief='ridge', borderwidth=4) - frame_hyperlinks.grid(row=0, column=98, pady=5, padx=5, sticky=W + N) - - frame_support = Frame(tab_main, relief='ridge', borderwidth=4) - frame_support.grid(row=98, column=98, pady=5, padx=5, sticky=W + N) - - # frame_mainstats = Frame(tab_main, relief = 'ridge', borderwidth = 4) - # frame_mainstats.grid(row=5, column=1, sticky=W + E + N, pady=5, padx=5) - - # tab_transactions transactions - tab_transactions = ttk.Frame(nbtabs) - - nbtabs.add(tab_transactions, text='History') - - frame_entries_t = Frame(tab_transactions, relief='ridge', borderwidth=0) - frame_entries_t.grid(row=0, column=0, pady=5, padx=5) - - # frame_labels_t = Frame(tab_transactions,relief = 'ridge', borderwidth = 0) - # frame_labels_t.grid(row=0, column=0, pady=5, padx=5, sticky=N+W+E+S) - - frame_table = Frame(tab_transactions, relief='ridge', borderwidth=0) - frame_table.grid(row=1, column=0, sticky=W + E + N, pady=5, padx=5) - - # refresh(myaddress, s) - - # tab_send sendcoin tab - tab_send = ttk.Frame(nbtabs) - nbtabs.add(tab_send, text='Send') - - frame_entries = Frame(tab_send) - frame_entries.grid(row=0, column=0, pady=5, padx=5, sticky=N + W + E + S) - - frame_send = Frame(tab_send, relief='ridge', borderwidth=1) - frame_send.grid(row=0, column=2, pady=5, padx=5, sticky=N) - - frame_tick = Frame(frame_send, relief='ridge', borderwidth=1) - frame_tick.grid(row=4, column=0, pady=5, padx=5, sticky=S) - - # tab_receive receive - tab_receive = ttk.Frame(nbtabs) - nbtabs.add(tab_receive, text='Receive') - - frame_entries_r = Frame(tab_receive, relief='ridge', borderwidth=0) - frame_entries_r.grid(row=0, column=0, pady=5, padx=5, sticky=N + W + E + S) - - recipient_address = Entry(frame_entries_r, width=60, text=myaddress) - recipient_address.insert(0, myaddress) - - recipient_address.grid(row=0, column=1, sticky=W, pady=5, padx=5) - recipient_address.configure(state=DISABLED) - - amount_r = Entry(frame_entries_r, width=60) - amount_r.grid(row=2, column=1, sticky=W, pady=5, padx=5) - amount_r.insert(0, "0.00000000") - - openfield_r = Text(frame_entries_r, width=60, height=5, font=("Tahoma", 8)) - openfield_r.grid(row=3, column=1, sticky=W, pady=5, padx=5) - - operation_r = Entry(frame_entries_r, width=60) - operation_r.grid(row=4, column=1, sticky=W, pady=5, padx=5) - - url_r = Entry(frame_entries_r, width=60) - url_r.grid(row=5, column=1, sticky=W, pady=5, padx=5) - url_r.insert(0, "bis://") - #tab5 tokens - tab_tokens = ttk.Frame(nbtabs) - nbtabs.add(tab_tokens, text='Tokens') - - def click_on_tab_tokens(event): - if str(nbtabs.index(nbtabs.select())) == "4": - tokens() - - nbtabs.bind('<>', click_on_tab_tokens) - - # tab_statistics statistics - # tab_statistics = ttk.Frame(nbtabs) - # nbtabs.add(tab_statistics, text='Statistics') - - # frames - # menu - - # canvas - menubar = Menu(root) - walletmenu = Menu(menubar, tearoff=0) - menubar.add_cascade(label="Wallet", menu=walletmenu) - walletmenu.add_command(label="Load Wallet", command=keys_load_dialog) - walletmenu.add_command(label="Backup Wallet", command=keys_backup) - walletmenu.add_command(label="Recovery", command=lambda: recover()) - walletmenu.add_separator() - walletmenu.add_command(label="Spending URL QR", command=lambda: qr(url.get())) - walletmenu.add_command(label="Reception URL QR", command=lambda: qr(url_r.get())) - walletmenu.add_command(label="Alias Registration", command=alias) - walletmenu.add_command(label="Show Alias", command=aliases_list) - walletmenu.add_command(label="Fingerprint", command=fingerprint) - walletmenu.add_separator() - walletmenu.add_command(label="Exit", command=root.quit) - - messagemenu = Menu(menubar, tearoff=0) - menubar.add_cascade(label="Message", menu=messagemenu) - messagemenu.add_command(label="Show Messages", command=lambda: msg_dialogue(gui_address_t.get())) - messagemenu.add_command(label="Sign Messages", command=sign) - - if not os.path.exists("theme"): - with open("theme", "w") as theme_file: - theme_file.write("Barebone") - - theme_menu = Menu(menubar, tearoff=0) - - theme_list = [] - for theme_picture in glob.glob('themes/*.jpg'): - theme_picture = os.path.basename(theme_picture).split('.jpg')[0] - theme_list.append(theme_picture) - theme_menu.add_command(label=theme_picture, command=lambda theme_picture=theme_picture: themes( - theme_picture)) # wow this lambda is amazing - - theme_menu.add_command(label="Barebone", command=lambda: themes("Barebone")) - menubar.add_cascade(label="Themes", menu=theme_menu) - - miscmenu = Menu(menubar, tearoff=0) - menubar.add_cascade(label="Misc", menu=miscmenu) - miscmenu.add_command(label="Mempool", command=lambda: mempool_get(s)) - miscmenu.add_command(label="CSV Export", command=lambda: csv_export(s)) - miscmenu.add_command(label="Statistics", command=lambda: stats()) - miscmenu.add_command(label="Help", command=help) - #connect_menu = Menu (menubar, tearoff=0) - #menubar.add_cascade (label="Connection", menu=connect_menu) - #connect_list = [] - - #for ip_once in light_ip: - # connect_list.append (ip_once) - # connect_menu.add_command(label=ip_once, command=lambda ip_once=ip_once: force_connection (ip_once)) - - # labels - Label(frame_entries, text="My Address:").grid(row=0, sticky=W + N, pady=5, padx=5) - Label(frame_entries, text="Recipient:").grid(row=1, sticky=W, pady=5, padx=5) - Label(frame_entries, text="Amount:").grid(row=2, sticky=W, pady=5, padx=5) - Label(frame_entries, text="Data:", height=4).grid(row=3, sticky=W, pady=5, padx=5) - Label(frame_entries, text="Operation:", height=4).grid(row=4, sticky=W, pady=5, padx=5) - Label(frame_entries, text="URL:").grid(row=5, sticky=W + S, pady=5, padx=5) - Label(frame_entries, text="If you have a BIS URL, copy it, click paste-button\n" - "on URL field and then click 'read'." - "If you want to send Bismuth\n" - "to the shown recipient, click send and then\n" - "the confirmation dialog opens.", justify=LEFT).grid(row=6, column=1, sticky=W + S, - pady=1, padx=1, columnspan=2) - - Label(frame_entries_r, text="Recipient:").grid(row=0, sticky=W, pady=5, padx=5) - Label(frame_entries_r, text="Amount:").grid(row=2, sticky=W, pady=5, padx=5) - Label(frame_entries_r, text="Data:", height=4).grid(row=3, sticky=W, pady=5, padx=5) - Label(frame_entries_r, text="Operation:", height=4).grid(row=4, sticky=W, pady=5, padx=5) - Label(frame_entries_r, text="URL:").grid(row=5, sticky=W + S, pady=5, padx=5) - - Label(frame_entries_r, text="Enter amount and if wanted, a message in field Data.\n" - "Your address is automatically used. Click create and copy the url.", - justify=LEFT).grid(row=6, column=1, sticky=W + S, pady=1, padx=1, columnspan=2) - - Label(frame_entries_t, text="Address:").grid(row=0, column=0, sticky=W + N, pady=5, padx=5) - - resolve_var = BooleanVar() - resolve = Checkbutton(frame_entries_t, text="Aliases", variable=resolve_var, - command=lambda: refresh(gui_address_t.get(), s), width=14, anchor=W) - resolve.grid(row=0, column=5, sticky=W) - - # canvas - - # display the menu - root.config(menu=menubar) - # menu - - # buttons - - send_b = Button(frame_send, text="Send Bismuth", - command=lambda: sendirm(str(amount.get()).strip(), recipient.get().strip(), - operation.get().strip(),(openfield.get("1.0", END)).strip()), - height=2, width=22, font=("Tahoma", 12)) - send_b.grid(row=0, column=0) - - frame_logo_buttons = Frame(frame_send) - frame_logo_buttons.grid(row=5, column=0, padx=5, pady=5) - - encrypt_b = Button(frame_logo_buttons, text="Encrypt", command=encrypt_get_password, height=1, width=8) - encrypt_b.grid(row=0, column=0) - decrypt_b = Button(frame_logo_buttons, text="Unlock", command=decrypt_get_password, height=1, width=8) - decrypt_b.grid(row=0, column=1) - lock_b = Button(frame_logo_buttons, text="Locked", command=lambda: lock_fn(lock_b), height=1, width=8, - state=DISABLED) - lock_b.grid(row=0, column=2) - - encryption_button_refresh() - # buttons - - # refreshables - - # update balance label - balance_raw = StringVar() - balance_var = StringVar() - - balance_msg_label = Label(frame_coins, textvariable=balance_var, font=("Tahoma", 16, "bold")) - balance_msg_label.grid(row=0, column=0, sticky=S, padx=15) - - balance_msg_label_sendtab = Label(frame_send, textvariable=balance_var, font=("Tahoma", 10)) - balance_msg_label_sendtab.grid(row=3, column=0, sticky=N + S) - - debit_var = StringVar() - spent_msg_label = Label(frame_coins, textvariable=debit_var, font=("Tahoma", 12)) - spent_msg_label.grid(row=1, column=0, sticky=N + E, padx=15) - - credit_var = StringVar() - received_msg_label = Label(frame_coins, textvariable=credit_var, font=("Tahoma", 12)) - received_msg_label.grid(row=2, column=0, sticky=N + E, padx=15) - - fees_var = StringVar() - fees_paid_msg_label = Label(frame_coins, textvariable=fees_var, font=("Tahoma", 12)) - fees_paid_msg_label.grid(row=3, column=0, sticky=N + E, padx=15) - - rewards_var = StringVar() - rewards_paid_msg_label = Label(frame_coins, textvariable=rewards_var, font=("Tahoma", 12)) - rewards_paid_msg_label.grid(row=4, column=0, sticky=N + E, padx=15) - - bl_height_var = StringVar() - block_height_label = Label(frame_bottom, textvariable=bl_height_var) - block_height_label.grid(row=0, column=7, sticky=S + E, padx=5) - - ip_connected_var = StringVar() - ip_connected_label = Label(frame_bottom, textvariable=ip_connected_var) - ip_connected_label.grid(row=0, column=8, sticky=S + E, padx=5) - - diff_msg_var = StringVar() - diff_msg_label = Label(frame_bottom, textvariable=diff_msg_var) - diff_msg_label.grid(row=0, column=5, sticky=S + E, padx=5) - - sync_msg_var = StringVar() - sync_msg_label = Label(frame_bottom, textvariable=sync_msg_var) - sync_msg_label.grid(row=0, column=0, sticky=N + E, padx=15) - - version_var = StringVar() - version_var_label = Label(frame_bottom, textvariable=version_var) - version_var_label.grid(row=0, column=2, sticky=N + E, padx=15) - - hash_var = StringVar() - hash_var_label = Label(frame_bottom, textvariable=hash_var) - hash_var_label.grid(row=0, column=4, sticky=S + E, padx=5) - - mempool_count_var = StringVar() - mempool_count_var_label = Label(frame_bottom, textvariable=mempool_count_var) - mempool_count_var_label.grid(row=0, column=3, sticky=S + E, padx=5) - - server_timestamp_var = StringVar() - server_timestamp_label = Label(frame_bottom, textvariable=server_timestamp_var) - server_timestamp_label.grid(row=0, column=9, sticky=S + E, padx=5) - - ann_var = StringVar() - ann_var_text = Text(frame_logo, width=20, height=4, font=("Tahoma", 8)) - ann_var_text.grid(row=1, column=0, sticky=E + W, padx=5, pady=5) - ann_var_text.config(wrap=WORD) - ann_var_text.config(background="grey75") - - encode_var = BooleanVar() - alias_cb_var = BooleanVar() - msg_var = BooleanVar() - encrypt_var = BooleanVar() - all_spend_var = BooleanVar() - - # address and amount - - # gui_address.configure(state="readonly") - - gui_copy_address = Button(frame_entries, text="Copy", command=address_copy, font=("Tahoma", 7)) - gui_copy_address.grid(row=0, column=2, sticky=W) - - gui_copy_recipient = Button(frame_entries, text="Copy", command=recipient_copy, font=("Tahoma", 7)) - gui_copy_recipient.grid(row=1, column=2, sticky=W) - - gui_insert_recipient = Button(frame_entries, text="Paste", command=recipient_insert, font=("Tahoma", 7)) - gui_insert_recipient.grid(row=1, column=3, sticky=W) - - # gui_help = Button(frame_entries, text="Help", command=help, font=("Tahoma", 7)) - # gui_help.grid(row=4, column=2, sticky=W + E, padx=(5, 0)) - - gui_all_spend = Checkbutton(frame_entries, text="All", variable=all_spend_var, command=all_spend, - font=("Tahoma", 7)) - gui_all_spend.grid(row=2, column=2, sticky=W) - - gui_all_spend_clear = Button(frame_entries, text="Clear", command=all_spend_clear, font=("Tahoma", 7)) - gui_all_spend_clear.grid(row=2, column=3, sticky=W) - - data_insert_clipboard = Button(frame_entries, text="Paste", command=data_insert, font=("Tahoma", 7)) - data_insert_clipboard.grid(row=3, column=2) - - data_insert_clear = Button(frame_entries, text="Clear", command=data_insert_clear, font=("Tahoma", 7)) - data_insert_clear.grid(row=3, column=3, sticky=W) - - url_insert_clipboard = Button(frame_entries, text="Paste", command=url_insert, font=("Tahoma", 7)) - url_insert_clipboard.grid(row=5, column=2, sticky=W) - - read_url_b = Button(frame_entries, text="Read", command=lambda: read_url_clicked(app_log, url.get()), - font=("Tahoma", 7)) - read_url_b.grid(row=5, column=3, sticky=W) - - data_insert_clipboard = Button(frame_entries_r, text="Paste", command=data_insert_r, font=("Tahoma", 7)) - data_insert_clipboard.grid(row=3, column=2) - - data_insert_clear = Button(frame_entries_r, text="Clear", command=data_insert_clear, font=("Tahoma", 7)) - data_insert_clear.grid(row=3, column=3, sticky=W) - - gui_copy_address_r = Button(frame_entries_r, text="Copy", command=address_copy, font=("Tahoma", 7)) - gui_copy_address_r.grid(row=0, column=2, sticky=W) - - gui_copy_url_r = Button(frame_entries_r, text="Copy", command=url_copy, font=("Tahoma", 7)) - gui_copy_url_r.grid(row=5, column=3, sticky=W) - - create_url_b = Button(frame_entries_r, text="Create", - command=lambda: create_url_clicked(app_log, "pay", gui_address_t.get(), amount_r.get(), - operation_r.get(), openfield_r.get("1.0", END).strip()), - font=("Tahoma", 7)) - create_url_b.grid(row=5, column=2, sticky=W) - - gui_paste_address = Button(frame_entries_t, text="Paste", command=address_insert, font=("Tahoma", 7)) - gui_paste_address.grid(row=0, column=2, sticky=W) - - gui_watch = Button(frame_entries_t, text="Watch", command=watch, font=("Tahoma", 7)) - gui_watch.grid(row=0, column=3, sticky=W) - - gui_unwatch = Button(frame_entries_t, text="Reset", command=unwatch, font=("Tahoma", 7)) - gui_unwatch.grid(row=0, column=4, sticky=W, padx=(0, 5)) - - # hyperlinks - hyperlink_BISGit = Button(frame_hyperlinks, text="Bismuth@Github", command=hyperlink_BISGit, font=("Tahoma", 7)) - hyperlink_BISGit.grid(row=0, column=0, sticky=N + E + S + W, padx=1, pady=1) - - hyperlink_BE = Button(frame_hyperlinks, text="Official Block Explorer", command=hyperlink_BE, font=("Tahoma", 7)) - hyperlink_BE.grid(row=1, column=0, sticky=N + E + S + W, padx=1, pady=1) - - hyperlink_howto = Button(frame_hyperlinks, text="HowTos@Github", command=hyperlink_howto, font=("Tahoma", 7)) - hyperlink_howto.grid(row=2, column=0, sticky=N + E + S + W, padx=1, pady=1) - - hyperlink_bct = Button(frame_hyperlinks, text="BIS@Bitcointalk", command=hyperlink_bct, font=("Tahoma", 7)) - hyperlink_bct.grid(row=3, column=0, sticky=N + E + S + W, padx=1, pady=1) - # hyperlinks - - # supportbutton - dev_support = Button(frame_support, text="Collect Info for Support", - command=lambda: support_collection(str(sync_msg_var), str(version_var)), font=("Tahoma", 7)) - dev_support.grid(row=98, column=98, sticky=N + E + S + W, padx=1, pady=1) - # supportbutton - - gui_address_t = Entry(frame_entries_t, width=60) - gui_address_t.grid(row=0, column=1, sticky=W, pady=5, padx=5) - gui_address_t.insert(0, myaddress) - - sender_address = Entry(frame_entries, width=60) - sender_address.insert(0, myaddress) - sender_address.grid(row=0, column=1, sticky=W, pady=5, padx=5) - sender_address.configure(state=DISABLED) - - recipient = Entry(frame_entries, width=60) - recipient.grid(row=1, column=1, sticky=W, pady=5, padx=5) - - amount = Entry(frame_entries, width=60) - amount.grid(row=2, column=1, sticky=W, pady=5, padx=5) - amount.insert(0, "0.00000000") - - openfield = Text(frame_entries, width=60, height=5, font=("Tahoma", 8)) - openfield.grid(row=3, column=1, sticky=W, pady=5, padx=5) - - operation = Entry(frame_entries, width=60) - operation.grid(row=4, column=1, sticky=W, pady=5, padx=5) - - url = Entry(frame_entries, width=60) - url.grid(row=5, column=1, sticky=W, pady=5, padx=5) - url.insert(0, "bis://") - - encode = Checkbutton(frame_tick, text="Base64 Encoding", variable=encode_var, command=all_spend_check, width=14, anchor=W) - encode.grid(row=0, column=0, sticky=W) - - msg = Checkbutton(frame_tick, text="Mark as Message", variable=msg_var, command=all_spend_check, width=14, anchor=W) - msg.grid(row=1, column=0, sticky=W) - - encr = Checkbutton(frame_tick, text="Encrypt with PK", variable=encrypt_var, command=all_spend_check, width=14, anchor=W) - encr.grid(row=2, column=0, sticky=W) - - alias_cb = Checkbutton(frame_tick, text="Alias Recipient", variable=alias_cb_var, command=None, width=14, anchor=W) - alias_cb.grid(row=4, column=0, sticky=W) - - balance_enumerator = Entry(frame_entries, width=5) - # address and amount - - # logo - - # logo_hash_decoded = base64.b64decode(icons.logo_hash) - # logo = PhotoImage(data="graphics/logo.png") - - with PIL.Image.open("graphics/logo.png") as logo_img: - logo = PIL.ImageTk.PhotoImage(logo_img) - - Label(frame_logo, image=logo).grid(column=0, row=0) - # logo - - # / Build TK INTERFACE - - # Run the threaded ioloop that handles the connection and refresh in the background - loop = threading.Thread(target=connection_thread) - loop.daemon = True - loop.start() - # let the object take a coffee and wake up. - time.sleep(0.1) - - s = None # Temp hack - refresh_auto() - - try: - with open('theme', 'r') as theme_file: - themes(theme_file.read()) # load last selected theme - except: - with open("theme", "w") as theme_file: - theme_file.write("Barebone") - - root.mainloop() diff --git a/wallet_keys.py b/wallet_keys.py index cb1c634..d8b86b9 100644 --- a/wallet_keys.py +++ b/wallet_keys.py @@ -1,6 +1,7 @@ import base64, hashlib, json from Cryptodome.PublicKey import RSA + def generate(): # generate key pair and an address key = RSA.generate(4096) @@ -10,6 +11,7 @@ def generate(): address = hashlib.sha224(public_key_readable.encode("utf-8")).hexdigest() # hashed public key return private_key_readable, public_key_readable, address + def read(): # import keys with open ("wallet.der", 'r') as wallet_file: @@ -21,8 +23,8 @@ def read(): if (len(public_key_readable)) != 271 and (len(public_key_readable)) != 799: raise ValueError("Invalid public key length: {}".format(len(public_key_readable))) - public_key_hashed = base64.b64encode(public_key_readable.encode("utf-8")).decode("utf-8") + public_key_b64encoded = base64.b64encode(public_key_readable.encode("utf-8")).decode("utf-8") address = hashlib.sha224(public_key_readable.encode("utf-8")).hexdigest() # import keys - return key, private_key_readable, public_key_readable, public_key_hashed, address \ No newline at end of file + return key, private_key_readable, public_key_readable, public_key_b64encoded, address diff --git a/worker.py b/worker.py index 82b4c9d..0386432 100644 --- a/worker.py +++ b/worker.py @@ -6,19 +6,19 @@ import dbhandler import socks from connections import send, receive -from decimal import * -from quantizer import * +from decimal import Decimal +from quantizer import quantize_two, quantize_eight, quantize_ten import mempool as mp from difficulty import * from libs import client -def sendsync(sdef, peer_ip, status, provider, node): + +def sendsync(sdef, peer_ip, status, node): """ Save peer_ip to peerlist and send `sendsync` :param sdef: socket object :param peer_ip: IP of peer synchronization has been completed with :param status: Status synchronization was completed in/as - :param provider: Provided a valid block Log the synchronization status Save peer IP to peers list if applicable @@ -27,21 +27,16 @@ def sendsync(sdef, peer_ip, status, provider, node): returns None """ - + # TODO: ERROR, does **not** save anything. code or comment wrong. node.logger.app_log.info(f"Outbound: Synchronization with {peer_ip} finished after: {status}, sending new sync request") - - if provider: - node.logger.app_log.info(f"Outbound: Saving peer {peer_ip}") - node.peers.peer_dump(node.peerfile, peer_ip) - time.sleep(Decimal(node.pause)) while node.db_lock.locked(): if node.IS_STOPPING: return time.sleep(Decimal(node.pause)) - send(sdef, "sendsync") + def worker(host, port, node): logger = node.logger @@ -55,7 +50,6 @@ def worker(host, port, node): client_instance_worker = client.Client() if node.peers.is_banned(host) or dict_ip['ip'] == 'banned': - client_instance_worker.banned = True node.logger.app_log.warning(f"IP {host} is banned, won't connect") return @@ -111,10 +105,10 @@ def worker(host, port, node): node.logger.app_log.info(f"Connected to {this_client}") node.logger.app_log.info(f"Current active pool: {node.peers.connection_pool}") - if not client_instance_worker.banned and node.peers.version_allowed(host, node.version_allow) and not node.IS_STOPPING: + if not node.peers.is_banned(host) and node.peers.version_allowed(host, node.version_allow) and not node.IS_STOPPING: db_handler_instance = dbhandler.DbHandler(node.index_db, node.ledger_path, node.hyper_path, node.ram, node.ledger_ram_file, logger) - while not client_instance_worker.banned and node.peers.version_allowed(host, node.version_allow) and not node.IS_STOPPING: + while not node.peers.is_banned(host) and node.peers.version_allowed(host, node.version_allow) and not node.IS_STOPPING: try: #ensure_good_peer_version(host) @@ -122,7 +116,7 @@ def worker(host, port, node): # print(data) if data == "peers": - subdata = receive(s) + subdata = receive(s) # dict of "ip":"port" node.peers.peersync(subdata) elif data == "sync": @@ -141,20 +135,17 @@ def worker(host, port, node): # send block height, receive block height send(s, "blockheight") - db_handler_instance.execute(db_handler_instance.c, 'SELECT max(block_height) FROM transactions') - db_block_height = db_handler_instance.c.fetchone()[0] - - node.logger.app_log.info(f"Outbound: Sending block height to compare: {db_block_height}") + node.logger.app_log.info(f"Outbound: Sending block height to compare: {node.hdd_block}") # append zeroes to get static length - send(s, db_block_height) + send(s, node.hdd_block) received_block_height = receive(s) # receive node's block height node.logger.app_log.info( f"Outbound: Node {peer_ip} is at block height: {received_block_height}") - if int(received_block_height) < db_block_height: + if int(received_block_height) < node.hdd_block: node.logger.app_log.warning( - f"Outbound: We have a higher block ({db_block_height}) than {peer_ip} ({received_block_height}), sending") + f"Outbound: We have a higher block ({node.hdd_block}) than {peer_ip} ({received_block_height}), sending") data = receive(s) # receive client's last block_hash @@ -163,14 +154,12 @@ def worker(host, port, node): # consensus pool 2 (active connection) consensus_blockheight = int(received_block_height) - node.peers.consensus_add(peer_ip, consensus_blockheight, s, node.last_block) + node.peers.consensus_add(peer_ip, consensus_blockheight, s, node.hdd_block) # consensus pool 2 (active connection) - try: - db_handler_instance.execute_param(db_handler_instance.h, "SELECT block_height FROM transactions WHERE block_hash = ?;", - (data,)) - client_block = db_handler_instance.h.fetchone()[0] - except Exception: + client_block = db_handler_instance.block_height_from_hash(data) + + if not client_block: node.logger.app_log.warning(f"Outbound: Block {data[:8]} of {peer_ip} not found") if node.full_ledger: send(s, "blocknf") @@ -178,16 +167,14 @@ def worker(host, port, node): send(s, "blocknfhb") send(s, data) - else: + if node.peers.warning(s, peer_ip, "Forked", 1): + raise ValueError(f"{peer_ip} is banned") - node.logger.app_log.info( + else: + node.logger.app_log.warning( f"Outbound: Node is at block {client_block}") # now check if we have any newer - db_handler_instance.execute(db_handler_instance.h, - 'SELECT block_hash FROM transactions ORDER BY block_height DESC LIMIT 1') - db_block_hash = db_handler_instance.h.fetchone()[0] # get latest block_hash - - if db_block_hash == data or not node.egress: + if node.hdd_hash == data or not node.egress: if not node.egress: node.logger.app_log.warning(f"Outbound: Egress disabled for {peer_ip}") time.sleep(int(node.pause)) # reduce CPU usage @@ -197,20 +184,7 @@ def worker(host, port, node): send(s, "nonewblk") else: - blocks_fetched = [] - while sys.getsizeof( - str(blocks_fetched)) < 500000: # limited size based on txs in blocks - # db_handler.execute_param(db_handler.h, ("SELECT block_height, timestamp,address,recipient,amount,signature,public_key,keep,openfield FROM transactions WHERE block_height > ? AND block_height <= ?;"),(str(int(client_block)),) + (str(int(client_block + 1)),)) - db_handler_instance.execute_param(db_handler_instance.h, ( - "SELECT timestamp,address,recipient,amount,signature,public_key,operation,openfield FROM transactions WHERE block_height > ? AND block_height <= ?;"), - (str(int(client_block)), str(int(client_block + 1)),)) - result = db_handler_instance.h.fetchall() - if not result: - break - blocks_fetched.extend([result]) - client_block = int(client_block) + 1 - - # blocks_send = [[l[1:] for l in group] for _, group in groupby(blocks_fetched, key=itemgetter(0))] # remove block number + blocks_fetched = db_handler_instance.blocksync(client_block) node.logger.app_log.info(f"Outbound: Selected {blocks_fetched}") @@ -226,25 +200,20 @@ def worker(host, port, node): node.logger.app_log.info( "Outbound: Client rejected to sync from us because we're dont have the latest block") - - - elif int(received_block_height) >= db_block_height: - if int(received_block_height) == db_block_height: + elif int(received_block_height) >= node.hdd_block: + if int(received_block_height) == node.hdd_block: node.logger.app_log.info(f"Outbound: We have the same block as {peer_ip} ({received_block_height}), hash will be verified") else: - node.logger.app_log.warning(f"Outbound: We have a lower block ({db_block_height}) than {peer_ip} ({received_block_height}), hash will be verified") + node.logger.app_log.warning(f"Outbound: We have a lower block ({node.hdd_block}) than {peer_ip} ({received_block_height}), hash will be verified") - db_handler_instance.execute(db_handler_instance.c, 'SELECT block_hash FROM transactions ORDER BY block_height DESC LIMIT 1') - db_block_hash = db_handler_instance.c.fetchone()[0] # get latest block_hash - - node.logger.app_log.info(f"Outbound: block_hash to send: {db_block_hash}") - send(s, db_block_hash) + node.logger.app_log.info(f"Outbound: block_hash to send: {node.hdd_hash}") + send(s, node.hdd_hash) #ensure_good_peer_version(host) # consensus pool 2 (active connection) consensus_blockheight = int(received_block_height) # str int to remove leading zeros - node.peers.consensus_add(peer_ip, consensus_blockheight, s, node.last_block) + node.peers.consensus_add(peer_ip, consensus_blockheight, s, node.hdd_block) # consensus pool 2 (active connection) except Exception as e: @@ -252,7 +221,6 @@ def worker(host, port, node): finally: node.syncing.remove(peer_ip) - elif data == "blocknfhb": # one of the possible outcomes block_hash_delete = receive(s) # print peer_ip @@ -264,7 +232,7 @@ def worker(host, port, node): if node.peers.warning(s, peer_ip, "Rollback", 2): raise ValueError(f"{peer_ip} is banned") - sendsync(s, peer_ip, "Block not found", False, node) + sendsync(s, peer_ip, "Block not found", node) elif data == "blocknf": # one of the possible outcomes block_hash_delete = receive(s) @@ -277,7 +245,7 @@ def worker(host, port, node): if node.peers.warning(s, peer_ip, "Rollback", 2): raise ValueError(f"{peer_ip} is banned") - sendsync(s, peer_ip, "Block not found", False, node) + sendsync(s, peer_ip, "Block not found", node) elif data == "blocksfnd": node.logger.app_log.info(f"Outbound: Node {peer_ip} has the block(s)") # node should start sending txs in this step @@ -288,10 +256,6 @@ def worker(host, port, node): node.logger.app_log.warning(f"Skipping sync from {peer_ip}, syncing already in progress") else: - db_handler_instance.execute(db_handler_instance.c, - "SELECT timestamp FROM transactions WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;") # or it takes the first - node.last_block_timestamp = quantize_two(db_handler_instance.c.fetchone()[0]) - if int(node.last_block_timestamp) < (time.time() - 600): block_req = node.peers.consensus_most_common node.logger.app_log.warning("Most common block rule triggered") @@ -302,7 +266,7 @@ def worker(host, port, node): #ensure_good_peer_version(host) - if int(received_block_height) >= block_req: + if int(received_block_height) >= block_req and int(received_block_height) > node.last_block: try: # they claim to have the longest chain, things must go smooth or ban node.logger.app_log.warning(f"Confirming to sync from {peer_ip}") @@ -313,15 +277,15 @@ def worker(host, port, node): except: if node.peers.warning(s, peer_ip, "Failed to deliver the longest chain", 2): raise ValueError(f"{peer_ip} is banned") - else: digest_block(node, segments, s, peer_ip, db_handler_instance) + # receive theirs else: send(s, "blocksrj") - node.logger.app_log.warning(f"Inbound: Distant peer {peer_ip} is at {received_block_height}, should be at least {block_req}") + node.logger.app_log.warning(f"Inbound: Distant peer {peer_ip} is at {received_block_height}, should be at least {max(block_req,node.last_block+1)}") - sendsync(s, peer_ip, "Block found", True, node) + sendsync(s, peer_ip, "Block found", node) # block_hash validation end @@ -337,11 +301,13 @@ def worker(host, port, node): # send own # receive theirs segments = receive(s) + node.logger.app_log.info(mp.MEMPOOL.merge(segments, peer_ip, db_handler_instance.c, True)) + # receive theirs # Tell the mempool we just send our pool to a peer mp.MEMPOOL.sent(peer_ip) - sendsync(s, peer_ip, "No new block", True, node) + sendsync(s, peer_ip, "No new block", node) elif data == "hyperlane": pass @@ -374,6 +340,7 @@ def worker(host, port, node): # properly end the connection s.close() + # properly end the connection if node.debug: raise # major debug client @@ -382,4 +349,4 @@ def worker(host, port, node): return if not node.peers.version_allowed(host, node.version_allow): - node.logger.app_log.warning(f"Outbound: Ending thread, because {host} has too old a version: {node.peers.ip_to_mainnet[host]}") \ No newline at end of file + node.logger.app_log.warning(f"Outbound: Ending thread, because {host} has too old a version: {node.peers.ip_to_mainnet[host]}")