From f4a7b14a1314e0ada29b8a6d5fb9e2e52bb4dfe8 Mon Sep 17 00:00:00 2001 From: Forrest Voight Date: Sun, 2 Dec 2012 12:53:54 -0500 Subject: [PATCH] instead of 300, request a random length of shares so that it always eventually gets past a too-large chunk of the sharechain --- p2pool/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/p2pool/node.py b/p2pool/node.py index f5bb93d56..bb5041e45 100644 --- a/p2pool/node.py +++ b/p2pool/node.py @@ -110,7 +110,7 @@ def download_shares(): try: shares = yield peer.get_shares( hashes=[share_hash], - parents=300-1, + parents=random.randrange(500), # randomize parents so that we eventually get past a too large block of shares stops=list(set(self.node.tracker.heads) | set( self.node.tracker.get_nth_parent_hash(head, min(max(0, self.node.tracker.get_height_and_last(head)[0] - 1), 10)) for head in self.node.tracker.heads ))[:100],