-
Notifications
You must be signed in to change notification settings - Fork 8
/
merged-mine-proxy.py
469 lines (401 loc) · 17.4 KB
/
merged-mine-proxy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
#!/usr/bin/python
#
# Copyright (c) 2011 Vince Durham
# Copyright (c) 2013-2014 The Blakecoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING.
#
import logging
import argparse
import os
import sys
import traceback
import json
import base64
from binascii import a2b_hex, b2a_hex
import socket
from datetime import datetime
from twisted.internet import defer, reactor, threads
from twisted.web import server, resource
from twisted.internet.error import ConnectionRefusedError
import twisted.internet.error
from urlparse import urlsplit
import httplib
import thread
try:
from merkletree import MerkleTree
have_merkletree = True
except ImportError:
have_merkletree = False
class MerkleTree:
def __init__(self, L, detailed=False):
self.detail = L
__version__ = '1.2.3'
'''
merge-mine-proxy
Run behind Eloipool for a set of auxiliary chains.
Output is in the form:
2014-05-05T00:00:00,solve,=,1,HASH
Where the fields are:
* UTC date and time in ISO format
* The word "solve"
* 1 if the proof of work was accepted by each aux chain
* extra ,1 per aux chain e.g solve,=,1,1,HASH
* HASH parent block hash
'''
AUX_UPDATE_INTERVAL = 5
MERKLE_TREES_TO_KEEP = 240
logger = logging.getLogger('merged-mine-proxy')
logger.setLevel(logging.DEBUG)
def reverse_chunks(s, l):
return ''.join(reversed([s[x:x+l] for x in xrange(0, len(s), l)]))
def getresponse(http, path, postdata, headers):
http.request('POST', path, postdata, headers)
return http.getresponse().read()
class Error(Exception):
def __init__(self, code, message, data=''):
if not isinstance(code, int):
raise TypeError('code must be an int')
if not isinstance(message, unicode):
raise TypeError('message must be a unicode')
self._code, self._message, self._data = code, message, data
def __str__(self):
return '%i %s %r' % (self._code, self._message, self._data)
def _to_obj(self):
return {
'code': self._code,
'message': self._message,
'data': self._data,
}
class Proxy(object):
def __init__(self, url):
(schema, netloc, path, query, fragment) = urlsplit(url)
auth = None
if netloc.find('@') >= 0:
(auth, netloc) = netloc.split("@")
if path == "":
path = "/"
self._url = "%s://%s%s" % (schema, netloc, path)
self._path = path
self._auth = auth
self._netloc = netloc
self._http = None
def callRemote(self, method, *params):
try:
if self._http is None:
(host, port) = self._netloc.split(":")
self._http = httplib.HTTPConnection(host, port)
try:
self._http.connect()
except socket.error:
raise httplib.HTTPException()
id_ = 0
headers = {
'Content-Type': 'text/json',
}
if self._auth is not None:
headers['Authorization'] = 'Basic ' + base64.b64encode(self._auth)
resp = None
postdata=json.dumps({
'jsonrpc': '2.0',
'method': method,
'params': params,
'id': id_,
})
content = getresponse(self._http, self._path, postdata, headers)
resp = json.loads(content)
if resp['id'] != id_:
raise ValueError('invalid id')
if 'error' in resp and resp['error'] is not None:
raise Error(resp['error']['code'], resp['error']['message'])
return resp['result']
except httplib.HTTPException:
self._http = None
#logger.error("Could not connect to %s", self._url)
raise Error(-32099, u'Could not connect to backend', self._url)
def __getattr__(self, attr):
if attr.startswith('rpc_'):
return lambda *params: self.callRemote(attr[len('rpc_'):], *params)
raise AttributeError('%r object has no attribute %r' % (self.__class__.__name__, attr))
class Server(resource.Resource):
extra_headers = None
def render(self, request):
def finish(x):
if request._disconnected:
return
if x is not None:
request.write(x)
request.finish()
def finish_error(fail):
if request._disconnected:
return
request.setResponseCode(500) # won't do anything if already written to
request.write('---ERROR---')
request.finish()
fail.printTraceback()
defer.maybeDeferred(resource.Resource.render, self, request).addCallbacks(finish, finish_error)
return server.NOT_DONE_YET
@defer.inlineCallbacks
def render_POST(self, request):
# missing batching, 1.0 notifications
data = request.content.read()
if self.extra_headers is not None:
for name, value in self.extra_headers.iteritems():
request.setHeader(name, value)
try:
try:
req = json.loads(data)
except Exception:
raise RemoteError(-32700, u'Parse error')
except Error, e:
# id unknown
request.write(json.dumps({
'jsonrpc': '2.0',
'id': None,
'result': None,
'error': e._to_obj(),
}))
id_ = req.get('id', None)
try:
try:
method = req['method']
if not isinstance(method, unicode):
raise ValueError()
params = req.get('params', [])
if not isinstance(params, list):
raise ValueError()
except Exception:
raise Error(-32600, u'Invalid Request')
method_name = 'rpc_' + method
if not hasattr(self, method_name):
raise Error(-32601, u'Method not found')
method_meth = getattr(self, method_name)
df = defer.maybeDeferred(method_meth, *params)
if id_ is None:
return
try:
result = yield df
#except Error, e:
#w raise e
except Exception, e:
logger.error(str(e))
raise Error(-32099, u'Unknown error: ' + str(e))
res = json.dumps({
'jsonrpc': '2.0',
'id': id_,
'result': result,
'error': None,
})
request.setHeader('content-length', str(len(res)))
request.write(res)
except Error, e:
res = json.dumps({
'jsonrpc': '2.0',
'id': id_,
'result': None,
'error': e._to_obj(),
})
request.setHeader('content-length', str(len(res)))
request.write(res)
class Listener(Server):
def __init__(self, parent, auxs, merkle_size, rewrite_target):
Server.__init__(self)
self.parent = parent
self.auxs = auxs
self.chain_ids = [None for i in auxs]
self.aux_targets = [None for i in auxs]
self.merkle_size = merkle_size
self.merkle_tree_queue = []
self.merkle_trees = {}
self.rewrite_target = None
if rewrite_target == 32:
self.rewrite_target = reverse_chunks("0000000007ffffffffffffffffffffffffffffffffffffffffffffffffffffff", 2)
elif rewrite_target == 1024:
self.rewrite_target = reverse_chunks("00000000000fffffffffffffffffffffffffffffffffffffffffffffffffffff", 2)
if merkle_size > 255:
raise ValueError('merkle size up to 255')
self.putChild('', self)
def merkle_branch(self, chain_index, merkle_tree):
step = self.merkle_size
i1 = chain_index
j = 0
branch = []
while step > 1:
i = min(i1^1, step-1)
branch.append(merkle_tree[i + j])
i1 = i1 >> 1
j += step
step = (step + 1) / 2
return branch
def calc_merkle_index(self, chain):
chain_id = self.chain_ids[chain]
rand = 0 # nonce
rand = (rand * 1103515245 + 12345) & 0xffffffff;
rand += chain_id;
rand = (rand * 1103515245 + 12345) & 0xffffffff;
return rand % self.merkle_size
@defer.inlineCallbacks
def update_auxs(self):
# create merkle leaves with arbitrary initial value
merkle_leaves = [ ('0' * 62) + ("%02x" % x) for x in range(self.merkle_size) ]
# ask each aux chain for a block
for chain in range(len(self.auxs)):
aux_block = (yield self.auxs[chain].rpc_getauxblock())
aux_block_hash = aux_block['hash']
self.chain_ids[chain] = aux_block['chainid']
chain_merkle_index = self.calc_merkle_index(chain)
merkle_leaves[chain_merkle_index] = aux_block_hash
self.aux_targets[chain] = reverse_chunks(aux_block['target'], 2) # fix endian
# create merkle tree
if len(merkle_leaves) > 1:
merkle_tree = map(lambda s: b2a_hex(s[::-1]), MerkleTree(map(lambda s: a2b_hex(s)[::-1], merkle_leaves), detailed=True).detail)
else:
merkle_tree = merkle_leaves
merkle_root = merkle_tree[-1]
if not self.merkle_trees.has_key(merkle_root):
# Tell bitcoind the new merkle root
MMAux = merkle_root + ("%02x000000" % self.merkle_size) + "00000000"
MMAux = 'fabe6d6d' + MMAux
for p in self.parent:
p.rpc_setworkaux('MM', MMAux)
# remember new tree
self.merkle_trees[merkle_root] = merkle_tree
self.merkle_tree_queue.append(merkle_root)
if len(self.merkle_tree_queue) > MERKLE_TREES_TO_KEEP:
# forget one tree
old_root = self.merkle_tree_queue.pop(0)
del self.merkle_trees[old_root]
def update_aux_process(self):
reactor.callLater(AUX_UPDATE_INTERVAL, self.update_aux_process)
self.update_auxs()
def rpc_getaux(self, data=None):
''' Use this rpc call to get the aux chain merkle root and aux target. Pool software
can then call getworkaux(aux) instead of going through this proxy. It is enough to call this
once a second.
'''
try:
# Get aux based on the latest tree
merkle_root = self.merkle_tree_queue[-1]
# nonce = 0, one byte merkle size
aux = merkle_root + ("%02x000000" % self.merkle_size) + "00000000"
result = {'aux': aux}
if self.rewrite_target:
result['aux_target'] = self.rewrite_target
else:
# Find highest target
targets = []
targets.extend(self.aux_targets)
targets.sort()
result['aux_target'] = reverse_chunks(targets[-1], 2) # fix endian
return result
except Exception:
logger.error(traceback.format_exc())
raise
@defer.inlineCallbacks
def rpc_gotwork(self, solution):
try:
# Submit work upstream
any_solved = False
aux_solved = []
parent_hash = solution['hash']
blkhdr = solution['header']
coinbaseMerkle = solution['coinbaseMrkl']
pos = coinbaseMerkle.find('fabe6d6d') + 8
if not pos:
logger.error("failed to find aux in coinbase")
defer.returnValue(False)
return
slnaux = coinbaseMerkle[pos:pos+80]
merkle_root = slnaux[:-16] # strip off size and nonce
if not self.merkle_trees.has_key(merkle_root):
logger.error("stale merkle root %s", merkle_root)
defer.returnValue(False)
return
merkle_tree = self.merkle_trees[merkle_root]
# submit to each aux chain
for chain in range(len(self.auxs)):
chain_merkle_index = self.calc_merkle_index(chain)
aux_solved.append(False)
# try submitting if under target
# TODO: self.aux_targets[chain] > parent_hash and
if not chain_merkle_index is None:
branch = self.merkle_branch(chain_merkle_index, merkle_tree)
"""
proof = (
yield self.parent.rpc_getworkaux("", data, chain_merkle_index, *branch))
if proof is False:
logger.error("aux pow request rejected by parent, chain %d", chain)
else:
"""
auxpow = coinbaseMerkle + '%02x' % (len(branch),)
for mb in branch:
auxpow += b2a_hex(a2b_hex(mb)[::-1])
auxpow += ('%02x000000' % (chain_merkle_index,)) + blkhdr
aux_hash = merkle_tree[chain_merkle_index]
aux_solved[-1] = (
yield self.auxs[chain].rpc_getauxblock(aux_hash, auxpow))
any_solved = any_solved or aux_solved[-1]
logger.info("%s,solve,%s,%s,%s", datetime.utcnow().isoformat(),
"=",
",".join(["1" if solve else "0" for solve in aux_solved]),
parent_hash)
if any_solved: self.update_auxs()
defer.returnValue(any_solved)
except Exception:
# Exceptions here are normally already handled by the rpc functions
#logger.debug(traceback.format_exc())
raise
def main(args):
parent = map(Proxy, args.parent_url)
aux_urls = args.aux_urls or ['http://un:pw@127.0.0.1:8342/']
auxs = [Proxy(url) for url in aux_urls]
if args.merkle_size is None:
for i in range(8):
if (1<<i) > len(aux_urls):
args.merkle_size = i
logger.info('merkle size = %d', i)
break
if len(aux_urls) > args.merkle_size:
raise ValueError('the merkle size must be at least as large as the number of aux chains')
if args.merkle_size > 1 and not have_merkletree:
raise ValueError('Missing merkletree module. Only a single subchain will work.')
if args.pidfile:
pidfile = open(args.pidfile, 'w')
pidfile.write(str(os.getpid()))
pidfile.close()
listener = Listener(parent, auxs, args.merkle_size, args.rewrite_target)
listener.update_aux_process()
reactor.listenTCP(args.worker_port, server.Site(listener))
def run():
parser = argparse.ArgumentParser(description='merge-mine-proxy (version %s)' % (__version__,))
parser.add_argument('--version', action='version', version=__version__)
worker_group = parser.add_argument_group('worker interface')
worker_group.add_argument('-w', '--worker-port', metavar='PORT',
help='listen on PORT for RPC connections from miners asking for work and providing responses (default: 8772)',
type=int, action='store', default=8772, dest='worker_port')
parent_group = parser.add_argument_group('parent chain (Eloipool) interface')
parent_group.add_argument('-p', '--parent-url', metavar='PARENT_URL',
help='connect to Eloipool at this address (default: http://un:pw@127.0.0.1:8330/)',
type=str, action='store', nargs='+', dest='parent_url')
aux_group = parser.add_argument_group('aux chain (e.g. Photon, B+ etc..) interface(s)')
aux_group.add_argument('-x', '--aux-url', metavar='AUX_URL',
help='connect to the aux RPCs at this address (default: http://un:pw@127.0.0.1:8342/)',
type=str, action='append', default=[], dest='aux_urls')
aux_group.add_argument('-s', '--merkle-size', metavar='SIZE',
help='use these many entries in the merkle tree. Must be a power of 2. Default is lowest power of 2 greater than number of aux chains.',
type=int, action='store', default=None, dest='merkle_size')
parser.add_argument('-r', '--rewrite-target', help='rewrite target difficulty to 32',
action='store_const', const=32, default=False, dest='rewrite_target')
parser.add_argument('-R', '--rewrite-target-1024', help='rewrite target difficulty to 1024',
action='store_const', const=1024, default=False, dest='rewrite_target')
parser.add_argument('-i', '--pidfile', metavar='PID', type=str, action='store', default=None, dest='pidfile')
parser.add_argument('-l', '--logfile', metavar='LOG', type=str, action='store', default=None, dest='logfile')
args = parser.parse_args()
if args.logfile:
logger.addHandler(logging.FileHandler(args.logfile))
else:
logger.addHandler(logging.StreamHandler())
reactor.callWhenRunning(main, args)
reactor.run()
if __name__ == "__main__":
run()