-
Notifications
You must be signed in to change notification settings - Fork 46
/
Copy pathdiff_delta.cpp
executable file
·321 lines (274 loc) · 15.3 KB
/
diff_delta.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
// Copyright (c) 2015-2018 The Gulden developers
// Authored by: Frank (dt_cdog@yahoo.com) and Malcolm MacLeod (mmacleod@gmx.com)
// Distributed under the GULDEN software license, see the accompanying
// file COPYING
//
// This file contains Delta, the Gulden Difficulty Re-adjustment algorithm developed by Frank (dt_cdog@yahoo.com) with various enhancements by Malcolm MacLeod (mmacleod@gmx.com)
// The core algorithm works by taking time measurements of four periods (last block; short window; medium window; long window) and then apply a weighting to them.
// This allows the algorithm to react to short term fluctuations while still taking long term block targets into account, which helps prevent it from overreacting.
//
// In addition to the core algorithm several extra rules are then applied in certain situations (e.g. multiple quick blocks) to enhance the behaviour.
#include "diff_common.h"
#include "diff_delta.h"
#include "chainparams.h"
#include <stdint.h>
unsigned int GetNextWorkRequired_DELTA (const CBlockIndex* pindexLast, const CBlockHeader* block, int nPowTargetSpacing, unsigned int nPowLimit, unsigned int nFirstDeltaBlock)
{
// These two variables are not used in the calculation at all, but only for logging when -debug is set, to prevent logging the same calculation repeatedly.
static int64_t nPrevHeight = 0;
static int64_t nPrevDifficulty = 0;
static bool debugLogging = LogAcceptCategory(BCLog::DELTA);
std::string sLogInfo;
//uint32_t nDeltaVersion=3;
// The spacing we want our blocks to come in at.
int64_t nRetargetTimespan = nPowTargetSpacing;
// The minimum difficulty that is allowed, this is set on a per algorithm basis.
const unsigned int nProofOfWorkLimit = nPowLimit;
// How many blocks to use to calculate each of the four algo specific time windows (last block; short window; middle window; long window)
const unsigned int nLastBlock = 1;
const unsigned int nShortFrame = 3;
const unsigned int nMiddleFrame = 24;
const unsigned int nLongFrame = 576;
// Weighting to use for each of the four algo specific time windows.
const int64_t nLBWeight = 64;
const int64_t nShortWeight = 8;
int64_t nMiddleWeight = 2;
int64_t nLongWeight = 1;
// Minimum threshold for the short window, if it exceeds these thresholds then favour a larger swing in difficulty.
const int64_t nQBFrame = nShortFrame + 1;
// Any block with a time lower than nBadTimeLimit is considered to have a 'bad' time, the time is replaced with the value of nBadTimeReplace.
const int64_t nBadTimeLimit = 0;
const int64_t nBadTimeReplace = nRetargetTimespan / 10;
// Used for 'exception 1' (see code below), if block is lower than 'nLowTimeLimit' then prevent the algorithm from decreasing difficulty any further.
// If block is lower than 'nFloorTimeLimit' then impose a minor increase in difficulty.
// This helps to prevent the algorithm from generating and giving away too many sudden/easy 'quick blocks' after a long block or two have occured, and instead forces things to be recovered more gently over time without intefering with other desirable properties of the algorithm.
const int64_t nLowTimeLimit = nRetargetTimespan * 90 / PERCENT_FACTOR;
const int64_t nFloorTimeLimit = nRetargetTimespan * 65 / PERCENT_FACTOR;
// Used for 'exception 2' (see code below), if a block has taken longer than nLongTimeLimit we perform a difficulty reduction, which increases over time based on nLongTimeStep
// NB!!! nLongTimeLimit MUST ALWAYS EXCEED THE THE MAXIMUM DRIFT ALLOWED (IN BOTH THE POSITIVE AND NEGATIVE DIRECTION)
// SO AT LEAST DRIFT X2 OR MORE - OR ELSE CLIENTS CAN FORCE LOW DIFFICULTY BLOCKS BY MESSING WITH THE BLOCK TIMES.
const int64_t nDrift = 60; //Drift in seconds
int64_t nLongTimeLimit = (3 * nDrift);
int64_t nLongTimeStep = nDrift;
// Limit adjustment amount to try prevent jumping too far in either direction.
// min 75% of default time; 33.3% difficulty increase
unsigned int nMinimumAdjustLimit = (unsigned int)nRetargetTimespan * 75 / PERCENT_FACTOR;
// max 150% of default time; 33.3% difficuly decrease
unsigned int nMaximumAdjustLimit = (unsigned int)nRetargetTimespan * 150 / PERCENT_FACTOR;
// Variables used in calculation
int64_t nQBTimespan = 0;
int64_t nWeightedSum = 0;
int64_t nWeightedDiv = 0;
int64_t nWeightedTimespan = 0;
const CBlockIndex* pindexFirst = pindexLast;
// Genesis block
if (pindexLast == NULL)
return nProofOfWorkLimit;
// -- Use a fixed difficulty until we have enough blocks to work with
if (pindexLast->nHeight <= nQBFrame)
return nProofOfWorkLimit;
// -- Calculate timespan for last block window
int64_t nLBTimespan = 0;
{
int64_t nLBTimespanPoW = 0;
pindexFirst = pindexLast->pprev;
nLBTimespanPoW = pindexLast->GetBlockTime() - pindexFirst->GetBlockTime();
nLBTimespan = pindexLast->GetBlockTimePoW2Witness() - pindexFirst->GetBlockTimePoW2Witness();
// Prevent bad/negative block times - switch them for a fixed time.
if (nLBTimespan <= nBadTimeLimit)
nLBTimespan = nBadTimeReplace;
// If last block was 'long block' with difficulty adjustment, treat it as a faster block at the lower difficulty
if (nLBTimespanPoW > (nLongTimeLimit + nLongTimeStep))
{
nLBTimespanPoW = (nLBTimespanPoW-nLongTimeLimit)%nLongTimeStep;
nLBTimespan = std::min(nLBTimespan, nLBTimespanPoW);
}
}
// -- Calculate timespan for short window
int64_t nShortTimespan = 0;
{
int64_t nDeltaTimespan = 0;
int64_t nDeltaTimespanPoW = 0;
pindexFirst = pindexLast;
for (unsigned int i = 1; pindexFirst != NULL && i <= nQBFrame; i++)
{
nDeltaTimespanPoW = pindexFirst->GetBlockTime() - pindexFirst->pprev->GetBlockTime();
nDeltaTimespan = pindexFirst->GetBlockTimePoW2Witness() - pindexFirst->pprev->GetBlockTimePoW2Witness();
// Prevent bad/negative block times - switch them for a fixed time.
if (nDeltaTimespan <= nBadTimeLimit)
nDeltaTimespan = nBadTimeReplace;
// If last block was 'long block' with difficulty adjustment, treat it as a faster block at the lower difficulty
if (nDeltaTimespanPoW > (nLongTimeLimit + nLongTimeStep))
{
nDeltaTimespanPoW = (nDeltaTimespanPoW-nLongTimeLimit)%nLongTimeStep;
nDeltaTimespan = std::min(nDeltaTimespan, nDeltaTimespanPoW);
}
if (i<= nShortFrame)
nShortTimespan += nDeltaTimespan;
nQBTimespan += nDeltaTimespan;
pindexFirst = pindexFirst->pprev;
}
}
// -- Calculate time interval for middle window
int64_t nMiddleTimespan = 0;
{
int64_t nDeltaTimespan = 0;
int64_t nDeltaTimespanPoW = 0;
if (pindexLast->nHeight - (int)nFirstDeltaBlock <= (int)nMiddleFrame)
{
nMiddleWeight = nMiddleTimespan = 0;
}
else
{
pindexFirst = pindexLast;
for (unsigned int i = 1; pindexFirst != NULL && i <= nMiddleFrame; i++)
{
nDeltaTimespan = pindexFirst->GetBlockTimePoW2Witness() - pindexFirst->pprev->GetBlockTimePoW2Witness();
// Prevent bad/negative block times - switch them for a fixed time.
if (nDeltaTimespan <= nBadTimeLimit)
nDeltaTimespan = nBadTimeReplace;
// If last block was 'long block' with difficulty adjustment, treat it as a faster block at the lower difficulty
if (nDeltaTimespanPoW > (nLongTimeLimit + nLongTimeStep))
{
nDeltaTimespanPoW = (nDeltaTimespanPoW-nLongTimeLimit)%nLongTimeStep;
nDeltaTimespan = std::min(nDeltaTimespan, nDeltaTimespanPoW);
}
nMiddleTimespan += nDeltaTimespan;
pindexFirst = pindexFirst->pprev;
}
}
}
// -- Calculate timespan for long window
int64_t nLongTimespan = 0;
{
// NB! No need to worry about single negative block times as it has no significant influence over this many blocks.
if ((int)pindexLast->nHeight - (int)nFirstDeltaBlock <= (int)nLongFrame)
{
nLongWeight = nLongTimespan = 0;
}
else
{
pindexFirst = pindexLast;
for (unsigned int i = 1; pindexFirst != NULL && i <= nLongFrame; i++)
{
pindexFirst = pindexFirst->pprev;
}
nLongTimespan = pindexLast->GetBlockTimePoW2Witness() - pindexFirst->GetBlockTimePoW2Witness();
}
}
// -- Combine all the timespans and weights to get a weighted timespan
nWeightedSum = (nLBTimespan * nLBWeight) + (nShortTimespan * nShortWeight);
nWeightedSum += (nMiddleTimespan * nMiddleWeight) + (nLongTimespan * nLongWeight);
nWeightedDiv = (nLastBlock * nLBWeight) + (nShortFrame * nShortWeight);
nWeightedDiv += (nMiddleFrame * nMiddleWeight) + (nLongFrame * nLongWeight);
nWeightedTimespan = nWeightedSum / nWeightedDiv;
// If we are close to target time then reduce the adjustment limits to smooth things off a little bit more.
{
if (std::abs(nLBTimespan - nRetargetTimespan) < nRetargetTimespan * 20 / PERCENT_FACTOR)
{
// 90% of target
// 11.11111111111111% difficulty increase
// 10% difficulty decrease.
nMinimumAdjustLimit = (unsigned int)nRetargetTimespan * 90 / PERCENT_FACTOR;
nMaximumAdjustLimit = (unsigned int)nRetargetTimespan * 110 / PERCENT_FACTOR;
}
else if (std::abs(nLBTimespan - nRetargetTimespan) < nRetargetTimespan * 30 / PERCENT_FACTOR)
{
// 80% of target - 25% difficulty increase/decrease maximum.
nMinimumAdjustLimit = (unsigned int)nRetargetTimespan * 80 / PERCENT_FACTOR;
nMaximumAdjustLimit = (unsigned int)nRetargetTimespan * 120 / PERCENT_FACTOR;
}
}
// -- Apply the adjustment limits
{
// min
if (nWeightedTimespan < nMinimumAdjustLimit)
nWeightedTimespan = nMinimumAdjustLimit;
// max
if (nWeightedTimespan > nMaximumAdjustLimit)
nWeightedTimespan = nMaximumAdjustLimit;
}
// -- Finally calculate and set the new difficulty.
arith_uint256 bnNew;
bnNew.SetCompact(pindexLast->nBits);
bnNew = bnNew * arith_uint256(nWeightedTimespan);
bnNew = bnNew / arith_uint256(nRetargetTimespan);
// Now that we have the difficulty we run a last few 'special purpose' exception rules which have the ability to override the calculation:
// Exception 1 - Never adjust difficulty downward (human view) if previous block generation was already faster than what we wanted.
{
nLBTimespan = pindexLast->GetBlockTimePoW2Witness() - pindexLast->pprev->GetBlockTimePoW2Witness();
arith_uint256 bnComp;
bnComp.SetCompact(pindexLast->nBits);
if (nLBTimespan > 0 && nLBTimespan < nLowTimeLimit && (bnNew > bnComp))
{
// If it is this low then we actually give it a slight nudge upwards - 5%
if (nLBTimespan < nFloorTimeLimit)
{
bnNew.SetCompact(pindexLast->nBits);
bnNew = bnNew * arith_uint256(95);
bnNew = bnNew / arith_uint256(PERCENT_FACTOR);
if (debugLogging && (nPrevHeight != pindexLast->nHeight) )
sLogInfo += strprintf("<DELTA> Last block time [%ld] was far below target but adjustment still downward, forcing difficulty up by 5%% instead\n", nLBTimespan);
}
else
{
bnNew.SetCompact(pindexLast->nBits);
if (debugLogging && (nPrevHeight != pindexLast->nHeight) )
sLogInfo += strprintf("<DELTA> Last block time [%ld] below target but adjustment still downward, blocking downward adjustment\n", nLBTimespan);
}
}
}
// Exception 2 - Reduce difficulty if current block generation time has already exceeded maximum time limit. (NB! nLongTimeLimit must exceed maximum possible drift in both positive and negative direction)
{
int64_t lastBlockTime=pindexLast->GetBlockTimePoW2Witness();
if ((block->nTime - lastBlockTime) > nLongTimeLimit)
{
// Fixed reduction for each missed step. 10% pre-SIGMA, 30% after SIGMA, 10% delta V3
int32_t nDeltaDropPerStep=110;
int64_t nNumMissedSteps = ((block->nTime - lastBlockTime - nLongTimeLimit) / nLongTimeStep) + 1;
for(int i=0;i < nNumMissedSteps; ++i)
{
bnNew = bnNew * arith_uint256(nDeltaDropPerStep);
bnNew = bnNew / arith_uint256(PERCENT_FACTOR);
}
if (debugLogging && (nPrevHeight != pindexLast->nHeight || bnNew.GetCompact() != nPrevDifficulty) )
sLogInfo += strprintf("<DELTA> Maximum block time hit - dropping difficulty %08x %s\n", bnNew.GetCompact(), bnNew.ToString().c_str());
}
}
// Exception 3 - Difficulty should never go below (human view) the starting difficulty, so if it has we force it back to the limit.
{
arith_uint256 bnComp;
if (block->nTime > 1571320800)
{
const unsigned int newProofOfWorkLimit = UintToArith256(uint256S("0x003fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff")).GetCompact();
bnComp.SetCompact(newProofOfWorkLimit);
if (bnNew > bnComp)
bnNew.SetCompact(newProofOfWorkLimit);
}
else
{
bnComp.SetCompact(nProofOfWorkLimit);
if (bnNew > bnComp)
bnNew.SetCompact(nProofOfWorkLimit);
}
}
if (debugLogging)
{
if (nPrevHeight != pindexLast->nHeight || bnNew.GetCompact() != nPrevDifficulty)
{
static RecursiveMutex logCS;
LOCK(logCS);
LogPrintf("<DELTA> Height= %d\n" , pindexLast->nHeight);
LogPrintf("%s" , sLogInfo.c_str());
LogPrintf("<DELTA> nTargetTimespan = %ld nActualTimespan = %ld nWeightedTimespan = %ld \n", nRetargetTimespan, nLBTimespan, nWeightedTimespan);
LogPrintf("<DELTA> nShortTimespan/nShortFrame = %ld nMiddleTimespan/nMiddleFrame = %ld nLongTimespan/nLongFrame = %ld \n", nShortTimespan/nShortFrame, nMiddleTimespan/nMiddleFrame, nLongTimespan/nLongFrame);
LogPrintf("<DELTA> Before: %08x %s\n", pindexLast->nBits, arith_uint256().SetCompact(pindexLast->nBits).ToString().c_str());
LogPrintf("<DELTA> After: %08x %s\n", bnNew.GetCompact(), bnNew.ToString().c_str());
LogPrintf("<DELTA> Rough change percentage (human view): %lf%%\n", -( ( (bnNew.getdouble() - arith_uint256().SetCompact(pindexLast->nBits).getdouble()) / arith_uint256().SetCompact(pindexLast->nBits).getdouble()) * 100) );
}
nPrevHeight = pindexLast->nHeight;
nPrevDifficulty = bnNew.GetCompact();
}
// Difficulty is returned in compact form.
return bnNew.GetCompact();
}