-
-
Notifications
You must be signed in to change notification settings - Fork 301
/
discount_curve.py
465 lines (351 loc) · 16.7 KB
/
discount_curve.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
##############################################################################
# Copyright (C) 2018, 2019, 2020 Dominic O'Kane
##############################################################################
import numpy as np
from .interpolator import Interpolator, InterpTypes, interpolate
from ...utils.date import Date
from ...utils.error import FinError
from ...utils.global_vars import gDaysInYear, gSmall
from ...utils.frequency import annual_frequency, FrequencyTypes
from ...utils.day_count import DayCount, DayCountTypes
from ...utils.math import test_monotonicity
from ...utils.schedule import Schedule
from ...utils.helpers import check_argument_types
from ...utils.helpers import times_from_dates
from ...utils.helpers import label_to_string
###############################################################################
class DiscountCurve:
''' This is a base discount curve which has an internal representation of
a vector of times and discount factors and an interpolation scheme for
interpolating between these fixed points. '''
###########################################################################
def __init__(self,
value_dt: Date,
df_dts: list,
df_values: np.ndarray,
interp_type: InterpTypes = InterpTypes.FLAT_FWD_RATES):
""" Create the discount curve from a vector of times and discount
factors with an anchor date and specify an interpolation scheme. As we
are explicitly linking dates and discount factors, we do not need to
specify any compounding convention or day count calculation since
discount factors are pure prices. We do however need to specify a
convention for interpolating the discount factors in time."""
check_argument_types(self.__init__, locals())
# Validate curve
if len(df_dts) < 1:
raise FinError("Times has zero length")
if len(df_dts) != len(df_values):
raise FinError("Times and Values are not the same")
self._times = [0.0]
self._dfs = [1.0]
self._df_dts = df_dts
num_points = len(df_dts)
start_index = 0
if num_points > 0:
if df_dts[0] == value_dt:
self._dfs[0] = df_values[0]
start_index = 1
for i in range(start_index, num_points):
t = (df_dts[i] - value_dt) / gDaysInYear
self._times.append(t)
self._dfs.append(df_values[i])
self._times = np.array(self._times)
if test_monotonicity(self._times) is False:
print(self._times)
raise FinError("Times are not sorted in increasing order")
self._value_dt = value_dt
self._dfs = np.array(self._dfs)
self._interp_type = interp_type
self._freq_type = FrequencyTypes.CONTINUOUS
# This needs to be thought about - I just assign an arbitrary value
self._dc_type = DayCountTypes.ACT_ACT_ISDA
self._interpolator = Interpolator(self._interp_type)
self._interpolator.fit(self._times, self._dfs)
###########################################################################
def _zero_to_df(self,
value_dt: Date,
rates: (float, np.ndarray),
times: (float, np.ndarray),
freq_type: FrequencyTypes,
dc_type: DayCountTypes):
""" Convert a zero with a specified compounding frequency and day count
convention to a discount factor for a single maturity date or a list of
dates. The day count is used to calculate the elapsed year fraction."""
if isinstance(times, float):
times = np.array([times])
t = np.maximum(times, gSmall)
f = annual_frequency(freq_type)
if freq_type == FrequencyTypes.CONTINUOUS:
df = np.exp(-rates * t)
elif freq_type == FrequencyTypes.SIMPLE:
df = 1.0 / (1.0 + rates * t)
elif freq_type == FrequencyTypes.ANNUAL or \
freq_type == FrequencyTypes.SEMI_ANNUAL or \
freq_type == FrequencyTypes.QUARTERLY or \
freq_type == FrequencyTypes.MONTHLY:
df = 1.0 / np.power(1.0 + rates / f, f * t)
else:
raise FinError("Unknown Frequency type")
return df
###########################################################################
def _df_to_zero(self,
dfs: (float, np.ndarray),
maturity_dts: (Date, list),
freq_type: FrequencyTypes,
dc_type: DayCountTypes):
""" Given a dates this first generates the discount factors. It then
converts the discount factors to a zero rate with a chosen compounding
frequency which may be continuous, simple, or compounded at a specific
frequency which are all choices of FrequencyTypes. Returns a list of
discount factor. """
f = annual_frequency(freq_type)
if isinstance(maturity_dts, Date):
date_list = [maturity_dts]
else:
date_list = maturity_dts
if isinstance(dfs, float):
df_list = [dfs]
else:
df_list = dfs
if len(date_list) != len(df_list):
raise FinError("Date list and df list do not have same length")
num_dts = len(date_list)
zero_rates = []
times = times_from_dates(
date_list, self._value_dt, dc_type)
for i in range(0, num_dts):
df = df_list[i]
t = max(times[i], gSmall)
if freq_type == FrequencyTypes.CONTINUOUS:
r = -np.log(df) / t
elif freq_type == FrequencyTypes.SIMPLE:
r = (1.0 / df - 1.0) / t
else:
r = (np.power(df, -1.0 / (t * f)) - 1.0) * f
zero_rates.append(r)
return np.array(zero_rates)
###########################################################################
def zero_rate(self,
dts: (list, Date),
freq_type: FrequencyTypes = FrequencyTypes.CONTINUOUS,
dc_type: DayCountTypes = DayCountTypes.ACT_360):
""" Calculation of zero rates with specified frequency. This
function can return a vector of zero rates given a vector of
dates so must use Numpy functions. Default frequency is a
continuously compounded rate. """
if isinstance(freq_type, FrequencyTypes) is False:
raise FinError("Invalid Frequency type.")
if isinstance(dc_type, DayCountTypes) is False:
raise FinError("Invalid Day Count type.")
dfs = self.df(dts)
zero_rates = self._df_to_zero(dfs, dts, freq_type, dc_type)
if isinstance(dts, Date):
return zero_rates[0]
else:
return np.array(zero_rates)
return zero_rates
###########################################################################
def cc_rate(self,
dts: (list, Date),
dc_type: DayCountTypes = DayCountTypes.SIMPLE):
""" Calculation of zero rates with continuous compounding. This
function can return a vector of cc rates given a vector of
dates so must use Numpy functions. """
cc_rates = self.zero_rate(
dts, FrequencyTypes.CONTINUOUS, dc_type)
return cc_rates
###########################################################################
def swap_rate(self,
effective_dt: Date,
maturity_dt: (list, Date),
freq_type=FrequencyTypes.ANNUAL,
dc_type: DayCountTypes = DayCountTypes.THIRTY_E_360):
""" Calculate the swap rate to maturity date. This is the rate paid by
a swap that has a price of par today. This is the same as a Libor swap
rate except that we do not do any business day adjustments. """
# Note that this function does not call the IborSwap class to
# calculate the swap rate since that will create a circular dependency.
# I therefore recreate the actual calculation of the swap rate here.
if effective_dt < self._value_dt:
raise FinError("Swap starts before the curve valuation date.")
if isinstance(freq_type, FrequencyTypes) is False:
raise FinError("Invalid Frequency type.")
if isinstance(freq_type, FrequencyTypes) is False:
raise FinError("Invalid Frequency type.")
if freq_type == FrequencyTypes.SIMPLE:
raise FinError("Cannot calculate par rate with simple yield freq.")
elif freq_type == FrequencyTypes.CONTINUOUS:
raise FinError("Cannot calculate par rate with continuous freq.")
if isinstance(maturity_dt, Date):
maturity_dts = [maturity_dt]
else:
maturity_dts = maturity_dt
par_rates = []
for maturity_dt in maturity_dts:
if maturity_dt <= effective_dt:
raise FinError("Maturity date is before the swap start date.")
schedule = Schedule(effective_dt,
maturity_dt,
freq_type)
flow_dts = schedule._generate()
flow_dts[0] = effective_dt
day_counter = DayCount(dc_type)
prev_dt = flow_dts[0]
pv01 = 0.0
df = 1.0
for next_dt in flow_dts[1:]:
df = self.df(next_dt)
alpha = day_counter.year_frac(prev_dt, next_dt)[0]
pv01 += alpha * df
prev_dt = next_dt
if abs(pv01) < gSmall:
par_rate = 0.0
else:
df_start = self.df(effective_dt)
par_rate = (df_start - df) / pv01
par_rates.append(par_rate)
par_rates = np.array(par_rates)
if isinstance(maturity_dts, Date):
return par_rates[0]
else:
return par_rates
###########################################################################
def df(self,
dt: (list, Date),
day_count=DayCountTypes.ACT_ACT_ISDA):
''' Function to calculate a discount factor from a date or a
vector of dates. The day count determines how dates get converted to
years. I allow this to default to ACT_ACT_ISDA unless specified. '''
times = times_from_dates(dt, self._value_dt, day_count)
dfs = self._df(times)
if isinstance(dfs, float):
return dfs
else:
return np.array(dfs)
###########################################################################
def _df(self,
t: (float, np.ndarray)):
""" Hidden function to calculate a discount factor from a time or a
vector of times. Discourage usage in favour of passing in dates. """
if self._interp_type is InterpTypes.FLAT_FWD_RATES or \
self._interp_type is InterpTypes.LINEAR_ZERO_RATES or \
self._interp_type is InterpTypes.LINEAR_FWD_RATES:
df = interpolate(t,
self._times,
self._dfs,
self._interp_type.value)
else:
df = self._interpolator.interpolate(t)
return df
###########################################################################
def survival_prob(self,
dt: Date):
""" This returns a survival probability to a specified date based on
the assumption that the continuously compounded rate is a default
hazard rate in which case the survival probability is directly
analogous to a discount factor. """
q = self.df(dt)
return q
###########################################################################
def fwd(self,
dts: Date):
""" Calculate the continuously compounded forward rate at the forward
Date provided. This is done by perturbing the time by one day only
and measuring the change in the log of the discount factor divided by
the time increment dt. I am assuming continuous compounding over the
one date. """
if isinstance(dts, Date):
dtsPlusOneDays = [dts.add_days(1)]
else:
dtsPlusOneDays = []
for dt in dts:
dtsPlusOneDay = dt.add_days(1)
dtsPlusOneDays.append(dtsPlusOneDay)
df1 = self.df(dts)
df2 = self.df(dtsPlusOneDays)
dt = 1.0 / gDaysInYear
fwd = np.log(df1 / df2) / (1.0 * dt)
if isinstance(dts, Date):
return fwd[0]
else:
return np.array(fwd)
###########################################################################
def _fwd(self,
times: (np.ndarray, float)):
""" Calculate the continuously compounded forward rate at the forward
time provided. This is done by perturbing the time by a small amount
and measuring the change in the log of the discount factor divided by
the time increment dt."""
dt = 1e-6
times = np.maximum(times, dt)
df1 = self._df(times - dt)
df2 = self._df(times + dt)
fwd = np.log(df1 / df2) / (2.0 * dt)
return fwd
###########################################################################
def bump(self,
bump_size: float):
""" Adjust the continuously compounded forward rates by a perturbation
upward equal to the bump size and return a curve objet with this bumped
curve. This is used for interest rate risk. """
times = self._times.copy()
values = self._dfs.copy()
n = len(self._times)
for i in range(0, n):
t = times[i]
values[i] = values[i] * np.exp(-bump_size * t)
discCurve = DiscountCurve(self._value_dt,
times,
values,
self._interp_type)
return discCurve
###########################################################################
def fwd_rate(self,
start_dt: (list, Date),
date_or_tenor: (Date, str),
dc_type: DayCountTypes = DayCountTypes.ACT_360):
""" Calculate the forward rate between two forward dates according to
the specified day count convention. This defaults to Actual 360. The
first date is specified and the second is given as a date or as a tenor
which is added to the first date. """
if isinstance(start_dt, Date):
start_dts = []
start_dts.append(start_dt)
elif isinstance(start_dt, list):
start_dts = start_dt
else:
raise FinError("Start date and end date must be same types.")
day_count = DayCount(dc_type)
num_dts = len(start_dts)
fwd_rates = []
for i in range(0, num_dts):
dt1 = start_dts[i]
if isinstance(date_or_tenor, str):
dt2 = dt1.add_tenor(date_or_tenor)
elif isinstance(date_or_tenor, Date):
dt2 = date_or_tenor
elif isinstance(date_or_tenor, list):
dt2 = date_or_tenor[i]
year_frac = day_count.year_frac(dt1, dt2)[0]
df1 = self.df(dt1)
df2 = self.df(dt2)
fwd_rate = (df1 / df2 - 1.0) / year_frac
fwd_rates.append(fwd_rate)
if isinstance(start_dt, Date):
return fwd_rates[0]
else:
return np.array(fwd_rates)
###########################################################################
def __repr__(self):
s = label_to_string("OBJECT TYPE", type(self).__name__)
num_points = len(self._df_dts)
s += label_to_string("DATES", "DISCOUNT FACTORS")
for i in range(0, num_points):
s += label_to_string("%12s" % self._df_dts[i],
"%12.8f" % self._dfs[i])
return s
###########################################################################
def _print(self):
""" Simple print function for backward compatibility. """
print(self)
###############################################################################