diff --git a/src/ActuaryUtilities.jl b/src/ActuaryUtilities.jl index 2bf253c..5d212fc 100644 --- a/src/ActuaryUtilities.jl +++ b/src/ActuaryUtilities.jl @@ -14,9 +14,9 @@ import Distributions # need to define this here to extend it without conflict inside FinancialMath function duration() end +include("utilities.jl") include("financial_math.jl") include("risk_measures.jl") -include("utilities.jl") diff --git a/src/financial_math.jl b/src/financial_math.jl index a35c370..fa43b24 100644 --- a/src/financial_math.jl +++ b/src/financial_math.jl @@ -4,6 +4,7 @@ import ..FinanceCore import ..FinanceModels import ..ForwardDiff import ..ActuaryUtilities: duration +import ..ActuaryUtilities.Utilities: _segment_reals export irr, internal_rate_of_return, spread, pv, present_value, price, present_values, @@ -250,6 +251,9 @@ function duration(yield, cfs) times = FinanceCore.timepoint.(cfs, 1:length(cfs)) return duration(Modified(), yield, cfs, times) end +function duration(yield, cf::FinanceCore.Cashflow) + return duration(Modified(), yield, cf.amount, cf.time) +end function duration(::DV01, yield, cfs, times) return duration(DV01(), yield, i -> price(i, vec(cfs), times)) @@ -421,6 +425,131 @@ function duration(keyrate::KeyRateDuration, curve, cashflows) end + +""" + _residual_duration(curve, cashflows, time) + +Return the residual duration for cashflows occurring at or after `time`, weighted by their proportional contribution to the total present value. + +This measure decomposes overall portfolio duration by attributing the “remaining” duration to cashflows beyond a given horizon. It is useful for cash flow duration contribution analysis, which focuses on how each cashflow’s timing impacts the overall duration rather than isolating sensitivities at specific curve points. +""" +function _residual_duration(time, cashflows) + fcf = filter(c -> c.time >= time, cashflows) + if isempty(fcf) + return zero(first(cashflows).amount) + else + d = duration(curve, fcf) + d * pv(curve, fcf) / pv(curve, cashflows) + end +end + +""" + _duration_cf(curve, cashflows) + +For each cashflow in `cashflows`, compute its partial duration contribution. Each cashflow’s duration is weighted by its proportion of the aggregate present value so that the sum of partial durations equals the overall portfolio duration. + +This function forms the basis for our cash flow duration contribution analysis, breaking down the overall duration into weighted pieces assigned to each cashflow. +""" +function _duration_cf(curve, cashflows) + p = FinanceCore.pv(curve, cashflows) + map(cashflows) do cf + d = duration(curve, cf) + p_i = FinanceCore.pv(curve, cf) + (partial_duration=d * p_i / p, time=cf.time) + end +end + +abstract type WeightShape end +struct Triangular <: WeightShape end +struct Rectangular <: WeightShape end + +""" + duration_contributions(curve, cashflows, points, ::Rectangular) + +Calculate the cash flow duration contributions segmented by bands defined from `points` +using a rectangular (uniform) weighting scheme. In each band, every cashflow whose time +falls between the band's lower (low) and upper (high) bounds is given full weight, meaning +its partial duration contribution is applied in full. + +The bands are determined using _segment_reals, which returns a named tuple for each band +with the fields: low, high, and point (the central reference). + +This function decomposes the overall portfolio duration into contributions from each band, +facilitating an analysis of how cashflows at different maturities contribute to total duration. +""" +function duration_contributions(curve, cashflows, points, ::Rectangular) + dcf = _duration_cf(curve, cashflows) + bands = _segment_reals(points) + + map(bands) do band + low, high = band.low, band.high + + # Sum partial durations for cashflows within the band + krd = sum(c.partial_duration for c in dcf if (c.time >= low) && (c.time < high)) + + # Return the band and its corresponding KRD + (; band=band, krd=krd) + end +end + +""" +duration_contributions(curve, cashflows, points, ::Triangular) + +Calculate the cash flow duration contributions segmented by bands defined from `points` +using a triangular (linearly graded) weighting scheme. Within each band, cashflows are +weighted based on their proximity to the band’s central point: + - In a middle band, cashflows before the central point are assigned a weight that increases + linearly from the band's lower bound to the central point, while cashflows after are + linearly decreased from the central point to the band's upper bound. + - In the first band (with low == -Inf), cashflows on the finite side (c.time >= low) receive + full weight if they are no later than the central point; those after the central point have + their weights linearly decreased. + - In the last band (with high == Inf), cashflows on the finite side (c.time < high) receive + full weight if they are no earlier than the central point; those before the central point have + weights linearly increased. + +The bands are determined via _segment_reals, which returns each band as a named tuple with +low, high, and point. This function provides a refined breakdown of overall duration by assigning +differentiated weights to cashflows according to their timing relative to the band’s center. +""" +function duration_contributions(curve, cashflows, points, ::Triangular) + dcf = _duration_cf(curve, cashflows) + bands = _segment_reals(points) + + map(bands) do band + low, high, point = band.low, band.high, band.point + krd = 0.0 + isfirst = band == first(bands) + islast = band == last(bands) + + for c in dcf + if c.time >= low && c.time < high + # Calculate weights based on proximity to the central point + if c.time <= point + weight = if isfirst + 1 + else + max(0, (c.time - low) / (point - low)) + end + else + if islast + 1 + else + weight = max(0, (high - c.time) / (high - point)) + end + end + krd += c.partial_duration * weight + end + end + + (; band=band, krd=krd) + end +end + + + + + """ spread(curve1,curve2,cashflows) diff --git a/src/utilities.jl b/src/utilities.jl index a9c6e52..13dd8bd 100644 --- a/src/utilities.jl +++ b/src/utilities.jl @@ -132,4 +132,32 @@ function accum_offset(x; op=*, init=1.0) end return xnew end + +function _segment_reals(central_points) + length(central_points) == 1 && return [(low=-Inf, high=Inf, point=only(central_points))] + # Sort central points to ensure they are in ascending order + sorted_points = unique(sort(central_points)) + + # Create bands + bounds = map(enumerate(sorted_points)) do (i, point) + if i == 1 + # First band: from 0 to the midpoint between the first two points + low = -Inf + high = (sorted_points[i] + sorted_points[i+1]) / 2.0 + elseif i == length(sorted_points) + # Last band: from the midpoint of the last two points to infinity + low = (sorted_points[i-1] + sorted_points[i]) / 2.0 + high = Inf + else + # Middle bands: between midpoints of adjacent points + low = (sorted_points[i-1] + sorted_points[i]) / 2.0 + high = (sorted_points[i] + sorted_points[i+1]) / 2.0 + end + + (; low, high, point) + end + + return bounds +end + end \ No newline at end of file diff --git a/test/runtests.jl b/test/runtests.jl index 8d73e16..025f5a5 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -319,4 +319,91 @@ end s = spread(y, y2, cfs) @test s ≈ FC.Periodic(0.01, 1) atol = 0.002 -end \ No newline at end of file +end + +@testset "segmenting times" begin + u = ActuaryUtilities.Utilities + + # Three central points + central_points = [1.0, 3.0, 5.0] + expected_output = [ + (low=-Inf, high=2.0, point=1.0), + (low=2.0, high=4.0, point=3.0), + (low=4.0, high=Inf, point=5.0) + ] + @test u._segment_reals(central_points) == expected_output + + # Single central point + central_points = [2.0] + expected_output = [ + (low=-Inf, high=Inf, point=2.0) + ] + @test u._segment_reals(central_points) == expected_output + + # Two central points + central_points = [2.0, 4.0] + expected_output = [ + (low=-Inf, high=3.0, point=2.0), + (low=3.0, high=Inf, point=4.0) + ] + @test u._segment_reals(central_points) == expected_output + + # Descending order of central points + central_points = [5.0, 3.0, 1.0] + expected_output = [ + (low=-Inf, high=2.0, point=1.0), + (low=2.0, high=4.0, point=3.0), + (low=4.0, high=Inf, point=5.0) + ] # Sorted internally + @test u._segment_reals(central_points) == expected_output + + # Central points with duplicates + central_points = [1.0, 3.0, 3.0, 5.0] + expected_output = [ + (low=-Inf, high=2.0, point=1.0), + (low=2.0, high=4.0, point=3.0), + (low=4.0, high=Inf, point=5.0) + ] + @test u._segment_reals(central_points) == expected_output + + # Evenly spaced points + central_points = collect(1:10) # [1, 2, ..., 10] + expected_output = vcat( + [(low=-Inf, high=1.5, point=1)], + [(low=i - 0.5, high=i + 0.5, point=i) for i in 2:9]..., + [(low=9.5, high=Inf, point=10)] + ) + @test u._segment_reals(central_points) == expected_output + + # Large values in central points + central_points = [1e6, 1e7] + expected_output = [ + (low=-Inf, high=5.5e6, point=1e6), + (low=5.5e6, high=Inf, point=1e7) + ] + @test u._segment_reals(central_points) == expected_output + + # Small values in central points + central_points = [1e-6, 1e-3] + midpoint = (1e-6 + 1e-3) / 2 + expected_output = [ + (low=-Inf, high=midpoint, point=1e-6), + (low=midpoint, high=Inf, point=1e-3) + ] + @test u._segment_reals(central_points) == expected_output + + # Empty list of central points + central_points = [] + expected_output = [] # No bands can be created from an empty list + @test isempty(u._segment_reals(central_points)) + + # Negative values in input + central_points = [-2.0, -1.0] + expected_output = [ + (low=-Inf, high=-1.5, point=-2.0), + (low=-1.5, high=Inf, point=-1.0) + ] + @test u._segment_reals(central_points) == expected_output +end + +# Test KRDs sum up to total KRD