Skip to article frontmatterSkip to article content
Site not loading correctly?

This may be due to an incorrect BASE_URL configuration. See the MyST Documentation for reference.

Sizing a Steepener Trade

Using a 5s30s Steepener Trade as Example

Calculating it with PCA on OTR Yields (Fixed-Income Native approach)

Define the front leg (10Y) and back leg (30Y) futures contracts for the steepener trade.

FRONT_LEG = "FVZ5 Comdty"
BACK_LEG = "WNZ5 Comdty"
FRONT_LEG_GENERIC = "FV1 Comdty"
BACK_LEG_GENERIC = "WN1 Comdty"

HALF_LIFE = 75

Fetch cheapest-to-deliver (CTD) bond information including conversion factors, durations, and contract values.

Set up utility functions for PCA analysis and load US Treasury par curve data across all key rate durations.

Y_par = Y_par.reindex(list(KRD_BUCKET_TO_YEARS.values()), axis=1).interpolate(
    axis=1, method="cubicspline", limit_area="inside"
)

Calculate PCA-based risk model: compute key rate durations, run PCA on yield changes, and derive the DV01-neutral hedge ratio and expected volatility.

# ---------- Data on the futures ----------------
# ===============================================

ctd_identifier = "FUT_CTD_TICKER"
with BQuery() as bq:
    fut_df = (
        bq.bdp(
            [FRONT_LEG, BACK_LEG],
            [ctd_identifier, "FUT_CNVS_FACTOR", "CONTRACT_VALUE", ""],
        )
        .to_pandas()
        .set_index("security")
    )

    future_history_generic_futures = (
        bq.bdh(
            [FRONT_LEG_GENERIC, BACK_LEG_GENERIC],
            ["CONTRACT_VALUE"],
            start_date=pd.Timestamp("2020-01-01"),
            end_date=pd.Timestamp.today(),
        )
        .to_pandas()
        .set_index(["security", "date"])
    )


fut_to_ctd = dict(zip(fut_df.index, [f"{x} Govt" for x in fut_df[ctd_identifier]]))

Checks

Check of Durations and Key Rate Durations

kr01_sum = krd_ctd_raw.sum(axis=1).div(CF_by_leg.values)
kr01_sum.index = [FRONT_LEG, BACK_LEG]  # Sum across tenors
print(f"Front leg - Sum of KR01s: ${kr01_sum[FRONT_LEG]:.2f}/bp")
print(f"Back leg - Sum of KR01s: ${kr01_sum[BACK_LEG]:.2f}/bp")

# Check 2: Compare to Bloomberg's total risk field
with BQuery() as bq:
    bbg_dv01 = (
        bq.bdp([FRONT_LEG, BACK_LEG], ["CONVENTIONAL_CTD_FORWARD_FRSK"])
        .to_pandas()
        .set_index("security")
    ).squeeze()

print(f"Front leg - Sum of KR01s: ${bbg_dv01[FRONT_LEG]:.2f}/bp")
print(f"Back leg - Sum of KR01s: ${bbg_dv01[BACK_LEG]:.2f}/bp")

Backtests

The PCA-predicted volatility by computing realized P&L using historical yield changes multiplied by today’s KR01 sensitivities

This isolates yield curve risk only (excluding futures basis risk and CTD switches) and validates whether our PCA decomposition accurately captures the covariance structure of yield changes - if the ratio is close to 1.0, it confirms the PCA model is well-calibrated.

Y_hist = Y_par
# Daily yield changes in bp
dY_bp = Y_hist.diff().dropna() * 1e4

# Use fixed KR01s from a recent date (or average over recent period)
# These are the same loadings from your PCA calculation
K_front = KR01_FUT.loc[FRONT_LEG, tenors].values  # $/bp per tenor
K_back = KR01_FUT.loc[BACK_LEG, tenors].values

# Fixed DV01-neutral hedge ratio
h_fixed = K_front.sum() / K_back.sum()

# Calculate daily PnL: sum(KR01_i * dY_i) for each leg
pnl_front = (dY_bp * K_front).sum(axis=1)  # $ from front leg
pnl_back = (dY_bp * K_back).sum(axis=1)  # $ from back leg

# DV01-neutral portfolio PnL
pnl_steepener = pnl_front - h_fixed * pnl_back

# Realized vol
sigma_realized_daily = pnl_steepener.std()
sigma_realized_annual = sigma_realized_daily * np.sqrt(252)

# Compare
print(f"PCA-predicted daily vol: ${sigma_daily:.2f}")
print(f"Realized daily vol (yields only): ${sigma_realized_daily:.2f}")
print(f"Ratio (realized/predicted): {sigma_realized_daily / sigma_daily:.3f}")

# Optional: rolling window comparison
rolling_vol = pnl_steepener.rolling(63).std() * np.sqrt(252)  # 3M rolling

Another Check: The actual dollar movement of the PNL:

note: The PCA number should typically be lower than historical realized, as it excludes basis volatility and CTD switch risk.

with BQuery() as bq:
    future_history_actual_futures = (
        bq.bdh(
            [FRONT_LEG, BACK_LEG],
            ["CONTRACT_VALUE", "YLD_YTM_MID", "CONVENTIONAL_CTD_FORWARD_FRSK"],
            start_date=pd.Timestamp("2020-01-01"),
            end_date=pd.Timestamp.today(),
        )
        .to_pandas()
        .set_index(["security", "date"])
    )

future_history_actual_futures = future_history_actual_futures.unstack(
    "security"
).ffill()
# Assume the correct 'hedge' ratio is applied
print("\nAssume the correct 'hedge' ratio is applied")
print("=" * 40)
H = future_history_actual_futures.xs(
    "CONVENTIONAL_CTD_FORWARD_FRSK", level=0, axis=1
).dropna()
H = (H[FRONT_LEG] / H[BACK_LEG]).shift(1)
PNL = (
    future_history_actual_futures.xs("CONTRACT_VALUE", level=0, axis=1).dropna().diff()
)
PNL[BACK_LEG] = PNL[BACK_LEG] * H * -1
PNL = PNL.dropna().sum(axis=1)
print("Average Daily PNL in dollars:", round(PNL.pow(2).mean() ** 0.5, 2))
print(
    "Average EWM Daily PNL in dollars:",
    round(PNL.pow(2).ewm(halflife=HALF_LIFE).mean().iloc[-1] ** 0.5),
)

print("\n")

print("Assume a fixed hedge ratio of 0.40")
print("=" * 40)
PNL = (
    future_history_actual_futures.xs("CONTRACT_VALUE", level=0, axis=1).dropna().diff()
)
PNL[BACK_LEG] = PNL[BACK_LEG] * 0.40 * -1
PNL = PNL.dropna().sum(axis=1)
print("Average Daily PNL in dollars:", round(PNL.pow(2).mean() ** 0.5, 2))
print(
    "Average EWM Daily PNL in dollars:",
    round(PNL.pow(2).ewm(halflife=HALF_LIFE).mean().iloc[-1] ** 0.5),
)

Checks: Using the actual PNL from the generics

with BQuery() as bq:
    future_history_generic_futures = (
        bq.bdh(
            [FRONT_LEG_GENERIC, BACK_LEG_GENERIC],
            ["CONTRACT_VALUE"],
            start_date=pd.Timestamp("2020-01-01"),
            end_date=pd.Timestamp.today(),
        )
        .to_pandas()
        .set_index(["security", "date"])
    )

future_history_generic_futures = future_history_generic_futures.unstack(
    "security"
).ffill()

Using Contract Value (Issues with rolls)

# Assume a
PNL = (
    future_history_generic_futures.xs("CONTRACT_VALUE", level=0, axis=1).dropna().diff()
)
PNL[BACK_LEG_GENERIC] = PNL[BACK_LEG_GENERIC] * 0.40 * -1
PNL = PNL.dropna().sum(axis=1)
print("Average Daily PNL in dollars:", round(PNL.pow(2).mean() ** 0.5, 2))
print(
    "Average EWM Daily PNL in dollars:",
    round(PNL.pow(2).ewm(halflife=HALF_LIFE).mean().iloc[-1] ** 0.5),
)

With Roll Adjustment to Prices (should account for the roll)

Note maybe we should use this logic for calculate_excess

with BQuery() as bq:
    future_history_generic_futures = (
        bq.bdh(
            [FRONT_LEG_GENERIC, BACK_LEG_GENERIC],
            ["PX_LAST", "CONTRACT_VALUE"],
            start_date=pd.Timestamp("2020-01-01"),
            end_date=pd.Timestamp.today(),
        )
        .to_pandas()
        .set_index(["security", "date"])
    )

    cont_size = (
        bq.bdp(
            [FRONT_LEG_GENERIC, BACK_LEG_GENERIC],
            ["FUT_CONT_SIZE"],
        )
        .to_pandas()
        .set_index("security")["FUT_CONT_SIZE"]
    )

future_history_generic_futures = future_history_generic_futures.unstack(
    "security"
).ffill()
roll_adj = (
    future_history_generic_futures.xs("PX_LAST", level=0, axis=1)
    .mul(cont_size)
    .div(100)
)
PNL = roll_adj.dropna().diff()
PNL[BACK_LEG_GENERIC] = PNL[BACK_LEG_GENERIC] * 0.40 * -1
PNL = PNL.dropna().sum(axis=1)
print("Average Daily PNL in dollars:", round(PNL.pow(2).mean() ** 0.5, 2))
print(
    "Average EWM Daily PNL in dollars:",
    round(PNL.pow(2).ewm(halflife=HALF_LIFE).mean().iloc[-1] ** 0.5),
)

Brian’s check (a bit above)

with BQuery() as bq:
    history = (
        bq.bdh(
            ["GT10 Govt", "GT30 Govt"],  # , "GT20 Govt"
            ["YLD_YTM_MID"],  # "PX_LAST",
            start_date=pd.Timestamp("2020-01-01"),
            end_date=pd.Timestamp.today(),
        )
        .to_pandas()
        .set_index(["security", "date"])
        .squeeze()
        .unstack(level=0)
    )


spread_chg = history.loc[:, "GT10 Govt"] - history.loc[:, "GT30 Govt"]
spread_chg_bps = spread_chg * 100
spread_chg_bps.pow(2).ewm(halflife=HALF_LIFE).mean().iloc[-1] ** 0.5
# moves 24 bps daily
print(
    "Avg Daily Move",
    ctd_data.to_pandas()["FUT_CNV_RISK_FRSK"][0] * spread_chg_bps.std(),
)

How the risk system would account for this position:

from tulip.risk.models.cov_estimators import *

kate_fast = (15, 15 * 2)
kate_slow = (126, 126 * 2)

excess_return = calculate_excess_returns([FRONT_LEG_GENERIC, BACK_LEG_GENERIC])
CONTRACT_VAL_front, CONTRACT_VAL_back = CONTRACT_VAL_by_leg.to_list()
# Use GROSS notional as imaginary NAV
NAV = abs(CONTRACT_VAL_front) + abs(h * CONTRACT_VAL_back)
# Weights (now both will be ~0.5 in magnitude)
weights = np.array(
    [
        CONTRACT_VAL_front / NAV,  # ~0.48
        -h * CONTRACT_VAL_back / NAV,
    ]
)  # ~-0.52

# Fast
kate_ewm_fast_cov = ewm_covariance(
    ex_ret=excess_return,
    hl_vola=kate_fast[0],
    hl_corr=kate_fast[1],
    give_last_date=True,
)

cov_ann_fast = (kate_ewm_fast_cov["cov"]) / 252  # It comes already annualized

# Checks
print(f"Covariance diagonal (variance) front leg: {cov_ann_fast.iloc[0, 0]:.6f}")
print(f"Daily std dev front leg: {np.sqrt(cov_ann_fast.iloc[0, 0]):.6f}")
print(f"Annualized vol: {np.sqrt(cov_ann_fast.iloc[0, 0] * 252):.4f}")

print(f"Weight front: {weights[0]:.4f}")
print(f"Weight back: {weights[1]:.4f}")
print(f"Sum of abs weights: {abs(weights).sum():.4f}")  # = 1.0

# Calculate risk
portfolio_vol = np.sqrt(weights @ cov_ann_fast @ weights.T)
dollar_vol = portfolio_vol * NAV
print(f"Portfolio daily vol ($): {dollar_vol:.2f}")
# Slow
kate_ewm_slow_cov = ewm_covariance(
    ex_ret=excess_return,
    hl_vola=kate_slow[0],
    hl_corr=kate_slow[1],
    give_last_date=True,
)

cov_ann_slow = (kate_ewm_slow_cov["cov"]) / 252  # It comes already annualized

# Checks
print(f"Covariance diagonal (variance) front leg: {cov_ann_slow.iloc[0, 0]:.6f}")
print(f"Daily std dev front leg: {np.sqrt(cov_ann_slow.iloc[0, 0]):.6f}")
print(f"Annualized vol: {np.sqrt(cov_ann_slow.iloc[0, 0] * 252):.4f}")

print(f"Weight front: {weights[0]:.4f}")
print(f"Weight back: {weights[1]:.4f}")
print(f"Sum of abs weights: {abs(weights).sum():.4f}")  # = 1.0

# Calculate risk
portfolio_vol_slow = np.sqrt(weights @ cov_ann_slow @ weights.T)
dollar_vol_slow = portfolio_vol_slow * NAV
print(f"Portfolio daily vol ($): {dollar_vol_slow:.2f}")
midpoint_daily_vol = (dollar_vol_slow + dollar_vol) / 2
print(f"Midpoint daily vol ($): {midpoint_daily_vol:.2f}")