@@ -3,8 +3,10 @@ jupytext:
33 text_representation :
44 extension : .md
55 format_name : myst
6+ format_version : 0.13
7+ jupytext_version : 1.16.4
68kernelspec :
7- display_name : Python 3
9+ display_name : Python 3 (ipykernel)
810 language : python
911 name : python3
1012---
@@ -79,15 +81,13 @@ adjust investors' subjective beliefs about mean returns in order to render more
7981
8082Let's start with some imports:
8183
82- ``` {code-cell} ipython
84+ ``` {code-cell} ipython3
8385import numpy as np
8486import scipy.stats as stat
8587import matplotlib.pyplot as plt
86- from ipywidgets import interact, FloatSlider
88+ from numba import jit
8789```
8890
89-
90-
9191## Mean-Variance Portfolio Choice
9292
9393A risk-free security earns one-period net return $r_f$.
@@ -169,7 +169,7 @@ $w$'s with **extreme long and short positions**.
169169A common reaction to these outcomes is that they are so implausible that a portfolio
170170manager cannot recommend them to a customer.
171171
172- ``` {code-cell} python3
172+ ``` {code-cell} ipython3
173173np.random.seed(12)
174174
175175N = 10 # Number of assets
@@ -300,7 +300,7 @@ The starting point of the Black-Litterman portfolio choice model is thus
300300a pair $(\delta_m, \mu_m)$ that tells the customer to hold the
301301market portfolio.
302302
303- ``` {code-cell} python3
303+ ``` {code-cell} ipython3
304304# Observed mean excess market return
305305r_m = w_m @ μ_est
306306
@@ -316,11 +316,12 @@ d_m = r_m / σ_m
316316# Derive "view" which would induce the market portfolio
317317μ_m = (d_m * Σ_est @ w_m).reshape(N, 1)
318318
319+ x = np.arange(N) + 1
319320fig, ax = plt.subplots(figsize=(8, 5))
320321ax.set_title(r'Difference between $\hat{\mu}$ (estimate) and $\mu_{BL}$ (market implied)')
321- ax.plot(np.arange(N)+1 , μ_est, 'o', c='k', label='$\hat{\mu}$')
322- ax.plot(np.arange(N)+1 , μ_m, 'o', c='r', label='$\mu_{BL}$')
323- ax.vlines(np.arange(N) + 1 , μ_m, μ_est, lw=1)
322+ ax.plot(x , μ_est, 'o', c='k', label='$\hat{\mu}$')
323+ ax.plot(x , μ_m, 'o', c='r', label='$\mu_{BL}$')
324+ ax.vlines(x , μ_m, μ_est, lw=1)
324325ax.axhline(0, c='k', ls='--')
325326ax.set_xlabel('Assets')
326327ax.xaxis.set_ticks(np.arange(1, N+1, 1))
@@ -384,7 +385,7 @@ If $\hat \mu$ is the maximum likelihood estimator
384385and $\tau$ is chosen heavily to weight this view, then the
385386customer's portfolio will involve big short-long positions.
386387
387- ``` {code-cell} python3
388+ ``` {code-cell} ipython3
388389def black_litterman(λ, μ1, μ2, Σ1, Σ2):
389390 """
390391 This function calculates the Black-Litterman mixture
@@ -402,10 +403,9 @@ def black_litterman(λ, μ1, μ2, Σ1, Σ2):
402403
403404# The Black-Litterman recommendation for the portfolio weights
404405w_tilde = np.linalg.solve(δ * Σ_est, μ_tilde)
406+ ```
405407
406- τ_slider = FloatSlider(min=0.05, max=10, step=0.5, value=τ)
407-
408- @interact(τ=τ_slider)
408+ ``` {code-cell} ipython3
409409def BL_plot(τ):
410410 μ_tilde = black_litterman(1, μ_m, μ_est, Σ_est, τ * Σ_est)
411411 w_tilde = np.linalg.solve(δ * Σ_est, μ_tilde)
@@ -439,6 +439,8 @@ def BL_plot(τ):
439439 ax[1].xaxis.set_ticks(np.arange(1, N+1, 1))
440440 ax[1].legend(numpoints=1)
441441 plt.show()
442+
443+ BL_plot(τ)
442444```
443445
444446## Bayesian Interpretation
@@ -607,7 +609,7 @@ $\bar d_2$ on the RHS of the constraint, by varying
607609$\bar d_2$ (or $\lambda$ ), we can trace out the whole curve
608610as the figure below illustrates.
609611
610- ``` {code-cell} python3
612+ ``` {code-cell} ipython3
611613np.random.seed(1987102)
612614
613615N = 2 # Number of assets
@@ -641,21 +643,18 @@ r2 = np.linspace(-0.02, .15, N_r2)
641643curve = np.asarray([black_litterman(λ, μ_m, μ_est, Σ_est,
642644 τ * Σ_est).flatten() for λ in λ_grid])
643645
644- λ_slider = FloatSlider(min=.1, max=7, step=.5, value=1)
646+ λ = 1
647+ ```
645648
646- @interact(λ=λ_slider)
649+ ``` {code-cell} ipython3
647650def decolletage(λ):
648651 dist_r_BL = stat.multivariate_normal(μ_m.squeeze(), Σ_est)
649652 dist_r_hat = stat.multivariate_normal(μ_est.squeeze(), τ * Σ_est)
650653
651654 X, Y = np.meshgrid(r1, r2)
652- Z_BL = np.zeros((N_r1, N_r2))
653- Z_hat = np.zeros((N_r1, N_r2))
654-
655- for i in range(N_r1):
656- for j in range(N_r2):
657- Z_BL[i, j] = dist_r_BL.pdf(np.hstack([X[i, j], Y[i, j]]))
658- Z_hat[i, j] = dist_r_hat.pdf(np.hstack([X[i, j], Y[i, j]]))
655+ XY = np.stack((X, Y), axis=-1)
656+ Z_BL = dist_r_BL.pdf(XY)
657+ Z_hat = dist_r_hat.pdf(XY)
659658
660659 μ_tilde = black_litterman(λ, μ_m, μ_est, Σ_est, τ * Σ_est).flatten()
661660
@@ -676,6 +675,8 @@ def decolletage(λ):
676675 ax.text(μ_est[0] + 0.003, μ_est[1], r'$\hat{\mu}$')
677676 ax.text(μ_m[0] + 0.003, μ_m[1] + 0.005, r'$\mu_{BL}$')
678677 plt.show()
678+
679+ decolletage(λ)
679680```
680681
681682Note that the line that connects the two points
@@ -692,26 +693,22 @@ This leads to the
692693following figure, on which the curve connecting $\hat \mu$
693694and $\mu_ {BL}$ are bending
694695
695- ``` {code-cell} python3
696+ ``` {code-cell} ipython3
696697λ_grid = np.linspace(.001, 20000, 1000)
697698curve = np.asarray([black_litterman(λ, μ_m, μ_est, Σ_est,
698699 τ * np.eye(N)).flatten() for λ in λ_grid])
700+ λ = 200
701+ ```
699702
700- λ_slider = FloatSlider(min=5, max=1500, step=100, value=200)
701-
702- @interact(λ=λ_slider)
703+ ``` {code-cell} ipython3
703704def decolletage(λ):
704705 dist_r_BL = stat.multivariate_normal(μ_m.squeeze(), Σ_est)
705706 dist_r_hat = stat.multivariate_normal(μ_est.squeeze(), τ * np.eye(N))
706707
707708 X, Y = np.meshgrid(r1, r2)
708- Z_BL = np.zeros((N_r1, N_r2))
709- Z_hat = np.zeros((N_r1, N_r2))
710-
711- for i in range(N_r1):
712- for j in range(N_r2):
713- Z_BL[i, j] = dist_r_BL.pdf(np.hstack([X[i, j], Y[i, j]]))
714- Z_hat[i, j] = dist_r_hat.pdf(np.hstack([X[i, j], Y[i, j]]))
709+ XY = np.stack((X, Y), axis=-1)
710+ Z_BL = dist_r_BL.pdf(XY)
711+ Z_hat = dist_r_hat.pdf(XY)
715712
716713 μ_tilde = black_litterman(λ, μ_m, μ_est, Σ_est, τ * np.eye(N)).flatten()
717714
@@ -733,6 +730,8 @@ def decolletage(λ):
733730 ax.text(μ_est[0] + 0.003, μ_est[1], r'$\hat{\mu}$')
734731 ax.text(μ_m[0] + 0.003, μ_m[1] + 0.005, r'$\mu_{BL}$')
735732 plt.show()
733+
734+ decolletage(λ)
736735```
737736
738737## Black-Litterman Recommendation as Regularization
@@ -1247,7 +1246,7 @@ observations is related to the sampling frequency
12471246
12481247- Moreover, for a fixed lag length, $n$, the dependence vanishes as the sampling frequency goes to infinity. In fact, letting $h$ go to $\infty$ gives back the case of IID data.
12491248
1250- ``` {code-cell} python3
1249+ ``` {code-cell} ipython3
12511250μ = .0
12521251κ = .1
12531252σ = .5
@@ -1346,7 +1345,8 @@ thus getting an idea about how the asymptotic relative MSEs changes in
13461345the sampling frequency $h$ relative to the IID case that we
13471346compute in closed form.
13481347
1349- ``` {code-cell} python3
1348+ ``` {code-cell} ipython3
1349+ @jit
13501350def sample_generator(h, N, M):
13511351 ϕ = (1 - np.exp(-κ * h)) * μ
13521352 ρ = np.exp(-κ * h)
@@ -1355,18 +1355,18 @@ def sample_generator(h, N, M):
13551355 mean_uncond = μ
13561356 std_uncond = np.sqrt(σ**2 / (2 * κ))
13571357
1358- ε_path = stat.norm (0, np.sqrt(s)).rvs( (M, N))
1358+ ε_path = np.random.normal (0, np.sqrt(s), (M, N))
13591359
13601360 y_path = np.zeros((M, N + 1))
1361- y_path[:, 0] = stat.norm (mean_uncond, std_uncond).rvs( M)
1361+ y_path[:, 0] = np.random.normal (mean_uncond, std_uncond, M)
13621362
13631363 for i in range(N):
13641364 y_path[:, i + 1] = ϕ + ρ * y_path[:, i] + ε_path[:, i]
13651365
13661366 return y_path
13671367```
13681368
1369- ``` {code-cell} python3
1369+ ``` {code-cell} ipython3
13701370# Generate large sample for different frequencies
13711371N_app, M_app = 1000, 30000 # Sample size, number of simulations
13721372h_grid = np.linspace(.1, 80, 30)
0 commit comments