Skip to content

Commit 9657433

Browse files
authored
Minor Fixes to Calvo Machine Learning Lecture (#168)
* fix a minor tag * remove redundent parameters
1 parent 38da9fb commit 9657433

File tree

1 file changed

+13
-13
lines changed

1 file changed

+13
-13
lines changed

lectures/calvo_machine_learn.md

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -414,7 +414,7 @@ First, because we'll want to compare the results we obtain here with those obtai
414414
We hide the cell that copies the class, but readers can find details of the class in this quantecon lecture {doc}`calvo`.
415415
416416
```{code-cell} ipython3
417-
:tags: [hide-output]
417+
:tags: [hide-input]
418418
419419
class ChangLQ:
420420
"""
@@ -558,14 +558,20 @@ def compute_θ(μ, α=1):
558558
θ = jnp.append(θ, μbar)
559559
560560
return θ
561+
562+
@jit
563+
def compute_hs(u0, u1, u2, α):
564+
h0 = u0
565+
h1 = -u1 * α
566+
h2 = -0.5 * u2 * α**2
567+
568+
return h0, h1, h2
561569
562570
@jit
563571
def compute_V(μ, β, c, α=1, u0=1, u1=0.5, u2=3):
564572
θ = compute_θ(μ, α)
565573
566-
h0 = u0
567-
h1 = -u1 * α
568-
h2 = -0.5 * u2 * α**2
574+
h0, h1, h2 = compute_hs(u0, u1, u2, α)
569575
570576
T = len(μ) - 1
571577
t = np.arange(T)
@@ -890,9 +896,7 @@ With the more structured approach, we can update our gradient descent exercise w
890896
def compute_J(μ, β, c, α=1, u0=1, u1=0.5, u2=3):
891897
T = len(μ) - 1
892898
893-
h0 = u0
894-
h1 = -u1 * α
895-
h2 = -0.5 * u2 * α**2
899+
h0, h1, h2 = compute_hs(u0, u1, u2, α)
896900
λ = α / (1 + α)
897901
898902
_, B = construct_B(α, T+1)
@@ -944,9 +948,7 @@ We can also derive a closed-form solution for $\vec \mu$
944948
945949
```{code-cell} ipython3
946950
def compute_μ(β, c, T, α=1, u0=1, u1=0.5, u2=3):
947-
h0 = u0
948-
h1 = -u1 * α
949-
h2 = -0.5 * u2 * α**2
951+
h0, h1, h2 = compute_hs(u0, u1, u2, α)
950952
951953
_, B = construct_B(α, T+1)
952954
@@ -981,9 +983,7 @@ We can check the gradient of the analytical solution against the `JAX` computed
981983
def compute_grad(μ, β, c, α=1, u0=1, u1=0.5, u2=3):
982984
T = len(μ) - 1
983985
984-
h0 = u0
985-
h1 = -u1 * α
986-
h2 = -0.5 * u2 * α**2
986+
h0, h1, h2 = compute_hs(u0, u1, u2, α)
987987
988988
_, B = construct_B(α, T+1)
989989

0 commit comments

Comments
 (0)