Skip to content

Commit

Permalink
Merge branch 'master' into Recreate1479
Browse files Browse the repository at this point in the history
  • Loading branch information
mnwhite authored Feb 10, 2025
2 parents 37c8377 + f9396dc commit 36b570b
Show file tree
Hide file tree
Showing 9 changed files with 393 additions and 96 deletions.
10 changes: 6 additions & 4 deletions Documentation/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ Release Date: TBD

#### Minor Changes

- Fixes bug in `AgentPopulation` that caused discretization of distributions to not work. [1275](https://github.com/econ-ark/HARK/pull/1275)
- Adds support for distributions, booleans, and callables as parameters in the `Parameters` class. [1387](https://github.com/econ-ark/HARK/pull/1387)
- Removes a specific way of accounting for ``employment'' in the idiosyncratic-shocks income process. [1473](https://github.com/econ-ark/HARK/pull/1473)
- Adds income process constructor for the discrete Markov state consumption-saving model. [1484](https://github.com/econ-ark/HARK/pull/1484)
- Fixes bug in `AgentPopulation` that caused discretization of distributions to not work. [#1275](https://github.com/econ-ark/HARK/pull/1275)
- Adds support for distributions, booleans, and callables as parameters in the `Parameters` class. [#1387](https://github.com/econ-ark/HARK/pull/1387)
- Removes a specific way of accounting for ``employment'' in the idiosyncratic-shocks income process. [#1473](https://github.com/econ-ark/HARK/pull/1473)
- Adds income process constructor for the discrete Markov state consumption-saving model. [#1484](https://github.com/econ-ark/HARK/pull/1484)
- Changes the behavior of make_lognormal_RiskyDstn so that the standard deviation represents the standard deviation of log(returns)
- Adds detailed parameter and latex documentation to most models.
- Add PermGroFac constructor that explicitly combines idiosyncratic and aggregate sources of growth. [1489](https://github.com/econ-ark/HARK/pull/1489)
Expand All @@ -35,6 +35,8 @@ Release Date: TBD
- Fixes typos in IdentityFunction interpolator class. [1492](https://github.com/econ-ark/HARK/pull/1492)
- Expands functionality of Cobb-Douglas aggregator for CRRA utility. [1363](https://github.com/econ-ark/HARK/pull/1363)
- Improved tracking of the bounds of support for distributions, and (some) solvers now respect those bounds when computing the "worst outcome". [1524](https://github.com/econ-ark/HARK/pull/1524)
- Adds a new function for using Tauchen's method to approximate an AR1 process. [#1521](https://github.com/econ-ark/HARK/pull/1521)
- Adds additional functionality to the CubicHermiteInterp class, imported from scipy.interpolate. [#1020](https://github.com/econ-ark/HARK/pull/1020/)

### 0.15.1

Expand Down
7 changes: 3 additions & 4 deletions HARK/ConsumptionSaving/ConsIndShockModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,13 @@
expected,
)
from HARK.interpolation import (
CubicInterp,
LinearInterp,
LowerEnvelope,
MargMargValueFuncCRRA,
MargValueFuncCRRA,
ValueFuncCRRA,
)
from HARK.interpolation import CubicHermiteInterp as CubicInterp
from HARK.metric import MetricObject
from HARK.rewards import (
CRRAutility,
Expand Down Expand Up @@ -881,14 +881,13 @@ def solve_one_period_ConsKinkedR(

# Construct the assets grid by adjusting aXtra by the natural borrowing constraint
aNrmNow = np.sort(
np.hstack((np.asarray(aXtraGrid) + mNrmMinNow, np.array([0.0, 0.0]))),
np.hstack((np.asarray(aXtraGrid) + mNrmMinNow, np.array([0.0, 1e-15]))),
)

# Make a 1D array of the interest factor at each asset gridpoint
Rfree = Rsave * np.ones_like(aNrmNow)
Rfree[aNrmNow < 0] = Rboro
Rfree[aNrmNow <= 0] = Rboro
i_kink = np.argwhere(aNrmNow == 0.0)[0][0]
Rfree[i_kink] = Rboro

# Calculate end-of-period marginal value of assets at each gridpoint
vPfacEff = DiscFacEff * Rfree * PermGroFac ** (-CRRA)
Expand Down
2 changes: 2 additions & 0 deletions HARK/distributions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
"Uniform",
"MarkovProcess",
"add_discrete_outcome_constant_mean",
"make_tauchen_ar1",
]

from HARK.distributions.base import (
Expand Down Expand Up @@ -53,4 +54,5 @@
combine_indep_dstns,
distr_of_function,
expected,
make_tauchen_ar1,
)
39 changes: 27 additions & 12 deletions HARK/distributions/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,10 +181,10 @@ def make_markov_approx_to_normal_by_monte_carlo(x_grid, mu, sigma, N_draws=10000
return p_vec


def make_tauchen_ar1(N, sigma=1.0, ar_1=0.9, bound=3.0):
def make_tauchen_ar1(N, sigma=1.0, ar_1=0.9, bound=3.0, inflendpoint=True):
"""
Function to return a discretized version of an AR1 process.
See https://www.fperri.net/TEACHING/macrotheory08/numerical.pdf for details
See http://www.fperri.net/TEACHING/macrotheory08/numerical.pdf for details
Parameters
----------
Expand All @@ -197,6 +197,12 @@ def make_tauchen_ar1(N, sigma=1.0, ar_1=0.9, bound=3.0):
bound: float
The highest (lowest) grid point will be bound (-bound) multiplied by the unconditional
standard deviation of the process
inflendpoint: Bool
If True: implement the standard method as in Tauchen (1986):
assign the probability of jumping to a point outside the grid to the closest endpoint
If False: implement an alternative method:
discard the probability of jumping to a point outside the grid, effectively
reassigning it to the remaining points in proportion to their probability of being reached
Returns
-------
Expand All @@ -209,16 +215,25 @@ def make_tauchen_ar1(N, sigma=1.0, ar_1=0.9, bound=3.0):
y = np.linspace(-yN, yN, N)
d = y[1] - y[0]
trans_matrix = np.ones((N, N))
for j in range(N):
for k_1 in range(N - 2):
k = k_1 + 1
trans_matrix[j, k] = stats.norm.cdf(
(y[k] + d / 2.0 - ar_1 * y[j]) / sigma
) - stats.norm.cdf((y[k] - d / 2.0 - ar_1 * y[j]) / sigma)
trans_matrix[j, 0] = stats.norm.cdf((y[0] + d / 2.0 - ar_1 * y[j]) / sigma)
trans_matrix[j, N - 1] = 1.0 - stats.norm.cdf(
(y[N - 1] - d / 2.0 - ar_1 * y[j]) / sigma
)
if inflendpoint:
for j in range(N):
for k_1 in range(N - 2):
k = k_1 + 1
trans_matrix[j, k] = stats.norm.cdf(
(y[k] + d / 2.0 - ar_1 * y[j]) / sigma
) - stats.norm.cdf((y[k] - d / 2.0 - ar_1 * y[j]) / sigma)
trans_matrix[j, 0] = stats.norm.cdf((y[0] + d / 2.0 - ar_1 * y[j]) / sigma)
trans_matrix[j, N - 1] = 1.0 - stats.norm.cdf(
(y[N - 1] - d / 2.0 - ar_1 * y[j]) / sigma
)
else:
for j in range(N):
for k in range(N):
trans_matrix[j, k] = stats.norm.cdf(
(y[k] + d / 2.0 - ar_1 * y[j]) / sigma
) - stats.norm.cdf((y[k] - d / 2.0 - ar_1 * y[j]) / sigma)
## normalize: each row sums to 1
trans_matrix = trans_matrix / trans_matrix.sum(axis=1)[:, np.newaxis]

return y, trans_matrix

Expand Down
51 changes: 48 additions & 3 deletions HARK/interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@

import numpy as np
from scipy.interpolate import CubicHermiteSpline

from HARK.metric import MetricObject
from HARK.rewards import CRRAutility, CRRAutilityP, CRRAutilityPP

Expand Down Expand Up @@ -1231,7 +1230,7 @@ def __init__(
_check_grid_dimensions(1, self.y_list, self.x_list)
_check_grid_dimensions(1, self.dydx_list, self.x_list)

self.n = self.x_list.size
self.n = len(x_list)

self._chs = CubicHermiteSpline(
self.x_list, self.y_list, self.dydx_list, extrapolate=None
Expand Down Expand Up @@ -1330,6 +1329,53 @@ def _evalAndDer(self, x):
dydx = self._der_helper(x, out_bot, out_top)
return y, dydx

def der_interp(self, nu=1):
"""
Construct a new piecewise polynomial representing the derivative.
See `scipy` for additional documentation:
https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.CubicHermiteSpline.html
"""
return self._chs.derivative(nu)

def antider_interp(self, nu=1):
"""
Construct a new piecewise polynomial representing the antiderivative.
Antiderivative is also the indefinite integral of the function,
and derivative is its inverse operation.
See `scipy` for additional documentation:
https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.CubicHermiteSpline.html
"""
return self._chs.antiderivative(nu)

def integrate(self, a, b, extrapolate=False):
"""
Compute a definite integral over a piecewise polynomial.
See `scipy` for additional documentation:
https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.CubicHermiteSpline.html
"""
return self._chs.integrate(a, b, extrapolate)

def roots(self, discontinuity=True, extrapolate=False):
"""
Find real roots of the the piecewise polynomial.
See `scipy` for additional documentation:
https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.CubicHermiteSpline.html
"""
return self._chs.roots(discontinuity, extrapolate)

def solve(self, y=0.0, discontinuity=True, extrapolate=False):
"""
Find real solutions of the the equation ``pp(x) == y``.
See `scipy` for additional documentation:
https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.CubicHermiteSpline.html
"""
return self._chs.solve(y, discontinuity, extrapolate)


class BilinearInterp(HARKinterpolator2D):
"""
Expand Down Expand Up @@ -4716,5 +4762,4 @@ def __call__(self, *cFuncArgs):
"cFunc does not have a 'derivativeX' attribute. Can't compute"
+ "marginal marginal value."
)

return MPC * CRRAutilityPP(c, rho=self.CRRA)
36 changes: 36 additions & 0 deletions HARK/tests/test_distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
combine_indep_dstns,
distr_of_function,
expected,
make_tauchen_ar1,
)
from HARK.tests import HARK_PRECISION

Expand Down Expand Up @@ -657,3 +658,38 @@ def transition(shocks, state):

assert np.all(exp1["m"] == exp2["m"]).item()
assert np.all(exp1["n"] == exp2["n"]).item()


class TestTauchenAR1(unittest.TestCase):
def test_tauchen(self):
# Test with a simple AR(1) process
N = 5
sigma = 1.0
ar_1 = 0.9
bound = 3.0

# By default, inflendpoint = True
standard = make_tauchen_ar1(N, sigma, ar_1, bound)
alternative = make_tauchen_ar1(N, sigma, ar_1, bound, inflendpoint=False)

# Check that the grid points of the two methods are identical
self.assertTrue(np.all(np.equal(standard[0], alternative[0])))

# Check the shape of the transition matrix
self.assertEqual(standard[1].shape, (N, N))
self.assertEqual(alternative[1].shape, (N, N))

# Check that the sum of each row in the transition matrix is 1
self.assertTrue(np.allclose(np.sum(standard[1], axis=1), np.ones(N)))
self.assertTrue(np.allclose(np.sum(alternative[1], axis=1), np.ones(N)))

# Check that [k]-th column ./ [k-1]-th column are identical (k = 3, ..., N-1)
# Note: the first and the last column of the 'standard' transition matrix are inflated
if N > 3:
for i in range(2, N - 1):
self.assertTrue(
np.allclose(
standard[1][:, i] * alternative[1][:, i - 1],
alternative[1][:, i] * standard[1][:, i - 1],
)
)
16 changes: 10 additions & 6 deletions HARK/tests/test_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,18 @@
This file implements unit tests for interpolation methods
"""

import unittest
from HARK.interpolation import (
IdentityFunction,
LinearInterp,
BilinearInterp,
TrilinearInterp,
QuadlinearInterp,
)
from HARK.interpolation import CubicHermiteInterp as CubicInterp

import numpy as np

from HARK.interpolation import BilinearInterp
from HARK.interpolation import CubicHermiteInterp as CubicInterp
from HARK.interpolation import LinearInterp, QuadlinearInterp, TrilinearInterp
from HARK.interpolation import IdentityFunction
import unittest
import numpy as np


class testsLinearInterp(unittest.TestCase):
Expand Down
Loading

0 comments on commit 36b570b

Please sign in to comment.