reconnect moved files to git repo

This commit is contained in:
root
2025-08-01 04:33:03 -04:00
commit 5d3c35492d
23190 changed files with 4750716 additions and 0 deletions

View File

@ -0,0 +1,3 @@
from statsmodels.tools._test_runner import PytestTester
test = PytestTester()

View File

@ -0,0 +1,493 @@
"""
Markov switching autoregression models
Author: Chad Fulton
License: BSD-3
"""
import numpy as np
import statsmodels.base.wrapper as wrap
from statsmodels.tsa.tsatools import lagmat
from statsmodels.tsa.regime_switching import (
markov_switching, markov_regression)
from statsmodels.tsa.statespace.tools import (
constrain_stationary_univariate, unconstrain_stationary_univariate)
class MarkovAutoregression(markov_regression.MarkovRegression):
r"""
Markov switching regression model
Parameters
----------
endog : array_like
The endogenous variable.
k_regimes : int
The number of regimes.
order : int
The order of the autoregressive lag polynomial.
trend : {'n', 'c', 't', 'ct'}
Whether or not to include a trend. To include an constant, time trend,
or both, set `trend='c'`, `trend='t'`, or `trend='ct'`. For no trend,
set `trend='n'`. Default is a constant.
exog : array_like, optional
Array of exogenous regressors, shaped nobs x k.
exog_tvtp : array_like, optional
Array of exogenous or lagged variables to use in calculating
time-varying transition probabilities (TVTP). TVTP is only used if this
variable is provided. If an intercept is desired, a column of ones must
be explicitly included in this array.
switching_ar : bool or iterable, optional
If a boolean, sets whether or not all autoregressive coefficients are
switching across regimes. If an iterable, should be of length equal
to `order`, where each element is a boolean describing whether the
corresponding coefficient is switching. Default is True.
switching_trend : bool or iterable, optional
If a boolean, sets whether or not all trend coefficients are
switching across regimes. If an iterable, should be of length equal
to the number of trend variables, where each element is
a boolean describing whether the corresponding coefficient is
switching. Default is True.
switching_exog : bool or iterable, optional
If a boolean, sets whether or not all regression coefficients are
switching across regimes. If an iterable, should be of length equal
to the number of exogenous variables, where each element is
a boolean describing whether the corresponding coefficient is
switching. Default is True.
switching_variance : bool, optional
Whether or not there is regime-specific heteroskedasticity, i.e.
whether or not the error term has a switching variance. Default is
False.
Notes
-----
This model is new and API stability is not guaranteed, although changes
will be made in a backwards compatible way if possible.
The model can be written as:
.. math::
y_t = a_{S_t} + x_t' \beta_{S_t} + \phi_{1, S_t}
(y_{t-1} - a_{S_{t-1}} - x_{t-1}' \beta_{S_{t-1}}) + \dots +
\phi_{p, S_t} (y_{t-p} - a_{S_{t-p}} - x_{t-p}' \beta_{S_{t-p}}) +
\varepsilon_t \\
\varepsilon_t \sim N(0, \sigma_{S_t}^2)
i.e. the model is an autoregression with where the autoregressive
coefficients, the mean of the process (possibly including trend or
regression effects) and the variance of the error term may be switching
across regimes.
The `trend` is accommodated by prepending columns to the `exog` array. Thus
if `trend='c'`, the passed `exog` array should not already have a column of
ones.
See the notebook `Markov switching autoregression
<../examples/notebooks/generated/markov_autoregression.html>`__
for an overview.
References
----------
Kim, Chang-Jin, and Charles R. Nelson. 1999.
"State-Space Models with Regime Switching:
Classical and Gibbs-Sampling Approaches with Applications".
MIT Press Books. The MIT Press.
"""
def __init__(self, endog, k_regimes, order, trend='c', exog=None,
exog_tvtp=None, switching_ar=True, switching_trend=True,
switching_exog=False, switching_variance=False,
dates=None, freq=None, missing='none'):
# Properties
self.switching_ar = switching_ar
# Switching options
if self.switching_ar is True or self.switching_ar is False:
self.switching_ar = [self.switching_ar] * order
elif not len(self.switching_ar) == order:
raise ValueError('Invalid iterable passed to `switching_ar`.')
# Initialize the base model
super().__init__(
endog, k_regimes, trend=trend, exog=exog, order=order,
exog_tvtp=exog_tvtp, switching_trend=switching_trend,
switching_exog=switching_exog,
switching_variance=switching_variance, dates=dates, freq=freq,
missing=missing)
# Sanity checks
if self.nobs <= self.order:
raise ValueError('Must have more observations than the order of'
' the autoregression.')
# Autoregressive exog
self.exog_ar = lagmat(endog, self.order)[self.order:]
# Reshape other datasets
self.nobs -= self.order
self.orig_endog = self.endog
self.endog = self.endog[self.order:]
if self._k_exog > 0:
self.orig_exog = self.exog
self.exog = self.exog[self.order:]
# Reset the ModelData datasets
self.data.endog, self.data.exog = (
self.data._convert_endog_exog(self.endog, self.exog))
# Reset indexes, if provided
if self.data.row_labels is not None:
self.data._cache['row_labels'] = (
self.data.row_labels[self.order:])
if self._index is not None:
if self._index_generated:
self._index = self._index[:-self.order]
else:
self._index = self._index[self.order:]
# Parameters
self.parameters['autoregressive'] = self.switching_ar
# Cache an array for holding slices
self._predict_slices = [slice(None, None, None)] * (self.order + 1)
def predict_conditional(self, params):
"""
In-sample prediction, conditional on the current and previous regime
Parameters
----------
params : array_like
Array of parameters at which to create predictions.
Returns
-------
predict : array_like
Array of predictions conditional on current, and possibly past,
regimes
"""
params = np.array(params, ndmin=1)
# Prediction is based on:
# y_t = x_t beta^{(S_t)} +
# \phi_1^{(S_t)} (y_{t-1} - x_{t-1} beta^{(S_t-1)}) + ...
# \phi_p^{(S_t)} (y_{t-p} - x_{t-p} beta^{(S_t-p)}) + eps_t
if self._k_exog > 0:
xb = []
for i in range(self.k_regimes):
coeffs = params[self.parameters[i, 'exog']]
xb.append(np.dot(self.orig_exog, coeffs))
predict = np.zeros(
(self.k_regimes,) * (self.order + 1) + (self.nobs,),
dtype=np.promote_types(np.float64, params.dtype))
# Iterate over S_{t} = i
for i in range(self.k_regimes):
ar_coeffs = params[self.parameters[i, 'autoregressive']]
# y_t - x_t beta^{(S_t)}
ix = self._predict_slices[:]
ix[0] = i
ix = tuple(ix)
if self._k_exog > 0:
predict[ix] += xb[i][self.order:]
# Iterate over j = 2, .., p
for j in range(1, self.order + 1):
for k in range(self.k_regimes):
# This gets a specific time-period / regime slice:
# S_{t} = i, S_{t-j} = k, across all other time-period /
# regime slices.
ix = self._predict_slices[:]
ix[0] = i
ix[j] = k
ix = tuple(ix)
start = self.order - j
end = -j
if self._k_exog > 0:
predict[ix] += ar_coeffs[j-1] * (
self.orig_endog[start:end] - xb[k][start:end])
else:
predict[ix] += ar_coeffs[j-1] * (
self.orig_endog[start:end])
return predict
def _resid(self, params):
return self.endog - self.predict_conditional(params)
def _conditional_loglikelihoods(self, params):
"""
Compute loglikelihoods conditional on the current period's regime and
the last `self.order` regimes.
"""
# Get the residuals
resid = self._resid(params)
# Compute the conditional likelihoods
variance = params[self.parameters['variance']].squeeze()
if self.switching_variance:
variance = np.reshape(variance, (self.k_regimes, 1, 1))
conditional_loglikelihoods = (
-0.5 * resid**2 / variance - 0.5 * np.log(2 * np.pi * variance))
return conditional_loglikelihoods
@property
def _res_classes(self):
return {'fit': (MarkovAutoregressionResults,
MarkovAutoregressionResultsWrapper)}
def _em_iteration(self, params0):
"""
EM iteration
"""
# Inherited parameters
result, params1 = markov_switching.MarkovSwitching._em_iteration(
self, params0)
tmp = np.sqrt(result.smoothed_marginal_probabilities)
# Regression coefficients
coeffs = None
if self._k_exog > 0:
coeffs = self._em_exog(result, self.endog, self.exog,
self.parameters.switching['exog'], tmp)
for i in range(self.k_regimes):
params1[self.parameters[i, 'exog']] = coeffs[i]
# Autoregressive
if self.order > 0:
if self._k_exog > 0:
ar_coeffs, variance = self._em_autoregressive(
result, coeffs)
else:
ar_coeffs = self._em_exog(
result, self.endog, self.exog_ar,
self.parameters.switching['autoregressive'])
variance = self._em_variance(
result, self.endog, self.exog_ar, ar_coeffs, tmp)
for i in range(self.k_regimes):
params1[self.parameters[i, 'autoregressive']] = ar_coeffs[i]
params1[self.parameters['variance']] = variance
return result, params1
def _em_autoregressive(self, result, betas, tmp=None):
"""
EM step for autoregressive coefficients and variances
"""
if tmp is None:
tmp = np.sqrt(result.smoothed_marginal_probabilities)
resid = np.zeros((self.k_regimes, self.nobs + self.order))
resid[:] = self.orig_endog
if self._k_exog > 0:
for i in range(self.k_regimes):
resid[i] -= np.dot(self.orig_exog, betas[i])
# The difference between this and `_em_exog` is that here we have a
# different endog and exog for each regime
coeffs = np.zeros((self.k_regimes,) + (self.order,))
variance = np.zeros((self.k_regimes,))
exog = np.zeros((self.nobs, self.order))
for i in range(self.k_regimes):
endog = resid[i, self.order:]
exog = lagmat(resid[i], self.order)[self.order:]
tmp_endog = tmp[i] * endog
tmp_exog = tmp[i][:, None] * exog
coeffs[i] = np.dot(np.linalg.pinv(tmp_exog), tmp_endog)
if self.switching_variance:
tmp_resid = endog - np.dot(exog, coeffs[i])
variance[i] = (np.sum(
tmp_resid**2 * result.smoothed_marginal_probabilities[i]) /
np.sum(result.smoothed_marginal_probabilities[i]))
else:
tmp_resid = tmp_endog - np.dot(tmp_exog, coeffs[i])
variance[i] = np.sum(tmp_resid**2)
# Variances
if not self.switching_variance:
variance = variance.sum() / self.nobs
return coeffs, variance
@property
def start_params(self):
"""
(array) Starting parameters for maximum likelihood estimation.
"""
# Inherited parameters
params = markov_switching.MarkovSwitching.start_params.fget(self)
# OLS for starting parameters
endog = self.endog.copy()
if self._k_exog > 0 and self.order > 0:
exog = np.c_[self.exog, self.exog_ar]
elif self._k_exog > 0:
exog = self.exog
elif self.order > 0:
exog = self.exog_ar
if self._k_exog > 0 or self.order > 0:
beta = np.dot(np.linalg.pinv(exog), endog)
variance = np.var(endog - np.dot(exog, beta))
else:
variance = np.var(endog)
# Regression coefficients
if self._k_exog > 0:
if np.any(self.switching_coeffs):
for i in range(self.k_regimes):
params[self.parameters[i, 'exog']] = (
beta[:self._k_exog] * (i / self.k_regimes))
else:
params[self.parameters['exog']] = beta[:self._k_exog]
# Autoregressive
if self.order > 0:
if np.any(self.switching_ar):
for i in range(self.k_regimes):
params[self.parameters[i, 'autoregressive']] = (
beta[self._k_exog:] * (i / self.k_regimes))
else:
params[self.parameters['autoregressive']] = beta[self._k_exog:]
# Variance
if self.switching_variance:
params[self.parameters['variance']] = (
np.linspace(variance / 10., variance, num=self.k_regimes))
else:
params[self.parameters['variance']] = variance
return params
@property
def param_names(self):
"""
(list of str) List of human readable parameter names (for parameters
actually included in the model).
"""
# Inherited parameters
param_names = np.array(
markov_regression.MarkovRegression.param_names.fget(self),
dtype=object)
# Autoregressive
if np.any(self.switching_ar):
for i in range(self.k_regimes):
param_names[self.parameters[i, 'autoregressive']] = [
'ar.L%d[%d]' % (j+1, i) for j in range(self.order)]
else:
param_names[self.parameters['autoregressive']] = [
'ar.L%d' % (j+1) for j in range(self.order)]
return param_names.tolist()
def transform_params(self, unconstrained):
"""
Transform unconstrained parameters used by the optimizer to constrained
parameters used in likelihood evaluation
Parameters
----------
unconstrained : array_like
Array of unconstrained parameters used by the optimizer, to be
transformed.
Returns
-------
constrained : array_like
Array of constrained parameters which may be used in likelihood
evaluation.
"""
# Inherited parameters
constrained = super().transform_params(
unconstrained)
# Autoregressive
# TODO may provide unexpected results when some coefficients are not
# switching
for i in range(self.k_regimes):
s = self.parameters[i, 'autoregressive']
constrained[s] = constrain_stationary_univariate(
unconstrained[s])
return constrained
def untransform_params(self, constrained):
"""
Transform constrained parameters used in likelihood evaluation
to unconstrained parameters used by the optimizer
Parameters
----------
constrained : array_like
Array of constrained parameters used in likelihood evaluation, to
be transformed.
Returns
-------
unconstrained : array_like
Array of unconstrained parameters used by the optimizer.
"""
# Inherited parameters
unconstrained = super().untransform_params(
constrained)
# Autoregressive
# TODO may provide unexpected results when some coefficients are not
# switching
for i in range(self.k_regimes):
s = self.parameters[i, 'autoregressive']
unconstrained[s] = unconstrain_stationary_univariate(
constrained[s])
return unconstrained
class MarkovAutoregressionResults(markov_regression.MarkovRegressionResults):
r"""
Class to hold results from fitting a Markov switching autoregression model
Parameters
----------
model : MarkovAutoregression instance
The fitted model instance
params : ndarray
Fitted parameters
filter_results : HamiltonFilterResults or KimSmootherResults instance
The underlying filter and, optionally, smoother output
cov_type : str
The type of covariance matrix estimator to use. Can be one of 'approx',
'opg', 'robust', or 'none'.
Attributes
----------
model : Model instance
A reference to the model that was fit.
filter_results : HamiltonFilterResults or KimSmootherResults instance
The underlying filter and, optionally, smoother output
nobs : float
The number of observations used to fit the model.
params : ndarray
The parameters of the model.
scale : float
This is currently set to 1.0 and not used by the model or its results.
"""
pass
class MarkovAutoregressionResultsWrapper(
markov_regression.MarkovRegressionResultsWrapper):
pass
wrap.populate_wrapper(MarkovAutoregressionResultsWrapper, # noqa:E305
MarkovAutoregressionResults)

View File

@ -0,0 +1,456 @@
"""
Markov switching regression models
Author: Chad Fulton
License: BSD-3
"""
import numpy as np
import statsmodels.base.wrapper as wrap
from statsmodels.tsa.regime_switching import markov_switching
class MarkovRegression(markov_switching.MarkovSwitching):
r"""
First-order k-regime Markov switching regression model
Parameters
----------
endog : array_like
The endogenous variable.
k_regimes : int
The number of regimes.
trend : {'n', 'c', 't', 'ct'}
Whether or not to include a trend. To include an intercept, time trend,
or both, set `trend='c'`, `trend='t'`, or `trend='ct'`. For no trend,
set `trend='n'`. Default is an intercept.
exog : array_like, optional
Array of exogenous regressors, shaped nobs x k.
order : int, optional
The order of the model describes the dependence of the likelihood on
previous regimes. This depends on the model in question and should be
set appropriately by subclasses.
exog_tvtp : array_like, optional
Array of exogenous or lagged variables to use in calculating
time-varying transition probabilities (TVTP). TVTP is only used if this
variable is provided. If an intercept is desired, a column of ones must
be explicitly included in this array.
switching_trend : bool or iterable, optional
If a boolean, sets whether or not all trend coefficients are
switching across regimes. If an iterable, should be of length equal
to the number of trend variables, where each element is
a boolean describing whether the corresponding coefficient is
switching. Default is True.
switching_exog : bool or iterable, optional
If a boolean, sets whether or not all regression coefficients are
switching across regimes. If an iterable, should be of length equal
to the number of exogenous variables, where each element is
a boolean describing whether the corresponding coefficient is
switching. Default is True.
switching_variance : bool, optional
Whether or not there is regime-specific heteroskedasticity, i.e.
whether or not the error term has a switching variance. Default is
False.
Notes
-----
This model is new and API stability is not guaranteed, although changes
will be made in a backwards compatible way if possible.
The model can be written as:
.. math::
y_t = a_{S_t} + x_t' \beta_{S_t} + \varepsilon_t \\
\varepsilon_t \sim N(0, \sigma_{S_t}^2)
i.e. the model is a dynamic linear regression where the coefficients and
the variance of the error term may be switching across regimes.
The `trend` is accommodated by prepending columns to the `exog` array. Thus
if `trend='c'`, the passed `exog` array should not already have a column of
ones.
See the notebook `Markov switching dynamic regression
<../examples/notebooks/generated/markov_regression.html>`__ for an
overview.
References
----------
Kim, Chang-Jin, and Charles R. Nelson. 1999.
"State-Space Models with Regime Switching:
Classical and Gibbs-Sampling Approaches with Applications".
MIT Press Books. The MIT Press.
"""
def __init__(self, endog, k_regimes, trend='c', exog=None, order=0,
exog_tvtp=None, switching_trend=True, switching_exog=True,
switching_variance=False, dates=None, freq=None,
missing='none'):
# Properties
from statsmodels.tools.validation import string_like
self.trend = string_like(trend, "trend", options=("n", "c", "ct", "t"))
self.switching_trend = switching_trend
self.switching_exog = switching_exog
self.switching_variance = switching_variance
# Exogenous data
self.k_exog, exog = markov_switching.prepare_exog(exog)
# Trend
nobs = len(endog)
self.k_trend = 0
self._k_exog = self.k_exog
trend_exog = None
if trend == 'c':
trend_exog = np.ones((nobs, 1))
self.k_trend = 1
elif trend == 't':
trend_exog = (np.arange(nobs) + 1)[:, np.newaxis]
self.k_trend = 1
elif trend == 'ct':
trend_exog = np.c_[np.ones((nobs, 1)),
(np.arange(nobs) + 1)[:, np.newaxis]]
self.k_trend = 2
if trend_exog is not None:
exog = trend_exog if exog is None else np.c_[trend_exog, exog]
self._k_exog += self.k_trend
# Initialize the base model
super().__init__(
endog, k_regimes, order=order, exog_tvtp=exog_tvtp, exog=exog,
dates=dates, freq=freq, missing=missing)
# Switching options
if self.switching_trend is True or self.switching_trend is False:
self.switching_trend = [self.switching_trend] * self.k_trend
elif not len(self.switching_trend) == self.k_trend:
raise ValueError('Invalid iterable passed to `switching_trend`.')
if self.switching_exog is True or self.switching_exog is False:
self.switching_exog = [self.switching_exog] * self.k_exog
elif not len(self.switching_exog) == self.k_exog:
raise ValueError('Invalid iterable passed to `switching_exog`.')
self.switching_coeffs = (
np.r_[self.switching_trend,
self.switching_exog].astype(bool).tolist())
# Parameters
self.parameters['exog'] = self.switching_coeffs
self.parameters['variance'] = [1] if self.switching_variance else [0]
def predict_conditional(self, params):
"""
In-sample prediction, conditional on the current regime
Parameters
----------
params : array_like
Array of parameters at which to perform prediction.
Returns
-------
predict : array_like
Array of predictions conditional on current, and possibly past,
regimes
"""
params = np.array(params, ndmin=1)
# Since in the base model the values are the same across columns, we
# only compute a single column, and then expand it below.
predict = np.zeros((self.k_regimes, self.nobs), dtype=params.dtype)
for i in range(self.k_regimes):
# Predict
if self._k_exog > 0:
coeffs = params[self.parameters[i, 'exog']]
predict[i] = np.dot(self.exog, coeffs)
return predict[:, None, :]
def _resid(self, params):
predict = np.repeat(self.predict_conditional(params),
self.k_regimes, axis=1)
return self.endog - predict
def _conditional_loglikelihoods(self, params):
"""
Compute loglikelihoods conditional on the current period's regime
"""
# Get residuals
resid = self._resid(params)
# Compute the conditional likelihoods
variance = params[self.parameters['variance']].squeeze()
if self.switching_variance:
variance = np.reshape(variance, (self.k_regimes, 1, 1))
conditional_loglikelihoods = (
-0.5 * resid**2 / variance - 0.5 * np.log(2 * np.pi * variance))
return conditional_loglikelihoods
@property
def _res_classes(self):
return {'fit': (MarkovRegressionResults,
MarkovRegressionResultsWrapper)}
def _em_iteration(self, params0):
"""
EM iteration
Notes
-----
This uses the inherited _em_iteration method for computing the
non-TVTP transition probabilities and then performs the EM step for
regression coefficients and variances.
"""
# Inherited parameters
result, params1 = super()._em_iteration(params0)
tmp = np.sqrt(result.smoothed_marginal_probabilities)
# Regression coefficients
coeffs = None
if self._k_exog > 0:
coeffs = self._em_exog(result, self.endog, self.exog,
self.parameters.switching['exog'], tmp)
for i in range(self.k_regimes):
params1[self.parameters[i, 'exog']] = coeffs[i]
# Variances
params1[self.parameters['variance']] = self._em_variance(
result, self.endog, self.exog, coeffs, tmp)
# params1[self.parameters['variance']] = 0.33282116
return result, params1
def _em_exog(self, result, endog, exog, switching, tmp=None):
"""
EM step for regression coefficients
"""
k_exog = exog.shape[1]
coeffs = np.zeros((self.k_regimes, k_exog))
# First, estimate non-switching coefficients
if not np.all(switching):
nonswitching_exog = exog[:, ~switching]
nonswitching_coeffs = (
np.dot(np.linalg.pinv(nonswitching_exog), endog))
coeffs[:, ~switching] = nonswitching_coeffs
endog = endog - np.dot(nonswitching_exog, nonswitching_coeffs)
# Next, get switching coefficients
if np.any(switching):
switching_exog = exog[:, switching]
if tmp is None:
tmp = np.sqrt(result.smoothed_marginal_probabilities)
for i in range(self.k_regimes):
tmp_endog = tmp[i] * endog
tmp_exog = tmp[i][:, np.newaxis] * switching_exog
coeffs[i, switching] = (
np.dot(np.linalg.pinv(tmp_exog), tmp_endog))
return coeffs
def _em_variance(self, result, endog, exog, betas, tmp=None):
"""
EM step for variances
"""
k_exog = 0 if exog is None else exog.shape[1]
if self.switching_variance:
variance = np.zeros(self.k_regimes)
for i in range(self.k_regimes):
if k_exog > 0:
resid = endog - np.dot(exog, betas[i])
else:
resid = endog
variance[i] = (
np.sum(resid**2 *
result.smoothed_marginal_probabilities[i]) /
np.sum(result.smoothed_marginal_probabilities[i]))
else:
variance = 0
if tmp is None:
tmp = np.sqrt(result.smoothed_marginal_probabilities)
for i in range(self.k_regimes):
tmp_endog = tmp[i] * endog
if k_exog > 0:
tmp_exog = tmp[i][:, np.newaxis] * exog
resid = tmp_endog - np.dot(tmp_exog, betas[i])
else:
resid = tmp_endog
variance += np.sum(resid**2)
variance /= self.nobs
return variance
@property
def start_params(self):
"""
(array) Starting parameters for maximum likelihood estimation.
Notes
-----
These are not very sophisticated and / or good. We set equal transition
probabilities and interpolate regression coefficients between zero and
the OLS estimates, where the interpolation is based on the regime
number. We rely heavily on the EM algorithm to quickly find much better
starting parameters, which are then used by the typical scoring
approach.
"""
# Inherited parameters
params = markov_switching.MarkovSwitching.start_params.fget(self)
# Regression coefficients
if self._k_exog > 0:
beta = np.dot(np.linalg.pinv(self.exog), self.endog)
variance = np.var(self.endog - np.dot(self.exog, beta))
if np.any(self.switching_coeffs):
for i in range(self.k_regimes):
params[self.parameters[i, 'exog']] = (
beta * (i / self.k_regimes))
else:
params[self.parameters['exog']] = beta
else:
variance = np.var(self.endog)
# Variances
if self.switching_variance:
params[self.parameters['variance']] = (
np.linspace(variance / 10., variance, num=self.k_regimes))
else:
params[self.parameters['variance']] = variance
return params
@property
def param_names(self):
"""
(list of str) List of human readable parameter names (for parameters
actually included in the model).
"""
# Inherited parameters
param_names = np.array(
markov_switching.MarkovSwitching.param_names.fget(self),
dtype=object)
# Regression coefficients
if np.any(self.switching_coeffs):
for i in range(self.k_regimes):
param_names[self.parameters[i, 'exog']] = [
'%s[%d]' % (exog_name, i) for exog_name in self.exog_names]
else:
param_names[self.parameters['exog']] = self.exog_names
# Variances
if self.switching_variance:
for i in range(self.k_regimes):
param_names[self.parameters[i, 'variance']] = 'sigma2[%d]' % i
else:
param_names[self.parameters['variance']] = 'sigma2'
return param_names.tolist()
def transform_params(self, unconstrained):
"""
Transform unconstrained parameters used by the optimizer to constrained
parameters used in likelihood evaluation
Parameters
----------
unconstrained : array_like
Array of unconstrained parameters used by the optimizer, to be
transformed.
Returns
-------
constrained : array_like
Array of constrained parameters which may be used in likelihood
evaluation.
"""
# Inherited parameters
constrained = super().transform_params(
unconstrained)
# Nothing to do for regression coefficients
constrained[self.parameters['exog']] = (
unconstrained[self.parameters['exog']])
# Force variances to be positive
constrained[self.parameters['variance']] = (
unconstrained[self.parameters['variance']]**2)
return constrained
def untransform_params(self, constrained):
"""
Transform constrained parameters used in likelihood evaluation
to unconstrained parameters used by the optimizer
Parameters
----------
constrained : array_like
Array of constrained parameters used in likelihood evaluation, to
be transformed.
Returns
-------
unconstrained : array_like
Array of unconstrained parameters used by the optimizer.
"""
# Inherited parameters
unconstrained = super().untransform_params(
constrained)
# Nothing to do for regression coefficients
unconstrained[self.parameters['exog']] = (
constrained[self.parameters['exog']])
# Force variances to be positive
unconstrained[self.parameters['variance']] = (
constrained[self.parameters['variance']]**0.5)
return unconstrained
class MarkovRegressionResults(markov_switching.MarkovSwitchingResults):
r"""
Class to hold results from fitting a Markov switching regression model
Parameters
----------
model : MarkovRegression instance
The fitted model instance
params : ndarray
Fitted parameters
filter_results : HamiltonFilterResults or KimSmootherResults instance
The underlying filter and, optionally, smoother output
cov_type : str
The type of covariance matrix estimator to use. Can be one of 'approx',
'opg', 'robust', or 'none'.
Attributes
----------
model : Model instance
A reference to the model that was fit.
filter_results : HamiltonFilterResults or KimSmootherResults instance
The underlying filter and, optionally, smoother output
nobs : float
The number of observations used to fit the model.
params : ndarray
The parameters of the model.
scale : float
This is currently set to 1.0 and not used by the model or its results.
"""
pass
class MarkovRegressionResultsWrapper(
markov_switching.MarkovSwitchingResultsWrapper):
pass
wrap.populate_wrapper(MarkovRegressionResultsWrapper, # noqa:E305
MarkovRegressionResults)

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,227 @@
const_p1,const_p2,const_f1,const_f2,const_sm1,const_sm2,const_yhat1,const_yhat2,const_pyhat,const_fyhat,const_syhat,constL1exog_syhat,constL1exog_syhat1,constL1exog_syhat2
.7376958,.2623042,.9997776,.0002225,.9999886,.0000114,3.70877,9.556793,5.242731,3.710071,3.708837,,,
.9818866,.0181134,.999989,.000011,.9999995,5.62e-07,3.70877,9.556793,3.814698,3.708834,3.708773,,,
.9820836,.0179164,.9999828,.0000172,.9999991,8.81e-07,3.70877,9.556793,3.813545,3.70887,3.708775,,,
.9820778,.0179221,.9999788,.0000212,.9999989,1.09e-06,3.70877,9.556793,3.813579,3.708894,3.708776,,,
.9820741,.0179259,.9999622,.0000378,.999998,1.95e-06,3.70877,9.556793,3.813601,3.708991,3.708781,1.831308,2.472625,1.392144
.9820586,.0179414,.9999341,.0000659,.9999966,3.39e-06,3.70877,9.556793,3.813692,3.709155,3.70879,2.268597,2.839436,1.934797
.9820325,.0179675,.9999228,.0000772,.999996,3.98e-06,3.70877,9.556793,3.813844,3.709222,3.708793,2.504288,3.019767,2.2552
.9820219,.0179781,.9998981,.0001019,.9999948,5.26e-06,3.70877,9.556793,3.813906,3.709366,3.708801,2.707525,3.046899,2.523101
.9819989,.0180011,.9998805,.0001195,.9999938,6.17e-06,3.70877,9.556793,3.814041,3.709469,3.708806,2.969676,3.084172,2.896406
.9819825,.0180175,.9998599,.0001401,.9999928,7.24e-06,3.70877,9.556793,3.814137,3.709589,3.708812,3.18926,3.243036,3.146579
.9819633,.0180367,.9998598,.0001402,.9999927,7.25e-06,3.70877,9.556793,3.814249,3.70959,3.708812,3.376379,3.322518,3.43591
.9819632,.0180368,.9998462,.0001538,.999992,7.97e-06,3.70877,9.556793,3.81425,3.709669,3.708817,3.273995,3.161087,3.438036
.9819506,.0180494,.9997917,.0002083,.9999892,.0000108,3.70877,9.556793,3.814323,3.709988,3.708833,3.302091,3.209093,3.468282
.9818998,.0181002,.9997855,.0002144,.999989,.000011,3.70877,9.556793,3.81462,3.710024,3.708834,3.190691,3.082165,3.518581
.9818941,.0181059,.9999656,.0000344,.9999982,1.77e-06,3.70877,9.556793,3.814654,3.708971,3.70878,2.676074,2.657601,3.506871
.9820618,.0179382,.9999899,.0000102,.9999995,5.21e-07,3.70877,9.556793,3.813673,3.708829,3.708773,1.505545,1.462715,2.16521
.9820844,.0179156,.9999833,.0000167,.9999992,8.60e-07,3.70877,9.556793,3.813541,3.708868,3.708775,1.018261,.958331,1.162562
.9820783,.0179217,.9999495,.0000506,.9999974,2.60e-06,3.70877,9.556793,3.813577,3.709066,3.708785,1.511617,1.521546,1.493146
.9820467,.0179533,.9999131,.0000869,.9999955,4.50e-06,3.70877,9.556793,3.813761,3.709278,3.708796,2.263951,2.353165,2.083339
.9820129,.0179871,.9998296,.0001704,.9999911,8.89e-06,3.70877,9.556793,3.813959,3.709766,3.708822,2.748969,2.898975,2.406418
.9819351,.0180649,.9996696,.0003304,.9999825,.0000175,3.70877,9.556793,3.814414,3.710702,3.708872,3.084917,3.156353,2.964966
.981786,.018214,.9994285,.0005715,.9999698,.0000301,3.70877,9.556793,3.815286,3.712112,3.708946,3.503891,3.489995,3.520062
.9815614,.0184387,.9994652,.0005348,.999972,.000028,3.70877,9.556793,3.8166,3.711897,3.708934,3.986069,4.035326,3.934834
.9815956,.0184044,.9996057,.0003944,.9999796,.0000204,3.70877,9.556793,3.8164,3.711076,3.708889,3.845686,3.776379,3.920848
.9817264,.0182736,.999856,.000144,.9999926,7.41e-06,3.70877,9.556793,3.815634,3.709612,3.708813,3.53088,3.489225,3.580799
.9819597,.0180403,.9999388,.0000612,.9999968,3.14e-06,3.70877,9.556793,3.81427,3.709128,3.708788,2.659254,2.546042,2.803689
.9820368,.0179632,.9999589,.000041,.9999979,2.11e-06,3.70877,9.556793,3.813819,3.70901,3.708782,2.095736,1.980844,2.224948
.9820556,.0179444,.9999713,.0000287,.9999985,1.48e-06,3.70877,9.556793,3.813709,3.708938,3.708779,1.852,1.863727,1.839663
.982067,.0179329,.9999731,.0000269,.9999986,1.38e-06,3.70877,9.556793,3.813642,3.708927,3.708778,1.705161,1.723561,1.684164
.9820688,.0179312,.9999306,.0000694,.9999964,3.57e-06,3.70877,9.556793,3.813632,3.709176,3.708791,1.730321,1.840032,1.567684
.9820292,.0179708,.9999248,.0000752,.9999961,3.88e-06,3.70877,9.556793,3.813864,3.70921,3.708793,2.431717,2.542717,2.304694
.9820237,.0179763,.9999083,.0000917,.9999953,4.73e-06,3.70877,9.556793,3.813895,3.709306,3.708798,2.513138,2.57788,2.44728
.9820084,.0179916,.9998741,.0001259,.9999935,6.50e-06,3.70877,9.556793,3.813985,3.709506,3.708808,2.62614,2.693316,2.561214
.9819766,.0180234,.9998617,.0001383,.9999928,7.15e-06,3.70877,9.556793,3.814172,3.709579,3.708812,2.791865,2.801997,2.782812
.981965,.018035,.9998522,.0001478,.9999924,7.64e-06,3.70877,9.556793,3.814239,3.709634,3.708815,2.851919,2.871364,2.834998
.9819561,.0180438,.9998541,.0001459,.9999924,7.58e-06,3.70877,9.556793,3.814291,3.709624,3.708814,2.906022,2.966903,2.852718
.9819579,.0180421,.9997625,.0002375,.9999877,.0000124,3.70877,9.556793,3.814281,3.710159,3.708842,3.009227,3.079246,2.945725
.9818726,.0181274,.9997206,.0002794,.9999855,.0000145,3.70877,9.556793,3.81478,3.710404,3.708855,3.324776,3.365401,3.290619
.9818335,.0181665,.9997162,.0002838,.9999852,.0000148,3.70877,9.556793,3.815008,3.710429,3.708857,3.525028,3.609638,3.459325
.9818295,.0181705,.9997047,.0002953,.9999846,.0000154,3.70877,9.556793,3.815032,3.710497,3.70886,3.538539,3.633008,3.467517
.9818187,.0181813,.999716,.000284,.9999852,.0000148,3.70877,9.556793,3.815094,3.710431,3.708857,3.540229,3.700191,3.420547
.9818292,.0181708,.9996676,.0003324,.9999824,.0000176,3.70877,9.556793,3.815033,3.710714,3.708873,3.489318,3.58355,3.411572
.9817842,.0182159,.9994432,.0005567,.9999704,.0000296,3.70877,9.556793,3.815297,3.712026,3.708943,3.71,3.888904,3.552691
.9815751,.0184249,.9993491,.000651,.9999654,.0000345,3.70877,9.556793,3.816519,3.712577,3.708972,4.116197,4.243034,4.028508
.9814873,.0185127,.9993544,.0006456,.9999655,.0000345,3.70877,9.556793,3.817033,3.712545,3.708972,4.275736,4.45837,4.177522
.9814923,.0185077,.9992639,.0007361,.9999595,.0000405,3.70877,9.556793,3.817003,3.713075,3.709007,4.358746,4.631372,4.229281
.9814079,.018592,.9987648,.0012353,.9999287,.0000713,3.70877,9.556793,3.817497,3.715994,3.709187,4.642481,4.897738,4.507643
.9809429,.0190571,.9979933,.0020067,.9998719,.0001281,3.70877,9.556793,3.820216,3.720505,3.709519,4.978095,5.104101,4.899562
.9802241,.0197758,.9959831,.0040169,.9997402,.0002598,3.70877,9.556793,3.824419,3.732261,3.710289,5.337822,5.337117,5.338327
.9783511,.0216489,.9946395,.0053605,.9996985,.0003016,3.70877,9.556793,3.835373,3.740119,3.710534,5.794573,5.714682,5.857948
.9770993,.0229007,.9978499,.0021501,.9998862,.0001139,3.70877,9.556793,3.842694,3.721344,3.709436,5.836755,5.830125,5.841867
.9800906,.0199095,.9993742,.0006258,.999967,.000033,3.70877,9.556793,3.825201,3.71243,3.708963,5.059279,5.070888,5.051047
.9815108,.0184892,.9994912,.0005088,.999971,.000029,3.70877,9.556793,3.816895,3.711745,3.70894,4.323678,4.35304,4.303565
.9816198,.0183802,.999269,.000731,.9998878,.0001122,3.70877,9.556793,3.816258,3.713045,3.709426,4.248055,4.227117,4.262413
.9814128,.0185872,.998329,.0016709,.9979967,.0020033,3.70877,9.556793,3.817469,3.718542,3.720485,4.661473,4.576157,4.716111
.980537,.019463,.9916658,.0083341,.9764566,.0235434,3.70877,9.556793,3.82259,3.757508,3.846452,5.306712,5.161865,5.393076
.9743286,.0256714,.9895275,.0104725,.9249268,.0750732,3.70877,9.556793,3.858897,3.770013,4.1478,6.332373,6.099176,6.564503
.9723363,.0276637,.9889915,.0110085,.7923684,.2076316,3.70877,9.556793,3.870548,3.773148,4.923005,6.284777,5.982467,6.53833
.9718368,.0281632,.974013,.0259869,.4413242,.5586758,3.70877,9.556793,3.873469,3.860742,6.975919,6.478172,6.03827,6.593009
.9578809,.0421191,.7088267,.2911734,.0466012,.9533988,3.70877,9.556793,3.955084,5.411559,9.284268,7.296552,6.460438,7.30485
.7107974,.2892026,.1005571,.8994429,.0029009,.997099,3.70877,9.556793,5.400033,8.968733,9.539828,8.867719,7.869485,8.93118
.1440513,.8559487,.0080047,.9919953,.0011666,.9988334,3.70877,9.556793,8.714377,9.509981,9.549972,9.045574,8.212791,9.553485
.057817,.942183,.0047662,.9952338,.0074692,.9925308,3.70877,9.556793,9.218678,9.52892,9.513113,8.329976,8.020488,9.562139
.0547995,.9452005,.0110985,.9889015,.0864779,.9135221,3.70877,9.556793,9.236324,9.491889,9.051068,7.654822,7.611776,9.151626
.0606996,.9393004,.0558457,.9441543,.4809731,.5190269,3.70877,9.556793,9.20182,9.230206,6.744051,7.049016,7.046554,8.435221
.1023921,.8976079,.3161151,.6838849,.8977214,.1022786,3.70877,9.556793,8.958002,7.708145,4.306898,5.81704,5.814537,7.256321
.3448943,.6551057,.9529809,.0470191,.9972818,.0027182,3.70877,9.556793,7.539844,3.983739,3.724666,5.139013,5.13464,6.098828
.9382845,.0617155,.9957243,.0042758,.9997301,.0002699,3.70877,9.556793,4.069684,3.733775,3.710349,4.142889,3.688474,4.394758
.97811,.02189,.9951816,.0048184,.9997313,.0002687,3.70877,9.556793,3.836783,3.736948,3.710341,4.85277,4.267033,5.034659
.9776044,.0223956,.998083,.001917,.9998995,.0001005,3.70877,9.556793,3.83974,3.71998,3.709358,5.155045,4.973974,5.706321
.9803077,.0196923,.9996578,.0003423,.9999782,.0000218,3.70877,9.556793,3.823931,3.710772,3.708898,4.571352,4.492369,5.060409
.9817749,.0182251,.9991401,.0008599,.99976,.00024,3.70877,9.556793,3.815351,3.713799,3.710174,3.808026,3.696524,3.927749
.9812927,.0187073,.9984251,.0015749,.995508,.004492,3.70877,9.556793,3.818171,3.71798,3.735039,4.508434,4.352981,4.597115
.9806264,.0193736,.9972397,.0027603,.9428005,.0571995,3.70877,9.556793,3.822067,3.724912,4.043274,5.038599,4.840997,5.103775
.979522,.020478,.9818339,.0181661,.5537605,.4462395,3.70877,9.556793,3.828526,3.815006,6.318389,5.661628,5.355406,5.692415
.9651679,.0348321,.8530456,.1469544,.0991841,.9008158,3.70877,9.556793,3.912469,4.568163,8.976763,7.301357,6.493879,7.315054
.8451713,.1548287,.0300818,.9699182,.0005918,.9994081,3.70877,9.556793,4.614212,9.380874,9.553332,8.715515,7.326397,8.71555
.0783869,.921613,.0010091,.9989909,.0000196,.9999804,3.70877,9.556793,9.098385,9.550892,9.556679,9.655275,9.598218,11.60682
.0512989,.9487011,.0015696,.9984304,.0000297,.9999703,3.70877,9.556793,9.256796,9.547614,9.55662,8.840707,8.830189,11.3388
.0518211,.9481789,.0001252,.9998748,2.39e-06,.9999976,3.70877,9.556793,9.253742,9.556061,9.55678,10.8225,8.173305,10.8225
.0504753,.9495247,.0000403,.9999597,.000013,.999987,3.70877,9.556793,9.261613,9.556558,9.556717,12.73945,9.477875,12.73965
.0503962,.9496038,.001481,.998519,.0156336,.9843664,3.70877,9.556793,9.262075,9.548132,9.465368,9.974022,9.974022,13.5986
.0517386,.9482614,.0779672,.9220328,.5556827,.4443173,3.70877,9.556793,9.254225,9.100839,6.307148,7.448603,7.448603,10.78955
.1230035,.8769965,.4091304,.5908696,.8923706,.1076294,3.70877,9.556793,8.837466,7.164189,4.338189,4.952647,4.949996,7.614517
.4315598,.5684401,.5858818,.4141181,.9578715,.0421285,3.70877,9.556793,7.033021,6.130542,3.955138,6.450385,4.34717,6.640697
.5962454,.4037546,.8807697,.1192303,.9923835,.0076165,3.70877,9.556793,6.069936,4.406032,3.753312,5.116845,5.066675,7.070253
.8710028,.1289972,.9863916,.0136084,.9991698,.0008302,3.70877,9.556793,4.463149,3.788352,3.713625,4.726859,4.65509,6.198095
.9694145,.0305856,.9952391,.0047609,.9997077,.0002923,3.70877,9.556793,3.887635,3.736612,3.710479,5.011682,4.179649,5.589425
.9776579,.0223421,.996165,.003835,.9997671,.0002329,3.70877,9.556793,3.839427,3.731197,3.710132,5.226906,4.46443,5.833974
.9785206,.0214794,.9978493,.0021507,.9997901,.0002099,3.70877,9.556793,3.834382,3.721348,3.709998,4.948143,4.548041,5.824133
.98009,.0199101,.9984894,.0015106,.9990297,.0009703,3.70877,9.556793,3.825205,3.717604,3.714444,4.763768,4.231219,5.60673
.9806864,.0193136,.9971752,.0028248,.9879678,.0120322,3.70877,9.556793,3.821716,3.72529,3.779135,5.394799,4.155723,5.632583
.9794618,.0205381,.9928595,.0071404,.9088904,.0911096,3.70877,9.556793,3.828877,3.750527,4.241581,6.027369,4.673698,6.074389
.9754409,.0245591,.9790298,.0209702,.6709118,.3290882,3.70877,9.556793,3.852392,3.831404,5.633285,6.663375,5.167326,6.678365
.9625551,.0374449,.9560277,.0439722,.3820007,.6179993,3.70877,9.556793,3.927748,3.965921,7.322845,7.225809,5.643235,7.268343
.9411234,.0588766,.8720841,.1279159,.129078,.870922,3.70877,9.556793,4.053082,4.456825,8.801942,7.673487,6.209391,7.713082
.8629101,.1370899,.4770057,.5229943,.0172653,.9827347,3.70877,9.556793,4.510475,6.767253,9.455826,8.397498,6.606598,8.405678
.4948017,.5051983,.0198198,.9801801,.0003854,.9996146,3.70877,9.556793,6.663181,9.440886,9.55454,9.375547,7.32544,9.375654
.0688255,.9311745,.0007999,.9992001,.0000152,.9999847,3.70877,9.556793,9.1543,9.552115,9.556705,10.88931,8.429197,10.90646
.051104,.9488961,.0005044,.9994956,9.55e-06,.9999905,3.70877,9.556793,9.257936,9.553843,9.556738,11.3769,8.714792,11.54397
.0508286,.9491714,.000182,.999818,3.43e-06,.9999965,3.70877,9.556793,9.259546,9.555729,9.556773,11.85097,8.780759,11.85222
.0505283,.9494717,5.67e-06,.9999943,1.07e-07,.9999999,3.70877,9.556793,9.261303,9.55676,9.556793,12.74731,9.333604,12.74731
.050364,.949636,8.16e-07,.9999992,1.54e-08,1,3.70877,9.556793,9.262263,9.556788,9.556793,15.51025,11.42102,15.51025
.0503594,.9496406,.0000182,.9999818,3.49e-07,.9999996,3.70877,9.556793,9.26229,9.556686,9.556791,12.29176,12.29176,16.83475
.0503757,.9496243,.0007771,.9992229,.0000147,.9999853,3.70877,9.556793,9.262195,9.552249,9.556707,10.28245,10.28245,14.30258
.0510828,.9489172,2.89e-07,.9999997,5.45e-09,1,3.70877,9.556793,9.25806,9.556791,9.556793,11.61238,8.090569,11.61238
.050359,.949641,1.10e-07,.9999999,2.08e-09,1,3.70877,9.556793,9.262293,9.556792,9.556793,16.97341,13.3116,16.97341
.0503588,.9496412,2.24e-08,1,4.23e-10,1,3.70877,9.556793,9.262294,9.556793,9.556793,17.30668,13.78634,17.30668
.0503587,.9496413,2.92e-08,1,5.50e-10,1,3.70877,9.556793,9.262294,9.556793,9.556793,18.65449,14.82626,18.65471
.0503587,.9496413,5.58e-06,.9999944,1.05e-07,.9999999,3.70877,9.556793,9.262295,9.556761,9.556793,14.46502,14.46502,18.14064
.0503639,.9496361,2.40e-06,.9999976,4.53e-08,.9999999,3.70877,9.556793,9.262264,9.556779,9.556793,13.93092,10.89948,13.93092
.0503609,.9496391,1.66e-06,.9999983,3.14e-08,.9999999,3.70877,9.556793,9.262281,9.556784,9.556793,14.37497,11.42169,14.37497
.0503602,.9496398,.0001666,.9998334,3.25e-06,.9999968,3.70877,9.556793,9.262285,9.555819,9.556774,11.54737,11.54737,14.36861
.0505139,.9494861,.0016064,.9983935,.0000327,.9999673,3.70877,9.556793,9.261387,9.547399,9.556602,8.734435,8.595025,10.80423
.0518555,.9481446,.0038268,.9961731,.0000769,.9999232,3.70877,9.556793,9.253541,9.534413,9.556344,8.943102,7.254905,9.04239
.0539243,.9460757,.0032753,.9967247,.0000635,.9999365,3.70877,9.556793,9.241443,9.53764,9.556422,8.432573,6.924034,8.433858
.0534104,.9465896,.0013624,.9986376,.0000264,.9999736,3.70877,9.556793,9.244448,9.548825,9.556639,8.448098,7.225063,8.448396
.0516281,.9483719,.0013674,.9986326,.0000263,.9999737,3.70877,9.556793,9.25487,9.548797,9.55664,9.250766,7.924766,9.255078
.0516328,.9483672,.0009715,.9990284,.0000184,.9999816,3.70877,9.556793,9.254844,9.551111,9.556685,9.561737,8.023977,9.564042
.0512639,.9487361,.0003069,.9996931,5.80e-06,.9999942,3.70877,9.556793,9.257001,9.554998,9.55676,9.78944,8.368082,9.789679
.0506446,.9493554,.0001016,.9998984,1.98e-06,.999998,3.70877,9.556793,9.260622,9.556199,9.556782,10.57913,9.117296,10.58065
.0504534,.9495466,.0016472,.9983528,.0000347,.9999654,3.70877,9.556793,9.261741,9.54716,9.556591,9.828331,9.815725,11.32301
.0518934,.9481066,.0047859,.9952142,.0001138,.9998862,3.70877,9.556793,9.25332,9.528806,9.556128,8.326496,8.084562,9.248471
.0548178,.9451821,.0105388,.9894612,.0002706,.9997294,3.70877,9.556793,9.236217,9.495162,9.555211,7.968068,7.444566,8.510942
.0601781,.9398219,.011929,.988071,.0004063,.9995937,3.70877,9.556793,9.20487,9.487032,9.554418,7.797318,7.070516,7.95793
.0614734,.9385266,.0094017,.9905983,.0009389,.999061,3.70877,9.556793,9.197295,9.501812,9.551303,7.886533,7.040267,7.971292
.0591185,.9408814,.0128242,.9871758,.0048718,.9951282,3.70877,9.556793,9.211066,9.481797,9.528303,7.887259,7.227557,8.072835
.0623075,.9376925,.043543,.956457,.0229177,.9770823,3.70877,9.556793,9.192418,9.302153,9.422771,7.30066,6.993566,7.506404
.0909292,.9090708,.1485886,.8514114,.0469927,.9530073,3.70877,9.556793,9.025037,8.687843,9.281979,6.502852,6.249644,6.661716
.1888039,.8111961,.2728584,.7271417,.0567973,.9432027,3.70877,9.556793,8.452663,7.961112,9.224641,5.848752,5.619293,5.921072
.3045904,.6954096,.429946,.570054,.0570281,.9429719,3.70877,9.556793,7.775541,7.042459,9.223292,6.052836,5.651648,6.12429
.4509545,.5490455,.4453482,.5546518,.0466283,.9533718,3.70877,9.556793,6.919601,6.952386,9.28411,6.394635,5.607582,6.44189
.4653052,.5346947,.3984752,.6015248,.0342835,.9657165,3.70877,9.556793,6.835677,7.226501,9.356303,6.87506,5.96464,6.943046
.421632,.578368,.333094,.666906,.0239641,.9760359,3.70877,9.556793,7.09108,7.608852,9.416651,7.080949,6.236509,7.20149
.3607141,.6392859,.3524925,.6475075,.0163044,.9836956,3.70877,9.556793,7.447329,7.495409,9.461445,7.020358,6.286122,7.17503
.3787883,.6212117,.2334698,.7665302,.0067983,.9932017,3.70877,9.556793,7.34163,8.191457,9.517036,6.914561,6.138781,6.954099
.2678907,.7321093,.0584654,.9415346,.0012797,.9987203,3.70877,9.556793,7.990162,9.214887,9.54931,7.437253,6.519526,7.445452
.1048329,.8951671,.010318,.989682,.0002017,.9997983,3.70877,9.556793,8.943727,9.496453,9.555614,8.247514,7.26423,8.259493
.0599723,.9400277,.0015812,.9984187,.0000304,.9999696,3.70877,9.556793,9.206074,9.547546,9.556616,8.800888,7.705005,8.804131
.051832,.948168,.0009255,.9990745,.0000183,.9999818,3.70877,9.556793,9.253678,9.551381,9.556686,9.74052,8.499221,9.808734
.051221,.948779,.0021482,.9978518,.0000448,.9999552,3.70877,9.556793,9.257252,9.54423,9.556531,9.318309,8.7498,9.981037
.0523602,.9476398,.0040742,.9959258,.0001052,.9998948,3.70877,9.556793,9.250589,9.532967,9.556178,8.615889,8.142057,9.342804
.0541547,.9458452,.0067634,.9932365,.0003805,.9996195,3.70877,9.556793,9.240095,9.517241,9.554568,8.310145,7.779929,9.046017
.0566604,.9433396,.0071861,.9928138,.0021532,.9978468,3.70877,9.556793,9.225442,9.514769,9.544202,8.122429,7.450436,8.561507
.0570543,.9429457,.0080363,.9919637,.0163216,.9836783,3.70877,9.556793,9.223139,9.509797,9.461344,8.06747,7.318541,8.736112
.0578464,.9421536,.0140897,.9859103,.1186408,.8813592,3.70877,9.556793,9.218506,9.474396,8.862979,7.368597,7.017911,8.758828
.0634865,.9365135,.0813341,.9186659,.543765,.456235,3.70877,9.556793,9.185523,9.081149,6.376843,6.552896,6.540946,8.113037
.1261405,.8738595,.285347,.714653,.8583246,.1416753,3.70877,9.556793,8.81912,7.888078,4.537291,5.594151,5.475195,6.811069
.3162265,.6837735,.630861,.369139,.968286,.031714,3.70877,9.556793,7.707493,5.867503,3.894234,5.252515,5.002752,6.063491
.6381541,.3618459,.9504541,.0495459,.9972499,.0027501,3.70877,9.556793,5.824853,3.998516,3.724853,4.929419,4.813092,5.662596
.9359302,.0640698,.9978096,.0021903,.9998849,.0001151,3.70877,9.556793,4.083452,3.721579,3.709443,4.286259,4.194264,4.900965
.9800531,.019947,.9995306,.0004694,.9999756,.0000244,3.70877,9.556793,3.82542,3.711515,3.708912,3.717198,3.578583,4.209394
.9816565,.0183435,.9997798,.0002203,.9999886,.0000114,3.70877,9.556793,3.816043,3.710058,3.708837,3.541503,3.408145,3.986915
.9818887,.0181114,.9998373,.0001627,.9999916,8.42e-06,3.70877,9.556793,3.814686,3.709722,3.708819,3.189564,3.02046,3.532578
.9819422,.0180578,.9998378,.0001623,.9999916,8.39e-06,3.70877,9.556793,3.814373,3.709719,3.708819,3.015388,2.761958,3.320222
.9819427,.0180573,.999846,.0001539,.999992,7.96e-06,3.70877,9.556793,3.81437,3.70967,3.708817,3.042857,2.747537,3.306076
.9819504,.0180496,.9998335,.0001665,.9999914,8.61e-06,3.70877,9.556793,3.814324,3.709744,3.70882,3.002553,2.703691,3.198682
.9819387,.0180613,.9998481,.000152,.9999921,7.88e-06,3.70877,9.556793,3.814393,3.709659,3.708816,3.10698,2.828977,3.263055
.9819523,.0180477,.9997972,.0002028,.9999893,.0000107,3.70877,9.556793,3.814313,3.709956,3.708833,3.064441,2.806232,3.156415
.9819049,.0180951,.9994684,.0005316,.9999708,.0000292,3.70877,9.556793,3.814591,3.711879,3.708941,3.304724,3.079071,3.349157
.9815986,.0184014,.9988852,.0011149,.9999304,.0000695,3.70877,9.556793,3.816382,3.71529,3.709177,4.077081,3.660009,4.124638
.9810551,.0189449,.9971934,.0028066,.9997794,.0002206,3.70877,9.556793,3.81956,3.725183,3.71006,4.566038,4.175121,4.595146
.9794788,.0205212,.9929582,.0070418,.9994116,.0005883,3.70877,9.556793,3.828778,3.749951,3.712211,5.235425,4.676962,5.263006
.9755328,.0244672,.9889305,.0110695,.9991763,.0008237,3.70877,9.556793,3.851855,3.773505,3.713587,5.838032,5.1478,5.8982
.9717799,.02822,.9903922,.0096078,.9993276,.0006724,3.70877,9.556793,3.873801,3.764957,3.712702,5.88869,5.353448,6.006532
.9731419,.0268581,.9917701,.0082299,.9994836,.0005164,3.70877,9.556793,3.865837,3.756899,3.71179,5.650352,5.166267,5.79265
.9744257,.0255742,.9951116,.0048884,.9997014,.0002986,3.70877,9.556793,3.858329,3.737358,3.710516,5.538863,5.083153,5.748745
.9775391,.0224609,.9963413,.0036587,.9997736,.0002264,3.70877,9.556793,3.840122,3.730166,3.710094,5.286134,4.909994,5.455621
.9786849,.0213151,.9961977,.0038023,.9997658,.0002342,3.70877,9.556793,3.833421,3.731006,3.71014,5.203722,4.803016,5.358268
.9785511,.0214489,.9963211,.0036789,.9997711,.000229,3.70877,9.556793,3.834203,3.730284,3.710109,5.326283,4.897751,5.505361
.9786661,.0213339,.9963412,.0036588,.9997575,.0002425,3.70877,9.556793,3.833531,3.730167,3.710188,5.265781,4.892343,5.419424
.9786848,.0213152,.9949928,.0050072,.9996672,.0003328,3.70877,9.556793,3.833422,3.738052,3.710716,5.208025,4.976932,5.299777
.9774284,.0225715,.9946225,.0053775,.9996445,.0003555,3.70877,9.556793,3.840769,3.740218,3.710849,5.434537,5.248524,5.524192
.9770834,.0229166,.99468,.00532,.999648,.0003519,3.70877,9.556793,3.842787,3.739882,3.710828,5.3884,5.257504,5.460289
.977137,.022863,.9946227,.0053773,.9996476,.0003524,3.70877,9.556793,3.842473,3.740216,3.710831,5.328261,5.274662,5.36072
.9770836,.0229164,.9947492,.0052507,.999662,.000338,3.70877,9.556793,3.842785,3.739476,3.710747,5.35511,5.293343,5.394659
.9772016,.0227984,.9945675,.0054325,.9996915,.0003085,3.70877,9.556793,3.842096,3.740539,3.710574,5.372501,5.338941,5.394736
.9770322,.0229678,.9977272,.0022729,.9998731,.0001269,3.70877,9.556793,3.843086,3.722062,3.709512,5.452638,5.477922,5.435889
.9799761,.0200239,.9983342,.0016658,.9999064,.0000936,3.70877,9.556793,3.82587,3.718512,3.709317,4.882108,4.928061,4.850593
.9805418,.0194582,.998339,.001661,.9999008,.0000992,3.70877,9.556793,3.822562,3.718484,3.70935,4.806666,4.792923,4.816381
.9805462,.0194538,.9974042,.0025958,.9998279,.0001721,3.70877,9.556793,3.822536,3.72395,3.709776,4.882067,4.85832,4.898543
.9796753,.0203247,.9963774,.0036226,.9996811,.0003189,3.70877,9.556793,3.82763,3.729955,3.710635,5.278541,5.245199,5.3009
.9787186,.0212814,.9938359,.0061641,.9991794,.0008206,3.70877,9.556793,3.833225,3.744818,3.713569,5.510527,5.330189,5.620605
.9763505,.0236495,.9851985,.0148015,.9979665,.0020335,3.70877,9.556793,3.847073,3.79533,3.720662,5.91604,5.774785,6.006871
.9683027,.0316973,.9725566,.0274434,.9971454,.0028546,3.70877,9.556793,3.894136,3.86926,3.725464,6.369084,6.152766,6.565149
.9565238,.0434762,.9646139,.0353861,.9976425,.0023576,3.70877,9.556793,3.96302,3.915709,3.722557,6.472348,6.309919,6.779056
.9491234,.0508766,.9866005,.0133995,.9992712,.0007288,3.70877,9.556793,4.006298,3.787131,3.713032,6.166741,6.103334,6.68342
.969609,.030391,.9984906,.0015094,.9999213,.0000787,3.70877,9.556793,3.886497,3.717597,3.70923,5.35135,5.324054,5.835004
.9806875,.0193125,.9996817,.0003183,.9999836,.0000164,3.70877,9.556793,3.82171,3.710632,3.708866,4.179661,4.132563,4.48576
.9817972,.0182028,.9999506,.0000494,.9999974,2.54e-06,3.70877,9.556793,3.81522,3.709059,3.708785,3.414703,3.377915,3.52192
.9820479,.0179521,.9999713,.0000287,.9999985,1.48e-06,3.70877,9.556793,3.813755,3.708938,3.708779,2.203474,2.271626,2.115997
.982067,.0179329,.9999705,.0000295,.9999985,1.51e-06,3.70877,9.556793,3.813642,3.708942,3.708779,1.823223,1.890859,1.750906
.9820664,.0179336,.9999709,.0000291,.9999985,1.49e-06,3.70877,9.556793,3.813646,3.70894,3.708779,1.834497,1.854651,1.812876
.9820668,.0179333,.9999804,.0000196,.999999,1.01e-06,3.70877,9.556793,3.813644,3.708885,3.708776,1.814991,1.729833,1.922723
.9820756,.0179244,.9999847,.0000153,.9999992,7.83e-07,3.70877,9.556793,3.813592,3.708859,3.708775,1.578217,1.423067,1.788079
.9820796,.0179204,.9999847,.0000153,.9999992,7.83e-07,3.70877,9.556793,3.813569,3.708859,3.708775,1.363374,1.317874,1.4119
.9820796,.0179204,.9999887,.0000113,.9999994,5.78e-07,3.70877,9.556793,3.813569,3.708836,3.708773,1.466598,1.441834,1.489571
.9820833,.0179166,.999989,.000011,.9999995,5.63e-07,3.70877,9.556793,3.813547,3.708834,3.708773,1.274937,1.313173,1.244443
.9820836,.0179164,.999989,.000011,.9999995,5.63e-07,3.70877,9.556793,3.813545,3.708834,3.708773,1.233032,1.29632,1.185423
.9820836,.0179164,.9999889,.0000111,.9999994,5.71e-07,3.70877,9.556793,3.813545,3.708835,3.708773,1.351381,1.293064,1.397081
.9820835,.0179165,.9999807,.0000193,.999999,9.93e-07,3.70877,9.556793,3.813546,3.708883,3.708776,1.368077,1.335848,1.390817
.9820758,.0179242,.9999617,.0000383,.999998,1.97e-06,3.70877,9.556793,3.813591,3.708994,3.708781,1.848841,1.699826,1.940641
.9820581,.0179419,.9999239,.0000761,.9999961,3.93e-06,3.70877,9.556793,3.813694,3.709215,3.708793,2.310586,2.209126,2.366568
.9820229,.0179771,.9998584,.0001416,.9999926,7.37e-06,3.70877,9.556793,3.8139,3.709598,3.708813,2.764744,2.638925,2.824465
.9819619,.0180381,.9997182,.0002817,.9999851,.0000149,3.70877,9.556793,3.814257,3.710418,3.708857,3.343914,3.036805,3.459788
.9818314,.0181686,.9994374,.0005626,.9999694,.0000306,3.70877,9.556793,3.815021,3.71206,3.708949,3.806304,3.468405,3.910976
.9815696,.0184304,.9989265,.0010735,.9999384,.0000616,3.70877,9.556793,3.816551,3.715048,3.709131,4.322728,3.985987,4.419353
.9810937,.0189063,.9980094,.0019905,.9998788,.0001212,3.70877,9.556793,3.819335,3.720411,3.709479,4.785995,4.336953,4.904437
.9802392,.0197608,.9967461,.0032539,.9998016,.0001984,3.70877,9.556793,3.824332,3.727799,3.70993,5.059835,4.657316,5.180828
.9790621,.0209379,.9965488,.0034512,.9997894,.0002107,3.70877,9.556793,3.831215,3.728953,3.710002,5.143878,5.000844,5.21044
.9788783,.0211217,.996472,.003528,.9997864,.0002136,3.70877,9.556793,3.83229,3.729402,3.710019,5.148761,4.916379,5.291348
.9788067,.0211933,.996506,.003494,.9997967,.0002032,3.70877,9.556793,3.832709,3.729203,3.709959,5.154384,4.936847,5.35499
.9788383,.0211617,.9972454,.0027546,.99985,.00015,3.70877,9.556793,3.832524,3.724879,3.709647,5.064756,4.948065,5.281224
.9795272,.0204728,.9987407,.0012593,.9999347,.0000653,3.70877,9.556793,3.828495,3.716134,3.709152,4.797365,4.723593,5.464302
.9809206,.0190795,.9997936,.0002063,.9999894,.0000106,3.70877,9.556793,3.820347,3.709977,3.708832,4.08531,4.078696,4.914553
.9819016,.0180984,.9999534,.0000466,.9999976,2.39e-06,3.70877,9.556793,3.81461,3.709042,3.708784,2.981632,2.97441,3.724269
.9820505,.0179495,.9999621,.0000379,.999998,1.94e-06,3.70877,9.556793,3.813739,3.708992,3.708781,1.956489,1.906075,2.877074
.9820585,.0179415,.9999942,5.77e-06,.9999997,2.96e-07,3.70877,9.556793,3.813692,3.708804,3.708772,1.564194,1.527188,1.870849
.9820885,.0179115,.9999962,3.73e-06,.9999998,1.91e-07,3.70877,9.556793,3.813517,3.708792,3.708771,.1384992,.1517437,.1041634
.9820904,.0179096,.9999962,3.73e-06,.9999998,1.91e-07,3.70877,9.556793,3.813506,3.708792,3.708771,-.2369698,-.1676456,-.3802484
.9820904,.0179096,.9999964,3.63e-06,.9999998,1.86e-07,3.70877,9.556793,3.813506,3.708791,3.708771,-.2928468,-.1591626,-.5239687
.9820905,.0179095,.9999965,3.44e-06,.9999998,1.77e-07,3.70877,9.556793,3.813505,3.70879,3.708771,-.0218886,-.186655,.133808
.9820907,.0179094,.9999965,3.49e-06,.9999998,1.79e-07,3.70877,9.556793,3.813504,3.70879,3.708771,.0668349,-.2345839,.2793285
.9820906,.0179094,.9999962,3.78e-06,.9999998,1.94e-07,3.70877,9.556793,3.813505,3.708792,3.708771,.0643252,-.1345215,.1909246
.9820904,.0179097,.9999962,3.78e-06,.9999998,1.94e-07,3.70877,9.556793,3.813506,3.708792,3.708771,.0745394,-.0243442,.1396657
.9820904,.0179097,.9999962,3.78e-06,.9999962,3.78e-06,3.70877,9.556793,3.813506,3.708792,3.708792,.0966984,.0219892,.1484894
1 const_p1 const_p2 const_f1 const_f2 const_sm1 const_sm2 const_yhat1 const_yhat2 const_pyhat const_fyhat const_syhat constL1exog_syhat constL1exog_syhat1 constL1exog_syhat2
2 .7376958 .2623042 .9997776 .0002225 .9999886 .0000114 3.70877 9.556793 5.242731 3.710071 3.708837
3 .9818866 .0181134 .999989 .000011 .9999995 5.62e-07 3.70877 9.556793 3.814698 3.708834 3.708773
4 .9820836 .0179164 .9999828 .0000172 .9999991 8.81e-07 3.70877 9.556793 3.813545 3.70887 3.708775
5 .9820778 .0179221 .9999788 .0000212 .9999989 1.09e-06 3.70877 9.556793 3.813579 3.708894 3.708776
6 .9820741 .0179259 .9999622 .0000378 .999998 1.95e-06 3.70877 9.556793 3.813601 3.708991 3.708781 1.831308 2.472625 1.392144
7 .9820586 .0179414 .9999341 .0000659 .9999966 3.39e-06 3.70877 9.556793 3.813692 3.709155 3.70879 2.268597 2.839436 1.934797
8 .9820325 .0179675 .9999228 .0000772 .999996 3.98e-06 3.70877 9.556793 3.813844 3.709222 3.708793 2.504288 3.019767 2.2552
9 .9820219 .0179781 .9998981 .0001019 .9999948 5.26e-06 3.70877 9.556793 3.813906 3.709366 3.708801 2.707525 3.046899 2.523101
10 .9819989 .0180011 .9998805 .0001195 .9999938 6.17e-06 3.70877 9.556793 3.814041 3.709469 3.708806 2.969676 3.084172 2.896406
11 .9819825 .0180175 .9998599 .0001401 .9999928 7.24e-06 3.70877 9.556793 3.814137 3.709589 3.708812 3.18926 3.243036 3.146579
12 .9819633 .0180367 .9998598 .0001402 .9999927 7.25e-06 3.70877 9.556793 3.814249 3.70959 3.708812 3.376379 3.322518 3.43591
13 .9819632 .0180368 .9998462 .0001538 .999992 7.97e-06 3.70877 9.556793 3.81425 3.709669 3.708817 3.273995 3.161087 3.438036
14 .9819506 .0180494 .9997917 .0002083 .9999892 .0000108 3.70877 9.556793 3.814323 3.709988 3.708833 3.302091 3.209093 3.468282
15 .9818998 .0181002 .9997855 .0002144 .999989 .000011 3.70877 9.556793 3.81462 3.710024 3.708834 3.190691 3.082165 3.518581
16 .9818941 .0181059 .9999656 .0000344 .9999982 1.77e-06 3.70877 9.556793 3.814654 3.708971 3.70878 2.676074 2.657601 3.506871
17 .9820618 .0179382 .9999899 .0000102 .9999995 5.21e-07 3.70877 9.556793 3.813673 3.708829 3.708773 1.505545 1.462715 2.16521
18 .9820844 .0179156 .9999833 .0000167 .9999992 8.60e-07 3.70877 9.556793 3.813541 3.708868 3.708775 1.018261 .958331 1.162562
19 .9820783 .0179217 .9999495 .0000506 .9999974 2.60e-06 3.70877 9.556793 3.813577 3.709066 3.708785 1.511617 1.521546 1.493146
20 .9820467 .0179533 .9999131 .0000869 .9999955 4.50e-06 3.70877 9.556793 3.813761 3.709278 3.708796 2.263951 2.353165 2.083339
21 .9820129 .0179871 .9998296 .0001704 .9999911 8.89e-06 3.70877 9.556793 3.813959 3.709766 3.708822 2.748969 2.898975 2.406418
22 .9819351 .0180649 .9996696 .0003304 .9999825 .0000175 3.70877 9.556793 3.814414 3.710702 3.708872 3.084917 3.156353 2.964966
23 .981786 .018214 .9994285 .0005715 .9999698 .0000301 3.70877 9.556793 3.815286 3.712112 3.708946 3.503891 3.489995 3.520062
24 .9815614 .0184387 .9994652 .0005348 .999972 .000028 3.70877 9.556793 3.8166 3.711897 3.708934 3.986069 4.035326 3.934834
25 .9815956 .0184044 .9996057 .0003944 .9999796 .0000204 3.70877 9.556793 3.8164 3.711076 3.708889 3.845686 3.776379 3.920848
26 .9817264 .0182736 .999856 .000144 .9999926 7.41e-06 3.70877 9.556793 3.815634 3.709612 3.708813 3.53088 3.489225 3.580799
27 .9819597 .0180403 .9999388 .0000612 .9999968 3.14e-06 3.70877 9.556793 3.81427 3.709128 3.708788 2.659254 2.546042 2.803689
28 .9820368 .0179632 .9999589 .000041 .9999979 2.11e-06 3.70877 9.556793 3.813819 3.70901 3.708782 2.095736 1.980844 2.224948
29 .9820556 .0179444 .9999713 .0000287 .9999985 1.48e-06 3.70877 9.556793 3.813709 3.708938 3.708779 1.852 1.863727 1.839663
30 .982067 .0179329 .9999731 .0000269 .9999986 1.38e-06 3.70877 9.556793 3.813642 3.708927 3.708778 1.705161 1.723561 1.684164
31 .9820688 .0179312 .9999306 .0000694 .9999964 3.57e-06 3.70877 9.556793 3.813632 3.709176 3.708791 1.730321 1.840032 1.567684
32 .9820292 .0179708 .9999248 .0000752 .9999961 3.88e-06 3.70877 9.556793 3.813864 3.70921 3.708793 2.431717 2.542717 2.304694
33 .9820237 .0179763 .9999083 .0000917 .9999953 4.73e-06 3.70877 9.556793 3.813895 3.709306 3.708798 2.513138 2.57788 2.44728
34 .9820084 .0179916 .9998741 .0001259 .9999935 6.50e-06 3.70877 9.556793 3.813985 3.709506 3.708808 2.62614 2.693316 2.561214
35 .9819766 .0180234 .9998617 .0001383 .9999928 7.15e-06 3.70877 9.556793 3.814172 3.709579 3.708812 2.791865 2.801997 2.782812
36 .981965 .018035 .9998522 .0001478 .9999924 7.64e-06 3.70877 9.556793 3.814239 3.709634 3.708815 2.851919 2.871364 2.834998
37 .9819561 .0180438 .9998541 .0001459 .9999924 7.58e-06 3.70877 9.556793 3.814291 3.709624 3.708814 2.906022 2.966903 2.852718
38 .9819579 .0180421 .9997625 .0002375 .9999877 .0000124 3.70877 9.556793 3.814281 3.710159 3.708842 3.009227 3.079246 2.945725
39 .9818726 .0181274 .9997206 .0002794 .9999855 .0000145 3.70877 9.556793 3.81478 3.710404 3.708855 3.324776 3.365401 3.290619
40 .9818335 .0181665 .9997162 .0002838 .9999852 .0000148 3.70877 9.556793 3.815008 3.710429 3.708857 3.525028 3.609638 3.459325
41 .9818295 .0181705 .9997047 .0002953 .9999846 .0000154 3.70877 9.556793 3.815032 3.710497 3.70886 3.538539 3.633008 3.467517
42 .9818187 .0181813 .999716 .000284 .9999852 .0000148 3.70877 9.556793 3.815094 3.710431 3.708857 3.540229 3.700191 3.420547
43 .9818292 .0181708 .9996676 .0003324 .9999824 .0000176 3.70877 9.556793 3.815033 3.710714 3.708873 3.489318 3.58355 3.411572
44 .9817842 .0182159 .9994432 .0005567 .9999704 .0000296 3.70877 9.556793 3.815297 3.712026 3.708943 3.71 3.888904 3.552691
45 .9815751 .0184249 .9993491 .000651 .9999654 .0000345 3.70877 9.556793 3.816519 3.712577 3.708972 4.116197 4.243034 4.028508
46 .9814873 .0185127 .9993544 .0006456 .9999655 .0000345 3.70877 9.556793 3.817033 3.712545 3.708972 4.275736 4.45837 4.177522
47 .9814923 .0185077 .9992639 .0007361 .9999595 .0000405 3.70877 9.556793 3.817003 3.713075 3.709007 4.358746 4.631372 4.229281
48 .9814079 .018592 .9987648 .0012353 .9999287 .0000713 3.70877 9.556793 3.817497 3.715994 3.709187 4.642481 4.897738 4.507643
49 .9809429 .0190571 .9979933 .0020067 .9998719 .0001281 3.70877 9.556793 3.820216 3.720505 3.709519 4.978095 5.104101 4.899562
50 .9802241 .0197758 .9959831 .0040169 .9997402 .0002598 3.70877 9.556793 3.824419 3.732261 3.710289 5.337822 5.337117 5.338327
51 .9783511 .0216489 .9946395 .0053605 .9996985 .0003016 3.70877 9.556793 3.835373 3.740119 3.710534 5.794573 5.714682 5.857948
52 .9770993 .0229007 .9978499 .0021501 .9998862 .0001139 3.70877 9.556793 3.842694 3.721344 3.709436 5.836755 5.830125 5.841867
53 .9800906 .0199095 .9993742 .0006258 .999967 .000033 3.70877 9.556793 3.825201 3.71243 3.708963 5.059279 5.070888 5.051047
54 .9815108 .0184892 .9994912 .0005088 .999971 .000029 3.70877 9.556793 3.816895 3.711745 3.70894 4.323678 4.35304 4.303565
55 .9816198 .0183802 .999269 .000731 .9998878 .0001122 3.70877 9.556793 3.816258 3.713045 3.709426 4.248055 4.227117 4.262413
56 .9814128 .0185872 .998329 .0016709 .9979967 .0020033 3.70877 9.556793 3.817469 3.718542 3.720485 4.661473 4.576157 4.716111
57 .980537 .019463 .9916658 .0083341 .9764566 .0235434 3.70877 9.556793 3.82259 3.757508 3.846452 5.306712 5.161865 5.393076
58 .9743286 .0256714 .9895275 .0104725 .9249268 .0750732 3.70877 9.556793 3.858897 3.770013 4.1478 6.332373 6.099176 6.564503
59 .9723363 .0276637 .9889915 .0110085 .7923684 .2076316 3.70877 9.556793 3.870548 3.773148 4.923005 6.284777 5.982467 6.53833
60 .9718368 .0281632 .974013 .0259869 .4413242 .5586758 3.70877 9.556793 3.873469 3.860742 6.975919 6.478172 6.03827 6.593009
61 .9578809 .0421191 .7088267 .2911734 .0466012 .9533988 3.70877 9.556793 3.955084 5.411559 9.284268 7.296552 6.460438 7.30485
62 .7107974 .2892026 .1005571 .8994429 .0029009 .997099 3.70877 9.556793 5.400033 8.968733 9.539828 8.867719 7.869485 8.93118
63 .1440513 .8559487 .0080047 .9919953 .0011666 .9988334 3.70877 9.556793 8.714377 9.509981 9.549972 9.045574 8.212791 9.553485
64 .057817 .942183 .0047662 .9952338 .0074692 .9925308 3.70877 9.556793 9.218678 9.52892 9.513113 8.329976 8.020488 9.562139
65 .0547995 .9452005 .0110985 .9889015 .0864779 .9135221 3.70877 9.556793 9.236324 9.491889 9.051068 7.654822 7.611776 9.151626
66 .0606996 .9393004 .0558457 .9441543 .4809731 .5190269 3.70877 9.556793 9.20182 9.230206 6.744051 7.049016 7.046554 8.435221
67 .1023921 .8976079 .3161151 .6838849 .8977214 .1022786 3.70877 9.556793 8.958002 7.708145 4.306898 5.81704 5.814537 7.256321
68 .3448943 .6551057 .9529809 .0470191 .9972818 .0027182 3.70877 9.556793 7.539844 3.983739 3.724666 5.139013 5.13464 6.098828
69 .9382845 .0617155 .9957243 .0042758 .9997301 .0002699 3.70877 9.556793 4.069684 3.733775 3.710349 4.142889 3.688474 4.394758
70 .97811 .02189 .9951816 .0048184 .9997313 .0002687 3.70877 9.556793 3.836783 3.736948 3.710341 4.85277 4.267033 5.034659
71 .9776044 .0223956 .998083 .001917 .9998995 .0001005 3.70877 9.556793 3.83974 3.71998 3.709358 5.155045 4.973974 5.706321
72 .9803077 .0196923 .9996578 .0003423 .9999782 .0000218 3.70877 9.556793 3.823931 3.710772 3.708898 4.571352 4.492369 5.060409
73 .9817749 .0182251 .9991401 .0008599 .99976 .00024 3.70877 9.556793 3.815351 3.713799 3.710174 3.808026 3.696524 3.927749
74 .9812927 .0187073 .9984251 .0015749 .995508 .004492 3.70877 9.556793 3.818171 3.71798 3.735039 4.508434 4.352981 4.597115
75 .9806264 .0193736 .9972397 .0027603 .9428005 .0571995 3.70877 9.556793 3.822067 3.724912 4.043274 5.038599 4.840997 5.103775
76 .979522 .020478 .9818339 .0181661 .5537605 .4462395 3.70877 9.556793 3.828526 3.815006 6.318389 5.661628 5.355406 5.692415
77 .9651679 .0348321 .8530456 .1469544 .0991841 .9008158 3.70877 9.556793 3.912469 4.568163 8.976763 7.301357 6.493879 7.315054
78 .8451713 .1548287 .0300818 .9699182 .0005918 .9994081 3.70877 9.556793 4.614212 9.380874 9.553332 8.715515 7.326397 8.71555
79 .0783869 .921613 .0010091 .9989909 .0000196 .9999804 3.70877 9.556793 9.098385 9.550892 9.556679 9.655275 9.598218 11.60682
80 .0512989 .9487011 .0015696 .9984304 .0000297 .9999703 3.70877 9.556793 9.256796 9.547614 9.55662 8.840707 8.830189 11.3388
81 .0518211 .9481789 .0001252 .9998748 2.39e-06 .9999976 3.70877 9.556793 9.253742 9.556061 9.55678 10.8225 8.173305 10.8225
82 .0504753 .9495247 .0000403 .9999597 .000013 .999987 3.70877 9.556793 9.261613 9.556558 9.556717 12.73945 9.477875 12.73965
83 .0503962 .9496038 .001481 .998519 .0156336 .9843664 3.70877 9.556793 9.262075 9.548132 9.465368 9.974022 9.974022 13.5986
84 .0517386 .9482614 .0779672 .9220328 .5556827 .4443173 3.70877 9.556793 9.254225 9.100839 6.307148 7.448603 7.448603 10.78955
85 .1230035 .8769965 .4091304 .5908696 .8923706 .1076294 3.70877 9.556793 8.837466 7.164189 4.338189 4.952647 4.949996 7.614517
86 .4315598 .5684401 .5858818 .4141181 .9578715 .0421285 3.70877 9.556793 7.033021 6.130542 3.955138 6.450385 4.34717 6.640697
87 .5962454 .4037546 .8807697 .1192303 .9923835 .0076165 3.70877 9.556793 6.069936 4.406032 3.753312 5.116845 5.066675 7.070253
88 .8710028 .1289972 .9863916 .0136084 .9991698 .0008302 3.70877 9.556793 4.463149 3.788352 3.713625 4.726859 4.65509 6.198095
89 .9694145 .0305856 .9952391 .0047609 .9997077 .0002923 3.70877 9.556793 3.887635 3.736612 3.710479 5.011682 4.179649 5.589425
90 .9776579 .0223421 .996165 .003835 .9997671 .0002329 3.70877 9.556793 3.839427 3.731197 3.710132 5.226906 4.46443 5.833974
91 .9785206 .0214794 .9978493 .0021507 .9997901 .0002099 3.70877 9.556793 3.834382 3.721348 3.709998 4.948143 4.548041 5.824133
92 .98009 .0199101 .9984894 .0015106 .9990297 .0009703 3.70877 9.556793 3.825205 3.717604 3.714444 4.763768 4.231219 5.60673
93 .9806864 .0193136 .9971752 .0028248 .9879678 .0120322 3.70877 9.556793 3.821716 3.72529 3.779135 5.394799 4.155723 5.632583
94 .9794618 .0205381 .9928595 .0071404 .9088904 .0911096 3.70877 9.556793 3.828877 3.750527 4.241581 6.027369 4.673698 6.074389
95 .9754409 .0245591 .9790298 .0209702 .6709118 .3290882 3.70877 9.556793 3.852392 3.831404 5.633285 6.663375 5.167326 6.678365
96 .9625551 .0374449 .9560277 .0439722 .3820007 .6179993 3.70877 9.556793 3.927748 3.965921 7.322845 7.225809 5.643235 7.268343
97 .9411234 .0588766 .8720841 .1279159 .129078 .870922 3.70877 9.556793 4.053082 4.456825 8.801942 7.673487 6.209391 7.713082
98 .8629101 .1370899 .4770057 .5229943 .0172653 .9827347 3.70877 9.556793 4.510475 6.767253 9.455826 8.397498 6.606598 8.405678
99 .4948017 .5051983 .0198198 .9801801 .0003854 .9996146 3.70877 9.556793 6.663181 9.440886 9.55454 9.375547 7.32544 9.375654
100 .0688255 .9311745 .0007999 .9992001 .0000152 .9999847 3.70877 9.556793 9.1543 9.552115 9.556705 10.88931 8.429197 10.90646
101 .051104 .9488961 .0005044 .9994956 9.55e-06 .9999905 3.70877 9.556793 9.257936 9.553843 9.556738 11.3769 8.714792 11.54397
102 .0508286 .9491714 .000182 .999818 3.43e-06 .9999965 3.70877 9.556793 9.259546 9.555729 9.556773 11.85097 8.780759 11.85222
103 .0505283 .9494717 5.67e-06 .9999943 1.07e-07 .9999999 3.70877 9.556793 9.261303 9.55676 9.556793 12.74731 9.333604 12.74731
104 .050364 .949636 8.16e-07 .9999992 1.54e-08 1 3.70877 9.556793 9.262263 9.556788 9.556793 15.51025 11.42102 15.51025
105 .0503594 .9496406 .0000182 .9999818 3.49e-07 .9999996 3.70877 9.556793 9.26229 9.556686 9.556791 12.29176 12.29176 16.83475
106 .0503757 .9496243 .0007771 .9992229 .0000147 .9999853 3.70877 9.556793 9.262195 9.552249 9.556707 10.28245 10.28245 14.30258
107 .0510828 .9489172 2.89e-07 .9999997 5.45e-09 1 3.70877 9.556793 9.25806 9.556791 9.556793 11.61238 8.090569 11.61238
108 .050359 .949641 1.10e-07 .9999999 2.08e-09 1 3.70877 9.556793 9.262293 9.556792 9.556793 16.97341 13.3116 16.97341
109 .0503588 .9496412 2.24e-08 1 4.23e-10 1 3.70877 9.556793 9.262294 9.556793 9.556793 17.30668 13.78634 17.30668
110 .0503587 .9496413 2.92e-08 1 5.50e-10 1 3.70877 9.556793 9.262294 9.556793 9.556793 18.65449 14.82626 18.65471
111 .0503587 .9496413 5.58e-06 .9999944 1.05e-07 .9999999 3.70877 9.556793 9.262295 9.556761 9.556793 14.46502 14.46502 18.14064
112 .0503639 .9496361 2.40e-06 .9999976 4.53e-08 .9999999 3.70877 9.556793 9.262264 9.556779 9.556793 13.93092 10.89948 13.93092
113 .0503609 .9496391 1.66e-06 .9999983 3.14e-08 .9999999 3.70877 9.556793 9.262281 9.556784 9.556793 14.37497 11.42169 14.37497
114 .0503602 .9496398 .0001666 .9998334 3.25e-06 .9999968 3.70877 9.556793 9.262285 9.555819 9.556774 11.54737 11.54737 14.36861
115 .0505139 .9494861 .0016064 .9983935 .0000327 .9999673 3.70877 9.556793 9.261387 9.547399 9.556602 8.734435 8.595025 10.80423
116 .0518555 .9481446 .0038268 .9961731 .0000769 .9999232 3.70877 9.556793 9.253541 9.534413 9.556344 8.943102 7.254905 9.04239
117 .0539243 .9460757 .0032753 .9967247 .0000635 .9999365 3.70877 9.556793 9.241443 9.53764 9.556422 8.432573 6.924034 8.433858
118 .0534104 .9465896 .0013624 .9986376 .0000264 .9999736 3.70877 9.556793 9.244448 9.548825 9.556639 8.448098 7.225063 8.448396
119 .0516281 .9483719 .0013674 .9986326 .0000263 .9999737 3.70877 9.556793 9.25487 9.548797 9.55664 9.250766 7.924766 9.255078
120 .0516328 .9483672 .0009715 .9990284 .0000184 .9999816 3.70877 9.556793 9.254844 9.551111 9.556685 9.561737 8.023977 9.564042
121 .0512639 .9487361 .0003069 .9996931 5.80e-06 .9999942 3.70877 9.556793 9.257001 9.554998 9.55676 9.78944 8.368082 9.789679
122 .0506446 .9493554 .0001016 .9998984 1.98e-06 .999998 3.70877 9.556793 9.260622 9.556199 9.556782 10.57913 9.117296 10.58065
123 .0504534 .9495466 .0016472 .9983528 .0000347 .9999654 3.70877 9.556793 9.261741 9.54716 9.556591 9.828331 9.815725 11.32301
124 .0518934 .9481066 .0047859 .9952142 .0001138 .9998862 3.70877 9.556793 9.25332 9.528806 9.556128 8.326496 8.084562 9.248471
125 .0548178 .9451821 .0105388 .9894612 .0002706 .9997294 3.70877 9.556793 9.236217 9.495162 9.555211 7.968068 7.444566 8.510942
126 .0601781 .9398219 .011929 .988071 .0004063 .9995937 3.70877 9.556793 9.20487 9.487032 9.554418 7.797318 7.070516 7.95793
127 .0614734 .9385266 .0094017 .9905983 .0009389 .999061 3.70877 9.556793 9.197295 9.501812 9.551303 7.886533 7.040267 7.971292
128 .0591185 .9408814 .0128242 .9871758 .0048718 .9951282 3.70877 9.556793 9.211066 9.481797 9.528303 7.887259 7.227557 8.072835
129 .0623075 .9376925 .043543 .956457 .0229177 .9770823 3.70877 9.556793 9.192418 9.302153 9.422771 7.30066 6.993566 7.506404
130 .0909292 .9090708 .1485886 .8514114 .0469927 .9530073 3.70877 9.556793 9.025037 8.687843 9.281979 6.502852 6.249644 6.661716
131 .1888039 .8111961 .2728584 .7271417 .0567973 .9432027 3.70877 9.556793 8.452663 7.961112 9.224641 5.848752 5.619293 5.921072
132 .3045904 .6954096 .429946 .570054 .0570281 .9429719 3.70877 9.556793 7.775541 7.042459 9.223292 6.052836 5.651648 6.12429
133 .4509545 .5490455 .4453482 .5546518 .0466283 .9533718 3.70877 9.556793 6.919601 6.952386 9.28411 6.394635 5.607582 6.44189
134 .4653052 .5346947 .3984752 .6015248 .0342835 .9657165 3.70877 9.556793 6.835677 7.226501 9.356303 6.87506 5.96464 6.943046
135 .421632 .578368 .333094 .666906 .0239641 .9760359 3.70877 9.556793 7.09108 7.608852 9.416651 7.080949 6.236509 7.20149
136 .3607141 .6392859 .3524925 .6475075 .0163044 .9836956 3.70877 9.556793 7.447329 7.495409 9.461445 7.020358 6.286122 7.17503
137 .3787883 .6212117 .2334698 .7665302 .0067983 .9932017 3.70877 9.556793 7.34163 8.191457 9.517036 6.914561 6.138781 6.954099
138 .2678907 .7321093 .0584654 .9415346 .0012797 .9987203 3.70877 9.556793 7.990162 9.214887 9.54931 7.437253 6.519526 7.445452
139 .1048329 .8951671 .010318 .989682 .0002017 .9997983 3.70877 9.556793 8.943727 9.496453 9.555614 8.247514 7.26423 8.259493
140 .0599723 .9400277 .0015812 .9984187 .0000304 .9999696 3.70877 9.556793 9.206074 9.547546 9.556616 8.800888 7.705005 8.804131
141 .051832 .948168 .0009255 .9990745 .0000183 .9999818 3.70877 9.556793 9.253678 9.551381 9.556686 9.74052 8.499221 9.808734
142 .051221 .948779 .0021482 .9978518 .0000448 .9999552 3.70877 9.556793 9.257252 9.54423 9.556531 9.318309 8.7498 9.981037
143 .0523602 .9476398 .0040742 .9959258 .0001052 .9998948 3.70877 9.556793 9.250589 9.532967 9.556178 8.615889 8.142057 9.342804
144 .0541547 .9458452 .0067634 .9932365 .0003805 .9996195 3.70877 9.556793 9.240095 9.517241 9.554568 8.310145 7.779929 9.046017
145 .0566604 .9433396 .0071861 .9928138 .0021532 .9978468 3.70877 9.556793 9.225442 9.514769 9.544202 8.122429 7.450436 8.561507
146 .0570543 .9429457 .0080363 .9919637 .0163216 .9836783 3.70877 9.556793 9.223139 9.509797 9.461344 8.06747 7.318541 8.736112
147 .0578464 .9421536 .0140897 .9859103 .1186408 .8813592 3.70877 9.556793 9.218506 9.474396 8.862979 7.368597 7.017911 8.758828
148 .0634865 .9365135 .0813341 .9186659 .543765 .456235 3.70877 9.556793 9.185523 9.081149 6.376843 6.552896 6.540946 8.113037
149 .1261405 .8738595 .285347 .714653 .8583246 .1416753 3.70877 9.556793 8.81912 7.888078 4.537291 5.594151 5.475195 6.811069
150 .3162265 .6837735 .630861 .369139 .968286 .031714 3.70877 9.556793 7.707493 5.867503 3.894234 5.252515 5.002752 6.063491
151 .6381541 .3618459 .9504541 .0495459 .9972499 .0027501 3.70877 9.556793 5.824853 3.998516 3.724853 4.929419 4.813092 5.662596
152 .9359302 .0640698 .9978096 .0021903 .9998849 .0001151 3.70877 9.556793 4.083452 3.721579 3.709443 4.286259 4.194264 4.900965
153 .9800531 .019947 .9995306 .0004694 .9999756 .0000244 3.70877 9.556793 3.82542 3.711515 3.708912 3.717198 3.578583 4.209394
154 .9816565 .0183435 .9997798 .0002203 .9999886 .0000114 3.70877 9.556793 3.816043 3.710058 3.708837 3.541503 3.408145 3.986915
155 .9818887 .0181114 .9998373 .0001627 .9999916 8.42e-06 3.70877 9.556793 3.814686 3.709722 3.708819 3.189564 3.02046 3.532578
156 .9819422 .0180578 .9998378 .0001623 .9999916 8.39e-06 3.70877 9.556793 3.814373 3.709719 3.708819 3.015388 2.761958 3.320222
157 .9819427 .0180573 .999846 .0001539 .999992 7.96e-06 3.70877 9.556793 3.81437 3.70967 3.708817 3.042857 2.747537 3.306076
158 .9819504 .0180496 .9998335 .0001665 .9999914 8.61e-06 3.70877 9.556793 3.814324 3.709744 3.70882 3.002553 2.703691 3.198682
159 .9819387 .0180613 .9998481 .000152 .9999921 7.88e-06 3.70877 9.556793 3.814393 3.709659 3.708816 3.10698 2.828977 3.263055
160 .9819523 .0180477 .9997972 .0002028 .9999893 .0000107 3.70877 9.556793 3.814313 3.709956 3.708833 3.064441 2.806232 3.156415
161 .9819049 .0180951 .9994684 .0005316 .9999708 .0000292 3.70877 9.556793 3.814591 3.711879 3.708941 3.304724 3.079071 3.349157
162 .9815986 .0184014 .9988852 .0011149 .9999304 .0000695 3.70877 9.556793 3.816382 3.71529 3.709177 4.077081 3.660009 4.124638
163 .9810551 .0189449 .9971934 .0028066 .9997794 .0002206 3.70877 9.556793 3.81956 3.725183 3.71006 4.566038 4.175121 4.595146
164 .9794788 .0205212 .9929582 .0070418 .9994116 .0005883 3.70877 9.556793 3.828778 3.749951 3.712211 5.235425 4.676962 5.263006
165 .9755328 .0244672 .9889305 .0110695 .9991763 .0008237 3.70877 9.556793 3.851855 3.773505 3.713587 5.838032 5.1478 5.8982
166 .9717799 .02822 .9903922 .0096078 .9993276 .0006724 3.70877 9.556793 3.873801 3.764957 3.712702 5.88869 5.353448 6.006532
167 .9731419 .0268581 .9917701 .0082299 .9994836 .0005164 3.70877 9.556793 3.865837 3.756899 3.71179 5.650352 5.166267 5.79265
168 .9744257 .0255742 .9951116 .0048884 .9997014 .0002986 3.70877 9.556793 3.858329 3.737358 3.710516 5.538863 5.083153 5.748745
169 .9775391 .0224609 .9963413 .0036587 .9997736 .0002264 3.70877 9.556793 3.840122 3.730166 3.710094 5.286134 4.909994 5.455621
170 .9786849 .0213151 .9961977 .0038023 .9997658 .0002342 3.70877 9.556793 3.833421 3.731006 3.71014 5.203722 4.803016 5.358268
171 .9785511 .0214489 .9963211 .0036789 .9997711 .000229 3.70877 9.556793 3.834203 3.730284 3.710109 5.326283 4.897751 5.505361
172 .9786661 .0213339 .9963412 .0036588 .9997575 .0002425 3.70877 9.556793 3.833531 3.730167 3.710188 5.265781 4.892343 5.419424
173 .9786848 .0213152 .9949928 .0050072 .9996672 .0003328 3.70877 9.556793 3.833422 3.738052 3.710716 5.208025 4.976932 5.299777
174 .9774284 .0225715 .9946225 .0053775 .9996445 .0003555 3.70877 9.556793 3.840769 3.740218 3.710849 5.434537 5.248524 5.524192
175 .9770834 .0229166 .99468 .00532 .999648 .0003519 3.70877 9.556793 3.842787 3.739882 3.710828 5.3884 5.257504 5.460289
176 .977137 .022863 .9946227 .0053773 .9996476 .0003524 3.70877 9.556793 3.842473 3.740216 3.710831 5.328261 5.274662 5.36072
177 .9770836 .0229164 .9947492 .0052507 .999662 .000338 3.70877 9.556793 3.842785 3.739476 3.710747 5.35511 5.293343 5.394659
178 .9772016 .0227984 .9945675 .0054325 .9996915 .0003085 3.70877 9.556793 3.842096 3.740539 3.710574 5.372501 5.338941 5.394736
179 .9770322 .0229678 .9977272 .0022729 .9998731 .0001269 3.70877 9.556793 3.843086 3.722062 3.709512 5.452638 5.477922 5.435889
180 .9799761 .0200239 .9983342 .0016658 .9999064 .0000936 3.70877 9.556793 3.82587 3.718512 3.709317 4.882108 4.928061 4.850593
181 .9805418 .0194582 .998339 .001661 .9999008 .0000992 3.70877 9.556793 3.822562 3.718484 3.70935 4.806666 4.792923 4.816381
182 .9805462 .0194538 .9974042 .0025958 .9998279 .0001721 3.70877 9.556793 3.822536 3.72395 3.709776 4.882067 4.85832 4.898543
183 .9796753 .0203247 .9963774 .0036226 .9996811 .0003189 3.70877 9.556793 3.82763 3.729955 3.710635 5.278541 5.245199 5.3009
184 .9787186 .0212814 .9938359 .0061641 .9991794 .0008206 3.70877 9.556793 3.833225 3.744818 3.713569 5.510527 5.330189 5.620605
185 .9763505 .0236495 .9851985 .0148015 .9979665 .0020335 3.70877 9.556793 3.847073 3.79533 3.720662 5.91604 5.774785 6.006871
186 .9683027 .0316973 .9725566 .0274434 .9971454 .0028546 3.70877 9.556793 3.894136 3.86926 3.725464 6.369084 6.152766 6.565149
187 .9565238 .0434762 .9646139 .0353861 .9976425 .0023576 3.70877 9.556793 3.96302 3.915709 3.722557 6.472348 6.309919 6.779056
188 .9491234 .0508766 .9866005 .0133995 .9992712 .0007288 3.70877 9.556793 4.006298 3.787131 3.713032 6.166741 6.103334 6.68342
189 .969609 .030391 .9984906 .0015094 .9999213 .0000787 3.70877 9.556793 3.886497 3.717597 3.70923 5.35135 5.324054 5.835004
190 .9806875 .0193125 .9996817 .0003183 .9999836 .0000164 3.70877 9.556793 3.82171 3.710632 3.708866 4.179661 4.132563 4.48576
191 .9817972 .0182028 .9999506 .0000494 .9999974 2.54e-06 3.70877 9.556793 3.81522 3.709059 3.708785 3.414703 3.377915 3.52192
192 .9820479 .0179521 .9999713 .0000287 .9999985 1.48e-06 3.70877 9.556793 3.813755 3.708938 3.708779 2.203474 2.271626 2.115997
193 .982067 .0179329 .9999705 .0000295 .9999985 1.51e-06 3.70877 9.556793 3.813642 3.708942 3.708779 1.823223 1.890859 1.750906
194 .9820664 .0179336 .9999709 .0000291 .9999985 1.49e-06 3.70877 9.556793 3.813646 3.70894 3.708779 1.834497 1.854651 1.812876
195 .9820668 .0179333 .9999804 .0000196 .999999 1.01e-06 3.70877 9.556793 3.813644 3.708885 3.708776 1.814991 1.729833 1.922723
196 .9820756 .0179244 .9999847 .0000153 .9999992 7.83e-07 3.70877 9.556793 3.813592 3.708859 3.708775 1.578217 1.423067 1.788079
197 .9820796 .0179204 .9999847 .0000153 .9999992 7.83e-07 3.70877 9.556793 3.813569 3.708859 3.708775 1.363374 1.317874 1.4119
198 .9820796 .0179204 .9999887 .0000113 .9999994 5.78e-07 3.70877 9.556793 3.813569 3.708836 3.708773 1.466598 1.441834 1.489571
199 .9820833 .0179166 .999989 .000011 .9999995 5.63e-07 3.70877 9.556793 3.813547 3.708834 3.708773 1.274937 1.313173 1.244443
200 .9820836 .0179164 .999989 .000011 .9999995 5.63e-07 3.70877 9.556793 3.813545 3.708834 3.708773 1.233032 1.29632 1.185423
201 .9820836 .0179164 .9999889 .0000111 .9999994 5.71e-07 3.70877 9.556793 3.813545 3.708835 3.708773 1.351381 1.293064 1.397081
202 .9820835 .0179165 .9999807 .0000193 .999999 9.93e-07 3.70877 9.556793 3.813546 3.708883 3.708776 1.368077 1.335848 1.390817
203 .9820758 .0179242 .9999617 .0000383 .999998 1.97e-06 3.70877 9.556793 3.813591 3.708994 3.708781 1.848841 1.699826 1.940641
204 .9820581 .0179419 .9999239 .0000761 .9999961 3.93e-06 3.70877 9.556793 3.813694 3.709215 3.708793 2.310586 2.209126 2.366568
205 .9820229 .0179771 .9998584 .0001416 .9999926 7.37e-06 3.70877 9.556793 3.8139 3.709598 3.708813 2.764744 2.638925 2.824465
206 .9819619 .0180381 .9997182 .0002817 .9999851 .0000149 3.70877 9.556793 3.814257 3.710418 3.708857 3.343914 3.036805 3.459788
207 .9818314 .0181686 .9994374 .0005626 .9999694 .0000306 3.70877 9.556793 3.815021 3.71206 3.708949 3.806304 3.468405 3.910976
208 .9815696 .0184304 .9989265 .0010735 .9999384 .0000616 3.70877 9.556793 3.816551 3.715048 3.709131 4.322728 3.985987 4.419353
209 .9810937 .0189063 .9980094 .0019905 .9998788 .0001212 3.70877 9.556793 3.819335 3.720411 3.709479 4.785995 4.336953 4.904437
210 .9802392 .0197608 .9967461 .0032539 .9998016 .0001984 3.70877 9.556793 3.824332 3.727799 3.70993 5.059835 4.657316 5.180828
211 .9790621 .0209379 .9965488 .0034512 .9997894 .0002107 3.70877 9.556793 3.831215 3.728953 3.710002 5.143878 5.000844 5.21044
212 .9788783 .0211217 .996472 .003528 .9997864 .0002136 3.70877 9.556793 3.83229 3.729402 3.710019 5.148761 4.916379 5.291348
213 .9788067 .0211933 .996506 .003494 .9997967 .0002032 3.70877 9.556793 3.832709 3.729203 3.709959 5.154384 4.936847 5.35499
214 .9788383 .0211617 .9972454 .0027546 .99985 .00015 3.70877 9.556793 3.832524 3.724879 3.709647 5.064756 4.948065 5.281224
215 .9795272 .0204728 .9987407 .0012593 .9999347 .0000653 3.70877 9.556793 3.828495 3.716134 3.709152 4.797365 4.723593 5.464302
216 .9809206 .0190795 .9997936 .0002063 .9999894 .0000106 3.70877 9.556793 3.820347 3.709977 3.708832 4.08531 4.078696 4.914553
217 .9819016 .0180984 .9999534 .0000466 .9999976 2.39e-06 3.70877 9.556793 3.81461 3.709042 3.708784 2.981632 2.97441 3.724269
218 .9820505 .0179495 .9999621 .0000379 .999998 1.94e-06 3.70877 9.556793 3.813739 3.708992 3.708781 1.956489 1.906075 2.877074
219 .9820585 .0179415 .9999942 5.77e-06 .9999997 2.96e-07 3.70877 9.556793 3.813692 3.708804 3.708772 1.564194 1.527188 1.870849
220 .9820885 .0179115 .9999962 3.73e-06 .9999998 1.91e-07 3.70877 9.556793 3.813517 3.708792 3.708771 .1384992 .1517437 .1041634
221 .9820904 .0179096 .9999962 3.73e-06 .9999998 1.91e-07 3.70877 9.556793 3.813506 3.708792 3.708771 -.2369698 -.1676456 -.3802484
222 .9820904 .0179096 .9999964 3.63e-06 .9999998 1.86e-07 3.70877 9.556793 3.813506 3.708791 3.708771 -.2928468 -.1591626 -.5239687
223 .9820905 .0179095 .9999965 3.44e-06 .9999998 1.77e-07 3.70877 9.556793 3.813505 3.70879 3.708771 -.0218886 -.186655 .133808
224 .9820907 .0179094 .9999965 3.49e-06 .9999998 1.79e-07 3.70877 9.556793 3.813504 3.70879 3.708771 .0668349 -.2345839 .2793285
225 .9820906 .0179094 .9999962 3.78e-06 .9999998 1.94e-07 3.70877 9.556793 3.813505 3.708792 3.708771 .0643252 -.1345215 .1909246
226 .9820904 .0179097 .9999962 3.78e-06 .9999998 1.94e-07 3.70877 9.556793 3.813506 3.708792 3.708771 .0745394 -.0243442 .1396657
227 .9820904 .0179097 .9999962 3.78e-06 .9999962 3.78e-06 3.70877 9.556793 3.813506 3.708792 3.708792 .0966984 .0219892 .1484894

View File

@ -0,0 +1,137 @@
switchar2_p1,switchar2_p2,switchar2_f1,switchar2_f2,switchar2_sm1,switchar2_sm2,switchar2_yhat1,switchar2_yhat2,switchar2_pyhat,switchar2_fyhat,switchar2_syhat
,,,,,,,,,,
,,,,,,,,,,
,,,,,,,,,,
.3655099,.6344901,.1908335,.8091666,.2039875,.7960125,.6668599,.8056132,.7548975,.7791345,1.175943
.3611798,.6388202,.3588357,.6411644,.4560583,.5439417,.2757178,.4150288,.3647124,.365039,.6393359
.3653444,.6346556,.8787717,.1212283,.861632,.138368,-.0489651,.8798761,.5405291,.0636367,-.0475077
.3782332,.6217668,.2201452,.7798548,.3592777,.6407223,.0795501,.2984534,.2156569,.2502629,.7076135
.3619064,.6380936,.0060957,.9939043,.0082053,.9917946,-.2810024,1.033839,.5579897,1.025824,1.37325
.3566003,.6433997,.11223,.88777,.0884842,.9115158,.1003189,1.192435,.8029861,1.069867,1.505547
.3592313,.6407687,.3765296,.6234704,.0499525,.9500476,.3861393,.8377848,.6755396,.6677269,1.195445
.365783,.634217,.7021681,.2978319,.4766508,.5233492,.2789021,.6074251,.4872569,.3767467,.5998272
.3738553,.6261446,.8631248,.1368752,.9677929,.0322071,-.0134841,.280744,.1707453,.0267884,-.5156572
.3778453,.6221547,.9939212,.0060788,.9937111,.0062889,-.3697357,.4270407,.1259825,-.3648922,-1.069221
.3810876,.6189124,.6417809,.3582191,.6563745,.3436256,-.4973104,.2940543,-.007525,-.2138285,-.4417256
.3723584,.6276416,.0010628,.9989372,.0006934,.9993066,-.6216475,.7895434,.2640746,.7880436,1.270005
.3564756,.6435244,.012374,.987626,.0291472,.9708528,-.349462,1.204387,.650478,1.18516,1.405585
.356756,.643244,.0119621,.9880379,.0092829,.9907171,.0968139,.9555007,.649159,.9452289,1.321816
.3567457,.6432543,.3283875,.6716126,.4892569,.510743,.2540295,1.095943,.7955943,.8194695,1.050847
.3645897,.6354104,.3161621,.6838379,.1268433,.8731567,.3827603,.5847964,.5111361,.5209202,1.080783
.3642866,.6357134,.3383332,.6616668,.3105439,.6894561,.1474826,.8656664,.6040417,.622681,.978343
.3648362,.6351638,.6774012,.3225988,.8668257,.1331743,.1532161,.6805145,.4881369,.3233219,.2800821
.3732414,.6267586,.3011953,.6988047,.4282137,.5717863,-.0017978,.3782444,.2363969,.2637775,.556136
.3639156,.6360844,.5726407,.4273593,.7200522,.2799478,-.3046013,.8760991,.4464239,.199982,.1164198
.3706445,.6293555,.0987641,.9012359,.1196774,.8803226,-.1050289,.5673033,.3181071,.500901,1.043566
.3588975,.6411025,.2103667,.7896333,.1060183,.8939817,-.210565,1.05934,.6035746,.7921946,1.173387
.361664,.638336,.6925763,.3074237,.2164227,.7835773,.0497307,.7404512,.4906424,.2620745,.7977074
.3736176,.6263824,.2575079,.7424921,.005824,.994176,.0085827,.426209,.2701765,.318667,.8089374
.3628326,.6371674,.997604,.002396,.9961206,.0038793,-.264759,.896194,.4749624,-.2619774,-.920441
.3811789,.6188211,.9977646,.0022354,.99639,.00361,-.1612,-.0384718,-.0852532,-.1609257,-1.00337
.3811829,.6188171,.0010599,.9989401,.0011916,.9988084,-.7774606,.2547778,-.1386938,.2536838,.7459083
.3564755,.6435245,1.58e-08,1,1.49e-08,1,-.8607886,1.348258,.560787,1.348258,1.54865
.3564492,.6435508,.001224,.998776,.0006004,.9993997,.0446378,1.350136,.8847919,1.348538,1.932688
.3564796,.6435204,.3604717,.6395283,.2442461,.7557539,.5060323,.9844004,.8138719,.8119622,1.324627
.365385,.634615,.33878,.66122,.1291146,.8708854,.4691985,.5743639,.535938,.5387359,.9758657
.3648473,.6351528,.7996124,.2003876,.8402012,.1597988,.267849,.996059,.7303736,.4137732,.5672621
.3762709,.6237291,.3155236,.6844764,.228146,.771854,.2605178,.0665302,.1395221,.1277379,.5768158
.3642708,.6357293,.0098121,.9901879,.0029106,.9970894,-.3134682,1.041803,.5481176,1.028505,1.364321
.3566925,.6433076,.886528,.113472,.846061,.153939,.0416518,1.097726,.7210326,.1614867,.2476345
.3784254,.6215746,.5235677,.4764323,.4480896,.5519103,.2633849,.1340759,.1830097,.2017779,.5294089
.369428,.630572,.9513308,.0486692,.9706135,.0293865,-.3070059,.7367491,.3511568,-.2562071,-.4980835
.3800319,.6199682,.0439671,.9560329,.0420237,.9579763,-.2055761,.2772512,.0937615,.2560227,.9187043
.3575391,.6424609,.0117721,.988228,.011024,.9889759,-.4756168,1.242086,.6279398,1.221865,1.406804
.356741,.643259,.0498644,.9501356,.1314053,.8685947,.0266372,.923333,.6034449,.8786198,1.105651
.3576853,.6423147,.0157562,.9842438,.0117993,.9882007,.1151268,.926867,.6365194,.9140771,1.344204
.3568398,.6431602,.2576703,.7423297,.2752726,.7247275,.2603368,1.133504,.8219233,.9085149,1.321995
.3628366,.6371633,.3590391,.6409609,.2117085,.7882915,.4166265,.6422516,.5603865,.5612433,1.022684
.3653495,.6346505,.2964521,.703548,.189856,.810144,.1760955,.7276233,.5261229,.5641218,.9811047
.363798,.636202,.7142375,.2857625,.8386366,.1613634,.0733605,.7645192,.513077,.2708678,.1629116
.3741545,.6258455,.0818044,.9181957,.0717512,.9282488,.0274522,.3683135,.2407787,.3404296,1.003676
.3584771,.6415229,.0551477,.9448522,.0749342,.9250659,-.2439651,1.182559,.6711826,1.103889,1.311399
.3578163,.6421837,.0780104,.9219896,.0646097,.9353903,.1381912,.890511,.6213188,.8318223,1.205812
.358383,.641617,.4395099,.5604901,.8584533,.1415467,.2015714,.983662,.703374,.6399255,.5431973
.3673443,.6326557,.1202812,.8797188,.0779433,.9220567,.2680204,.5275288,.4321998,.4963148,1.241017
.3594309,.6405691,.4637097,.5362903,.5746583,.4253416,.1109875,1.24799,.8393159,.7207507,1.105171
.3679442,.6320558,.3577688,.6422312,.2465385,.7534615,.4165906,.4647978,.4470603,.4475508,.9700944
.365318,.634682,.4642533,.5357467,.8108636,.1891364,.0563841,.8096159,.5344468,.4599256,.4282598
.3679577,.6320423,.0641185,.9358815,.0669424,.9330576,.0497228,.5854846,.388347,.5511324,1.238735
.3580387,.6419613,.1949094,.8050906,.2332901,.7667099,.0105471,1.30052,.8386597,1.049092,1.393703
.3612809,.6387191,.300744,.699256,.459328,.540672,.3948515,.7132688,.5982307,.6175067,1.031192
.3639044,.6360956,.1455622,.8544378,.1226258,.8773742,.2664956,.8734233,.6525596,.7850776,1.452815
.3600576,.6399424,.2920372,.7079628,.1179143,.8820857,.3485572,1.085151,.8199348,.8700382,1.473615
.3636886,.6363115,.3610975,.6389025,.6701577,.3298423,.5294808,.8166577,.7122148,.7129588,1.117233
.3654005,.6345995,.309507,.690493,.3870208,.6129792,.2398921,.3660286,.3199383,.3269885,.7566582
.3641216,.6358784,.4339314,.5660686,.6008897,.3991103,-.1082219,.9467762,.5626286,.4889795,.5728013
.367206,.632794,.3232115,.6767884,.3871084,.6128916,.0531779,.6076186,.4040247,.428417,.772496
.3644613,.6355386,.2838635,.7161365,.5072133,.4927867,-.1068446,.763018,.4459867,.5160958,.6046163
.3634859,.6365141,.068095,.931905,.0823548,.9176452,-.0959154,.7668481,.4532457,.7080982,1.19073
.3581372,.6418628,.421362,.578638,.4438438,.5561562,-.0416897,1.056154,.6629754,.5935645,.8270106
.3668944,.6331056,.1854353,.8145647,.2989025,.7010975,.1643479,.5480497,.4072717,.4768978,.8094192
.361046,.638954,.0738302,.9261699,.0187918,.9812082,-.0512716,.9540159,.5910608,.8797954,1.360345
.3582794,.6417206,.3792882,.6207117,.2526048,.7473952,.1375967,1.016455,.701578,.6831142,1.095962
.3658514,.6341486,.6069332,.3930668,.8806944,.1193056,.2456208,.568009,.4500628,.3723409,.3789999
.3714945,.6285055,.0663378,.9336622,.0777551,.922245,-.0352519,.4446202,.2663504,.4127866,1.077137
.3580937,.6419063,.7386758,.2613242,.7365384,.2634616,-.2241057,1.187275,.6818687,.1447223,.3215021
.3747603,.6252397,.3813017,.6186984,.2480231,.7519769,.1966081,.3553618,.2958672,.2948288,.7948864
.3659014,.6340986,.8316975,.1683026,.9079095,.0920905,-.180831,.8179061,.4524668,-.012741,-.1486508
.3770663,.6229337,.87519,.12481,.8067881,.1932119,-.0632746,.3479188,.1928716,-.0119535,-.2175115
.3781444,.6218556,.6100608,.3899392,.4258435,.5741565,-.3230339,.4675073,.1685686,-.0147709,.2901475
.3715721,.6284279,.0172509,.9827491,.0017807,.9982194,-.3892398,.7295551,.3138422,.7102549,1.106248
.3568769,.6431231,.9889793,.0110207,.9996961,.0003039,-.2622162,1.142509,.641195,-.2467351,-.538968
.3809651,.6190349,.0025518,.9974482,.002083,.997917,.0853241,.028174,.0499461,.0283198,.7719029
.3565125,.6434875,.9905953,.0094047,.9964426,.0035574,-.2815244,1.734997,1.016082,-.2625596,-.0889179
.3810052,.6189948,.3292068,.6707932,.1744831,.8255169,.5766323,-.0012819,.2189064,.1889714,.7750577
.36461,.63539,.6318375,.3681625,.8349881,.165012,-.2396213,.8475749,.4511723,.1606435,.0031042
.3721119,.6278881,.0111827,.9888173,.0097676,.9902325,-.0823655,.5193925,.2954712,.5126633,1.249954
.3567264,.6432736,.0320511,.9679489,.0413362,.9586638,-.1129587,1.408029,.8654523,1.359279,1.659853
.3572437,.6427563,.3648334,.6351666,.4673626,.5326374,.4163951,.8898959,.7207407,.7171471,1.113082
.3654931,.6345069,.1939078,.8060921,.3887317,.6112683,.337547,.5993049,.5036342,.548548,.9766124
.361256,.638744,.1323635,.8676365,.0406093,.9593907,.167944,1.062599,.7393993,.9441791,1.577669
.3597304,.6402696,.498467,.501533,.3508248,.6491752,.4220645,1.044722,.8207333,.7343479,1.181616
.3688058,.6311942,.356284,.643716,.3692181,.6307819,.3744644,.2590503,.3016157,.3001705,.5756877
.3652812,.6347188,.0709767,.9290233,.089928,.910072,-.2463651,.6087282,.2963787,.5480365,.8741023
.3582087,.6417913,.956079,.043921,.9115888,.0884112,-.291689,1.0384,.5619506,-.2332702,-.527754
.3801495,.6198505,.3854555,.6145445,.2328662,.7671338,.0405197,.2029729,.1412164,.1403544,.6426073
.3660043,.6339957,.994001,.005999,.9913173,.0086827,-.3939995,.8796137,.4134658,-.3863591,-.6873238
.3810896,.6189104,.9126762,.0873239,.8453703,.1546297,-.205447,.0969485,-.0182913,-.1790407,-.7267979
.3790736,.6209264,.9998629,.0001371,.9999191,.0000809,-.5811837,.5661494,.1312257,-.5810264,-1.32168
.3812349,.6187651,.0012389,.9987611,.0011699,.9988301,-.5924893,.1384216,-.1402272,.137516,.6240655
.3564799,.6435201,9.31e-06,.9999907,7.15e-06,.9999928,-.7767551,1.472514,.6706945,1.472493,1.74309
.3564495,.6435506,.0706543,.9293457,.1225256,.8774744,.077838,1.061299,.7107452,.9918137,1.303984
.3582007,.6417993,.0964938,.9035062,.0505851,.9494149,.230602,.8202751,.6090538,.7633753,1.156554
.3588412,.6411588,.6011773,.3988228,.6002312,.3997688,.2267366,1.015415,.7324044,.5412793,.8228542
.3713519,.6286482,.4333998,.5666002,.4540614,.5459386,.3145667,.3836636,.3580043,.353717,.718225
.3671928,.6328072,.1700871,.8299129,.1971657,.8028343,-.1043802,.7024823,.4062082,.5652454,.9236737
.3606655,.6393345,.0853925,.9146075,.0797246,.9202754,-.1158232,.940254,.5593634,.8500729,1.215137
.358566,.641434,.0824606,.9175394,.1073687,.8926314,.0558167,.951513,.6303467,.8776534,1.244283
.3584934,.6415067,.0798316,.9201683,.0236819,.9763181,.1818834,.9462167,.6722083,.8851987,1.348312
.3584282,.6415718,.8117195,.1882805,.7113619,.2886381,.2943652,1.012657,.7552012,.4296057,.8013623
.376571,.623429,.3287594,.6712406,.7905903,.2094096,.3177593,.1023483,.1834659,.1731667,.4115401
.3645989,.6354011,.0001937,.9998063,.0000534,.9999465,-.2536942,1.005844,.5466175,1.005599,1.679178
.356454,.643546,.5861492,.4138509,.6666598,.3333402,.2368061,1.507272,1.054409,.7625895,1.348948
.3709793,.6290207,.415696,.584304,.1972527,.8027472,.6850777,.2529412,.4132549,.4325786,.9398638
.3667539,.6332461,.6525174,.3474827,.6578275,.3421725,.1057528,.8636656,.5856981,.3691144,.5881053
.3726245,.6273755,.5579455,.4420545,.7796769,.2203231,.1100055,.3696976,.27293,.2248036,.1860365
.3702802,.6297198,.1001561,.8998439,.0293324,.9706676,-.2476939,.6137009,.2947435,.527427,1.125174
.358932,.641068,.8126831,.1873169,.1429511,.8570489,-.2804277,1.033013,.5615768,-.0343982,.9548979
.3765949,.6234051,.1139573,.8860427,.0077561,.9922439,.0479682,.3432032,.2320192,.309559,.7096683
.3592741,.6407259,.9999623,.0000377,.9999784,.0000215,-.2872946,1.078295,.5876743,-.2872432,-.9256884
.3812374,.6187626,.1415462,.8584538,.0547459,.9452541,-.1671722,-.3660731,-.2902446,-.3379194,.0901736
.359958,.640042,8.65e-06,.9999914,7.12e-06,.9999928,-.9680839,1.24652,.4493558,1.246501,1.469094
.3564494,.6435506,.0013372,.9986628,.0005885,.9994115,-.2213253,1.130314,.6485232,1.128507,1.56611
.3564824,.6435176,.94006,.0599401,.845143,.154857,.1735296,1.078947,.7561818,.2278004,.4941151
.3797525,.6202475,.4100294,.5899706,.0490023,.9509977,.3287447,.0638878,.1644679,.1724869,.7057356
.3666135,.6333866,.9941261,.0058739,.9961222,.0038778,-.3074672,.8746909,.4412958,-.3005233,-.6183509
.3810927,.6189073,.9865972,.0134027,.996933,.003067,-.1776888,.0368088,-.0449347,-.1748139,-1.010209
.3809061,.619094,.0203599,.9796401,.0650344,.9349656,-.6692971,.3808981,-.0191276,.3595163,.7818835
.3569539,.6430461,.9790046,.0209954,.9652825,.0347175,-.751592,1.166428,.4817834,-.7113224,-1.280465
.3807178,.6192821,.3201608,.6798392,.1632341,.8367659,-.1369031,.2466296,.1006118,.1238374,.7574543
.3643857,.6356143,.0228407,.9771593,.0520427,.9479573,-.4943302,.9059532,.3957099,.8739698,.9687712
.3570154,.6429846,.0027989,.9972011,.0035125,.9964876,-.2296742,1.026684,.5781449,1.023168,1.395377
.3565186,.6434814,.1375207,.8624793,.1369265,.8630735,.132511,1.19744,.8177727,1.05099,1.492269
.3598582,.6401418,.2749309,.7250692,.6349947,.3650052,.3607773,.7020003,.5792084,.6081876,.8663656
.3632645,.6367355,.0747665,.9252335,.04608,.9539199,.2392728,.9091664,.6658179,.8590808,1.638283
.3583026,.6416974,.3430003,.6569996,.2439876,.7560124,.3945461,1.222072,.9255675,.9382305,1.486441
.3649519,.6350481,.2784336,.7215664,.2026928,.7973073,.5596761,.5065153,.5259164,.521317,.917065
.3633513,.6366487,.4820935,.5179065,.4820935,.5179065,.0647946,.6038795,.4080022,.3439901,.4402459
1 switchar2_p1 switchar2_p2 switchar2_f1 switchar2_f2 switchar2_sm1 switchar2_sm2 switchar2_yhat1 switchar2_yhat2 switchar2_pyhat switchar2_fyhat switchar2_syhat
2
3
4
5 .3655099 .6344901 .1908335 .8091666 .2039875 .7960125 .6668599 .8056132 .7548975 .7791345 1.175943
6 .3611798 .6388202 .3588357 .6411644 .4560583 .5439417 .2757178 .4150288 .3647124 .365039 .6393359
7 .3653444 .6346556 .8787717 .1212283 .861632 .138368 -.0489651 .8798761 .5405291 .0636367 -.0475077
8 .3782332 .6217668 .2201452 .7798548 .3592777 .6407223 .0795501 .2984534 .2156569 .2502629 .7076135
9 .3619064 .6380936 .0060957 .9939043 .0082053 .9917946 -.2810024 1.033839 .5579897 1.025824 1.37325
10 .3566003 .6433997 .11223 .88777 .0884842 .9115158 .1003189 1.192435 .8029861 1.069867 1.505547
11 .3592313 .6407687 .3765296 .6234704 .0499525 .9500476 .3861393 .8377848 .6755396 .6677269 1.195445
12 .365783 .634217 .7021681 .2978319 .4766508 .5233492 .2789021 .6074251 .4872569 .3767467 .5998272
13 .3738553 .6261446 .8631248 .1368752 .9677929 .0322071 -.0134841 .280744 .1707453 .0267884 -.5156572
14 .3778453 .6221547 .9939212 .0060788 .9937111 .0062889 -.3697357 .4270407 .1259825 -.3648922 -1.069221
15 .3810876 .6189124 .6417809 .3582191 .6563745 .3436256 -.4973104 .2940543 -.007525 -.2138285 -.4417256
16 .3723584 .6276416 .0010628 .9989372 .0006934 .9993066 -.6216475 .7895434 .2640746 .7880436 1.270005
17 .3564756 .6435244 .012374 .987626 .0291472 .9708528 -.349462 1.204387 .650478 1.18516 1.405585
18 .356756 .643244 .0119621 .9880379 .0092829 .9907171 .0968139 .9555007 .649159 .9452289 1.321816
19 .3567457 .6432543 .3283875 .6716126 .4892569 .510743 .2540295 1.095943 .7955943 .8194695 1.050847
20 .3645897 .6354104 .3161621 .6838379 .1268433 .8731567 .3827603 .5847964 .5111361 .5209202 1.080783
21 .3642866 .6357134 .3383332 .6616668 .3105439 .6894561 .1474826 .8656664 .6040417 .622681 .978343
22 .3648362 .6351638 .6774012 .3225988 .8668257 .1331743 .1532161 .6805145 .4881369 .3233219 .2800821
23 .3732414 .6267586 .3011953 .6988047 .4282137 .5717863 -.0017978 .3782444 .2363969 .2637775 .556136
24 .3639156 .6360844 .5726407 .4273593 .7200522 .2799478 -.3046013 .8760991 .4464239 .199982 .1164198
25 .3706445 .6293555 .0987641 .9012359 .1196774 .8803226 -.1050289 .5673033 .3181071 .500901 1.043566
26 .3588975 .6411025 .2103667 .7896333 .1060183 .8939817 -.210565 1.05934 .6035746 .7921946 1.173387
27 .361664 .638336 .6925763 .3074237 .2164227 .7835773 .0497307 .7404512 .4906424 .2620745 .7977074
28 .3736176 .6263824 .2575079 .7424921 .005824 .994176 .0085827 .426209 .2701765 .318667 .8089374
29 .3628326 .6371674 .997604 .002396 .9961206 .0038793 -.264759 .896194 .4749624 -.2619774 -.920441
30 .3811789 .6188211 .9977646 .0022354 .99639 .00361 -.1612 -.0384718 -.0852532 -.1609257 -1.00337
31 .3811829 .6188171 .0010599 .9989401 .0011916 .9988084 -.7774606 .2547778 -.1386938 .2536838 .7459083
32 .3564755 .6435245 1.58e-08 1 1.49e-08 1 -.8607886 1.348258 .560787 1.348258 1.54865
33 .3564492 .6435508 .001224 .998776 .0006004 .9993997 .0446378 1.350136 .8847919 1.348538 1.932688
34 .3564796 .6435204 .3604717 .6395283 .2442461 .7557539 .5060323 .9844004 .8138719 .8119622 1.324627
35 .365385 .634615 .33878 .66122 .1291146 .8708854 .4691985 .5743639 .535938 .5387359 .9758657
36 .3648473 .6351528 .7996124 .2003876 .8402012 .1597988 .267849 .996059 .7303736 .4137732 .5672621
37 .3762709 .6237291 .3155236 .6844764 .228146 .771854 .2605178 .0665302 .1395221 .1277379 .5768158
38 .3642708 .6357293 .0098121 .9901879 .0029106 .9970894 -.3134682 1.041803 .5481176 1.028505 1.364321
39 .3566925 .6433076 .886528 .113472 .846061 .153939 .0416518 1.097726 .7210326 .1614867 .2476345
40 .3784254 .6215746 .5235677 .4764323 .4480896 .5519103 .2633849 .1340759 .1830097 .2017779 .5294089
41 .369428 .630572 .9513308 .0486692 .9706135 .0293865 -.3070059 .7367491 .3511568 -.2562071 -.4980835
42 .3800319 .6199682 .0439671 .9560329 .0420237 .9579763 -.2055761 .2772512 .0937615 .2560227 .9187043
43 .3575391 .6424609 .0117721 .988228 .011024 .9889759 -.4756168 1.242086 .6279398 1.221865 1.406804
44 .356741 .643259 .0498644 .9501356 .1314053 .8685947 .0266372 .923333 .6034449 .8786198 1.105651
45 .3576853 .6423147 .0157562 .9842438 .0117993 .9882007 .1151268 .926867 .6365194 .9140771 1.344204
46 .3568398 .6431602 .2576703 .7423297 .2752726 .7247275 .2603368 1.133504 .8219233 .9085149 1.321995
47 .3628366 .6371633 .3590391 .6409609 .2117085 .7882915 .4166265 .6422516 .5603865 .5612433 1.022684
48 .3653495 .6346505 .2964521 .703548 .189856 .810144 .1760955 .7276233 .5261229 .5641218 .9811047
49 .363798 .636202 .7142375 .2857625 .8386366 .1613634 .0733605 .7645192 .513077 .2708678 .1629116
50 .3741545 .6258455 .0818044 .9181957 .0717512 .9282488 .0274522 .3683135 .2407787 .3404296 1.003676
51 .3584771 .6415229 .0551477 .9448522 .0749342 .9250659 -.2439651 1.182559 .6711826 1.103889 1.311399
52 .3578163 .6421837 .0780104 .9219896 .0646097 .9353903 .1381912 .890511 .6213188 .8318223 1.205812
53 .358383 .641617 .4395099 .5604901 .8584533 .1415467 .2015714 .983662 .703374 .6399255 .5431973
54 .3673443 .6326557 .1202812 .8797188 .0779433 .9220567 .2680204 .5275288 .4321998 .4963148 1.241017
55 .3594309 .6405691 .4637097 .5362903 .5746583 .4253416 .1109875 1.24799 .8393159 .7207507 1.105171
56 .3679442 .6320558 .3577688 .6422312 .2465385 .7534615 .4165906 .4647978 .4470603 .4475508 .9700944
57 .365318 .634682 .4642533 .5357467 .8108636 .1891364 .0563841 .8096159 .5344468 .4599256 .4282598
58 .3679577 .6320423 .0641185 .9358815 .0669424 .9330576 .0497228 .5854846 .388347 .5511324 1.238735
59 .3580387 .6419613 .1949094 .8050906 .2332901 .7667099 .0105471 1.30052 .8386597 1.049092 1.393703
60 .3612809 .6387191 .300744 .699256 .459328 .540672 .3948515 .7132688 .5982307 .6175067 1.031192
61 .3639044 .6360956 .1455622 .8544378 .1226258 .8773742 .2664956 .8734233 .6525596 .7850776 1.452815
62 .3600576 .6399424 .2920372 .7079628 .1179143 .8820857 .3485572 1.085151 .8199348 .8700382 1.473615
63 .3636886 .6363115 .3610975 .6389025 .6701577 .3298423 .5294808 .8166577 .7122148 .7129588 1.117233
64 .3654005 .6345995 .309507 .690493 .3870208 .6129792 .2398921 .3660286 .3199383 .3269885 .7566582
65 .3641216 .6358784 .4339314 .5660686 .6008897 .3991103 -.1082219 .9467762 .5626286 .4889795 .5728013
66 .367206 .632794 .3232115 .6767884 .3871084 .6128916 .0531779 .6076186 .4040247 .428417 .772496
67 .3644613 .6355386 .2838635 .7161365 .5072133 .4927867 -.1068446 .763018 .4459867 .5160958 .6046163
68 .3634859 .6365141 .068095 .931905 .0823548 .9176452 -.0959154 .7668481 .4532457 .7080982 1.19073
69 .3581372 .6418628 .421362 .578638 .4438438 .5561562 -.0416897 1.056154 .6629754 .5935645 .8270106
70 .3668944 .6331056 .1854353 .8145647 .2989025 .7010975 .1643479 .5480497 .4072717 .4768978 .8094192
71 .361046 .638954 .0738302 .9261699 .0187918 .9812082 -.0512716 .9540159 .5910608 .8797954 1.360345
72 .3582794 .6417206 .3792882 .6207117 .2526048 .7473952 .1375967 1.016455 .701578 .6831142 1.095962
73 .3658514 .6341486 .6069332 .3930668 .8806944 .1193056 .2456208 .568009 .4500628 .3723409 .3789999
74 .3714945 .6285055 .0663378 .9336622 .0777551 .922245 -.0352519 .4446202 .2663504 .4127866 1.077137
75 .3580937 .6419063 .7386758 .2613242 .7365384 .2634616 -.2241057 1.187275 .6818687 .1447223 .3215021
76 .3747603 .6252397 .3813017 .6186984 .2480231 .7519769 .1966081 .3553618 .2958672 .2948288 .7948864
77 .3659014 .6340986 .8316975 .1683026 .9079095 .0920905 -.180831 .8179061 .4524668 -.012741 -.1486508
78 .3770663 .6229337 .87519 .12481 .8067881 .1932119 -.0632746 .3479188 .1928716 -.0119535 -.2175115
79 .3781444 .6218556 .6100608 .3899392 .4258435 .5741565 -.3230339 .4675073 .1685686 -.0147709 .2901475
80 .3715721 .6284279 .0172509 .9827491 .0017807 .9982194 -.3892398 .7295551 .3138422 .7102549 1.106248
81 .3568769 .6431231 .9889793 .0110207 .9996961 .0003039 -.2622162 1.142509 .641195 -.2467351 -.538968
82 .3809651 .6190349 .0025518 .9974482 .002083 .997917 .0853241 .028174 .0499461 .0283198 .7719029
83 .3565125 .6434875 .9905953 .0094047 .9964426 .0035574 -.2815244 1.734997 1.016082 -.2625596 -.0889179
84 .3810052 .6189948 .3292068 .6707932 .1744831 .8255169 .5766323 -.0012819 .2189064 .1889714 .7750577
85 .36461 .63539 .6318375 .3681625 .8349881 .165012 -.2396213 .8475749 .4511723 .1606435 .0031042
86 .3721119 .6278881 .0111827 .9888173 .0097676 .9902325 -.0823655 .5193925 .2954712 .5126633 1.249954
87 .3567264 .6432736 .0320511 .9679489 .0413362 .9586638 -.1129587 1.408029 .8654523 1.359279 1.659853
88 .3572437 .6427563 .3648334 .6351666 .4673626 .5326374 .4163951 .8898959 .7207407 .7171471 1.113082
89 .3654931 .6345069 .1939078 .8060921 .3887317 .6112683 .337547 .5993049 .5036342 .548548 .9766124
90 .361256 .638744 .1323635 .8676365 .0406093 .9593907 .167944 1.062599 .7393993 .9441791 1.577669
91 .3597304 .6402696 .498467 .501533 .3508248 .6491752 .4220645 1.044722 .8207333 .7343479 1.181616
92 .3688058 .6311942 .356284 .643716 .3692181 .6307819 .3744644 .2590503 .3016157 .3001705 .5756877
93 .3652812 .6347188 .0709767 .9290233 .089928 .910072 -.2463651 .6087282 .2963787 .5480365 .8741023
94 .3582087 .6417913 .956079 .043921 .9115888 .0884112 -.291689 1.0384 .5619506 -.2332702 -.527754
95 .3801495 .6198505 .3854555 .6145445 .2328662 .7671338 .0405197 .2029729 .1412164 .1403544 .6426073
96 .3660043 .6339957 .994001 .005999 .9913173 .0086827 -.3939995 .8796137 .4134658 -.3863591 -.6873238
97 .3810896 .6189104 .9126762 .0873239 .8453703 .1546297 -.205447 .0969485 -.0182913 -.1790407 -.7267979
98 .3790736 .6209264 .9998629 .0001371 .9999191 .0000809 -.5811837 .5661494 .1312257 -.5810264 -1.32168
99 .3812349 .6187651 .0012389 .9987611 .0011699 .9988301 -.5924893 .1384216 -.1402272 .137516 .6240655
100 .3564799 .6435201 9.31e-06 .9999907 7.15e-06 .9999928 -.7767551 1.472514 .6706945 1.472493 1.74309
101 .3564495 .6435506 .0706543 .9293457 .1225256 .8774744 .077838 1.061299 .7107452 .9918137 1.303984
102 .3582007 .6417993 .0964938 .9035062 .0505851 .9494149 .230602 .8202751 .6090538 .7633753 1.156554
103 .3588412 .6411588 .6011773 .3988228 .6002312 .3997688 .2267366 1.015415 .7324044 .5412793 .8228542
104 .3713519 .6286482 .4333998 .5666002 .4540614 .5459386 .3145667 .3836636 .3580043 .353717 .718225
105 .3671928 .6328072 .1700871 .8299129 .1971657 .8028343 -.1043802 .7024823 .4062082 .5652454 .9236737
106 .3606655 .6393345 .0853925 .9146075 .0797246 .9202754 -.1158232 .940254 .5593634 .8500729 1.215137
107 .358566 .641434 .0824606 .9175394 .1073687 .8926314 .0558167 .951513 .6303467 .8776534 1.244283
108 .3584934 .6415067 .0798316 .9201683 .0236819 .9763181 .1818834 .9462167 .6722083 .8851987 1.348312
109 .3584282 .6415718 .8117195 .1882805 .7113619 .2886381 .2943652 1.012657 .7552012 .4296057 .8013623
110 .376571 .623429 .3287594 .6712406 .7905903 .2094096 .3177593 .1023483 .1834659 .1731667 .4115401
111 .3645989 .6354011 .0001937 .9998063 .0000534 .9999465 -.2536942 1.005844 .5466175 1.005599 1.679178
112 .356454 .643546 .5861492 .4138509 .6666598 .3333402 .2368061 1.507272 1.054409 .7625895 1.348948
113 .3709793 .6290207 .415696 .584304 .1972527 .8027472 .6850777 .2529412 .4132549 .4325786 .9398638
114 .3667539 .6332461 .6525174 .3474827 .6578275 .3421725 .1057528 .8636656 .5856981 .3691144 .5881053
115 .3726245 .6273755 .5579455 .4420545 .7796769 .2203231 .1100055 .3696976 .27293 .2248036 .1860365
116 .3702802 .6297198 .1001561 .8998439 .0293324 .9706676 -.2476939 .6137009 .2947435 .527427 1.125174
117 .358932 .641068 .8126831 .1873169 .1429511 .8570489 -.2804277 1.033013 .5615768 -.0343982 .9548979
118 .3765949 .6234051 .1139573 .8860427 .0077561 .9922439 .0479682 .3432032 .2320192 .309559 .7096683
119 .3592741 .6407259 .9999623 .0000377 .9999784 .0000215 -.2872946 1.078295 .5876743 -.2872432 -.9256884
120 .3812374 .6187626 .1415462 .8584538 .0547459 .9452541 -.1671722 -.3660731 -.2902446 -.3379194 .0901736
121 .359958 .640042 8.65e-06 .9999914 7.12e-06 .9999928 -.9680839 1.24652 .4493558 1.246501 1.469094
122 .3564494 .6435506 .0013372 .9986628 .0005885 .9994115 -.2213253 1.130314 .6485232 1.128507 1.56611
123 .3564824 .6435176 .94006 .0599401 .845143 .154857 .1735296 1.078947 .7561818 .2278004 .4941151
124 .3797525 .6202475 .4100294 .5899706 .0490023 .9509977 .3287447 .0638878 .1644679 .1724869 .7057356
125 .3666135 .6333866 .9941261 .0058739 .9961222 .0038778 -.3074672 .8746909 .4412958 -.3005233 -.6183509
126 .3810927 .6189073 .9865972 .0134027 .996933 .003067 -.1776888 .0368088 -.0449347 -.1748139 -1.010209
127 .3809061 .619094 .0203599 .9796401 .0650344 .9349656 -.6692971 .3808981 -.0191276 .3595163 .7818835
128 .3569539 .6430461 .9790046 .0209954 .9652825 .0347175 -.751592 1.166428 .4817834 -.7113224 -1.280465
129 .3807178 .6192821 .3201608 .6798392 .1632341 .8367659 -.1369031 .2466296 .1006118 .1238374 .7574543
130 .3643857 .6356143 .0228407 .9771593 .0520427 .9479573 -.4943302 .9059532 .3957099 .8739698 .9687712
131 .3570154 .6429846 .0027989 .9972011 .0035125 .9964876 -.2296742 1.026684 .5781449 1.023168 1.395377
132 .3565186 .6434814 .1375207 .8624793 .1369265 .8630735 .132511 1.19744 .8177727 1.05099 1.492269
133 .3598582 .6401418 .2749309 .7250692 .6349947 .3650052 .3607773 .7020003 .5792084 .6081876 .8663656
134 .3632645 .6367355 .0747665 .9252335 .04608 .9539199 .2392728 .9091664 .6658179 .8590808 1.638283
135 .3583026 .6416974 .3430003 .6569996 .2439876 .7560124 .3945461 1.222072 .9255675 .9382305 1.486441
136 .3649519 .6350481 .2784336 .7215664 .2026928 .7973073 .5596761 .5065153 .5259164 .521317 .917065
137 .3633513 .6366487 .4820935 .5179065 .4820935 .5179065 .0647946 .6038795 .4080022 .3439901 .4402459

View File

@ -0,0 +1,905 @@
"""
Tests for Markov Autoregression models
Author: Chad Fulton
License: BSD-3
"""
import warnings
import os
import numpy as np
from numpy.testing import assert_equal, assert_allclose
import pandas as pd
import pytest
from statsmodels.tools import add_constant
from statsmodels.tsa.regime_switching import markov_autoregression
current_path = os.path.dirname(os.path.abspath(__file__))
rgnp = [2.59316421, 2.20217133, 0.45827562, 0.9687438,
-0.24130757, 0.89647478, 2.05393219, 1.73353648,
0.93871289, -0.46477833, -0.80983406, -1.39763689,
-0.39886093, 1.1918416, 1.45620048, 2.11808228,
1.08957863, 1.32390273, 0.87296367, -0.19773273,
0.45420215, 0.07221876, 1.1030364, 0.82097489,
-0.05795795, 0.58447772, -1.56192672, -2.05041027,
0.53637183, 2.33676839, 2.34014559, 1.2339263,
1.8869648, -0.45920792, 0.84940469, 1.70139849,
-0.28756312, 0.09594627, -0.86080289, 1.03447127,
1.23685944, 1.42004502, 2.22410631, 1.30210173,
1.03517699, 0.9253425, -0.16559951, 1.3444382,
1.37500131, 1.73222184, 0.71605635, 2.21032143,
0.85333031, 1.00238776, 0.42725441, 2.14368343,
1.43789184, 1.57959926, 2.27469826, 1.95962656,
0.25992399, 1.01946914, 0.49016398, 0.5636338,
0.5959546, 1.43082857, 0.56230122, 1.15388393,
1.68722844, 0.77438205, -0.09647045, 1.39600146,
0.13646798, 0.55223715, -0.39944872, -0.61671102,
-0.08722561, 1.2101835, -0.90729755, 2.64916158,
-0.0080694, 0.51111895, -0.00401437, 2.16821432,
1.92586732, 1.03504717, 1.85897219, 2.32004929,
0.25570789, -0.09855274, 0.89073682, -0.55896485,
0.28350255, -1.31155407, -0.88278776, -1.97454941,
1.01275265, 1.68264723, 1.38271284, 1.86073637,
0.4447377, 0.41449001, 0.99202275, 1.36283576,
1.59970522, 1.98845816, -0.25684232, 0.87786949,
3.1095655, 0.85324478, 1.23337317, 0.00314302,
-0.09433369, 0.89883322, -0.19036628, 0.99772376,
-2.39120054, 0.06649673, 1.26136017, 1.91637838,
-0.3348029, 0.44207108, -1.40664911, -1.52129889,
0.29919869, -0.80197448, 0.15204792, 0.98585027,
2.13034606, 1.34397924, 1.61550522, 2.70930099,
1.24461412, 0.50835466, 0.14802167]
rec = [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0]
def test_predict():
# AR(1) without mean, k_regimes=2
endog = np.ones(10)
markov_autoregression.MarkovAutoregression(
endog,
k_regimes=2,
order=1,
trend='n'
)
mod = markov_autoregression.MarkovAutoregression(
endog, k_regimes=2, order=1, trend='n')
assert_equal(mod.nobs, 9)
assert_equal(mod.endog, np.ones(9))
params = np.r_[0.5, 0.5, 1., 0.1, 0.5]
mod_resid = mod._resid(params)
resids = np.zeros((2, 2, mod.nobs))
# Resids when: S_{t} = 0
resids[0, :, :] = np.ones(9) - 0.1 * np.ones(9)
assert_allclose(mod_resid[0, :, :], resids[0, :, :])
# Resids when: S_{t} = 1
resids[1, :, :] = np.ones(9) - 0.5 * np.ones(9)
assert_allclose(mod_resid[1, :, :], resids[1, :, :])
# AR(1) with mean, k_regimes=2
endog = np.arange(10)
mod = markov_autoregression.MarkovAutoregression(
endog, k_regimes=2, order=1)
assert_equal(mod.nobs, 9)
assert_equal(mod.endog, np.arange(1, 10))
params = np.r_[0.5, 0.5, 2., 3., 1., 0.1, 0.5]
mod_resid = mod._resid(params)
resids = np.zeros((2, 2, mod.nobs))
# Resids when: S_t = 0, S_{t-1} = 0
resids[0, 0, :] = (np.arange(1, 10) - 2.) - 0.1 * (np.arange(9) - 2.)
assert_allclose(mod_resid[0, 0, :], resids[0, 0, :])
# Resids when: S_t = 0, S_{t-1} = 1
resids[0, 1, :] = (np.arange(1, 10) - 2.) - 0.1 * (np.arange(9) - 3.)
assert_allclose(mod_resid[0, 1, :], resids[0, 1, :])
# Resids when: S_t = 1, S_{t-1} = 0
resids[1, 0, :] = (np.arange(1, 10) - 3.) - 0.5 * (np.arange(9) - 2.)
assert_allclose(mod_resid[1, 0, :], resids[1, 0, :])
# Resids when: S_t = 1, S_{t-1} = 1
resids[1, 1, :] = (np.arange(1, 10) - 3.) - 0.5 * (np.arange(9) - 3.)
assert_allclose(mod_resid[1, 1, :], resids[1, 1, :])
# AR(2) with mean, k_regimes=3
endog = np.arange(10)
mod = markov_autoregression.MarkovAutoregression(
endog, k_regimes=3, order=2)
assert_equal(mod.nobs, 8)
assert_equal(mod.endog, np.arange(2, 10))
params = np.r_[[0.3] * 6, 2., 3., 4, 1., 0.1, 0.5, 0.8, -0.05, -0.25, -0.4]
mod_resid = mod._resid(params)
resids = np.zeros((3, 3, 3, mod.nobs))
# Resids when: S_t = 0, S_{t-1} = 0, S_{t-2} = 0
resids[0, 0, 0, :] = (
(np.arange(2, 10) - 2.) -
0.1 * (np.arange(1, 9) - 2.) -
(-0.05) * (np.arange(8) - 2.))
assert_allclose(mod_resid[0, 0, 0, :], resids[0, 0, 0, :])
# Resids when: S_t = 1, S_{t-1} = 0, S_{t-2} = 0
resids[1, 0, 0, :] = (
(np.arange(2, 10) - 3.) -
0.5 * (np.arange(1, 9) - 2.) -
(-0.25) * (np.arange(8) - 2.))
assert_allclose(mod_resid[1, 0, 0, :], resids[1, 0, 0, :])
# Resids when: S_t = 0, S_{t-1} = 2, S_{t-2} = 1
resids[0, 2, 1, :] = (
(np.arange(2, 10) - 2.) -
0.1 * (np.arange(1, 9) - 4.) -
(-0.05) * (np.arange(8) - 3.))
assert_allclose(mod_resid[0, 2, 1, :], resids[0, 2, 1, :])
# AR(1) with mean + non-switching exog
endog = np.arange(10)
exog = np.r_[0.4, 5, 0.2, 1.2, -0.3, 2.5, 0.2, -0.7, 2., -1.1]
mod = markov_autoregression.MarkovAutoregression(
endog, k_regimes=2, order=1, exog=exog)
assert_equal(mod.nobs, 9)
assert_equal(mod.endog, np.arange(1, 10))
params = np.r_[0.5, 0.5, 2., 3., 1.5, 1., 0.1, 0.5]
mod_resid = mod._resid(params)
resids = np.zeros((2, 2, mod.nobs))
# Resids when: S_t = 0, S_{t-1} = 0
resids[0, 0, :] = (
(np.arange(1, 10) - 2. - 1.5 * exog[1:]) -
0.1 * (np.arange(9) - 2. - 1.5 * exog[:-1]))
assert_allclose(mod_resid[0, 0, :], resids[0, 0, :])
# Resids when: S_t = 0, S_{t-1} = 1
resids[0, 1, :] = (
(np.arange(1, 10) - 2. - 1.5 * exog[1:]) -
0.1 * (np.arange(9) - 3. - 1.5 * exog[:-1]))
assert_allclose(mod_resid[0, 1, :], resids[0, 1, :])
# Resids when: S_t = 1, S_{t-1} = 0
resids[1, 0, :] = (
(np.arange(1, 10) - 3. - 1.5 * exog[1:]) -
0.5 * (np.arange(9) - 2. - 1.5 * exog[:-1]))
assert_allclose(mod_resid[1, 0, :], resids[1, 0, :])
# Resids when: S_t = 1, S_{t-1} = 1
resids[1, 1, :] = (
(np.arange(1, 10) - 3. - 1.5 * exog[1:]) -
0.5 * (np.arange(9) - 3. - 1.5 * exog[:-1]))
assert_allclose(mod_resid[1, 1, :], resids[1, 1, :])
def test_conditional_loglikelihoods():
# AR(1) without mean, k_regimes=2, non-switching variance
endog = np.ones(10)
mod = markov_autoregression.MarkovAutoregression(
endog, k_regimes=2, order=1)
assert_equal(mod.nobs, 9)
assert_equal(mod.endog, np.ones(9))
params = np.r_[0.5, 0.5, 2., 3., 2., 0.1, 0.5]
resid = mod._resid(params)
conditional_likelihoods = (
np.exp(-0.5 * resid**2 / 2) / np.sqrt(2 * np.pi * 2))
assert_allclose(mod._conditional_loglikelihoods(params),
np.log(conditional_likelihoods))
# AR(1) without mean, k_regimes=3, switching variance
endog = np.ones(10)
mod = markov_autoregression.MarkovAutoregression(
endog, k_regimes=3, order=1, switching_variance=True)
assert_equal(mod.nobs, 9)
assert_equal(mod.endog, np.ones(9))
params = np.r_[[0.3]*6, 2., 3., 4., 1.5, 3., 4.5, 0.1, 0.5, 0.8]
mod_conditional_loglikelihoods = mod._conditional_loglikelihoods(params)
conditional_likelihoods = mod._resid(params)
# S_t = 0
conditional_likelihoods[0, :, :] = (
np.exp(-0.5 * conditional_likelihoods[0, :, :]**2 / 1.5) /
np.sqrt(2 * np.pi * 1.5))
assert_allclose(mod_conditional_loglikelihoods[0, :, :],
np.log(conditional_likelihoods[0, :, :]))
# S_t = 1
conditional_likelihoods[1, :, :] = (
np.exp(-0.5 * conditional_likelihoods[1, :, :]**2 / 3.) /
np.sqrt(2 * np.pi * 3.))
assert_allclose(mod_conditional_loglikelihoods[1, :, :],
np.log(conditional_likelihoods[1, :, :]))
# S_t = 2
conditional_likelihoods[2, :, :] = (
np.exp(-0.5 * conditional_likelihoods[2, :, :]**2 / 4.5) /
np.sqrt(2 * np.pi * 4.5))
assert_allclose(mod_conditional_loglikelihoods[2, :, :],
np.log(conditional_likelihoods[2, :, :]))
class MarkovAutoregression:
@classmethod
def setup_class(cls, true, endog, atol=1e-5, rtol=1e-7, **kwargs):
cls.model = markov_autoregression.MarkovAutoregression(endog, **kwargs)
cls.true = true
cls.result = cls.model.smooth(cls.true['params'])
cls.atol = atol
cls.rtol = rtol
def test_llf(self):
assert_allclose(self.result.llf, self.true['llf'], atol=self.atol,
rtol=self.rtol)
def test_fit(self, **kwargs):
# Test fitting against Stata
with warnings.catch_warnings():
warnings.simplefilter("ignore")
res = self.model.fit(disp=False, **kwargs)
assert_allclose(res.llf, self.true['llf_fit'], atol=self.atol,
rtol=self.rtol)
@pytest.mark.smoke
def test_fit_em(self, **kwargs):
# Test EM fitting (smoke test)
res_em = self.model._fit_em(**kwargs)
assert_allclose(res_em.llf, self.true['llf_fit_em'], atol=self.atol,
rtol=self.rtol)
hamilton_ar2_short_filtered_joint_probabilities = np.array([
[[[4.99506987e-02, 6.44048275e-04, 6.22227140e-05,
4.45756755e-06, 5.26645567e-07, 7.99846146e-07,
1.19425705e-05, 6.87762063e-03],
[1.95930395e-02, 3.25884335e-04, 1.12955091e-04,
3.38537103e-04, 9.81927968e-06, 2.71696750e-05,
5.83828290e-03, 7.64261509e-02]],
[[1.97113193e-03, 9.50372207e-05, 1.98390978e-04,
1.88188953e-06, 4.83449400e-07, 1.14872860e-05,
4.02918239e-06, 4.35015431e-04],
[2.24870443e-02, 1.27331172e-03, 9.62155856e-03,
4.04178695e-03, 2.75516282e-04, 1.18179572e-02,
5.99778157e-02, 1.48149567e-01]]],
[[[6.70912859e-02, 1.84223872e-02, 2.55621792e-04,
4.48500688e-05, 7.80481515e-05, 2.73734559e-06,
7.59835896e-06, 1.42930726e-03],
[2.10053328e-02, 7.44036383e-03, 3.70388879e-04,
2.71878370e-03, 1.16152088e-03, 7.42182691e-05,
2.96490192e-03, 1.26774695e-02]],
[[8.09335679e-02, 8.31016518e-02, 2.49149080e-02,
5.78825626e-04, 2.19019941e-03, 1.20179130e-03,
7.83659430e-05, 2.76363377e-03],
[7.36967899e-01, 8.88697316e-01, 9.64463954e-01,
9.92270877e-01, 9.96283886e-01, 9.86863839e-01,
9.31117063e-01, 7.51241236e-01]]]])
hamilton_ar2_short_predicted_joint_probabilities = np.array([[
[[[1.20809334e-01, 3.76964436e-02, 4.86045844e-04,
4.69578023e-05, 3.36400588e-06, 3.97445190e-07,
6.03622290e-07, 9.01273552e-06],
[3.92723623e-02, 1.47863379e-02, 2.45936108e-04,
8.52441571e-05, 2.55484811e-04, 7.41034525e-06,
2.05042201e-05, 4.40599447e-03]],
[[4.99131230e-03, 1.48756005e-03, 7.17220245e-05,
1.49720314e-04, 1.42021122e-06, 3.64846209e-07,
8.66914462e-06, 3.04071516e-06],
[4.70476003e-02, 1.69703652e-02, 9.60933974e-04,
7.26113047e-03, 3.05022748e-03, 2.07924699e-04,
8.91869322e-03, 4.52636381e-02]]],
[[[4.99131230e-03, 6.43506069e-03, 1.76698327e-03,
2.45179642e-05, 4.30179435e-06, 7.48598845e-06,
2.62552503e-07, 7.28796600e-07],
[1.62256192e-03, 2.01472650e-03, 7.13642497e-04,
3.55258493e-05, 2.60772139e-04, 1.11407276e-04,
7.11864528e-06, 2.84378568e-04]],
[[5.97950448e-03, 7.76274317e-03, 7.97069493e-03,
2.38971340e-03, 5.55180599e-05, 2.10072977e-04,
1.15269812e-04, 7.51646942e-06],
[5.63621989e-02, 7.06862760e-02, 8.52394030e-02,
9.25065601e-02, 9.51736612e-02, 9.55585689e-02,
9.46550451e-02, 8.93080931e-02]]]],
[[[[3.92723623e-02, 1.22542551e-02, 1.58002431e-04,
1.52649118e-05, 1.09356167e-06, 1.29200377e-07,
1.96223855e-07, 2.92983500e-06],
[1.27665503e-02, 4.80670161e-03, 7.99482261e-05,
2.77109335e-05, 8.30522919e-05, 2.40893443e-06,
6.66545485e-06, 1.43228843e-03]],
[[1.62256192e-03, 4.83571884e-04, 2.33151963e-05,
4.86706634e-05, 4.61678312e-07, 1.18603191e-07,
2.81814142e-06, 9.88467229e-07],
[1.52941031e-02, 5.51667911e-03, 3.12377744e-04,
2.36042810e-03, 9.91559466e-04, 6.75915830e-05,
2.89926399e-03, 1.47141776e-02]]],
[[[4.70476003e-02, 6.06562252e-02, 1.66554040e-02,
2.31103828e-04, 4.05482745e-05, 7.05621631e-05,
2.47479309e-06, 6.86956236e-06],
[1.52941031e-02, 1.89906063e-02, 6.72672133e-03,
3.34863029e-04, 2.45801156e-03, 1.05011361e-03,
6.70996238e-05, 2.68052335e-03]],
[[5.63621989e-02, 7.31708248e-02, 7.51309569e-02,
2.25251946e-02, 5.23307566e-04, 1.98012644e-03,
1.08652148e-03, 7.08494735e-05],
[5.31264334e-01, 6.66281623e-01, 8.03457913e-01,
8.71957394e-01, 8.97097216e-01, 9.00725317e-01,
8.92208794e-01, 8.41808970e-01]]]]])
hamilton_ar2_short_smoothed_joint_probabilities = np.array([
[[[1.29898189e-02, 1.66298475e-04, 1.29822987e-05,
9.95268382e-07, 1.84473346e-07, 7.18761267e-07,
1.69576494e-05, 6.87762063e-03],
[5.09522472e-03, 8.41459714e-05, 2.35672254e-05,
7.55872505e-05, 3.43949612e-06, 2.44153330e-05,
8.28997024e-03, 7.64261509e-02]],
[[5.90021731e-04, 2.55342733e-05, 4.50698224e-05,
5.30734135e-07, 1.80741761e-07, 1.11483792e-05,
5.98539007e-06, 4.35015431e-04],
[6.73107901e-03, 3.42109009e-04, 2.18579464e-03,
1.13987259e-03, 1.03004157e-04, 1.14692946e-02,
8.90976350e-02, 1.48149567e-01]]],
[[[6.34648123e-02, 1.79187451e-02, 2.37462147e-04,
3.55542558e-05, 7.63980455e-05, 2.90520820e-06,
8.17644492e-06, 1.42930726e-03],
[1.98699352e-02, 7.23695477e-03, 3.44076057e-04,
2.15527721e-03, 1.13696383e-03, 7.87695658e-05,
3.19047276e-03, 1.26774695e-02]],
[[8.81925054e-02, 8.33092133e-02, 2.51106301e-02,
5.81007470e-04, 2.19065072e-03, 1.20221350e-03,
7.56893839e-05, 2.76363377e-03],
[8.03066603e-01, 8.90916999e-01, 9.72040418e-01,
9.96011175e-01, 9.96489179e-01, 9.87210535e-01,
8.99315113e-01, 7.51241236e-01]]]])
class TestHamiltonAR2Short(MarkovAutoregression):
# This is just a set of regression tests
@classmethod
def setup_class(cls):
true = {
'params': np.r_[0.754673, 0.095915, -0.358811, 1.163516,
np.exp(-0.262658)**2, 0.013486, -0.057521],
'llf': -10.14066,
'llf_fit': -4.0523073,
'llf_fit_em': -8.885836
}
super().setup_class(
true, rgnp[-10:], k_regimes=2, order=2, switching_ar=False)
def test_fit_em(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
super().test_fit_em()
def test_filter_output(self, **kwargs):
res = self.result
# Filtered
assert_allclose(res.filtered_joint_probabilities,
hamilton_ar2_short_filtered_joint_probabilities)
# Predicted
desired = hamilton_ar2_short_predicted_joint_probabilities
if desired.ndim > res.predicted_joint_probabilities.ndim:
desired = desired.sum(axis=-2)
assert_allclose(res.predicted_joint_probabilities, desired)
def test_smoother_output(self, **kwargs):
res = self.result
# Filtered
assert_allclose(res.filtered_joint_probabilities,
hamilton_ar2_short_filtered_joint_probabilities)
# Predicted
desired = hamilton_ar2_short_predicted_joint_probabilities
if desired.ndim > res.predicted_joint_probabilities.ndim:
desired = desired.sum(axis=-2)
assert_allclose(res.predicted_joint_probabilities, desired)
# Smoothed, entry-by-entry
assert_allclose(
res.smoothed_joint_probabilities[..., -1],
hamilton_ar2_short_smoothed_joint_probabilities[..., -1])
assert_allclose(
res.smoothed_joint_probabilities[..., -2],
hamilton_ar2_short_smoothed_joint_probabilities[..., -2])
assert_allclose(
res.smoothed_joint_probabilities[..., -3],
hamilton_ar2_short_smoothed_joint_probabilities[..., -3])
assert_allclose(
res.smoothed_joint_probabilities[..., :-3],
hamilton_ar2_short_smoothed_joint_probabilities[..., :-3])
hamilton_ar4_filtered = [
0.776712, 0.949192, 0.996320, 0.990258, 0.940111, 0.537442,
0.140001, 0.008942, 0.048480, 0.614097, 0.910889, 0.995463,
0.979465, 0.992324, 0.984561, 0.751038, 0.776268, 0.522048,
0.814956, 0.821786, 0.472729, 0.673567, 0.029031, 0.001556,
0.433276, 0.985463, 0.995025, 0.966067, 0.998445, 0.801467,
0.960997, 0.996431, 0.461365, 0.199357, 0.027398, 0.703626,
0.946388, 0.985321, 0.998244, 0.989567, 0.984510, 0.986811,
0.793788, 0.973675, 0.984848, 0.990418, 0.918427, 0.998769,
0.977647, 0.978742, 0.927635, 0.998691, 0.988934, 0.991654,
0.999288, 0.999073, 0.918636, 0.987710, 0.966876, 0.910015,
0.826150, 0.969451, 0.844049, 0.941525, 0.993363, 0.949978,
0.615206, 0.970915, 0.787585, 0.707818, 0.200476, 0.050835,
0.140723, 0.809850, 0.086422, 0.990344, 0.785963, 0.817425,
0.659152, 0.996578, 0.992860, 0.948501, 0.996883, 0.999712,
0.906694, 0.725013, 0.963690, 0.386960, 0.241302, 0.009078,
0.015789, 0.000896, 0.541530, 0.928686, 0.953704, 0.992741,
0.935877, 0.918958, 0.977316, 0.987941, 0.987300, 0.996769,
0.645469, 0.921285, 0.999917, 0.949335, 0.968914, 0.886025,
0.777141, 0.904381, 0.368277, 0.607429, 0.002491, 0.227610,
0.871284, 0.987717, 0.288705, 0.512124, 0.030329, 0.005177,
0.256183, 0.020955, 0.051620, 0.549009, 0.991715, 0.987892,
0.995377, 0.999833, 0.993756, 0.956164, 0.927714]
hamilton_ar4_smoothed = [
0.968096, 0.991071, 0.998559, 0.958534, 0.540652, 0.072784,
0.010999, 0.006228, 0.172144, 0.898574, 0.989054, 0.998293,
0.986434, 0.993248, 0.976868, 0.858521, 0.847452, 0.675670,
0.596294, 0.165407, 0.035270, 0.127967, 0.007414, 0.004944,
0.815829, 0.998128, 0.998091, 0.993227, 0.999283, 0.921100,
0.977171, 0.971757, 0.124680, 0.063710, 0.114570, 0.954701,
0.994852, 0.997302, 0.999345, 0.995817, 0.996218, 0.994580,
0.933990, 0.996054, 0.998151, 0.996976, 0.971489, 0.999786,
0.997362, 0.996755, 0.993053, 0.999947, 0.998469, 0.997987,
0.999830, 0.999360, 0.953176, 0.992673, 0.975235, 0.938121,
0.946784, 0.986897, 0.905792, 0.969755, 0.995379, 0.914480,
0.772814, 0.931385, 0.541742, 0.394596, 0.063428, 0.027829,
0.124527, 0.286105, 0.069362, 0.995950, 0.961153, 0.962449,
0.945022, 0.999855, 0.998943, 0.980041, 0.999028, 0.999838,
0.863305, 0.607421, 0.575983, 0.013300, 0.007562, 0.000635,
0.001806, 0.002196, 0.803550, 0.972056, 0.984503, 0.998059,
0.985211, 0.988486, 0.994452, 0.994498, 0.998873, 0.999192,
0.870482, 0.976282, 0.999961, 0.984283, 0.973045, 0.786176,
0.403673, 0.275418, 0.115199, 0.257560, 0.004735, 0.493936,
0.907360, 0.873199, 0.052959, 0.076008, 0.001653, 0.000847,
0.062027, 0.021257, 0.219547, 0.955654, 0.999851, 0.997685,
0.998324, 0.999939, 0.996858, 0.969209, 0.927714]
class TestHamiltonAR4(MarkovAutoregression):
@classmethod
def setup_class(cls):
# Results from E-views:
# Dependent variable followed by a list of switching regressors:
# rgnp c
# List of non-switching regressors:
# ar(1) ar(2) ar(3) ar(4)
# Do not check "Regime specific error variances"
# Switching type: Markov
# Number of Regimes: 2
# Probability regressors:
# c
# Method SWITCHREG
# Sample 1951q1 1984q4
true = {
'params': np.r_[0.754673, 0.095915, -0.358811, 1.163516,
np.exp(-0.262658)**2, 0.013486, -0.057521,
-0.246983, -0.212923],
'llf': -181.26339,
'llf_fit': -181.26339,
'llf_fit_em': -183.85444,
'bse_oim': np.r_[.0965189, .0377362, .2645396, .0745187, np.nan,
.1199942, .137663, .1069103, .1105311, ]
}
super().setup_class(
true, rgnp, k_regimes=2, order=4, switching_ar=False)
def test_filtered_regimes(self):
res = self.result
assert_equal(len(res.filtered_marginal_probabilities[:, 1]),
self.model.nobs)
assert_allclose(res.filtered_marginal_probabilities[:, 1],
hamilton_ar4_filtered, atol=1e-5)
def test_smoothed_regimes(self):
res = self.result
assert_equal(len(res.smoothed_marginal_probabilities[:, 1]),
self.model.nobs)
assert_allclose(res.smoothed_marginal_probabilities[:, 1],
hamilton_ar4_smoothed, atol=1e-5)
def test_bse(self):
# Cannot compare middle element of bse because we estimate sigma^2
# rather than sigma
bse = self.result.cov_params_approx.diagonal()**0.5
assert_allclose(bse[:4], self.true['bse_oim'][:4], atol=1e-6)
assert_allclose(bse[6:], self.true['bse_oim'][6:], atol=1e-6)
class TestHamiltonAR2Switch(MarkovAutoregression):
# Results from Stata, see http://www.stata.com/manuals14/tsmswitch.pdf
@classmethod
def setup_class(cls):
path = os.path.join(current_path, 'results',
'results_predict_rgnp.csv')
results = pd.read_csv(path)
true = {
'params': np.r_[.3812383, .3564492, -.0055216, 1.195482,
.6677098**2, .3710719, .4621503, .7002937,
-.3206652],
'llf': -179.32354,
'llf_fit': -179.38684,
'llf_fit_em': -184.99606,
'bse_oim': np.r_[.1424841, .0994742, .2057086, .1225987, np.nan,
.1754383, .1652473, .187409, .1295937],
'smoothed0': results.iloc[3:]['switchar2_sm1'],
'smoothed1': results.iloc[3:]['switchar2_sm2'],
'predict0': results.iloc[3:]['switchar2_yhat1'],
'predict1': results.iloc[3:]['switchar2_yhat2'],
'predict_predicted': results.iloc[3:]['switchar2_pyhat'],
'predict_filtered': results.iloc[3:]['switchar2_fyhat'],
'predict_smoothed': results.iloc[3:]['switchar2_syhat'],
}
super().setup_class(
true, rgnp, k_regimes=2, order=2)
def test_smoothed_marginal_probabilities(self):
assert_allclose(self.result.smoothed_marginal_probabilities[:, 0],
self.true['smoothed0'], atol=1e-6)
assert_allclose(self.result.smoothed_marginal_probabilities[:, 1],
self.true['smoothed1'], atol=1e-6)
def test_predict(self):
# Smoothed
actual = self.model.predict(
self.true['params'], probabilities='smoothed')
assert_allclose(actual, self.true['predict_smoothed'], atol=1e-6)
actual = self.model.predict(
self.true['params'], probabilities=None)
assert_allclose(actual, self.true['predict_smoothed'], atol=1e-6)
actual = self.result.predict(probabilities='smoothed')
assert_allclose(actual, self.true['predict_smoothed'], atol=1e-6)
actual = self.result.predict(probabilities=None)
assert_allclose(actual, self.true['predict_smoothed'], atol=1e-6)
def test_bse(self):
# Cannot compare middle element of bse because we estimate sigma^2
# rather than sigma
bse = self.result.cov_params_approx.diagonal()**0.5
assert_allclose(bse[:4], self.true['bse_oim'][:4], atol=1e-7)
assert_allclose(bse[6:], self.true['bse_oim'][6:], atol=1e-7)
hamilton_ar1_switch_filtered = [
0.840288, 0.730337, 0.900234, 0.596492, 0.921618, 0.983828,
0.959039, 0.898366, 0.477335, 0.251089, 0.049367, 0.386782,
0.942868, 0.965632, 0.982857, 0.897603, 0.946986, 0.916413,
0.640912, 0.849296, 0.778371, 0.954420, 0.929906, 0.723930,
0.891196, 0.061163, 0.004806, 0.977369, 0.997871, 0.977950,
0.896580, 0.963246, 0.430539, 0.906586, 0.974589, 0.514506,
0.683457, 0.276571, 0.956475, 0.966993, 0.971618, 0.987019,
0.916670, 0.921652, 0.930265, 0.655554, 0.965858, 0.964981,
0.976790, 0.868267, 0.983240, 0.852052, 0.919150, 0.854467,
0.987868, 0.935840, 0.958138, 0.979535, 0.956541, 0.716322,
0.919035, 0.866437, 0.899609, 0.914667, 0.976448, 0.867252,
0.953075, 0.977850, 0.884242, 0.688299, 0.968461, 0.737517,
0.870674, 0.559413, 0.380339, 0.582813, 0.941311, 0.240020,
0.999349, 0.619258, 0.828343, 0.729726, 0.991009, 0.966291,
0.899148, 0.970798, 0.977684, 0.695877, 0.637555, 0.915824,
0.434600, 0.771277, 0.113756, 0.144002, 0.008466, 0.994860,
0.993173, 0.961722, 0.978555, 0.789225, 0.836283, 0.940383,
0.968368, 0.974473, 0.980248, 0.518125, 0.904086, 0.993023,
0.802936, 0.920906, 0.685445, 0.666524, 0.923285, 0.643861,
0.938184, 0.008862, 0.945406, 0.990061, 0.991500, 0.486669,
0.805039, 0.089036, 0.025067, 0.863309, 0.352784, 0.733295,
0.928710, 0.984257, 0.926597, 0.959887, 0.984051, 0.872682,
0.824375, 0.780157]
hamilton_ar1_switch_smoothed = [
0.900074, 0.758232, 0.914068, 0.637248, 0.901951, 0.979905,
0.958935, 0.888641, 0.261602, 0.148761, 0.056919, 0.424396,
0.932184, 0.954962, 0.983958, 0.895595, 0.949519, 0.923473,
0.678898, 0.848793, 0.807294, 0.958868, 0.942936, 0.809137,
0.960892, 0.032947, 0.007127, 0.967967, 0.996551, 0.979278,
0.896181, 0.987462, 0.498965, 0.908803, 0.986893, 0.488720,
0.640492, 0.325552, 0.951996, 0.959703, 0.960914, 0.986989,
0.916779, 0.924570, 0.935348, 0.677118, 0.960749, 0.958966,
0.976974, 0.838045, 0.986562, 0.847774, 0.908866, 0.821110,
0.984965, 0.915302, 0.938196, 0.976518, 0.973780, 0.744159,
0.922006, 0.873292, 0.904035, 0.917547, 0.978559, 0.870915,
0.948420, 0.979747, 0.884791, 0.711085, 0.973235, 0.726311,
0.828305, 0.446642, 0.411135, 0.639357, 0.973151, 0.141707,
0.999805, 0.618207, 0.783239, 0.672193, 0.987618, 0.964655,
0.877390, 0.962437, 0.989002, 0.692689, 0.699370, 0.937934,
0.522535, 0.824567, 0.058746, 0.146549, 0.009864, 0.994072,
0.992084, 0.956945, 0.984297, 0.795926, 0.845698, 0.935364,
0.963285, 0.972767, 0.992168, 0.528278, 0.826349, 0.996574,
0.811431, 0.930873, 0.680756, 0.721072, 0.937977, 0.731879,
0.996745, 0.016121, 0.951187, 0.989820, 0.996968, 0.592477,
0.889144, 0.036015, 0.040084, 0.858128, 0.418984, 0.746265,
0.907990, 0.980984, 0.900449, 0.934741, 0.986807, 0.872818,
0.812080, 0.780157]
class TestHamiltonAR1Switch(MarkovAutoregression):
@classmethod
def setup_class(cls):
# Results from E-views:
# Dependent variable followed by a list of switching regressors:
# rgnp c ar(1)
# List of non-switching regressors: <blank>
# Do not check "Regime specific error variances"
# Switching type: Markov
# Number of Regimes: 2
# Probability regressors:
# c
# Method SWITCHREG
# Sample 1951q1 1984q4
true = {
'params': np.r_[0.85472458, 0.53662099, 1.041419, -0.479157,
np.exp(-0.231404)**2, 0.243128, 0.713029],
'llf': -186.7575,
'llf_fit': -186.7575,
'llf_fit_em': -189.25446
}
super().setup_class(
true, rgnp, k_regimes=2, order=1)
def test_filtered_regimes(self):
assert_allclose(self.result.filtered_marginal_probabilities[:, 0],
hamilton_ar1_switch_filtered, atol=1e-5)
def test_smoothed_regimes(self):
assert_allclose(self.result.smoothed_marginal_probabilities[:, 0],
hamilton_ar1_switch_smoothed, atol=1e-5)
def test_expected_durations(self):
expected_durations = [6.883477, 1.863513]
assert_allclose(self.result.expected_durations, expected_durations,
atol=1e-5)
hamilton_ar1_switch_tvtp_filtered = [
0.999996, 0.999211, 0.999849, 0.996007, 0.999825, 0.999991,
0.999981, 0.999819, 0.041745, 0.001116, 1.74e-05, 0.000155,
0.999976, 0.999958, 0.999993, 0.999878, 0.999940, 0.999791,
0.996553, 0.999486, 0.998485, 0.999894, 0.999765, 0.997657,
0.999619, 0.002853, 1.09e-05, 0.999884, 0.999996, 0.999997,
0.999919, 0.999987, 0.989762, 0.999807, 0.999978, 0.050734,
0.010660, 0.000217, 0.006174, 0.999977, 0.999954, 0.999995,
0.999934, 0.999867, 0.999824, 0.996783, 0.999941, 0.999948,
0.999981, 0.999658, 0.999994, 0.999753, 0.999859, 0.999330,
0.999993, 0.999956, 0.999970, 0.999996, 0.999991, 0.998674,
0.999869, 0.999432, 0.999570, 0.999600, 0.999954, 0.999499,
0.999906, 0.999978, 0.999712, 0.997441, 0.999948, 0.998379,
0.999578, 0.994745, 0.045936, 0.006816, 0.027384, 0.000278,
1.000000, 0.996382, 0.999541, 0.998130, 0.999992, 0.999990,
0.999860, 0.999986, 0.999997, 0.998520, 0.997777, 0.999821,
0.033353, 0.011629, 6.95e-05, 4.52e-05, 2.04e-06, 0.999963,
0.999977, 0.999949, 0.999986, 0.999240, 0.999373, 0.999858,
0.999946, 0.999972, 0.999991, 0.994039, 0.999817, 0.999999,
0.999715, 0.999924, 0.997763, 0.997944, 0.999825, 0.996592,
0.695147, 0.000161, 0.999665, 0.999928, 0.999988, 0.992742,
0.374214, 0.001569, 2.16e-05, 0.000941, 4.32e-05, 0.000556,
0.999955, 0.999993, 0.999942, 0.999973, 0.999999, 0.999919,
0.999438, 0.998738]
hamilton_ar1_switch_tvtp_smoothed = [
0.999997, 0.999246, 0.999918, 0.996118, 0.999740, 0.999990,
0.999984, 0.999783, 0.035454, 0.000958, 1.53e-05, 0.000139,
0.999973, 0.999939, 0.999994, 0.999870, 0.999948, 0.999884,
0.997243, 0.999668, 0.998424, 0.999909, 0.999860, 0.998037,
0.999559, 0.002533, 1.16e-05, 0.999801, 0.999993, 0.999997,
0.999891, 0.999994, 0.990096, 0.999753, 0.999974, 0.048495,
0.009289, 0.000542, 0.005991, 0.999974, 0.999929, 0.999995,
0.999939, 0.999880, 0.999901, 0.996221, 0.999937, 0.999935,
0.999985, 0.999450, 0.999995, 0.999768, 0.999897, 0.998930,
0.999992, 0.999949, 0.999954, 0.999995, 0.999994, 0.998687,
0.999902, 0.999547, 0.999653, 0.999538, 0.999966, 0.999485,
0.999883, 0.999982, 0.999831, 0.996940, 0.999968, 0.998678,
0.999780, 0.993895, 0.055372, 0.020421, 0.022913, 0.000127,
1.000000, 0.997072, 0.999715, 0.996893, 0.999990, 0.999991,
0.999811, 0.999978, 0.999998, 0.999100, 0.997866, 0.999787,
0.034912, 0.009932, 5.91e-05, 3.99e-05, 1.77e-06, 0.999954,
0.999976, 0.999932, 0.999991, 0.999429, 0.999393, 0.999845,
0.999936, 0.999961, 0.999995, 0.994246, 0.999570, 1.000000,
0.999702, 0.999955, 0.998611, 0.998019, 0.999902, 0.998486,
0.673991, 0.000205, 0.999627, 0.999902, 0.999994, 0.993707,
0.338707, 0.001359, 2.36e-05, 0.000792, 4.47e-05, 0.000565,
0.999932, 0.999993, 0.999931, 0.999950, 0.999999, 0.999940,
0.999626, 0.998738]
expected_durations = [
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [1.223309, 1864.084],
[1.223309, 1864.084], [1.223309, 1864.084], [1.223309, 1864.084],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [1.223309, 1864.084], [1.223309, 1864.084],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [1.223309, 1864.084],
[1.223309, 1864.084], [1.223309, 1864.084], [1.223309, 1864.084],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [1.223309, 1864.084],
[1.223309, 1864.084], [1.223309, 1864.084], [1.223309, 1864.084],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[1.223309, 1864.084], [1.223309, 1864.084], [1.223309, 1864.084],
[1.223309, 1864.084], [1.223309, 1864.084], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[1.223309, 1864.084], [1.223309, 1864.084], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[1.223309, 1864.084], [1.223309, 1864.084], [1.223309, 1864.084],
[1.223309, 1864.084], [1.223309, 1864.084], [1.223309, 1864.084],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391], [710.7573, 1.000391],
[710.7573, 1.000391], [710.7573, 1.000391]]
class TestHamiltonAR1SwitchTVTP(MarkovAutoregression):
@classmethod
def setup_class(cls):
# Results from E-views:
# Dependent variable followed by a list of switching regressors:
# rgnp c ar(1)
# List of non-switching regressors: <blank>
# Do not check "Regime specific error variances"
# Switching type: Markov
# Number of Regimes: 2
# Probability regressors:
# c recession
# Method SWITCHREG
# Sample 1951q1 1984q4
true = {
'params': np.r_[6.564923, 7.846371, -8.064123, -15.37636,
1.027190, -0.719760,
np.exp(-0.217003)**2, 0.161489, 0.022536],
'llf': -163.914049,
'llf_fit': -161.786477,
'llf_fit_em': -163.914049
}
exog_tvtp = np.c_[np.ones(len(rgnp)), rec]
super().setup_class(
true, rgnp, k_regimes=2, order=1, exog_tvtp=exog_tvtp)
@pytest.mark.skip # TODO(ChadFulton): give reason for skip
def test_fit_em(self):
pass
def test_filtered_regimes(self):
assert_allclose(self.result.filtered_marginal_probabilities[:, 0],
hamilton_ar1_switch_tvtp_filtered, atol=1e-5)
def test_smoothed_regimes(self):
assert_allclose(self.result.smoothed_marginal_probabilities[:, 0],
hamilton_ar1_switch_tvtp_smoothed, atol=1e-5)
def test_expected_durations(self):
assert_allclose(self.result.expected_durations, expected_durations,
rtol=1e-5, atol=1e-7)
class TestFilardo(MarkovAutoregression):
@classmethod
def setup_class(cls):
path = os.path.join(current_path, 'results', 'mar_filardo.csv')
cls.mar_filardo = pd.read_csv(path)
true = {
'params': np.r_[4.35941747, -1.6493936, 1.7702123, 0.9945672,
0.517298, -0.865888,
np.exp(-0.362469)**2,
0.189474, 0.079344, 0.110944, 0.122251],
'llf': -586.5718,
'llf_fit': -586.5718,
'llf_fit_em': -586.5718
}
endog = cls.mar_filardo['dlip'].iloc[1:].values
exog_tvtp = add_constant(
cls.mar_filardo['dmdlleading'].iloc[:-1].values)
super().setup_class(
true, endog, k_regimes=2, order=4, switching_ar=False,
exog_tvtp=exog_tvtp)
@pytest.mark.skip # TODO(ChadFulton): give reason for skip
def test_fit(self, **kwargs):
pass
@pytest.mark.skip # TODO(ChadFulton): give reason for skip
def test_fit_em(self):
pass
def test_filtered_regimes(self):
assert_allclose(self.result.filtered_marginal_probabilities[:, 0],
self.mar_filardo['filtered_0'].iloc[5:], atol=1e-5)
def test_smoothed_regimes(self):
assert_allclose(self.result.smoothed_marginal_probabilities[:, 0],
self.mar_filardo['smoothed_0'].iloc[5:], atol=1e-5)
def test_expected_durations(self):
assert_allclose(self.result.expected_durations,
self.mar_filardo[['duration0', 'duration1']].iloc[5:],
rtol=1e-5, atol=1e-7)
class TestFilardoPandas(MarkovAutoregression):
@classmethod
def setup_class(cls):
path = os.path.join(current_path, 'results', 'mar_filardo.csv')
cls.mar_filardo = pd.read_csv(path)
cls.mar_filardo.index = pd.date_range('1948-02-01', '1991-04-01',
freq='MS')
true = {
'params': np.r_[4.35941747, -1.6493936, 1.7702123, 0.9945672,
0.517298, -0.865888,
np.exp(-0.362469)**2,
0.189474, 0.079344, 0.110944, 0.122251],
'llf': -586.5718,
'llf_fit': -586.5718,
'llf_fit_em': -586.5718
}
endog = cls.mar_filardo['dlip'].iloc[1:]
exog_tvtp = add_constant(
cls.mar_filardo['dmdlleading'].iloc[:-1])
super().setup_class(
true, endog, k_regimes=2, order=4, switching_ar=False,
exog_tvtp=exog_tvtp)
@pytest.mark.skip # TODO(ChadFulton): give reason for skip
def test_fit(self, **kwargs):
pass
@pytest.mark.skip # TODO(ChadFulton): give reason for skip
def test_fit_em(self):
pass
def test_filtered_regimes(self):
assert_allclose(self.result.filtered_marginal_probabilities[0],
self.mar_filardo['filtered_0'].iloc[5:], atol=1e-5)
def test_smoothed_regimes(self):
assert_allclose(self.result.smoothed_marginal_probabilities[0],
self.mar_filardo['smoothed_0'].iloc[5:], atol=1e-5)
def test_expected_durations(self):
assert_allclose(self.result.expected_durations,
self.mar_filardo[['duration0', 'duration1']].iloc[5:],
rtol=1e-5, atol=1e-7)

View File

@ -0,0 +1,317 @@
"""
General tests for Markov switching models
Author: Chad Fulton
License: BSD-3
"""
import numpy as np
from numpy.testing import assert_equal, assert_allclose, assert_raises
import pandas as pd
from statsmodels.tools.numdiff import approx_fprime_cs
from statsmodels.tsa.regime_switching import markov_switching
def test_params():
def check_transtion_2(params):
assert_equal(params['regime_transition'], np.s_[0:2])
assert_equal(params[0, 'regime_transition'], [0])
assert_equal(params[1, 'regime_transition'], [1])
assert_equal(params['regime_transition', 0], [0])
assert_equal(params['regime_transition', 1], [1])
def check_transition_3(params):
assert_equal(params['regime_transition'], np.s_[0:6])
assert_equal(params[0, 'regime_transition'], [0, 3])
assert_equal(params[1, 'regime_transition'], [1, 4])
assert_equal(params[2, 'regime_transition'], [2, 5])
assert_equal(params['regime_transition', 0], [0, 3])
assert_equal(params['regime_transition', 1], [1, 4])
assert_equal(params['regime_transition', 2], [2, 5])
params = markov_switching.MarkovSwitchingParams(k_regimes=2)
params['regime_transition'] = [1]
assert_equal(params.k_params, 1 * 2)
assert_equal(params[0], [0])
assert_equal(params[1], [1])
check_transtion_2(params)
params['exog'] = [0, 1]
assert_equal(params.k_params, 1 * 2 + 1 + 1 * 2)
assert_equal(params[0], [0, 2, 3])
assert_equal(params[1], [1, 2, 4])
check_transtion_2(params)
assert_equal(params['exog'], np.s_[2:5])
assert_equal(params[0, 'exog'], [2, 3])
assert_equal(params[1, 'exog'], [2, 4])
assert_equal(params['exog', 0], [2, 3])
assert_equal(params['exog', 1], [2, 4])
params = markov_switching.MarkovSwitchingParams(k_regimes=3)
params['regime_transition'] = [1, 1]
assert_equal(params.k_params, 2 * 3)
assert_equal(params[0], [0, 3])
assert_equal(params[1], [1, 4])
assert_equal(params[2], [2, 5])
check_transition_3(params)
# Test for invalid parameter setting
assert_raises(IndexError, params.__setitem__, None, [1, 1])
# Test for invalid parameter selection
assert_raises(IndexError, params.__getitem__, None)
assert_raises(IndexError, params.__getitem__, (0, 0))
assert_raises(IndexError, params.__getitem__, ('exog', 'exog'))
assert_raises(IndexError, params.__getitem__, ('exog', 0, 1))
def test_init_endog():
index = pd.date_range(start='1950-01-01', periods=10, freq='D')
endog = [
np.ones(10), pd.Series(np.ones(10), index=index), np.ones((10, 1)),
pd.DataFrame(np.ones((10, 1)), index=index)
]
for _endog in endog:
mod = markov_switching.MarkovSwitching(_endog, k_regimes=2)
assert_equal(mod.nobs, 10)
assert_equal(mod.endog, _endog.squeeze())
assert_equal(mod.k_regimes, 2)
assert_equal(mod.tvtp, False)
assert_equal(mod.k_tvtp, 0)
assert_equal(mod.k_params, 2)
# Invalid: k_regimes < 2
endog = np.ones(10)
assert_raises(ValueError, markov_switching.MarkovSwitching, endog,
k_regimes=1)
# Invalid: multiple endog columns
endog = np.ones((10, 2))
assert_raises(ValueError, markov_switching.MarkovSwitching, endog,
k_regimes=2)
def test_init_exog_tvtp():
endog = np.ones(10)
exog_tvtp = np.c_[np.ones((10, 1)), (np.arange(10) + 1)[:, np.newaxis]]
mod = markov_switching.MarkovSwitching(endog, k_regimes=2,
exog_tvtp=exog_tvtp)
assert_equal(mod.tvtp, True)
assert_equal(mod.k_tvtp, 2)
# Invalid exog_tvtp (too many obs)
exog_tvtp = np.c_[np.ones((11, 1)), (np.arange(11) + 1)[:, np.newaxis]]
assert_raises(ValueError, markov_switching.MarkovSwitching, endog,
k_regimes=2, exog_tvtp=exog_tvtp)
def test_transition_matrix():
# k_regimes = 2
endog = np.ones(10)
mod = markov_switching.MarkovSwitching(endog, k_regimes=2)
params = np.r_[0., 0., 1.]
transition_matrix = np.zeros((2, 2, 1))
transition_matrix[1, :] = 1.
assert_allclose(mod.regime_transition_matrix(params), transition_matrix)
# k_regimes = 3
endog = np.ones(10)
mod = markov_switching.MarkovSwitching(endog, k_regimes=3)
params = np.r_[[0]*3, [0.2]*3, 1.]
transition_matrix = np.zeros((3, 3, 1))
transition_matrix[1, :, 0] = 0.2
transition_matrix[2, :, 0] = 0.8
assert_allclose(mod.regime_transition_matrix(params), transition_matrix)
# k_regimes = 2, tvtp
endog = np.ones(10)
exog_tvtp = np.c_[np.ones((10, 1)), (np.arange(10) + 1)[:, np.newaxis]]
mod = markov_switching.MarkovSwitching(endog, k_regimes=2,
exog_tvtp=exog_tvtp)
# If all TVTP regression coefficients are zero, then the logit transform
# results in exp(0) / (1 + exp(0)) = 0.5 for all parameters; since it's
# k_regimes=2 the remainder calculation is also 0.5.
params = np.r_[0, 0, 0, 0]
assert_allclose(mod.regime_transition_matrix(params), 0.5)
# Manually compute the TVTP coefficients
params = np.r_[1, 2, 1, 2]
transition_matrix = np.zeros((2, 2, 10))
coeffs0 = np.sum(exog_tvtp, axis=1)
p11 = np.exp(coeffs0) / (1 + np.exp(coeffs0))
transition_matrix[0, 0, :] = p11
transition_matrix[1, 0, :] = 1 - p11
coeffs1 = np.sum(2 * exog_tvtp, axis=1)
p21 = np.exp(coeffs1) / (1 + np.exp(coeffs1))
transition_matrix[0, 1, :] = p21
transition_matrix[1, 1, :] = 1 - p21
assert_allclose(mod.regime_transition_matrix(params), transition_matrix,
atol=1e-10)
# k_regimes = 3, tvtp
endog = np.ones(10)
exog_tvtp = np.c_[np.ones((10, 1)), (np.arange(10) + 1)[:, np.newaxis]]
mod = markov_switching.MarkovSwitching(
endog, k_regimes=3, exog_tvtp=exog_tvtp)
# If all TVTP regression coefficients are zero, then the logit transform
# results in exp(0) / (1 + exp(0) + exp(0)) = 1/3 for all parameters;
# since it's k_regimes=3 the remainder calculation is also 1/3.
params = np.r_[[0]*12]
assert_allclose(mod.regime_transition_matrix(params), 1 / 3)
# Manually compute the TVTP coefficients for the first column
params = np.r_[[0]*6, [2]*6]
transition_matrix = np.zeros((3, 3, 10))
p11 = np.zeros(10)
p12 = 2 * np.sum(exog_tvtp, axis=1)
tmp = np.exp(np.c_[p11, p12]).T
transition_matrix[:2, 0, :] = tmp / (1 + np.sum(tmp, axis=0))
transition_matrix[2, 0, :] = (
1 - np.sum(transition_matrix[:2, 0, :], axis=0))
assert_allclose(mod.regime_transition_matrix(params)[:, 0, :],
transition_matrix[:, 0, :], atol=1e-10)
def test_initial_probabilities():
endog = np.ones(10)
mod = markov_switching.MarkovSwitching(endog, k_regimes=2)
params = np.r_[0.5, 0.5, 1.]
# Valid known initial probabilities
mod.initialize_known([0.2, 0.8])
assert_allclose(mod.initial_probabilities(params), [0.2, 0.8])
# Invalid known initial probabilities (too many elements)
assert_raises(ValueError, mod.initialize_known, [0.2, 0.2, 0.6])
# Invalid known initial probabilities (does not sum to 1)
assert_raises(ValueError, mod.initialize_known, [0.2, 0.2])
# Valid steady-state probabilities
mod.initialize_steady_state()
assert_allclose(mod.initial_probabilities(params), [0.5, 0.5])
# Invalid steady-state probabilities (when mod has tvtp)
endog = np.ones(10)
mod = markov_switching.MarkovSwitching(endog, k_regimes=2, exog_tvtp=endog)
assert_raises(ValueError, mod.initialize_steady_state)
def test_logistic():
logistic = markov_switching._logistic
# For a number, logistic(x) = np.exp(x) / (1 + np.exp(x))
cases = [0, 10., -4]
for x in cases:
# Have to use allclose b/c logistic() actually uses logsumexp, so
# they're not equal
assert_allclose(logistic(x), np.exp(x) / (1 + np.exp(x)))
# For a vector, logistic(x) returns
# np.exp(x[i]) / (1 + np.sum(np.exp(x[:]))) for each i
# but squeezed
cases = [[1.], [0, 1.], [-2, 3., 1.2, -30.]]
for x in cases:
actual = logistic(x)
desired = [np.exp(i) / (1 + np.sum(np.exp(x))) for i in x]
assert_allclose(actual, desired)
# For a 2-dim, logistic(x) returns
# np.exp(x[i, t]) / (1 + np.sum(np.exp(x[:, t]))) for each i, each t
# but squeezed
case = [[1.]]
actual = logistic(case)
assert_equal(actual.shape, (1, 1))
assert_allclose(actual, np.exp(1) / (1 + np.exp(1)))
# Here, np.array(case) is 2x1, so it is interpreted as i=0, 1 and t=0
case = [[0], [1.]]
actual = logistic(case)
desired = [np.exp(i) / (1 + np.sum(np.exp(case))) for i in case]
assert_allclose(actual, desired)
# Here, np.array(case) is 1x2, so it is interpreted as i=0 and t=0, 1
case = [[0, 1.]]
actual = logistic(case)
desired = np.exp(case) / (1 + np.exp(case))
assert_allclose(actual, desired)
# For a 3-dim, logistic(x) returns
# np.exp(x[i, j, t]) / (1 + np.sum(np.exp(x[:, j, t])))
# for each i, each j, each t
case = np.arange(2*3*4).reshape(2, 3, 4)
actual = logistic(case)
for j in range(3):
assert_allclose(actual[:, j, :], logistic(case[:, j, :]))
def test_partials_logistic():
# Here we compare to analytic derivatives and to finite-difference
# approximations
logistic = markov_switching._logistic
partials_logistic = markov_switching._partials_logistic
# For a number, logistic(x) = np.exp(x) / (1 + np.exp(x))
# Then d/dx = logistix(x) - logistic(x)**2
cases = [0, 10., -4]
for x in cases:
assert_allclose(partials_logistic(x), logistic(x) - logistic(x)**2)
assert_allclose(partials_logistic(x), approx_fprime_cs([x], logistic))
# For a vector, logistic(x) returns
# np.exp(x[i]) / (1 + np.sum(np.exp(x[:]))) for each i
# Then d logistic(x[i]) / dx[i] = (logistix(x) - logistic(x)**2)[i]
# And d logistic(x[i]) / dx[j] = -(logistic(x[i]) * logistic[x[j]])
cases = [[1.], [0, 1.], [-2, 3., 1.2, -30.]]
for x in cases:
evaluated = np.atleast_1d(logistic(x))
partials = np.diag(evaluated - evaluated**2)
for i in range(len(x)):
for j in range(i):
partials[i, j] = partials[j, i] = -evaluated[i] * evaluated[j]
assert_allclose(partials_logistic(x), partials)
assert_allclose(partials_logistic(x), approx_fprime_cs(x, logistic))
# For a 2-dim, logistic(x) returns
# np.exp(x[i, t]) / (1 + np.sum(np.exp(x[:, t]))) for each i, each t
# but squeezed
case = [[1.]]
evaluated = logistic(case)
partial = [evaluated - evaluated**2]
assert_allclose(partials_logistic(case), partial)
assert_allclose(partials_logistic(case), approx_fprime_cs(case, logistic))
# # Here, np.array(case) is 2x1, so it is interpreted as i=0, 1 and t=0
case = [[0], [1.]]
evaluated = logistic(case)[:, 0]
partials = np.diag(evaluated - evaluated**2)
partials[0, 1] = partials[1, 0] = -np.multiply(*evaluated)
assert_allclose(partials_logistic(case)[:, :, 0], partials)
assert_allclose(partials_logistic(case),
approx_fprime_cs(np.squeeze(case), logistic)[..., None])
# Here, np.array(case) is 1x2, so it is interpreted as i=0 and t=0, 1
case = [[0, 1.]]
evaluated = logistic(case)
partials = (evaluated - evaluated**2)[None, ...]
assert_allclose(partials_logistic(case), partials)
assert_allclose(partials_logistic(case),
approx_fprime_cs(case, logistic).T)
# For a 3-dim, logistic(x) returns
# np.exp(x[i, j, t]) / (1 + np.sum(np.exp(x[:, j, t])))
# for each i, each j, each t
case = np.arange(2*3*4).reshape(2, 3, 4)
evaluated = logistic(case)
partials = partials_logistic(case)
for t in range(4):
for j in range(3):
desired = np.diag(evaluated[:, j, t] - evaluated[:, j, t]**2)
desired[0, 1] = desired[1, 0] = -np.multiply(*evaluated[:, j, t])
assert_allclose(partials[..., j, t], desired)