Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
pr = np.r_[pr, 1 - pr.sum()]
ax.plot(self.model.endog_values, pr, 'o-')
ax.set_xlabel("Response value")
ax.set_ylabel("Probability")
ax.set_xticks(self.model.endog_values)
ax.set_xticklabels(self.model.endog_values)
ax.set_ylim(0, 1)
return fig
class NominalGEEResultsWrapper(GEEResultsWrapper):
pass
wrap.populate_wrapper(NominalGEEResultsWrapper, NominalGEEResults) # noqa:E305
class _MultinomialLogit(Link):
"""
The multinomial logit transform, only for use with GEE.
Notes
-----
The data are assumed coded as binary indicators, where each
observed multinomial value y is coded as I(y == S[0]), ..., I(y ==
S[-1]), where S is the set of possible response labels, excluding
the largest one. Thererefore functions in this class should only
be called using vector argument whose length is a multiple of |S|
= ncut, which is an argument to be provided when initializing the
class.
stubs=stubs)
smry.tables.append(roots_table)
return smry
class AutoRegResultsWrapper(wrap.ResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(
tsa_model.TimeSeriesResultsWrapper._wrap_attrs, _attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(
tsa_model.TimeSeriesResultsWrapper._wrap_methods, _methods)
wrap.populate_wrapper(AutoRegResultsWrapper, AutoRegResults)
doc = Docstring(AutoReg.__doc__)
_auto_reg_params = doc.extract_parameters(['trend', 'seasonal', 'exog',
'hold_back', 'period', 'missing'],
4)
@Substitution(auto_reg_params=_auto_reg_params)
def ar_select_order(endog, maxlag, ic='bic', glob=False, trend='c',
seasonal=False, exog=None, hold_back=None, period=None,
missing='none'):
"""
Autoregressive AR-X(p) model order selection.
Parameters
----------
@cache_readonly
def gcv(self):
return self.scale / (1. - self.hat_matrix_trace / self.nobs)**2
@cache_readonly
def cv(self):
cv_ = ((self.resid_pearson / (1. - self.hat_matrix_diag))**2).sum()
cv_ /= self.nobs
return cv_
class GLMGamResultsWrapper(GLMResultsWrapper):
pass
wrap.populate_wrapper(GLMGamResultsWrapper, GLMGamResults)
class GLMGam(PenalizedMixin, GLM):
"""
Generalized Additive Models (GAM)
This inherits from `GLM`.
Warning: Not all inherited methods might take correctly account of the
penalization. Not all options including offset and exposure have been
verified yet.
Parameters
----------
endog : array_like
The response variable.
filter_results : HamiltonFilterResults or KimSmootherResults instance
The underlying filter and, optionally, smoother output
nobs : float
The number of observations used to fit the model.
params : array
The parameters of the model.
scale : float
This is currently set to 1.0 and not used by the model or its results.
"""
pass
class MarkovAutoregressionResultsWrapper(
markov_regression.MarkovRegressionResultsWrapper):
pass
wrap.populate_wrapper(MarkovAutoregressionResultsWrapper, # noqa:E305
MarkovAutoregressionResults)
for i, r in enumerate(y):
if r != 0:
grad[:, r - 1] += self.exog[ii[i], :]
grad -= denomg / denom
return grad.flatten()
class ConditionalResultsWrapper(lm.RegressionResultsWrapper):
pass
wrap.populate_wrapper(ConditionalResultsWrapper, ConditionalResults)
param_header,
params_stubs,
txt_fmt=fmt_params)
summary.tables.insert(-1, initial_state_table)
return summary
class ExponentialSmoothingResultsWrapper(MLEResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
_attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
_methods)
wrap.populate_wrapper(ExponentialSmoothingResultsWrapper, # noqa:E305
ExponentialSmoothingResults)
ax.legend(loc='best')
return fig
class ARMAResultsWrapper(wrap.ResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(
tsa_model.TimeSeriesResultsWrapper._wrap_attrs, _attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(
tsa_model.TimeSeriesResultsWrapper._wrap_methods, _methods)
wrap.populate_wrapper(ARMAResultsWrapper, ARMAResults) # noqa:E305
class ARIMAResults(ARMAResults):
@Appender(_arima_results_predict)
def predict(self, start=None, end=None, exog=None, typ='linear',
dynamic=False):
return self.model.predict(self.params, start, end, exog, typ, dynamic)
def _forecast_error(self, steps):
sigma2 = self.sigma2
ma_rep = arma2ma(np.r_[1, -self.arparams],
np.r_[1, self.maparams], lags=steps)
fcerr = np.sqrt(np.cumsum(cumsum_n(ma_rep, self.k_diff) ** 2) * sigma2)
return fcerr
dismalpy.ssm.mlemodel.MLEResults
dismalpy.ssm.kalman_smoother.SmootherResults
dismalpy.ssm.kalman_filter.FilterResults
dismalpy.ssm.representation.FrozenRepresentation
"""
pass
class VARMAXResultsWrapper(mlemodel.MLEResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(
mlemodel.MLEResultsWrapper._wrap_attrs, _attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(
mlemodel.MLEResultsWrapper._wrap_methods, _methods)
wrap.populate_wrapper(VARMAXResultsWrapper, VARMAXResults)
dismalpy.ssm.mlemodel.MLEResults
dismalpy.ssm.kalman_smoother.SmootherResults
dismalpy.ssm.kalman_filter.FilterResults
dismalpy.ssm.representation.FrozenRepresentation
"""
pass
class DynamicFactorResultsWrapper(mlemodel.MLEResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(
mlemodel.MLEResultsWrapper._wrap_attrs, _attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(
mlemodel.MLEResultsWrapper._wrap_methods, _methods)
wrap.populate_wrapper(DynamicFactorResultsWrapper, DynamicFactorResults)