Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
:return: self
"""
self.fit_callback_function((data_inputs, expected_outputs), *self.more_arguments)
def _callback(self, data):
"""
Will call the self.callback_function() with the data being processed and the extra arguments specified.
It has no other effect.
:param data_inputs: the data to process
:return: None
"""
self.callback_function(data, *self.more_arguments)
class FitCallbackStep(NonTransformableMixin, BaseCallbackStep):
"""Call a callback method on fit."""
def fit(self, data_inputs, expected_outputs=None) -> 'FitCallbackStep':
"""
Will call the self._callback() with the data being processed and the extra arguments specified.
Note that here, the data to process is packed into a tuple of (data_inputs, expected_outputs).
It has no other effect.
:param data_inputs: the data to process
:param expected_outputs: the data to process
:return: self
"""
self._callback((data_inputs, expected_outputs))
return self
"""
import hashlib
import os
import pickle
import shutil
from abc import abstractmethod, ABC
from typing import Iterable, Any
from neuraxle.base import MetaStepMixin, BaseStep, NonFittableMixin, NonTransformableMixin, \
ExecutionContext
from neuraxle.data_container import DataContainer
from neuraxle.pipeline import DEFAULT_CACHE_FOLDER
from neuraxle.steps.misc import VALUE_CACHING
class ValueCachingWrapper(MetaStepMixin, NonFittableMixin, NonTransformableMixin, BaseStep):
"""
Value caching wrapper wraps a step to cache the values.
"""
def __init__(
self,
wrapped: BaseStep,
cache_folder: str = DEFAULT_CACHE_FOLDER,
value_hasher: 'BaseValueHasher' = None,
):
BaseStep.__init__(self)
MetaStepMixin.__init__(self, wrapped)
self.value_hasher = value_hasher
if self.value_hasher is None:
self.value_hasher = Md5Hasher()
"""
Returns True if a step can be resumed with the given the data container, and execution context.
See Checkpoint class documentation for more details on how a resumable checkpoint works.
:param data_container: data container to resume from
:param context: execution context to resume from
:return: if we can resume
:rtype: bool
"""
raise NotImplementedError()
def __str__(self):
return self.__repr__()
class Identity(NonTransformableMixin, NonFittableMixin, BaseStep):
"""
A pipeline step that has no effect at all but to return the same data without changes.
This can be useful to concatenate new features to existing features, such as what AddFeatures do.
Identity inherits from ``NonTransformableMixin`` and from ``NonFittableMixin`` which makes it a class that has no
effect in the pipeline: it doesn't require fitting, and at transform-time, it returns the same data it received.
.. seemore::
* :class:`NonTransformableMixin`
* :class:`NonFittableMixin`
* :class:`BaseStep`
"""
def __init__(self, savers=None, name=None):
NonTransformableMixin.__init__(self)
def filter(self, status: TRIAL_STATUS) -> 'Trials':
trials = Trials()
for trial in self.trials:
if trial.status == status:
trials.append(trial)
return trials
def __getitem__(self, item):
return self.trials[item]
def __len__(self):
return len(self.trials)
class AutoMLSequentialWrapper(NonTransformableMixin, MetaStepMixin, BaseStep):
"""
A step to execute any Automatic Machine Learning Algorithms.
Example usage :
.. code-block:: python
auto_ml: AutoMLSequentialWrapper = AutoMLSequentialWrapper(
step=ForecastingPipeline(),
auto_ml_algorithm=AutoMLAlgorithm(
hyperparameter_optimizer=RandomSearchHyperparameterOptimizer(),
validation_technique=KFoldCrossValidationWrapper(),
higher_score_is_better=True
),
hyperparams_repository=HyperparamsJSONRepository(),
n_iters=100
def __init__(self, wrapped: BaseStep):
NonTransformableMixin.__init__(self)
NonFittableMixin.__init__(self)
MetaStepMixin.__init__(self, wrapped=wrapped)
BaseStep.__init__(self)
def __init__(
self,
wrapped: BaseStep,
auto_ml_algorithm: AutoMLAlgorithm,
hyperparams_repository: HyperparamsRepository = None,
n_iters: int = 100,
refit=True
):
NonTransformableMixin.__init__(self)
self.refit = refit
auto_ml_algorithm = auto_ml_algorithm.set_step(wrapped)
MetaStepMixin.__init__(self, auto_ml_algorithm)
if hyperparams_repository is None:
hyperparams_repository = InMemoryHyperparamsRepository()
self.hyperparams_repository = hyperparams_repository
self.n_iters = n_iters
def __init__(self, savers=None, name=None):
NonTransformableMixin.__init__(self)
NonFittableMixin.__init__(self)
BaseStep.__init__(self, name=name, savers=savers)
def __init__(self, wrapped: BaseStep):
NonTransformableMixin.__init__(self)
NonFittableMixin.__init__(self)
MetaStepMixin.__init__(self, wrapped=wrapped)
BaseStep.__init__(self)
import pickle
from abc import abstractmethod, ABC
from enum import Enum
from typing import List, Tuple, Any
from neuraxle.base import ResumableStepMixin, BaseStep, ExecutionContext, \
ExecutionMode, NonTransformableMixin, NonFittableMixin, Identity
from neuraxle.data_container import DataContainer, ListDataContainer
class DataCheckpointType(Enum):
DATA_INPUT = 'di'
EXPECTED_OUTPUT = 'eo'
class BaseCheckpointer(NonFittableMixin, NonTransformableMixin, BaseStep):
"""
Base class to implement a step checkpoint or data container checkpoint.
:class:`Checkpoint` uses many BaseCheckpointer to checkpoint both data container checkpoints, and step checkpoints.
BaseCheckpointer has an execution mode so there could be different checkpoints for each execution mode (fit, fit_transform or transform).
.. seealso::
* :class:`Checkpoint`
"""
def __init__(
self,
execution_mode: ExecutionMode
):
BaseStep.__init__(self)
from neuraxle.base import MetaStepMixin, BaseStep, ExecutionContext, DataContainer, NonTransformableMixin, \
ExecutionMode, NonFittableMixin, ForceHandleMixin
class TransformOnlyWrapper(
NonTransformableMixin,
NonFittableMixin,
MetaStepMixin,
BaseStep
):
"""
A wrapper step that makes its wrapped step only executes in the transform execution mode.
.. seealso:: :class:`ExecutionMode`,
:class:`neuraxle.base.DataContainer`,
:class:`neuraxle.base.NonTransformableMixin`,
:class:`neuraxle.base.NonFittableMixin`,
:class:`neuraxle.base.MetaStepMixin`,
:class:`neuraxle.base.BaseStep`
"""
def __init__(self, wrapped: BaseStep):