Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_local_search_acquisition_optimizer_with_context(simple_square_acquisition):
space = ParameterSpace([CategoricalParameter('x', OrdinalEncoding(np.arange(0, 100))),
InformationSourceParameter(10)])
optimizer = LocalSearchAcquisitionOptimizer(space, 1000, 3)
source_encoding = 1
opt_x, opt_val = optimizer.optimize(simple_square_acquisition, {'source': source_encoding})
np.testing.assert_array_equal(opt_x, np.array([[1., source_encoding]]))
np.testing.assert_array_equal(opt_val, np.array([[0. + source_encoding]]))
def test_categorical_variables():
np.random.seed(123)
def objective(x):
return np.array(np.sum(x, axis=1).reshape(-1, 1))
carol_spirits = ['past', 'present', 'yet to come']
encoding = OneHotEncoding(carol_spirits)
parameter_space = ParameterSpace([
ContinuousParameter('real_param', 0.0, 1.0),
CategoricalParameter('categorical_param', encoding)
])
x_init = parameter_space.sample_uniform(10)
assert x_init.shape == (10, 4)
assert np.all(np.logical_or(x_init[:, 1:3] == 0.0, x_init[:, 1:3] == 1.0))
y_init = objective(x_init)
gpy_model = GPy.models.GPRegression(x_init, y_init)
gpy_model.Gaussian_noise.fix(1)
model = GPyModelWrapper(gpy_model)
acquisition = ExpectedImprovement(model)
loop = BayesianOptimizationLoop(parameter_space, model, acquisition)
def test_local_search_acquisition_optimizer_neighbours():
np.random.seed(0)
space = ParameterSpace([
CategoricalParameter('a', OneHotEncoding([1, 2, 3])),
CategoricalParameter('b', OrdinalEncoding([0.1, 1, 2])),
CategoricalParameter('c', OrdinalEncoding([0.1, 1, 2])),
DiscreteParameter('d', [0.1, 1.2, 2.3]),
ContinuousParameter('e', 0, 100),
DiscreteParameter('no_neighbours', [1]),
DiscreteParameter('f', [0.1, 1.2, 2.3]),
])
x = np.array([1, 0, 0, 1.6, 2.9, 0.1, 50, 1.2, 1.])
optimizer = LocalSearchAcquisitionOptimizer(space, 1000, 3, num_continuous=1)
neighbourhood = optimizer._neighbours_per_parameter(x, space.parameters)
assert_equal(np.array([[0, 1, 0], [0, 0, 1]]), neighbourhood[0])
assert_equal(np.array([[1], [3]]), neighbourhood[1])
assert_equal(np.array([[2]]), neighbourhood[2])
assert_equal(np.array([[1.2]]), neighbourhood[3])
assert_almost_equal(np.array([[53.5281047]]), neighbourhood[4])
assert_equal(np.empty((0, 1)), neighbourhood[5])
def test_categorical_variables():
np.random.seed(123)
def objective(x):
return np.array(np.sum(x, axis=1).reshape(-1, 1))
carol_spirits = ['past', 'present', 'yet to come']
encoding = OneHotEncoding(carol_spirits)
parameter_space = ParameterSpace([
ContinuousParameter('real_param', 0.0, 1.0),
CategoricalParameter('categorical_param', encoding)
])
random_design = LatinDesign(parameter_space)
x_init = random_design.get_samples(10)
assert x_init.shape == (10, 4)
assert np.all(np.logical_or(x_init[:, 1:3] == 0.0, x_init[:, 1:3] == 1.0))
y_init = objective(x_init)
gpy_model = GPy.models.GPRegression(x_init, y_init)
gpy_model.Gaussian_noise.fix(1)
model = GPyModelWrapper(gpy_model)
loop = ExperimentalDesignLoop(parameter_space, model)
loop.run_loop(objective, 5)
class UnknownParameter(Parameter):
def __init__(self, name: str):
self.name = name
def sample_uniform(num_points):
return np.random.randint(0, 1, (num_points, 1))
space.parameters.append(UnknownParameter('y'))
with pytest.raises(TypeError):
optimizer.optimize(simple_square_acquisition)
space.parameters.pop()
class UnknownEncoding(Encoding):
def __init__(self):
super().__init__([1], [[1]])
space.parameters.append(CategoricalParameter('y', UnknownEncoding()))
with pytest.raises(TypeError):
optimizer.optimize(simple_square_acquisition)
space.parameters.pop()
def test_categorical_parameter_rounding(encoding):
expected = np.array([[1, 2, 4], [2, 3, 5]])
encoding.round = mock.MagicMock(return_value=expected)
param = CategoricalParameter('v', encoding)
assert_array_equal(param.round(np.ones((2, 3))), expected)
def test_local_search_acquisition_optimizer_neighbours():
np.random.seed(0)
space = ParameterSpace([
CategoricalParameter('a', OneHotEncoding([1, 2, 3])),
CategoricalParameter('b', OrdinalEncoding([0.1, 1, 2])),
CategoricalParameter('c', OrdinalEncoding([0.1, 1, 2])),
DiscreteParameter('d', [0.1, 1.2, 2.3]),
ContinuousParameter('e', 0, 100),
DiscreteParameter('no_neighbours', [1]),
DiscreteParameter('f', [0.1, 1.2, 2.3]),
])
x = np.array([1, 0, 0, 1.6, 2.9, 0.1, 50, 1.2, 1.])
optimizer = LocalSearchAcquisitionOptimizer(space, 1000, 3, num_continuous=1)
neighbourhood = optimizer._neighbours_per_parameter(x, space.parameters)
assert_equal(np.array([[0, 1, 0], [0, 0, 1]]), neighbourhood[0])
assert_equal(np.array([[1], [3]]), neighbourhood[1])
assert_equal(np.array([[2]]), neighbourhood[2])
assert_equal(np.array([[1.2]]), neighbourhood[3])
assert_almost_equal(np.array([[53.5281047]]), neighbourhood[4])
def test_categorical_parameter_check_in_domain(encoding):
param = CategoricalParameter('v', encoding)
assert param.check_in_domain(np.array([[1, 0, 0], [0, 0.5, 0]]))
assert not param.check_in_domain(np.array([[2, 0, 0], [0, 0.5, 0]]))
with pytest.raises(ValueError): # wrong encoding dimension
param.check_in_domain(np.array([[1, 0], [0, 0.5]]))
with pytest.raises(ValueError): # not a 2d array
param.check_in_domain(np.array([1, 0, 0]))
def _neighbours_per_parameter(self, all_features: np.ndarray, parameters: Sequence[Parameter]) -> List[np.ndarray]:
""" Generates parameter encodings for one-exchange neighbours of
parameters encoded in parameter feature vector
:param all_features: The encoded parameter point (1d-array)
:return: List of numpy arrays. Each array contains all one-exchange encodings of a parameter
"""
neighbours = []
current_feature = 0
for parameter in parameters:
features = parameter.round(
all_features[current_feature:(current_feature + parameter.dimension)]
.reshape(1, -1)).ravel()
if isinstance(parameter, CategoricalParameter):
if isinstance(parameter.encoding, OrdinalEncoding):
left_right = np.unique([parameter.encoding.round_row(features - 1),
parameter.encoding.round_row(features + 1)])
neighbours.append(left_right[left_right != features].reshape(-1, 1))
elif isinstance(parameter.encoding, OneHotEncoding):
# All categories apart from current one are valid neighbours with one hot encoding
neighbours.append(parameter.encodings[
(parameter.encodings != features).any(axis=1)])
else:
raise TypeError("{} not a supported parameter encoding."
.format(type(parameter.encoding)))
elif isinstance(parameter, DiscreteParameter):
# Find current position in domain while being robust to numerical precision problems
current_index = np.argmin(np.abs(
np.subtract(parameter.domain, np.asscalar(features))))
this_neighbours = []
"""
Converts this ParameterSpace to a GPyOpt DesignSpace object
"""
gpyopt_parameters = []
for parameter in self.parameters:
if isinstance(parameter, ContinuousParameter):
gpyopt_param = {'name': parameter.name, 'type': 'continuous', 'domain': (parameter.min, parameter.max),
'dimensionality': 1}
gpyopt_parameters.append(gpyopt_param)
elif isinstance(parameter, DiscreteParameter):
gpyopt_param = {'name': parameter.name, 'type': 'discrete', 'domain': parameter.domain,
'dimensionality': 1}
gpyopt_parameters.append(gpyopt_param)
elif isinstance(parameter, CategoricalParameter):
for i, cat_sub_param in enumerate(parameter.model_parameters):
gpyopt_param = {'name': parameter.name + '_' + str(i),
'type': 'continuous',
'domain': (cat_sub_param.min, cat_sub_param.max),
'dimensionality': 1}
gpyopt_parameters.append(gpyopt_param)
else:
raise NotImplementedError("Only continuous, discrete and categorical parameters are supported"
", received " + type(parameter))
return GPyOpt.core.task.space.Design_space(gpyopt_parameters)