Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_merge(tmp_dir):
x_train = np.random.rand(100, 33)
y_train = np.random.rand(100, 1)
input_node1 = ak.Input()
input_node2 = ak.Input()
output_node1 = ak.DenseBlock()(input_node1)
output_node2 = ak.DenseBlock()(input_node2)
output_node = ak.Merge()([output_node1, output_node2])
output_node = ak.RegressionHead()(output_node)
graph = ak.GraphAutoModel([input_node1, input_node2],
output_node,
directory=tmp_dir,
max_trials=1)
graph.fit([x_train, x_train], y_train,
epochs=1,
batch_size=100,
verbose=False,
validation_split=0.5)
result = graph.predict([x_train, x_train])
assert result.shape == (100, 1)
def test_preprocessing(_, tmp_dir):
x_train = np.random.rand(100, 33)
y_train = np.random.rand(100, 1)
input_node1 = ak.Input()
temp_node1 = ak.Normalization()(input_node1)
output_node1 = ak.DenseBlock()(temp_node1)
output_node3 = ak.Normalization()(temp_node1)
output_node3 = ak.DenseBlock()(output_node3)
input_node2 = ak.Input()
output_node2 = ak.Normalization()(input_node2)
output_node2 = ak.DenseBlock()(output_node2)
output_node = ak.Merge()([output_node1, output_node2, output_node3])
output_node = ak.RegressionHead()(output_node)
graph = ak.GraphAutoModel([input_node1, input_node2],
output_node,
directory=tmp_dir,
max_trials=1)
graph.fit([x_train, x_train], y_train,
def test_preprocessing(_, tmp_dir):
x_train = np.random.rand(100, 33)
y_train = np.random.rand(100, 1)
input_node1 = ak.Input()
temp_node1 = ak.Normalization()(input_node1)
output_node1 = ak.DenseBlock()(temp_node1)
output_node3 = ak.Normalization()(temp_node1)
output_node3 = ak.DenseBlock()(output_node3)
input_node2 = ak.Input()
output_node2 = ak.Normalization()(input_node2)
output_node2 = ak.DenseBlock()(output_node2)
output_node = ak.Merge()([output_node1, output_node2, output_node3])
output_node = ak.RegressionHead()(output_node)
graph = ak.GraphAutoModel([input_node1, input_node2],
output_node,
directory=tmp_dir,
max_trials=1)
graph.fit([x_train, x_train], y_train,
epochs=1,
batch_size=100,
validation_data=([x_train, x_train], y_train),
validation_split=0.5,
verbose=False)
def test_input_missing():
input_node1 = ak.Input()
input_node2 = ak.Input()
output_node1 = ak.DenseBlock()(input_node1)
output_node2 = ak.DenseBlock()(input_node2)
output_node = ak.Merge()([output_node1, output_node2])
output_node = ak.RegressionHead()(output_node)
with pytest.raises(ValueError) as info:
ak.hypermodel.graph.GraphHyperModel(input_node1, output_node)
assert 'A required input is missing for HyperModel' in str(info.value)
def test_input_missing():
input_node1 = ak.Input()
input_node2 = ak.Input()
output_node1 = ak.DenseBlock()(input_node1)
output_node2 = ak.DenseBlock()(input_node2)
output_node = ak.Merge()([output_node1, output_node2])
output_node = ak.RegressionHead()(output_node)
with pytest.raises(ValueError) as info:
ak.hypermodel.graph.GraphHyperModel(input_node1, output_node)
assert 'A required input is missing for HyperModel' in str(info.value)
def test_input_output_disconnect():
input_node1 = ak.Input()
output_node = input_node1
_ = ak.DenseBlock()(output_node)
input_node = ak.Input()
output_node = input_node
output_node = ak.DenseBlock()(output_node)
output_node = ak.RegressionHead()(output_node)
with pytest.raises(ValueError) as info:
ak.hypermodel.graph.GraphHyperModel(input_node1, output_node)
assert 'Inputs and outputs not connected.' in str(info.value)
trial = kerastuner.engine.trial.Trial()
trial.hyperparameters = kerastuner.HyperParameters()
get_trials.return_value = [trial]
input_shape = (32,)
num_instances = 100
num_classes = 10
x = common.generate_data(num_instances=num_instances,
shape=input_shape,
dtype='dataset')
y = common.generate_one_hot_labels(num_instances=num_instances,
num_classes=num_classes,
dtype='dataset')
input_node = ak.Input(shape=input_shape)
output_node = input_node
output_node = ak.DenseBlock()(output_node)
output_node = ak.ClassificationHead(output_shape=(num_classes,))(output_node)
hypermodel = ak.hypermodel.graph.HyperBuiltGraphHyperModel(input_node,
output_node)
tuner = ak.tuner.RandomSearch(
hypermodel=hypermodel,
objective='val_loss',
max_trials=1,
directory=tmp_dir,
seed=common.SEED)
tuner.search(x=tf.data.Dataset.zip((x, y)),
validation_data=(x, y),
epochs=20,
callbacks=[])
_, kwargs = run_trial.call_args_list[0]
callbacks = kwargs['callbacks']
def functional_api():
(x_train, y_train), (x_test, y_test) = mnist.load_data()
input_node = ak.ImageInput()
output_node = input_node
output_node = ak.Normalization()(output_node)
output_node = ak.ConvBlock()(output_node)
output_node = ak.SpatialReduction()(output_node)
output_node = ak.DenseBlock()(output_node)
output_node = ak.ClassificationHead()(output_node)
clf = ak.AutoModel(input_node, output_node, seed=5, max_trials=3)
clf.fit(x_train, y_train, validation_split=0.2)
return clf.evaluate(x_test, y_test)
y_test = y_test[:data_slice]
x_image = x_train.reshape(x_train.shape + (1,))
x_test = x_test.reshape(x_test.shape + (1,))
x_structured = np.random.rand(x_train.shape[0], 100)
y_regression = np.random.rand(x_train.shape[0], 1)
y_classification = y_classification.reshape(-1, 1)
# Build model and train.
inputs = ak.ImageInput(shape=(28, 28, 1))
outputs1 = ak.ResNetBlock(version='next')(inputs)
outputs2 = ak.XceptionBlock()(inputs)
image_outputs = ak.Merge()((outputs1, outputs2))
structured_inputs = ak.StructuredDataInput()
structured_outputs = ak.DenseBlock()(structured_inputs)
merged_outputs = ak.Merge()((structured_outputs, image_outputs))
classification_outputs = ak.ClassificationHead()(merged_outputs)
regression_outputs = ak.RegressionHead()(merged_outputs)
automodel = ak.GraphAutoModel(inputs=[inputs, structured_inputs],
outputs=[regression_outputs,
classification_outputs])
automodel.fit((x_image, x_structured),
(y_regression, y_classification),
# trials=100,
validation_split=0.2,
epochs=200,
callbacks=[tf.keras.callbacks.EarlyStopping()])