os.environ["SIGOPT_API_TOKEN"] = "YOUR_API_TOKEN"
os.environ["SIGOPT_PROJECT"] = "run-examples"
def __init__(self, hidden_layer_size, activation_fn):
model = tf.keras.Sequential(
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(hidden_layer_size, activation=activation_fn),
tf.keras.layers.Dense(10),
def get_keras_nn_model(self):
def train_model(self, train_images, train_labels, optimizer_type, metrics_list, num_epochs):
optimizer=optimizer_type,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
self.model.fit(train_images, train_labels, epochs=num_epochs)
def evaluate_model(self, test_images, test_labels):
metrics_dict = self.model.evaluate(test_images, test_labels, verbose=2, return_dict=True)
def load_data_train_model(run):
run.log_dataset(name="mnist")
(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()
# set model training, architecture parameters and hyperparameters
run.params.num_epochs = 2
keras_nn_model = KerasNNModel(
hidden_layer_size=run.params.hidden_layer_size, activation_fn=run.params.activation_function
run.log_model("Keras NN Model with 1 Hidden layer")
keras_nn_model.train_model(train_images, train_labels, "adam", ["accuracy"], run.params.num_epochs)
run.log_metadata("sgd optimizer", "adam")
metrics_dict = keras_nn_model.evaluate_model(test_images, test_labels)
# log performance metrics
run.log_metric("holdout_accuracy", metrics_dict["accuracy"])
if __name__ == "__main__":
experiment = sigopt.create_experiment(
name="Single metric optimization",
dict(name="hidden_layer_size", type="int", bounds=dict(min=32, max=512)),
dict(name="activation_function", type="categorical", categorical_values=["relu", "tanh"]),
metrics=[dict(name="holdout_accuracy", strategy="optimize", objective="maximize")],
for run in experiment.loop():
load_data_train_model(run=run)