Optimization Experiment
An optimization experiment leverages SigOpt's proprietary ensemble of global optimization algorithms to learn about the relationship between parameters and metrics over the experiment lifecycle. SigOpt allows you to search for a single high performing parameter configuration or a set of high performing parameter configurations with SigOpt's Multimetric Optimization.
Experiments can be created either in a script with calls from a SigOpt client library or by defining the Experiment in a YAML file that will be passed to the SigOpt CLI.

Executing in Jupyter and Colab

See this notebook for a demonstration of how easy intelligent optimization is with SigOpt.

View this tutorial in a notebook

For notebook instructions and tutorials, check out our GitHub notebook tutorials repo, open the SigOpt Run notebook tutorial in Google Colab.

Executing with the SigOpt CLI and Python

Here is how to execute a SigOpt Experiment using Python and the SigOpt CLI.
Copy and paste the code below to a file named sigopt_bo_experiment.py:
1
import tensorflow as tf
2
import sigopt
3
import os
4
5
os.environ["SIGOPT_API_TOKEN"] = "YOUR_API_TOKEN"
6
os.environ["SIGOPT_PROJECT"] = "run-examples"
7
8
9
class KerasNNModel:
10
def __init__(self, hidden_layer_size, activation_fn):
11
model = tf.keras.Sequential(
12
[
13
tf.keras.layers.Flatten(input_shape=(28, 28)),
14
tf.keras.layers.Dense(hidden_layer_size, activation=activation_fn),
15
tf.keras.layers.Dense(10),
16
]
17
)
18
self.model = model
19
20
def get_keras_nn_model(self):
21
return self.model
22
23
def train_model(self, train_images, train_labels, optimizer_type, metrics_list, num_epochs):
24
self.model.compile(
25
optimizer=optimizer_type,
26
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
27
metrics=metrics_list,
28
)
29
self.model.fit(train_images, train_labels, epochs=num_epochs)
30
31
def evaluate_model(self, test_images, test_labels):
32
metrics_dict = self.model.evaluate(test_images, test_labels, verbose=2, return_dict=True)
33
return metrics_dict
34
35
36
def load_data_train_model():
37
sigopt.log_dataset(name="mnist")
38
(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()
39
40
# set model training, architecture parameters and hyperparameters
41
sigopt.params.num_epochs = 2
42
43
# create the model
44
keras_nn_model = KerasNNModel(
45
hidden_layer_size=sigopt.params.hidden_layer_size, activation_fn=sigopt.params.activation_function
46
)
47
sigopt.log_model("Keras NN Model with 1 Hidden layer")
48
49
# train the model
50
keras_nn_model.train_model(train_images, train_labels, "adam", ["accuracy"], sigopt.params.num_epochs)
51
sigopt.log_metadata("sgd optimizer", "adam")
52
metrics_dict = keras_nn_model.evaluate_model(test_images, test_labels)
53
54
# log performance metrics
55
sigopt.log_metric("holdout_accuracy", metrics_dict["accuracy"])
56
57
58
if __name__ == "__main__":
59
load_data_train_model()
60
Copied!
Copy and paste the yml file below to a file named experiment.yml:
1
name: Single metric optimization
2
type: offline
3
parameters:
4
- name: hidden_layer_size
5
type: int
6
bounds:
7
min: 32
8
max: 512
9
- name: activation_function
10
type: categorical
11
categorical_values:
12
- relu
13
- tanh
14
metrics:
15
- name: holdout_accuracy
16
strategy: optimize
17
objective: maximize
18
parallel_bandwidth: 1
19
budget: 30
Copied!
Execute the CLI command below to start your Experiment:
1
$ sigopt optimize -e experiment.yml python sigopt_bo_experiment.py
Copied!
And that’s it! Navigate to the SigOpt web application to keep an eye on your Experiment and draw insights from your results!

Executing a Python File without the SigOpt CLI

Copy and paste the code below to a file named sigopt_bo_experiment_nocli.py:
1
import tensorflow as tf
2
import sigopt
3
import os
4
5
os.environ["SIGOPT_API_TOKEN"] = "YOUR_API_TOKEN"
6
os.environ["SIGOPT_PROJECT"] = "run-examples"
7
8
9
class KerasNNModel:
10
def __init__(self, hidden_layer_size, activation_fn):
11
model = tf.keras.Sequential(
12
[
13
tf.keras.layers.Flatten(input_shape=(28, 28)),
14
tf.keras.layers.Dense(hidden_layer_size, activation=activation_fn),
15
tf.keras.layers.Dense(10),
16
]
17
)
18
self.model = model
19
20
def get_keras_nn_model(self):
21
return self.model
22
23
def train_model(self, train_images, train_labels, optimizer_type, metrics_list, num_epochs):
24
self.model.compile(
25
optimizer=optimizer_type,
26
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
27
metrics=metrics_list,
28
)
29
self.model.fit(train_images, train_labels, epochs=num_epochs)
30
31
def evaluate_model(self, test_images, test_labels):
32
metrics_dict = self.model.evaluate(test_images, test_labels, verbose=2, return_dict=True)
33
return metrics_dict
34
35
36
def load_data_train_model(run):
37
run.log_dataset(name="mnist")
38
(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()
39
40
# set model training, architecture parameters and hyperparameters
41
run.params.num_epochs = 2
42
43
# create the model
44
keras_nn_model = KerasNNModel(
45
hidden_layer_size=run.params.hidden_layer_size, activation_fn=run.params.activation_function
46
)
47
run.log_model("Keras NN Model with 1 Hidden layer")
48
49
# train the model
50
keras_nn_model.train_model(train_images, train_labels, "adam", ["accuracy"], run.params.num_epochs)
51
run.log_metadata("sgd optimizer", "adam")
52
metrics_dict = keras_nn_model.evaluate_model(test_images, test_labels)
53
54
# log performance metrics
55
run.log_metric("holdout_accuracy", metrics_dict["accuracy"])
56
57
58
if __name__ == "__main__":
59
experiment = sigopt.create_experiment(
60
name="Single metric optimization",
61
type="offline",
62
parameters=[
63
dict(name="hidden_layer_size", type="int", bounds=dict(min=32, max=512)),
64
dict(name="activation_function", type="categorical", categorical_values=["relu", "tanh"]),
65
],
66
metrics=[dict(name="holdout_accuracy", strategy="optimize", objective="maximize")],
67
parallel_bandwidth=1,
68
budget=30,
69
)
70
for run in experiment.loop():
71
with run:
72
load_data_train_model(run=run)
73
Copied!
Execute the CLI command below to start your Experiment:
1
$ python sigopt_bo_experiment_nocli.py
Copied!
And that’s it! Navigate to the SigOpt web application to keep an eye on your Experiment and draw insights from your results!