An optimization experiment leverages SigOpt's proprietary ensemble of global optimization algorithms to learn about the relationship between parameters and metrics over the experiment lifecycle. SigOpt allows you to search for a single high performing parameter configuration or a set of high performing parameter configurations with SigOpt's Multimetric Optimization .
AI Experiments can be created either in a script with calls from a SigOpt client library or by defining the AI Experiment in a YAML file that will be passed to the SigOpt CLI.
Executing in Jupyter and Colab
See this notebook for a demonstration of how easy intelligent optimization is with SigOpt.
View this tutorial in a notebook
Executing with the SigOpt CLI and Python
Here is how to execute a SigOpt AI Experiment using Python and the SigOpt CLI.
Copy and paste the code below to a file named sigopt_bo_experiment.py
:
Copy import tensorflow as tf
import sigopt
import os
os.environ["SIGOPT_API_TOKEN"] = "YOUR_API_TOKEN"
os.environ["SIGOPT_PROJECT"] = "run-examples"
class KerasNNModel:
def __init__(self, hidden_layer_size, activation_fn):
model = tf.keras.Sequential(
[
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(hidden_layer_size, activation=activation_fn),
tf.keras.layers.Dense(10),
]
)
self.model = model
def get_keras_nn_model(self):
return self.model
def train_model(self, train_images, train_labels, optimizer_type, metrics_list, num_epochs):
self.model.compile(
optimizer=optimizer_type,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=metrics_list,
)
self.model.fit(train_images, train_labels, epochs=num_epochs)
def evaluate_model(self, test_images, test_labels):
metrics_dict = self.model.evaluate(test_images, test_labels, verbose=2, return_dict=True)
return metrics_dict
def load_data_train_model():
sigopt.log_dataset(name="mnist")
(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()
# set model training, architecture parameters and hyperparameters
sigopt.params.num_epochs = 2
# create the model
keras_nn_model = KerasNNModel(
hidden_layer_size=sigopt.params.hidden_layer_size, activation_fn=sigopt.params.activation_function
)
sigopt.log_model("Keras NN Model with 1 Hidden layer")
# train the model
keras_nn_model.train_model(train_images, train_labels, "adam", ["accuracy"], sigopt.params.num_epochs)
sigopt.log_metadata("sgd optimizer", "adam")
metrics_dict = keras_nn_model.evaluate_model(test_images, test_labels)
# log performance metrics
sigopt.log_metric("holdout_accuracy", metrics_dict["accuracy"])
if __name__ == "__main__":
load_data_train_model()
Copy and paste the yml file below to a file named experiment.yml
:
Copy name: Single metric optimization
type: offline
parameters:
- name: hidden_layer_size
type: int
bounds:
min: 32
max: 512
- name: activation_function
type: categorical
categorical_values:
- relu
- tanh
metrics:
- name: holdout_accuracy
strategy: optimize
objective: maximize
parallel_bandwidth: 1
budget: 30
Execute the CLI command below to start your AI Experiment:
Copy $ sigopt optimize -e experiment.yml python sigopt_bo_experiment.py
And that’s it! Navigate to the SigOpt web application to keep an eye on your AI Experiment and draw insights from your results!
Executing a Python File without the SigOpt CLI
Copy and paste the code below to a file named sigopt_bo_experiment_nocli.py
:
Copy import tensorflow as tf
import sigopt
import os
os.environ["SIGOPT_API_TOKEN"] = "YOUR_API_TOKEN"
os.environ["SIGOPT_PROJECT"] = "run-examples"
class KerasNNModel:
def __init__(self, hidden_layer_size, activation_fn):
model = tf.keras.Sequential(
[
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(hidden_layer_size, activation=activation_fn),
tf.keras.layers.Dense(10),
]
)
self.model = model
def get_keras_nn_model(self):
return self.model
def train_model(self, train_images, train_labels, optimizer_type, metrics_list, num_epochs):
self.model.compile(
optimizer=optimizer_type,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=metrics_list,
)
self.model.fit(train_images, train_labels, epochs=num_epochs)
def evaluate_model(self, test_images, test_labels):
metrics_dict = self.model.evaluate(test_images, test_labels, verbose=2, return_dict=True)
return metrics_dict
def load_data_train_model(run):
run.log_dataset(name="mnist")
(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()
# set model training, architecture parameters and hyperparameters
run.params.num_epochs = 2
# create the model
keras_nn_model = KerasNNModel(
hidden_layer_size=run.params.hidden_layer_size, activation_fn=run.params.activation_function
)
run.log_model("Keras NN Model with 1 Hidden layer")
# train the model
keras_nn_model.train_model(train_images, train_labels, "adam", ["accuracy"], run.params.num_epochs)
run.log_metadata("sgd optimizer", "adam")
metrics_dict = keras_nn_model.evaluate_model(test_images, test_labels)
# log performance metrics
run.log_metric("holdout_accuracy", metrics_dict["accuracy"])
if __name__ == "__main__":
experiment = sigopt.create_experiment(
name="Single metric optimization",
type="offline",
parameters=[
dict(name="hidden_layer_size", type="int", bounds=dict(min=32, max=512)),
dict(name="activation_function", type="categorical", categorical_values=["relu", "tanh"]),
],
metrics=[dict(name="holdout_accuracy", strategy="optimize", objective="maximize")],
parallel_bandwidth=1,
budget=30,
)
for run in experiment.loop():
with run:
load_data_train_model(run=run)
Execute the CLI command below to start your AI Experiment:
Copy $ python sigopt_bo_experiment_nocli.py
And that’s it! Navigate to the SigOpt web application to keep an eye on your AI Experiment and draw insights from your results!
Last updated 9 months ago