Links

Record SigOpt Runs with Python IDE

If the CLI or Jupyter Notebook integration isn't right for your use case then you might want to create Runs and AI Experiments directly with the SigOpt Python Client. To create a Run, just add the following to your code:
run = sigopt.create_run()
With this Run object you can get and set parameter values and log attributes in a similar way as you would when using the CLI:
run.params.learning_rate = 0.1
accuracy = train_my_model(learning_rate=run.params.learning_rate)
run.log_metric("accuracy", accuracy)
Finally, end the Run:
run.end()
For convenience, you can use a Python context manager to end the Run automatically, including when your code raises an exception:
with sigopt.create_run() as run:
run.params.learning_rate = 0.1
accuracy = train_my_model(learning_rate=sigopt.params.learning_rate)
run.log_metric("accuracy", accuracy)
With SigOpt installed and your Python environment set up, let's take a look at how to record a SigOpt Run in a Python IDE.

Instrument the Run

import tensorflow as tf
import sigopt
import os
os.environ["SIGOPT_API_TOKEN"] = # SIGOPT-API-TOKEN
os.environ["SIGOPT_PROJECT"] = "run-examples"
class KerasNNModel:
def __init__(self, hidden_layer_size, activation_fn):
model = tf.keras.Sequential(
[
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(hidden_layer_size, activation=activation_fn),
tf.keras.layers.Dense(10),
]
)
self.model = model
def get_keras_nn_model(self):
return self.model
def train_model(self, train_images, train_labels, optimizer_type, metrics_list, num_epochs):
self.model.compile(
optimizer=optimizer_type,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=metrics_list,
)
self.model.fit(train_images, train_labels, epochs=num_epochs)
def evaluate_model(self, test_images, test_labels):
metrics_dict = self.model.evaluate(test_images, test_labels, verbose=2, return_dict=True)
return metrics_dict
def load_data_train_model(sigopt_run):
sigopt_run.log_dataset(name="mnist")
(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()
# set model training, architecture parameters and hyperparameters
sigopt_run.params.num_epochs = 2
sigopt_run.params.hidden_layer_size = 200
sigopt_run.params.activation_fn = "relu"
# create the model
keras_nn_model = KerasNNModel(
hidden_layer_size=sigopt_run.params.hidden_layer_size, activation_fn=sigopt_run.params.activation_fn
)
sigopt_run.log_model("Keras NN Model with 1 Hidden layer")
# train the model
keras_nn_model.train_model(train_images, train_labels, "adam", ["accuracy"], sigopt_run.params.num_epochs)
sigopt_run.log_metadata("sgd optimizer", "adam")
metrics_dict = keras_nn_model.evaluate_model(test_images, test_labels)
# log performance metrics
sigopt_run.log_metric("accuracy", metrics_dict["accuracy"])
sigopt_run.log_metric("loss", metrics_dict["loss"])
if __name__ == "__main__":
with sigopt.create_run() as run:
load_data_train_model(sigopt_run=run)pyth

Run the Code

$ python keras_model.py
Run started, view it on the SigOpt dashboard at https://app.sigopt.com/run/1234
Epoch 1/2
1875/1875 [==============================] - 5s 2ms/step - loss: 2.7513 - accuracy: 0.8826
Epoch 2/2
1875/1875 [==============================] - 4s 2ms/step - loss: 0.3313 - accuracy: 0.9265
313/313 - 0s - loss: 0.2941 - accuracy: 0.9478
Run finished, view it on the SigOpt dashboard at https://app.sigopt.com/run/1234