Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# In[19]:
# The following code is used by the notebook "24_run_notebook_on_azureml.ipynb" to log metrics when using papermill or scrapbook
# to run this notebook. We can comment out this cell when we are running this notebook directly.
training_losses = [x.numpy().ravel()[0] for x in learn.recorder.losses]
training_accuracy = [x[0].numpy().ravel()[0] for x in learn.recorder.metrics]
# pm.record may get deprecated and completely replaced by sb.glue:
# https://github.com/nteract/scrapbook#papermills-deprecated-record-feature
try:
sb.glue("training_loss", training_losses)
sb.glue("training_accuracy", training_accuracy)
sb.glue("Accuracy on validation set:", 100 * float(metric))
except Exception:
pm.record("training_loss", training_losses)
pm.record("training_accuracy", training_accuracy)
pm.record("Accuracy on validation set:", 100 * float(metric))
check.inst_param(
dagster_event, 'dagster_event', (Materialization, ExpectationResult, TypeCheck, Failure)
)
if not self.in_pipeline:
return dagster_event
# deferred import for perf
import scrapbook
event_id = 'event-{event_uuid}'.format(event_uuid=str(uuid.uuid4()))
out_file_path = os.path.join(self.marshal_dir, event_id)
with open(out_file_path, 'wb') as fd:
fd.write(pickle.dumps(dagster_event, PICKLE_PROTOCOL))
scrapbook.glue(event_id, out_file_path)
interp.plot_top_losses(9, figsize=(15, 11))
# In[19]:
# The following code is used by the notebook "24_run_notebook_on_azureml.ipynb" to log metrics when using papermill or scrapbook
# to run this notebook. We can comment out this cell when we are running this notebook directly.
training_losses = [x.numpy().ravel()[0] for x in learn.recorder.losses]
training_accuracy = [x[0].numpy().ravel()[0] for x in learn.recorder.metrics]
# pm.record may get deprecated and completely replaced by sb.glue:
# https://github.com/nteract/scrapbook#papermills-deprecated-record-feature
try:
sb.glue("training_loss", training_losses)
sb.glue("training_accuracy", training_accuracy)
sb.glue("Accuracy on validation set:", 100 * float(metric))
except Exception:
pm.record("training_loss", training_losses)
pm.record("training_accuracy", training_accuracy)
pm.record("Accuracy on validation set:", 100 * float(metric))
return value
# deferred import for perf
import scrapbook
if not self.solid_def.has_output(output_name):
raise DagstermillError(
'Solid {solid_name} does not have output named {output_name}'.format(
solid_name=self.solid_def.name, output_name=output_name
)
)
runtime_type = self.solid_def.output_def_named(output_name).runtime_type
out_file = os.path.join(self.marshal_dir, 'output-{}'.format(output_name))
scrapbook.glue(output_name, write_value(runtime_type, value, out_file))
# In[19]:
# The following code is used by the notebook "24_run_notebook_on_azureml.ipynb" to log metrics when using papermill or scrapbook
# to run this notebook. We can comment out this cell when we are running this notebook directly.
training_losses = [x.numpy().ravel()[0] for x in learn.recorder.losses]
training_accuracy = [x[0].numpy().ravel()[0] for x in learn.recorder.metrics]
# pm.record may get deprecated and completely replaced by sb.glue:
# https://github.com/nteract/scrapbook#papermills-deprecated-record-feature
try:
sb.glue("training_loss", training_losses)
sb.glue("training_accuracy", training_accuracy)
sb.glue("Accuracy on validation set:", 100 * float(metric))
except Exception:
pm.record("training_loss", training_losses)
pm.record("training_accuracy", training_accuracy)
pm.record("Accuracy on validation set:", 100 * float(metric))