Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
return
for key in self.logs:
train_sats = []
val_sats = []
for i, layer_name in enumerate(self.logs[key]):
if layer_name in self.ignore_layer_names:
continue
if self.logs[key][layer_name]._cov_mtx is None:
raise ValueError("Attempting to compute intrinsic"
"dimensionality when covariance"
"is not initialized")
cov_mat = self.logs[key][layer_name].fix()
log_values = {}
for stat in self.stats:
if stat == 'lsat':
log_values[key.replace(STATMAP['cov'], STATMAP['lsat'])+'_'+layer_name] = compute_saturation(cov_mat, thresh=self.threshold)
elif stat == 'idim':
log_values[key.replace(STATMAP['cov'], STATMAP['idim'])+'_'+layer_name] = compute_intrinsic_dimensionality(cov_mat, thresh=self.threshold)
elif stat == 'cov':
log_values[key+'_'+layer_name] = cov_mat.cpu().numpy()
elif stat == 'det':
log_values[key.replace(STATMAP['cov'], STATMAP['det'])+'_'+layer_name] = compute_cov_determinant(cov_mat)
elif stat == 'trc':
log_values[key.replace(STATMAP['cov'], STATMAP['trc'])+'_'+layer_name] = compute_cov_trace(cov_mat)
elif stat == 'dtrc':
log_values[key.replace(STATMAP['cov'], STATMAP['dtrc'])+'_'+layer_name] = compute_diag_trace(cov_mat)
self.seen_samples[key.split('-')[0]][layer_name] = 0
if self.reset_covariance:
self.logs[key][layer_name]._cov_mtx = None
if self.layerwise_sat:
self.writer.add_scalars(prefix='', value_dict=log_values)
x_test = np.random.random((100, 20))
y_test = keras.utils.to_categorical(
np.random.randint(10, size=(100, 1)), num_classes=10)
# Build model
model = Sequential()
model.add(Dense(64, input_dim=20, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(10))
model.add(Activation('softmax'))
# Delve-specific
tbCallBack = CustomTensorBoard(log_dir='./runs', user_defined_freq=1)
saturation_logger = SaturationLogger(
model, input_data=x_train[:2], print_freq=1)
# Train and evaluate model
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model = Model(model.get_input_at(0), outputs=model.output)
model.compile(
loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
# # Optional - save to csv
# csv_logger = keras.callbacks.CSVLogger('1.log')
model.fit(
x_train,
y_train,
epochs=100,
y_test = keras.utils.to_categorical(
np.random.randint(10, size=(100, 1)), num_classes=10)
# Build model
model = Sequential()
model.add(Dense(64, input_dim=20, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(10))
model.add(Activation('softmax'))
# Delve-specific
tbCallBack = CustomTensorBoard(log_dir='./runs', user_defined_freq=1)
saturation_logger = SaturationLogger(
model, input_data=x_train[:2], print_freq=1)
# Train and evaluate model
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model = Model(model.get_input_at(0), outputs=model.output)
model.compile(
loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
# # Optional - save to csv
# csv_logger = keras.callbacks.CSVLogger('1.log')
model.fit(
x_train,
y_train,
epochs=100,
batch_size=128,
def get_prop(layer: torch.nn.Module, prop: Any):
"""Low-level function for getting `prop` from `layer`."""
training_state = get_training_state(layer)
if prop in ('train_eig_vals', 'eval_eig_vals'):
layer_history = get_layer_prop(layer,
f'{training_state}_layer_history')
# calculate eigenvalues
if hasattr(layer, 'conv_method'):
eig_vals = latent_iterative_pca(layer,
layer_history,
conv_method=layer.conv_method)
else:
eig_vals = latent_iterative_pca(layer, layer_history)
return eig_vals
elif prop == 'param_eig_vals':
layer_svd = get_layer_prop(layer, 'layer_svd')
return layer_svd
def get_prop(layer: torch.nn.Module, prop: Any):
"""Low-level function for getting `prop` from `layer`."""
training_state = get_training_state(layer)
if prop in ('train_eig_vals', 'eval_eig_vals'):
layer_history = get_layer_prop(layer,
f'{training_state}_layer_history')
# calculate eigenvalues
if hasattr(layer, 'conv_method'):
eig_vals = latent_iterative_pca(layer,
layer_history,
conv_method=layer.conv_method)
else:
eig_vals = latent_iterative_pca(layer, layer_history)
return eig_vals
elif prop == 'param_eig_vals':
layer_svd = get_layer_prop(layer, 'layer_svd')
return layer_svd
def _get_writer(self, save_to, writers_args) -> \
delve.writers.AbstractWriter:
"""Create a writer to log history to `writer_dir`."""
if issubclass(type(save_to), delve.writers.AbstractWriter):
return save_to
if isinstance(save_to, list):
all_writers = []
for saver in save_to:
all_writers.append(self._get_writer(save_to=saver,
writers_args=writers_args))
return CompositWriter(all_writers)
if hasattr(delve, save_to):
writer = getattr(delve, save_to)(**writers_args)
else:
raise ValueError('Illegal argument for save_to "{}"'.
format(save_to))
return writer
w.add_scalar(name, value, **kwargs)
def add_scalars(self, prefix, value_dict, **kwargs):
for w in self.writers:
w.add_scalars(prefix, value_dict, **kwargs)
def save(self):
for w in self.writers:
w.save()
def close(self):
for w in self.writers:
w.close()
class CSVWriter(AbstractWriter):
def __init__(self, savepath: str, **kwargs):
"""
This writer produces a csv file with all saturation values.
The csv-file is overwritten with
an updated version every time save() is called.
:param savepath: CSV file path
"""
super(CSVWriter, self).__init__()
self.value_dict = {}
self.savepath = savepath
def resume_from_saved_state(self, initial_epoch: int):
self.epoch_counter = initial_epoch
if self._check_savestate_ok(self.savepath+'.csv'):
self.value_dict = pd.read_csv(self.savepath + '.csv', sep=';', index_col=0).to_dict('list')
raise NotImplementedError()
@abstractmethod
def add_scalars(self, prefix, value_dict, global_step, **kwargs):
raise NotImplementedError()
@abstractmethod
def save(self):
pass
@abstractmethod
def close(self):
pass
class CompositWriter(AbstractWriter):
def __init__(self, writers: List[AbstractWriter]):
"""
This writer combines multiple writers.
:param writers: List of writers. Each writer is called when the CompositeWriter is invoked.
"""
super(CompositWriter, self).__init__()
self.writers = writers
def resume_from_saved_state(self, initial_epoch: int):
for w in self.writers:
try:
w.resume_from_saved_state(initial_epoch)
except NotImplementedError:
warnings.warn(f'Writer {w.__class__.__name__} raised a NotImplementedError when attempting to resume training'
'This may result in corrupted or overwritten data.')
def save(self):
pkl.dump(self.epoch_counter, open(os.path.join(self.savepath, 'epoch_counter.pkl'), 'wb'))
if self.zip:
make_archive(
base_name=os.path.basename(self.savepath),
format='zip',
root_dir=os.path.dirname(self.savepath),
verbose=True
)
def close(self):
pass
class PrintWriter(AbstractWriter):
def __init__(self, **kwargs):
"""
Prints output to the console
"""
super(PrintWriter, self).__init__()
def resume_from_saved_state(self, initial_epoch: int):
pass
def add_scalar(self, name, value, **kwargs):
print(name, ':', value)
def add_scalars(self, prefix, value_dict, **kwargs):
for key in value_dict.keys():
self.add_scalar(prefix + '_' + key, value_dict[key])
else:
self.value_dict[name] = [value]
return
def add_scalars(self, prefix, value_dict, **kwargs):
for name in value_dict.keys():
self.add_scalar(name, value_dict[name])
def save(self):
pd.DataFrame.from_dict(self.value_dict).to_csv(self.savepath + '.csv', sep=';')
def close(self):
pass
class NPYWriter(AbstractWriter):
def __init__(self, savepath: str, zip: bool = False, **kwargs):
"""
The NPYWriter creates a folder containing one subfolder for each stat.
Each subfolder contains a npy-file with the saturation value for each epoch.
This writer saves non-scalar values and can thus be used to save
the covariance-matrix.
:param savepath: The root folder to save the folder structure to
:param zip: Whether to zip the output folder after every invocation
"""
super(NPYWriter, self).__init__()
self.savepath = savepath
self.epoch_counter = {}
self.zip = zip
def resume_from_saved_state(self, initial_epoch: int):