Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
for epoch in range(args.epochs):
print('epoch=%d | train' % epoch, file=sys.stderr)
_ = train_epoch(model, train_iter, opt)
print('epoch=%d | test' % epoch, file=sys.stderr)
preds = pd.DataFrame(list(test(model, test_iter)))
print('acc=%f\n--' % (preds.act == preds.pred).mean(), file=sys.stderr)
# --
# Save model
print("saving model to %s" % args.outpath, file=sys.stderr)
torch.save(model.state_dict(), args.outpath + '.pt')
json.dump((args.attention, model_config), open(args.outpath + '.json', 'w'))
pickle.dump(dataset['vocabs'], open(args.outpath + '.vocab', 'w'))
niters = np.array([item[1] for item in iter_counts])
out = ' Mean number of iterations: %4.2f' % np.mean(niters)
print(out)
out = ' Range of values for number of iterations: %2i ' % np.ptp(niters)
print(out)
out = ' Position of max/min number of iterations: %2i -- %2i' % \
(int(np.argmax(niters)), int(np.argmin(niters)))
print(out)
out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float(np.std(niters)), float(np.var(niters)))
print(out)
assert np.mean(niters) <= maxmeaniter, 'Mean number of iterations is too high, got %s' % np.mean(niters)
fname = 'data/' + prob + '.dat'
f = open(fname, 'wb')
dill.dump(stats, f)
f.close()
assert os.path.isfile(fname), 'Run for %s did not create stats file' % prob
for song in album_songs:
song_path = os.path.join(album_path, song)
# Create mel spectrogram and convert it to the log scale
y, sr = librosa.load(song_path, sr=sr)
S = librosa.feature.melspectrogram(y, sr=sr, n_mels=n_mels,
n_fft=n_fft,
hop_length=hop_length)
log_S = librosa.logamplitude(S, ref_power=1.0)
data = (artist, log_S, song)
# Save each song
save_name = artist + '_%%-%%_' + album + '_%%-%%_' + song
with open(os.path.join(save_folder, save_name), 'wb') as fp:
dill.dump(data, fp)
func = module.FUNC
args,kwds = pickle.load(open(argfilename,'rb'))
if world.rank == 0:
log.info('funcname: %s' % funcname) # sys.argv[1]
log.info('argfilename: %s' % argfilename) # sys.argv[2]
log.info('outfilename: %s' % outfilename) # sys.argv[3]
log.info('workdir: %s' % workdir) # sys.argv[4]
log.info('func: %s' % func)
log.info('args: %s' % str(args))
log.info('kwds: %s' % str(kwds))
res = parallel_map(func, *args, **kwds) #XXX: called on ALL nodes ?
if world.rank == 0:
log.info('res: %s' % str(res))
pickle.dump(res, open(outfilename,'wb'))
print 'Binarizing with threshold value of ' + str(binary_threshold)
inv_binary = cv2.bitwise_not(clean.binarize(gray, threshold=binary_threshold))
binary = clean.binarize(gray, threshold=binary_threshold)
segmented_image = seg.segment_image(gray)
segmented_image = segmented_image[:,:,2]
components = cc.get_connected_components(segmented_image)
cc.draw_bounding_boxes(img,components,color=(255,0,0),line_size=2)
imsave(outfile, img)
blurbs = ocr.ocr_on_bounding_boxes(img, components)
long_blurb = max(blurbs, key=lambda b: len(b.text))
translated = translate.translate_blurb(long_blurb)
pickle.dump(to_typeset, open("tts.pkl", mode="w"))
pickle.dump(translated, open("blurb.pkl", mode="w"))
typeset.typeset_blurb(to_typeset, translated)
to_typeset.show()
def dump_session(sess):
temp = utils.tmp_dir()
try:
with open("{}/session.pkl".format(temp), "wb") as f:
dill.dump(sess, f)
except:
utils.log_err("Dump session failed")
def to_dill(self, filename):
import dill
rubi = self.load()
with open(filename, "wb") as fout:
dill.dump(rubi, fout)
print("Distributed dual subgradient")
_, dds_seq = dds.run(iterations=num_iterations, stepsize=step_gen, verbose=True)
if agent_id == 0:
print("Distributed primal decomposition")
dpd_seq, _ = dpd.run(iterations=num_iterations, stepsize=step_gen, M=30.0, verbose=True)
# save information
if agent_id == 0:
with open('info.pkl', 'wb') as output:
pickle.dump({'N': NN, 'iterations': num_iterations, 'n_coupling': TT}, output, pickle.HIGHEST_PROTOCOL)
with open('agent_{}_objective_func.pkl'.format(agent_id), 'wb') as output:
pickle.dump(obj_func, output, pickle.HIGHEST_PROTOCOL)
with open('agent_{}_coupling_func.pkl'.format(agent_id), 'wb') as output:
pickle.dump(coupling_func, output, pickle.HIGHEST_PROTOCOL)
with open('agent_{}_local_constr.pkl'.format(agent_id), 'wb') as output:
pickle.dump(constr, output, pickle.HIGHEST_PROTOCOL)
np.save("agent_{}_seq_dds.npy".format(agent_id), dds_seq)
np.save("agent_{}_seq_dpd.npy".format(agent_id), dpd_seq)
def cache_bvp(self, problem, filename=None):
"""
\brief Saves BVP object into file on disk
Arguments:
problem : Problem object
filename: Full path to cache file (optional)
default value: _bvp.dat
\date 01/27/2016
"""
if filename is None:
filename = problem.name+'_bvp.dat'
with open(filename,'wb') as f:
try:
logging.info('Caching BVP information to file')
bvp = dill.dump(self.bvp,f)
return True
except:
logging.warn('Failed to save BVP to '+filename)
return False
# filter statistics by type (number of iterations)
filtered_stats = filter_stats(stats, type='niter')
# convert filtered statistics to list of iterations count, sorted by process
iter_counts = sort_stats(filtered_stats, sortby='time')
niters[i] = np.mean(np.array([item[1] for item in iter_counts]))
# print('Worked on k = %s, took %s iterations' % (k, results[i]))
results[qd] = niters
fname = 'data/harmonic_k.dat'
f = open(fname, 'wb')
dill.dump(results, f)
f.close()
assert os.path.isfile(fname), 'Run for %s did not create stats file' % prob