Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Store Meta Files
meta = []
for i in range(2):
meta.append({
"name": str(i),
"type": "continuous",
"min": int(np.min(samples[:,i].astype('float'))) - 1,
"max": int(np.max(samples[:,i].astype('float'))) + 1
})
# Store simulated data
with open("{}/{}.json".format(output_dir, dist), 'w') as f:
json.dump(meta, f, sort_keys=True, indent=4, separators=(',', ': '))
np.savez("{}/{}.npz".format(output_dir, dist), train=samples[:len(samples)//2], test=samples[len(samples)//2:])
utils.verify("{}/{}.npz".format(output_dir, dist),
"{}/{}.json".format(output_dir, dist))
"i2s": [str(x) for x in range(10)]
})
with open("{}/{}.json".format(output_dir, name), 'w') as f:
json.dump(meta, f, sort_keys=True, indent=4, separators=(',', ': '))
np.random.shuffle(t_train)
t_train = t_train.astype('int8')
t_test = t_test.astype('int8')
np.savez("{}/{}.npz".format(output_dir, name), train=t_train, test=t_test)
verify("{}/{}.npz".format(output_dir, name),
"{}/{}.json".format(output_dir, name))
## Sample
for i in range(5):
img = t_train[i][:-1].reshape([wh, wh]) * 255
lb = t_train[i][-1]
cv2.imwrite('{}/{}_{}_{}.png'.format(temp_dir, name, i, lb),img)
})
tdata = df.values.astype('float32')
np.random.seed(0)
np.random.shuffle(tdata)
t_train = tdata[:-8000]
t_test = tdata[-8000:]
name = "news"
with open("{}/{}.json".format(output_dir, name), 'w') as f:
json.dump(meta, f, sort_keys=True, indent=4, separators=(',', ': '))
np.savez("{}/{}.npz".format(output_dir, name), train=t_train, test=t_test)
verify("{}/{}.npz".format(output_dir, name),
"{}/{}.json".format(output_dir, name))
tdata = project_table(df, meta)
np.random.seed(0)
np.random.shuffle(tdata)
t_train = tdata[:-100000]
t_test = tdata[-100000:]
name = "intrusion"
with open("{}/{}.json".format(output_dir, name), 'w') as f:
json.dump(meta, f, sort_keys=True, indent=4, separators=(',', ': '))
np.savez("{}/{}.npz".format(output_dir, name), train=t_train, test=t_test)
verify("{}/{}.npz".format(output_dir, name),
"{}/{}.json".format(output_dir, name))
"name": info[0],
"type": info[1],
"size": len(mapper),
"i2s": mapper
})
t_train = project_table(trainset, meta)
t_test = project_table(testset, meta)
name = "census"
with open("{}/{}.json".format(output_dir, name), 'w') as f:
json.dump(meta, f, sort_keys=True, indent=4, separators=(',', ': '))
np.savez("{}/{}.npz".format(output_dir, name), train=t_train, test=t_test)
verify("{}/{}.npz".format(output_dir, name),
"{}/{}.json".format(output_dir, name))
# assert 0
output_dir = "data/simulated"
if not os.path.exists(output_dir):
try:
os.mkdir(output_dir)
except:
pass
# Store simulated data
with open("{}/{}.json".format(output_dir, dist), 'w') as f:
json.dump(maker.meta, f, sort_keys=True, indent=4, separators=(',', ': '))
with open("{}/{}_structure.json".format(output_dir, dist), 'w') as f:
f.write(maker.model.to_json())
np.savez("{}/{}.npz".format(output_dir, dist), train=samples[:len(samples)//2], test=samples[len(samples)//2:])
utils.verify("{}/{}.npz".format(output_dir, dist),
"{}/{}.json".format(output_dir, dist))
tdata = project_table(df, meta)
np.random.seed(0)
np.random.shuffle(tdata)
t_train = tdata[:-10000]
t_test = tdata[-10000:]
name = "adult"
with open("{}/{}.json".format(output_dir, name), 'w') as f:
json.dump(meta, f, sort_keys=True, indent=4, separators=(',', ': '))
np.savez("{}/{}.npz".format(output_dir, name), train=t_train, test=t_test)
verify("{}/{}.npz".format(output_dir, name),
"{}/{}.json".format(output_dir, name))