Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_input_warnings(self):
data = np.random.random((3, 10))
with pytest.warns(UserWarning, match='has more columns than rows') as w:
ripser(data)
data = np.random.random((3, 3))
with pytest.warns(UserWarning, match='input matrix is square, but the distance_matrix') as w:
ripser(data)
def test_thresh(self):
np.random.seed(3100)
data = np.random.random((100, 3))
dgms0 = ripser(data, thresh=0.1)['dgms']
dgms1 = ripser(data)['dgms']
# Barcode of H_1 diagram will be smaller, right?
assert len(dgms0[1]) < len(dgms1[1]), "Usually"
def test_coeff(self):
np.random.seed(3100)
data = np.random.random((100, 3))
dgm3 = ripser(data, coeff=3)['dgms']
dgm2 = ripser(data)['dgms']
assert dgm2 is not dgm3, "This is a vacuous assertion, we only care that the above operations did not throw errors"
def test_maxdim(self):
np.random.seed(3100)
data = np.random.random((100, 3))
# maxdim refers to the max H_p class, generate all less than
dgms0 = ripser(data, maxdim=0)['dgms']
assert len(dgms0) == 1
dgms1 = ripser(data)['dgms']
assert len(dgms1) == 2
dgms2 = ripser(data, maxdim=2)['dgms']
assert len(dgms2) == 3
def test_full_nonzerobirths(self):
D = np.array([[1.0, 3.0], [3.0, 2.0]])
h0 = ripser(D, distance_matrix=True, maxdim=0)['dgms'][0]
h0 = h0[np.argsort(h0[:, 0]), :]
assert(h0[0, 0] == 1)
assert(np.isinf(h0[0, 1]))
assert(h0[1, 0] == 2)
assert(h0[1, 1] == 3)
def test_sparse(self):
np.random.seed(10)
thresh = 1.1
# Do dense filtration with threshold
data = datasets.make_circles(n_samples=100)[
0] + 5 * datasets.make_circles(n_samples=100)[0]
res0 = ripser(data, thresh=thresh)
# Convert to sparse matrix first based on threshold,
# then do full filtration
D = makeSparseDM(data, thresh)
res1 = ripser(D, distance_matrix=True)
# The same number of edges should have been added
assert res0['num_edges'] == res1['num_edges']
dgms0 = res0['dgms']
dgms1 = res1['dgms']
I10 = dgms0[1]
I11 = dgms1[1]
idx = np.argsort(I10[:, 0])
I10 = I10[idx, :]
idx = np.argsort(I11[:, 0])
I11 = I11[idx, :]
assert np.allclose(I10, I11)
def test_sparse(self):
np.random.seed(10)
thresh = 1.1
# Do dense filtration with threshold
data = datasets.make_circles(n_samples=100)[
0] + 5 * datasets.make_circles(n_samples=100)[0]
res0 = ripser(data, thresh=thresh)
# Convert to sparse matrix first based on threshold,
# then do full filtration
D = makeSparseDM(data, thresh)
res1 = ripser(D, distance_matrix=True)
# The same number of edges should have been added
assert res0['num_edges'] == res1['num_edges']
dgms0 = res0['dgms']
dgms1 = res1['dgms']
I10 = dgms0[1]
I11 = dgms1[1]
idx = np.argsort(I10[:, 0])
I10 = I10[idx, :]
idx = np.argsort(I11[:, 0])
def test_sphere_sparse_H2(self):
n=3
segment = [np.linspace(0,1,5)]
endpoints = [np.linspace(0,1,2)]
face = segment * (n - 1) + endpoints
vertices = []
for k in range(n):
vertices.extend(itertools.product(*(face[k:] + face[:k])))
coords = np.array(vertices)
thresh = 1.5
D = makeSparseDM(coords, thresh)
rips = ripser(D, distance_matrix=True, maxdim=2, thresh=thresh)
I2 = rips['dgms'][2]
assert(I2.shape[0] == 1)
assert(np.allclose(1.0, I2[0, 1]))
def test_coeff(self):
np.random.seed(3100)
data = np.random.random((100, 3))
dgm3 = ripser(data, coeff=3)['dgms']
dgm2 = ripser(data)['dgms']
assert dgm2 is not dgm3, "This is a vacuous assertion, we only care that the above operations did not throw errors"
for i, filePrefix in enumerate(AllSongs):
matfilename = "%s_DGMs_Raw.mat"%filePrefix
if os.path.exists(matfilename):
print("Skipping %i"%i)
continue
tic = time.time()
print("Computing features for %i of %i..."%(i, len(AllSongs)))
print("filePrefix = %s"%filePrefix)
X = sio.loadmat("%s_MFCC.mat"%filePrefix)
XMFCC = X['XMFCC']
X = sio.loadmat("%s_HPCP.mat"%filePrefix)
XChroma = X['XHPCP']
W = getFusedSimilarity(XMFCC, XChroma, winFac, winsPerBlock, K)
#W = promoteDiagonal(W, bias)
np.fill_diagonal(W, 0)
IRips = ripser(-W, distance_matrix=True, maxdim=1)['dgms'][1]
[X, Y] = np.meshgrid(np.arange(W.shape[0]), np.arange(W.shape[1]))
W[X < Y] = 0
IMorse = doImageSublevelsetFiltration(-W)
toc = time.time()
print("Elapsed Time: %.3g"%(toc-tic))
sio.savemat(matfilename, {"IRips":IRips, "IMorse":IMorse})
plt.clf()
plt.subplot(131)
plt.imshow(np.log(W+5e-2), cmap = 'afmhot')
plt.subplot(132)
plt.scatter(IRips[:, 0], IRips[:, 1])
plt.title("Rips (%i points)"%(IRips.shape[0]))
plt.subplot(133)
plt.scatter(IMorse[:, 0], IMorse[:, 1])
plt.title("Superlevelset Filtration (%i points)"%IMorse.shape[0])