Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def fit(self, counts, lengths=None):
"""
"""
if not sparse.isspmatrix_coo(counts):
counts = sparse.coo_matrix(counts)
if not sparse.issparse(counts):
counts[np.isnan(counts)] = 0
if self.init == "MDS2":
if self.verbose:
print("Initialing with MDS2")
X = mds.estimate_X(counts, alpha=self.alpha,
beta=self.beta,
bias=self.bias,
random_state=self.random_state,
maxiter=self.max_iter,
verbose=self.verbose)
else:
X = self.init
X = estimate_X(counts,
alpha=self.alpha,
beta=self.beta,
def _fit_binary(self, X, y):
p = np.asarray(self.alpha + X[y == 1].sum(axis=0)).flatten()
q = np.asarray(self.alpha + X[y == 0].sum(axis=0)).flatten()
#deal with NaNs
colmean_p = np.nanmean(p, axis=0)
colmean_q = np.nanmean(q, axis=0)
p_idx, q_idx = np.where(np.isnan(p)), np.where(np.isnan(q))
p[p_idx] = np.take(colmean_p, p_idx[1])
q[q_idx] = np.take(colmean_q, q_idx[1])
r = np.log(p/np.abs(p).sum()) - np.log(q/np.abs(q).sum())
b = np.log((y == 1).sum()) - np.log((y == 0).sum())
if isinstance(X, spmatrix):
indices = np.arange(len(r))
r_sparse = coo_matrix(
(r, (indices, indices)),
shape=(len(r), len(r))
)
X_scaled = X * r_sparse
else:
X_scaled = X * r
lsvc = LinearSVC(
C=self.C,
fit_intercept=self.fit_intercept,
max_iter=1000
).fit(X_scaled, y)
mean_mag = np.abs(lsvc.coef_).mean()
coef_ = (1 - self.beta) * mean_mag * r + \
self.Pp = Pp
self.Pw = Pw
self.Pr = Pr
self.r_cov = r_cov
self.L_cov = L
self.L_sca = [np.sqrt(t/(self.T-1.)) for t in range(self.T)] # variance grows linearly
self.Ip = eye(self.num_p,format='coo')
self.Iy = eye(self.num_y,format='coo')
self.Iz = eye(self.num_z,format='coo')
self.Ow = coo_matrix((self.num_w,self.num_w))
self.Os = coo_matrix((self.num_s,self.num_s))
self.Oy = coo_matrix((self.num_y,self.num_y))
self.Oz = coo_matrix((self.num_z,self.num_z))
self.oq = np.zeros(self.num_q)
self.ow = np.zeros(self.num_w)
self.os = np.zeros(self.num_s)
self.oy = np.zeros(self.num_y)
self.oz = np.zeros(self.num_z)
self.x_prev = np.hstack((self.p_prev,self.oq,self.ow,self.os,self.oy,self.oz)) # stage vars
self.d_forecast = []
self.r_forecast = []
for t in range(self.T):
for load in net.loads:
load.P = forecast['load'][load.index][t]
for gen in net.var_generators:
return M
# Product of differences (8x8)
AA = A.T.dot(A)
AB = A.T.dot(B)
AC = A.T.dot(C)
BB = B.T.dot(B)
BC = B.T.dot(C)
CC = C.T.dot(C)
values = (term(AA, 5.) + term(AB + AB.T, 4.) + term(AC + AC.T + BB, 3.)
+ term(BC + BC.T, 2.) + term(CC, 1.))
ijs = numpy.array(list(itertools.product((p00, p10, p01, p11), repeat=2)))
E = scipy.sparse.coo_matrix(
(values.ravel(), ijs.reshape(-1, 2).T), shape=(nPixels, nPixels))
return E
def _create_D_inverse(self, graph):
"""
Creating a sparse inverse degree matrix.
Arg types:
* **graph** *(NetworkX graph)* - The graph to be embedded.
Return types:
* **D_inverse** *(Scipy array)* - Diagonal inverse degree matrix.
"""
index = np.arange(graph.number_of_nodes())
values = np.array([1.0/graph.degree[node] for node in range(graph.number_of_nodes())])
shape = (graph.number_of_nodes(), graph.number_of_nodes())
D_inverse = sparse.coo_matrix((values, (index, index)), shape=shape)
return D_inverse
Dxl = spdiags(x-l,0,n,n)
In = eye(n)
K = bmat([[H,-A.T,In,-In],
[A,None,None,None],
[-Dmu,None,Dux,None],
[Dpi,None,None,Dxl]],
format='coo')
KT = K.T
Ibar = eye(self.num_p,K.shape[0])
Onp = coo_matrix((n,self.num_p))
bp = bmat([[-self.G],
[coo_matrix((self.num_br,self.num_p))]],
format='coo')
up = -eye(n,self.num_p)
lp = -eye(n,self.num_p)
eta_p = bmat([[Onp],
[bp],
[-Dmu*up],
[Dpi*lp]],
format='coo')
linsolver = new_linsolver('mumps','unsymmetric')
linsolver.analyze(KT)
linsolver.factorize(KT)
dqdpT = LinearOperator((self.num_p,self.num_p),
lambda y : eta_p.T*linsolver.solve(Ibar.T*y))
song_topics = pd.DataFrame(vt.transpose()[:, ::-1])
song_topics.columns = ['song_component_%d'%i for i in range(n_component)]
song_topics['song_id'] = range(song_cnt)
song = song.merge(song_topics, on='song_id', how='right')
## svd for user-artist pairs
n_component = 16
concat = concat.merge(song[['song_id', 'artist_name']], on='song_id', how='left')
concat = concat[concat['artist_name'] >= 0]
msno = concat['msno'].values
artist = concat['artist_name'].values.astype(int)
print(len(concat))
data = np.ones(len(concat))
rating_tmp = sparse.coo_matrix((data, (msno, artist)))
rating = np.log1p(rating_tmp) * 0.3 + (rating_tmp > 0) * 1.0
[u, s, vt] = svds(rating, k=n_component)
print(s[::-1])
s_artist = np.diag(s[::-1])
members_topics = pd.DataFrame(u[:, ::-1])
members_topics.columns = ['member_artist_component_%d'%i for i in range(n_component)]
members_topics['msno'] = range(member_cnt)
member = member.merge(members_topics, on='msno', how='left')
artist_topics = pd.DataFrame(vt.transpose()[:, ::-1])
artist_topics.columns = ['artist_component_%d'%i for i in range(n_component)]
artist_topics['artist_name'] = range(artist_cnt)
edges : sparse.coo_matrix
A COO matrix where (i, j) indicate neighboring labels and the
corresponding data element is the linear index of the edge pixel
in the labels array.
"""
conn = ndi.generate_binary_structure(labels.ndim, connectivity)
eroded = ndi.grey_erosion(labels, footprint=conn).ravel()
dilated = ndi.grey_dilation(labels, footprint=conn).ravel()
labels = labels.ravel()
boundaries0 = np.flatnonzero(eroded != labels)
boundaries1 = np.flatnonzero(dilated != labels)
labels_small = np.concatenate((eroded[boundaries0], labels[boundaries1]))
labels_large = np.concatenate((labels[boundaries0], dilated[boundaries1]))
n = np.max(labels_large) + 1
data = np.concatenate((boundaries0, boundaries1))
sparse_graph = sparse.coo_matrix((data, (labels_small, labels_large)),
dtype=np.int_, shape=(n, n))
return sparse_graph
def create_adjacency_matrix(graph):
"""
Creating a sparse adjacency matrix.
:param graph: NetworkX object.
:return A: Adjacency matrix.
"""
index_1 = [edge[0] for edge in graph.edges()] + [edge[1] for edge in graph.edges()]
index_2 = [edge[1] for edge in graph.edges()] + [edge[0] for edge in graph.edges()]
values = [1 for index in index_1]
node_count = max(max(index_1)+1, max(index_2)+1)
A = sparse.coo_matrix((values, (index_1, index_2)),
shape=(node_count, node_count),
dtype=np.float32)
return A
# n_features, expected_n_features))
# # *= doesn't work
# X = X * self._bns_diag
# else:
for word in self.vocab:
wordIndex = self.vocab[word]
words = X[:, wordIndex].view(np.ndarray)
if not self.is_word_feature(word):
bns_score = 0
else:
bns_score = self.compute_bns(words)
words *= bns_score
return sp.coo_matrix(X, dtype=np.float64)