Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if not column[g2]: continue
consistency_graph.add_node( g1 )
consistency_graph.add_node( g2 )
if column[g1] != column[g2]:
if options.loglevel >= 6:
options.stdlog.write("# column %i: inconsistency: %s - %i <---> %s - %i\n" % (c, identifiers[g1],column[g1],identifiers[g2],column[g2]))
ic.add( (identifiers[g1],) + tuple(identifiers[g1].split(options.separator)) )
ic.add( (identifiers[g2],) + tuple(identifiers[g2].split(options.separator)) )
is_inconsistent = True
else:
consistency_graph.add_edge( g1, g2 )
components = networkx.connected_components( consistency_graph )
if options.loglevel >= 6:
if is_inconsistent:
options.stdlog.write("# column %i: inconsistency for gene %s - %s\n" % (c, str(gene), str(components)))
component_sizes.append( len(components) )
# count maximum transripts per gene
if not ic: continue
max_counts = max( component_sizes )
inconsistent_columns.append( (c, max_counts, ic) )
if options.loglevel >= 1:
options.stdlog.write("# found %i inconsistent columns.\n" % len(inconsistent_columns) )
for transcript, ids in reciprocal_best.items():
subgraph.add_node( transcript )
this_id = map_transcript2id( transcript )
for other_id, best in ids.items():
if this_id == other_id: continue
best_weight, best_transcript = best
if reciprocal_best[best_transcript][this_id][1] == transcript:
subgraph.add_edge( transcript, best_transcript )
## compute components
return networkx.connected_components( subgraph )
final_clusters[str(i) + '.0'].append(component.nodes()[0])
# Enforces that final populations must be cliques if there is an obvious reason to cut out a node (i.e., a single max clique)
cluster_dict = {}
for clust, strains in final_clusters.items():
for clonal_complex in strains:
cluster_dict[clonal_complex] = clust
remove = []
for u, v in G_unclust.edges():
if cluster_dict[u] != cluster_dict[v]:
remove.append((u, v))
for u, v in remove:
G_unclust.remove_edge(u, v)
clusters = G_unclust
max_cluster = int(max([float(clust) for clust in final_clusters.keys()]))
for c in nx.connected_components(clusters):
G = clusters.subgraph(c)
cliques = [(len(cli), cli) for cli in nx.find_cliques(G)]
if len(cliques) != 1:
max_clique_size = max(cliques)[0]
exclude = []
for size, clique in cliques:
if len(clique) == max_clique_size:
exclude.append(set(G.nodes()) - set(clique))
if len(exclude) == 1:
for node in exclude[0]:
clust = cluster_dict[node]
final_clusters[clust].remove(node)
final_clusters[str(float(max_cluster + 1))] = [node]
max_cluster += 1
print(node)
def stitch_skel_nx(skel_nx):
no_of_seg = nx.number_connected_components(skel_nx)
skel_nx_nodes = [ii['position'] for ix, ii in skel_nx.node.items()]
new_nodes = np.array([skel_nx.node[ix]['position'] for ix in skel_nx.nodes()], dtype=np.uint32)
while no_of_seg != 1:
rest_nodes = []
current_set_of_nodes = []
list_of_comp = np.array([c for c in sorted(nx.connected_components(skel_nx), key=len, reverse=True)])
for single_rest_graph in list_of_comp[1:]:
rest_nodes = rest_nodes + [skel_nx_nodes[int(ix)] for ix in single_rest_graph]
for single_rest_graph in list_of_comp[:1]:
current_set_of_nodes = current_set_of_nodes + [skel_nx_nodes[int(ix)] for ix in
single_rest_graph]
tree = spatial.cKDTree(rest_nodes, 1)
thread_lengths, indices = tree.query(current_set_of_nodes)
start_thread_index = np.argmin(thread_lengths)
stop_thread_index = indices[start_thread_index]
start_thread_node = \
np.where(np.sum(np.subtract(new_nodes, current_set_of_nodes[start_thread_index]), axis=1) == 0)[0][0]
def largest_component(G):
selfLoops = [(u, v) for u, v in G.edges() if u == v]
G.remove_edges_from( selfLoops )
return G.subgraph( sorted(nx.connected_components( G ), key=lambda cc: len(cc), reverse=True)[0] )
diagram.node["0"]["formula"] = "TRUE"
else:
igraph = PyBoolNet.InteractionGraphs.primes2igraph(Primes)
outdags = PyBoolNet.InteractionGraphs.find_outdag(igraph)
attractor_nodes = [x for A in Subspaces for x in A]
critical_nodes = PyBoolNet.Utility.DiGraphs.ancestors(igraph, attractor_nodes)
outdags = [x for x in outdags if not x in critical_nodes]
igraph.remove_nodes_from(outdags)
if not Silent:
print(" excluding the non-critical out-dag nodes %s"%outdags)
components = networkx.connected_components(igraph.to_undirected())
components = [list(x) for x in components]
if not Silent:
print(" working on %i connected component(s)"%len(components))
counter_mc = 0
diagrams = []
for component in components:
subprimes = PyBoolNet.PrimeImplicants.copy(Primes)
PyBoolNet.PrimeImplicants.remove_all_variables_except(subprimes, component)
attrs_projected = project_attractors(Subspaces, component)
diagram, count = _compute_diagram_component(subprimes, Update, attrs_projected, EdgeData, Silent)
counter_mc+=count
diagrams.append(diagram)
def csd(el):
c=Counter()
G=el_to_nx(el)
cc=list(nx.connected_components(G))
for x in cc:
c[x]+=1
return(c)
def addcut(cut_edges):
"""addcut: add constraint to eliminate infeasible solutions
Parameters:
- cut_edges: list of edges in the current solution, except connections to depot
Returns True is a cut was added, False otherwise
"""
G = networkx.Graph()
G.add_edges_from(cut_edges)
Components = networkx.connected_components(G)
cut = False
for S in Components:
S_card = len(S)
q_sum = sum(q[i] for i in S)
NS = int(math.ceil(float(q_sum)/Q))
S_edges = [(i,j) for i in S for j in S if i= 3 and (len(S_edges) >= S_card or NS > 1):
add = model.addConstr(quicksum(x[i,j] for i in S for j in S if j > i) <= S_card-NS)
model.update()
cut = True
return cut
if not np.all(A == A.T): # ensure matrix is undirected
raise BCTParamError('get_components can only be computed for undirected'
' matrices. If your matrix is noisy, correct it with np.around')
A = binarize(A, copy=True)
n = len(A)
np.fill_diagonal(A, 1)
try:
if no_depend:
raise ImportError()
else:
import networkx as nx
net = nx.from_numpy_matrix(A)
cpts = list(nx.connected_components(net))
cptvec = np.zeros((n,))
cptsizes = np.zeros(len(cpts))
for i, cpt in enumerate(cpts):
cptsizes[i] = len(cpt)
for node in cpt:
cptvec[node] = i + 1
except ImportError:
# if networkx is not available use less efficient breadth first search
cptvec = np.zeros((n,))
r, _ = breadthdist(A)
for node, reach in enumerate(r):
if cptvec[node] > 0:
continue
else:
H = G.subgraph(sub)
coms.append(H)
print "Detected ", len(coms), " communities."
print("--- --- 1 pass event match: %s seconds ---" % (time.time() - start))
# Further division
newPar = coms
level = 0
while level < 1:
oldPar = copy.deepcopy(newPar)
newRes = []
for c in oldPar:
start = time.time()
refine_sub_cluster(c, db_graph, tau)
print("--- --- cluster refine: %s seconds ---" % (time.time() - start))
sublist = nx.connected_components(c)
newRes.extend(sublist)
newPar = []
for sub in newRes:
H = G.subgraph(sub)
newPar.append(H)
level += 1
if len(oldPar) == len(newPar): break
print "Detected ", len(newRes), " communities in total on further sub partition"
return newRes