Wheat trade (2009) network made with Cytoscape using a community clustering algorithm.

Wheat trade (2009) network clustered after Europe and U.S. is combined into one node (large blue).

Balancing authorities across the US.

Below is code I wrote using python. The program finds and counts the triangles in a randomly generated data set of nodes and edges. And it does the same for a perturbed graph of the same based on the same data.

import networkx as nximport itertools import randomimport matplotlib.pyplot as plt

def create_random_graph(ground_truth, fp, fn): ''' * INPUT * ground_truth: networkx graph; ground truth graph fp: int; desired number of false positives (additions) fn: int; desired number of false negatives (deletions) --------------------------------------------------------- * OUTPUT * graph: networkx graph; perturbed graph '''
random.seed(a=None)
complete_graph = nx.Graph() complete_graph.add_nodes_from(ground_truth.nodes()) complete_graph.add_edges_from(itertools.combinations(list(ground_truth.nodes()),2)) # checks for too many fn or fp; fails silently by correcting number to be max possible fn = len(list(ground_truth.edges())) - 1 if fn > len(list(ground_truth.edges())) else fn if fp > (len(list(complete_graph.edges())) - len(list(ground_truth.edges()))): fp = len(list(complete_graph.edges())) - len(list(ground_truth.edges())) - 1
edges = list(ground_truth.edges()) not_in_gr_th = [item for item in list(complete_graph.edges()) if item not in edges]
# randomly selectes fn edges to remove: for _ in range(fn): item = random.choice(edges) edges.remove(item) # randomly selects fp edges (not originally in ground truth) to add: for _ in range(fp): item = random.choice(not_in_gr_th) not_in_gr_th.remove(item) edges.append(item) edges.sort()
graph = nx.Graph() graph.add_nodes_from(list(complete_graph.nodes())) graph.add_edges_from(edges)
return graph

#create function that assigns weight to edges in the graphdef assign_egdes_weight(edges): for (u,v,w) in edges: w['weight'] = random.uniform(0,1) print(u,v,w) return (u,v,w)

#adds the weight of the edges in a triangle def add_edge_weight(tris): graph_tri_weight_lst = [] weight = nx.get_edge_attributes(graph,name='weight') for [a,b,c] in tris: tri_weight=0 print([a,b,c]) if (a,b) in graph.edges(): x = weight[(a,b)] print(x) else: x = weight[(b,a)] print(x) if (a,c) in graph.edges(): y = weight[(a,c)] print(y) else: y = weight[(c,a)] print(y) if (b,c) in graph.edges(): z = weight[(b,c)] print(z) else: z = weight[(c,b)] print(z) tri_weight= x+y+z # x= weight[(min(a,b),max(a,b))] #print(x) #y=weight[(min(a,c),max(a,c))] #print(y) #z=weight[min(b,c),max(b,c)] #print(z) graph_tri_weight_lst.append(tri_weight) return graph_tri_weight_lst

def G_GT_weight(triangles): G_GT_weight_lst = [] for tri in triangles: G_GT_tri_weight = 3 G_GT_weight_lst.append(G_GT_tri_weight) return G_GT_weight_lst

def organize(dictionary): graph_G_GT_tri_weight_lst = [] G_GT_tri_dictionary = list(map(tuple, G_GT_triangles)) for key, value in dictionary.items(): if key in G_GT_tri_dictionary: graph_G_GT_tri_weight_lst.append(value) return graph_G_GT_tri_weight_lst

#ground truth graphn=8p = random.uniform(0,1)G_GT= nx.fast_gnp_random_graph(n,p)

"""#c elegans datap = 0.5G_GT = nx.read_graphml('C:\\Users\\hemincs1\\Downloads\\c.elegans_neural.male_1.graphml')G_GT = nx.to_undirected(G_GT)n = nx.number_of_nodes(G_GT)#n=272"""
#find triangles in G_GT graph and assign a weight of 3 to each triangleG_GT_triangles = [c for c in nx.cycle_basis(G_GT) if len(c) == 3]G_GT_weight_lst = G_GT_weight(G_GT_triangles)


#create A perturbed graph and find triangles in the perturbed graphgraph = create_random_graph(G_GT, 10, 10)graph_edges = assign_egdes_weight(graph.edges(data=True))graph_triangles = [c for c in nx.cycle_basis(graph) if len(c) == 3]graph_tri_weight_lst = add_edge_weight(graph_triangles) print(graph_tri_weight_lst)


#turn perturbed graph triangles and their weights into a dictionarygraph_triangles_lst = list(map(tuple, graph_triangles))graph_dictionary = dict(zip(graph_triangles_lst, graph_tri_weight_lst))graph_G_GT_tri_weight_lst = organize(graph_dictionary)


#histogram of # of triangles v confidence of tru:plt.hist(G_GT_weight_lst, bins=[0,.5,.5,1,1,1.5,1.5,2,2,2.5,2.5,3], color='green', label= 'triangles in ground truth graph- ' + str(len(G_GT_weight_lst)), alpha=1)plt.hist(graph_tri_weight_lst, bins=[0,.5,.5,1,1,1.5,1.5,2,2,2.5,2.5,3], color='red', label='triangles in perturbed graph- ' + str(len(graph_triangles)), alpha=1)plt.hist(graph_G_GT_tri_weight_lst, bins=[0,.5,.5,1,1,1.5,1.5,2,2,2.5,2.5,3], color='blue', label='triangles in perturbed and ground truth graphs- ' + str(len(graph_G_GT_tri_weight_lst)), alpha=1)plt.grid(True)plt.axis([0,3,0, len(G_GT_triangles)])plt.xlabel('confidence of triangle')plt.ylabel('number of triangles')plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)plt.show()