From 98dc91bf7f7b7be6084390ca03d0e3d53be60a83 Mon Sep 17 00:00:00 2001 From: Francois Date: Thu, 25 Jul 2024 13:06:26 +0000 Subject: [PATCH 1/3] iloc bug --- hypernetx/algorithms/hypergraph_modularity.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hypernetx/algorithms/hypergraph_modularity.py b/hypernetx/algorithms/hypergraph_modularity.py index e91f2db3..36bc4bbd 100644 --- a/hypernetx/algorithms/hypergraph_modularity.py +++ b/hypernetx/algorithms/hypergraph_modularity.py @@ -183,7 +183,7 @@ def modularity(HG, A, wdc=linear): _df = pd.DataFrame(zip(_keys, _vals), columns=["key", "val"]) _df = _df.groupby(by="key").sum() EC = sum( - [wdc(k[1], k[0]) * v[0] for (k, v) in _df.iterrows() if k[0] > k[1] / 2] + [wdc(k[1], k[0]) * v.iloc[0] for (k, v) in _df.iterrows() if k[0] > k[1] / 2] ) ## Degree Tax @@ -402,7 +402,7 @@ def _last_step_weighted(H, A, wdc, delta=0.01, verbose=False): _df = _df.groupby(by="key").sum() ec = sum( [ - wdc(k[1], k[0]) * val[0] + wdc(k[1], k[0]) * val.iloc[0] for (k, val) in _df.iterrows() if k[0] > k[1] / 2 ] @@ -430,7 +430,7 @@ def _last_step_weighted(H, A, wdc, delta=0.01, verbose=False): _df = _df.groupby(by="key").sum() ecp = sum( [ - wdc(k[1], k[0]) * val[0] + wdc(k[1], k[0]) * val.iloc[0] for (k, val) in _df.iterrows() if k[0] > k[1] / 2 ] From baf0cda917d1c9146d5bbcf53b342fd6fc93f41a Mon Sep 17 00:00:00 2001 From: Francois Date: Thu, 25 Jul 2024 13:44:13 +0000 Subject: [PATCH 2/3] last step bug fix --- hypernetx/algorithms/hypergraph_modularity.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/hypernetx/algorithms/hypergraph_modularity.py b/hypernetx/algorithms/hypergraph_modularity.py index 36bc4bbd..f9b79623 100644 --- a/hypernetx/algorithms/hypergraph_modularity.py +++ b/hypernetx/algorithms/hypergraph_modularity.py @@ -392,12 +392,12 @@ def _last_step_weighted(H, A, wdc, delta=0.01, verbose=False): n_moves = 0 for v in list(np.random.permutation(list(H.nodes))): dct_A_v = dct_A[v] - H_id = [H.incidence_dict[x] for x in H.nodes[v].memberships] + H_id = [H.incidence_dict[x] for x in H.nodes[v]] L = [[dct_A[i] for i in x] for x in H_id] ## ec portion before move _keys = [(Counter(l).most_common(1)[0][1], len(l)) for l in L] - _vals = [H.edges[x].weight for x in H.nodes[v].memberships] + _vals = [H.edges[x].weight for x in H.nodes[v]] _df = pd.DataFrame(zip(_keys, _vals), columns=["key", "val"]) _df = _df.groupby(by="key").sum() ec = sum( @@ -425,7 +425,7 @@ def _last_step_weighted(H, A, wdc, delta=0.01, verbose=False): L = [[dct_A[i] for i in x] for x in H_id] ## EC _keys = [(Counter(l).most_common(1)[0][1], len(l)) for l in L] - _vals = [H.edges[x].weight for x in H.nodes[v].memberships] + _vals = [H.edges[x].weight for x in H.nodes[v]] _df = pd.DataFrame(zip(_keys, _vals), columns=["key", "val"]) _df = _df.groupby(by="key").sum() ecp = sum( @@ -491,7 +491,7 @@ def _last_step_unweighted(H, A, wdc, delta=0.01, verbose=False): n_moves = 0 for v in list(np.random.permutation(list(H.nodes))): dct_A_v = dct_A[v] - H_id = [H.incidence_dict[x] for x in H.nodes[v].memberships] + H_id = [H.incidence_dict[x] for x in H.nodes[v]] L = [[dct_A[i] for i in x] for x in H_id] deg_v = H.degree(v) From 2508c0a4c2f6fe386c9baa3f7aa7282a5ff94992 Mon Sep 17 00:00:00 2001 From: Francois Date: Thu, 25 Jul 2024 16:58:07 +0000 Subject: [PATCH 3/3] two section bug fix --- hypernetx/algorithms/hypergraph_modularity.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/hypernetx/algorithms/hypergraph_modularity.py b/hypernetx/algorithms/hypergraph_modularity.py index f9b79623..f29efe3b 100644 --- a/hypernetx/algorithms/hypergraph_modularity.py +++ b/hypernetx/algorithms/hypergraph_modularity.py @@ -292,6 +292,12 @@ def two_section(HG): w = 1 / (len(E) - 1) s.extend([(k[0], k[1], w) for k in itertools.combinations(E, 2)]) G = ig.Graph.TupleList(s, weights=True).simplify(combine_edges="sum") + + ## add isolates if any + isolates = list(set([v for v in HG.nodes]) - set(G.vs['name'])) + if len(isolates)>0: + G.add_vertices(isolates) + return G