@@ -119,6 +119,61 @@ def get_complex_connectivity(complex, max_rank, signed=False):
119
119
return connectivity
120
120
121
121
122
+ def get_combinatorial_complex_connectivity (complex , max_rank = None ):
123
+ r"""Gets the connectivity matrices for the combinatorial complex.
124
+
125
+ Parameters
126
+ ----------
127
+ complex : topnetx.CombinatorialComplex
128
+ Combinatorial complex.
129
+ max_rank : int
130
+ Maximum rank of the complex.
131
+
132
+ Returns
133
+ -------
134
+ dict
135
+ Dictionary containing the connectivity matrices.
136
+ """
137
+ if max_rank is None :
138
+ max_rank = complex .dim
139
+ practical_shape = list (
140
+ np .pad (list (complex .shape ), (0 , max_rank + 1 - len (complex .shape )))
141
+ )
142
+
143
+ connectivity = {}
144
+
145
+ for rank_idx in range (max_rank + 1 ):
146
+ if rank_idx > 0 :
147
+ try :
148
+ connectivity [f"incidence_{ rank_idx } " ] = from_sparse (
149
+ complex .incidence_matrix (
150
+ rank = rank_idx - 1 , to_rank = rank_idx
151
+ )
152
+ )
153
+ except ValueError :
154
+ connectivity [f"incidence_{ rank_idx } " ] = (
155
+ generate_zero_sparse_connectivity (
156
+ m = practical_shape [rank_idx ],
157
+ n = practical_shape [rank_idx ],
158
+ )
159
+ )
160
+
161
+ try :
162
+ connectivity [f"adjacency_{ rank_idx } " ] = from_sparse (
163
+ complex .adjacency_matrix (rank = rank_idx , via_rank = rank_idx + 1 )
164
+ )
165
+ except ValueError :
166
+ connectivity [f"adjacency_{ rank_idx } " ] = (
167
+ generate_zero_sparse_connectivity (
168
+ m = practical_shape [rank_idx ], n = practical_shape [rank_idx ]
169
+ )
170
+ )
171
+
172
+ connectivity ["shape" ] = practical_shape
173
+
174
+ return connectivity
175
+
176
+
122
177
def generate_zero_sparse_connectivity (m , n ):
123
178
r"""Generates a zero sparse connectivity matrix.
124
179
@@ -285,17 +340,13 @@ def load_hypergraph_pickle_dataset(cfg):
285
340
286
341
print (f"number of hyperedges: { len (hypergraph )} " )
287
342
288
- edge_idx = 0 # num_nodes
289
343
node_list = []
290
344
edge_list = []
291
- for he in hypergraph :
292
- cur_he = hypergraph [he ]
293
- cur_size = len (cur_he )
294
-
295
- node_list += list (cur_he )
296
- edge_list += [edge_idx ] * cur_size
297
345
298
- edge_idx += 1
346
+ for edge_idx , cur_he in enumerate (hypergraph .values ()):
347
+ cur_size = len (cur_he )
348
+ node_list .extend (cur_he )
349
+ edge_list .extend ([edge_idx ] * cur_size )
299
350
300
351
# check that every node is in some hyperedge
301
352
if len (np .unique (node_list )) != num_nodes :
@@ -641,6 +692,55 @@ def load_manual_mol():
641
692
)
642
693
643
694
695
+ def load_manual_hypergraph ():
696
+ """Create a manual hypergraph for testing purposes."""
697
+ # Define the vertices (just 8 vertices)
698
+ vertices = [i for i in range (8 )]
699
+ y = [0 , 1 , 1 , 1 , 0 , 0 , 0 , 0 ]
700
+ # Define the hyperedges
701
+ hyperedges = [
702
+ [0 , 1 , 2 , 3 ],
703
+ [4 , 5 , 6 , 7 ],
704
+ [0 , 1 , 2 ],
705
+ [0 , 1 , 3 ],
706
+ [0 , 2 , 3 ],
707
+ [1 , 2 , 3 ],
708
+ [3 , 4 ],
709
+ [4 , 5 ],
710
+ [4 , 7 ],
711
+ [5 , 6 ],
712
+ [6 , 7 ],
713
+ ]
714
+
715
+ # Generate feature from 0 to 7
716
+ x = torch .tensor ([1 , 5 , 10 , 50 , 100 , 500 , 1000 , 5000 ]).unsqueeze (1 ).float ()
717
+ labels = torch .tensor (y , dtype = torch .long )
718
+
719
+ node_list = []
720
+ edge_list = []
721
+
722
+ for edge_idx , he in enumerate (hyperedges ):
723
+ cur_size = len (he )
724
+ node_list += he
725
+ edge_list += [edge_idx ] * cur_size
726
+
727
+ edge_index = np .array ([node_list , edge_list ], dtype = int )
728
+ edge_index = torch .LongTensor (edge_index )
729
+
730
+ incidence_hyperedges = torch .sparse_coo_tensor (
731
+ edge_index ,
732
+ values = torch .ones (edge_index .shape [1 ]),
733
+ size = (len (vertices ), len (hyperedges )),
734
+ )
735
+
736
+ return Data (
737
+ x = x ,
738
+ edge_index = edge_index ,
739
+ y = labels ,
740
+ incidence_hyperedges = incidence_hyperedges ,
741
+ )
742
+
743
+
644
744
def get_Planetoid_pyg (cfg ):
645
745
r"""Loads Planetoid graph datasets from torch_geometric.
646
746
0 commit comments