Skip to content

Commit 07cdc24

Browse files
committed
change np.long to np.longlong to support numpy with newer version
1 parent a555c42 commit 07cdc24

File tree

7 files changed

+16
-16
lines changed

7 files changed

+16
-16
lines changed

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
## Overview
55
Dynamic Graph Library (DyGLib) is an open-source toolkit with standard training pipelines, extensible coding interfaces, and comprehensive evaluating strategies,
66
which aims to promote standard, scalable, and reproducible dynamic graph learning research. Diverse benchmark datasets and thorough baselines are involved in DyGLib.
7-
![](figures/CTDyGLib_procedure.jpg)
7+
![](figures/DyGLib_procedure.jpg)
88

99

1010
## Benchmark Datasets and Preprocessing
File renamed without changes.

models/CAWN.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ def convert_format_from_tree_to_array(self, node_ids: np.ndarray, node_interact_
142142
# add the target node to the list to generate random walks in array-like format
143143
nodes_neighbor_ids = [node_ids[:, np.newaxis]] + nodes_neighbor_ids
144144
# follow the CAWN official implementation, the edge ids of the target node is denoted by zeros
145-
nodes_edge_ids = [np.zeros((len(node_ids), 1)).astype(np.long)] + nodes_edge_ids
145+
nodes_edge_ids = [np.zeros((len(node_ids), 1)).astype(np.longlong)] + nodes_edge_ids
146146
nodes_neighbor_times = [node_interact_times[:, np.newaxis]] + nodes_neighbor_times
147147

148148
array_format_data_list = []

models/DyGFormer.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -227,8 +227,8 @@ def pad_sequences(self, node_ids: np.ndarray, node_interact_times: np.ndarray, n
227227

228228
# pad the sequences
229229
# three ndarrays with shape (batch_size, max_seq_length)
230-
padded_nodes_neighbor_ids = np.zeros((len(node_ids), max_seq_length)).astype(np.long)
231-
padded_nodes_edge_ids = np.zeros((len(node_ids), max_seq_length)).astype(np.long)
230+
padded_nodes_neighbor_ids = np.zeros((len(node_ids), max_seq_length)).astype(np.longlong)
231+
padded_nodes_edge_ids = np.zeros((len(node_ids), max_seq_length)).astype(np.longlong)
232232
padded_nodes_neighbor_times = np.zeros((len(node_ids), max_seq_length)).astype(np.float32)
233233

234234
for idx in range(len(node_ids)):

models/TCL.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -84,14 +84,14 @@ def compute_src_dst_node_temporal_embeddings(self, src_node_ids: np.ndarray, dst
8484
# src_neighbor_node_ids, ndarray, shape (batch_size, num_neighbors + 1)
8585
src_neighbor_node_ids = np.concatenate((src_node_ids[:, np.newaxis], src_neighbor_node_ids), axis=1)
8686
# src_neighbor_edge_ids, ndarray, shape (batch_size, num_neighbors + 1)
87-
src_neighbor_edge_ids = np.concatenate((np.zeros((len(src_node_ids), 1)).astype(np.long), src_neighbor_edge_ids), axis=1)
87+
src_neighbor_edge_ids = np.concatenate((np.zeros((len(src_node_ids), 1)).astype(np.longlong), src_neighbor_edge_ids), axis=1)
8888
# src_neighbor_times, ndarray, shape (batch_size, num_neighbors + 1)
8989
src_neighbor_times = np.concatenate((node_interact_times[:, np.newaxis], src_neighbor_times), axis=1)
9090

9191
# dst_neighbor_node_ids, ndarray, shape (batch_size, num_neighbors + 1)
9292
dst_neighbor_node_ids = np.concatenate((dst_node_ids[:, np.newaxis], dst_neighbor_node_ids), axis=1)
9393
# dst_neighbor_edge_ids, ndarray, shape (batch_size, num_neighbors + 1)
94-
dst_neighbor_edge_ids = np.concatenate((np.zeros((len(dst_node_ids), 1)).astype(np.long), dst_neighbor_edge_ids), axis=1)
94+
dst_neighbor_edge_ids = np.concatenate((np.zeros((len(dst_node_ids), 1)).astype(np.longlong), dst_neighbor_edge_ids), axis=1)
9595
# dst_neighbor_times, ndarray, shape (batch_size, num_neighbors + 1)
9696
dst_neighbor_times = np.concatenate((node_interact_times[:, np.newaxis], dst_neighbor_times), axis=1)
9797

utils/DataLoader.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -94,10 +94,10 @@ def get_link_prediction_data(dataset_name: str, val_ratio: float, test_ratio: fl
9494
# get the timestamp of validate and test set
9595
val_time, test_time = list(np.quantile(graph_df.ts, [(1 - val_ratio - test_ratio), (1 - test_ratio)]))
9696

97-
src_node_ids = graph_df.u.values.astype(np.long)
98-
dst_node_ids = graph_df.i.values.astype(np.long)
97+
src_node_ids = graph_df.u.values.astype(np.longlong)
98+
dst_node_ids = graph_df.i.values.astype(np.longlong)
9999
node_interact_times = graph_df.ts.values.astype(np.float64)
100-
edge_ids = graph_df.idx.values.astype(np.long)
100+
edge_ids = graph_df.idx.values.astype(np.longlong)
101101
labels = graph_df.label.values
102102

103103
full_data = Data(src_node_ids=src_node_ids, dst_node_ids=dst_node_ids, node_interact_times=node_interact_times, edge_ids=edge_ids, labels=labels)
@@ -204,10 +204,10 @@ def get_node_classification_data(dataset_name: str, val_ratio: float, test_ratio
204204
# get the timestamp of validate and test set
205205
val_time, test_time = list(np.quantile(graph_df.ts, [(1 - val_ratio - test_ratio), (1 - test_ratio)]))
206206

207-
src_node_ids = graph_df.u.values.astype(np.long)
208-
dst_node_ids = graph_df.i.values.astype(np.long)
207+
src_node_ids = graph_df.u.values.astype(np.longlong)
208+
dst_node_ids = graph_df.i.values.astype(np.longlong)
209209
node_interact_times = graph_df.ts.values.astype(np.float64)
210-
edge_ids = graph_df.idx.values.astype(np.long)
210+
edge_ids = graph_df.idx.values.astype(np.longlong)
211211
labels = graph_df.label.values
212212

213213
# The setting of seed follows previous works

utils/utils.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -157,10 +157,10 @@ def get_historical_neighbors(self, node_ids: np.ndarray, node_interact_times: np
157157
# All interactions described in the following three matrices are sorted in each row by time
158158
# each entry in position (i,j) represents the id of the j-th dst node of src node node_ids[i] with an interaction before node_interact_times[i]
159159
# ndarray, shape (batch_size, num_neighbors)
160-
nodes_neighbor_ids = np.zeros((len(node_ids), num_neighbors)).astype(np.long)
160+
nodes_neighbor_ids = np.zeros((len(node_ids), num_neighbors)).astype(np.longlong)
161161
# each entry in position (i,j) represents the id of the edge with src node node_ids[i] and dst node nodes_neighbor_ids[i][j] with an interaction before node_interact_times[i]
162162
# ndarray, shape (batch_size, num_neighbors)
163-
nodes_edge_ids = np.zeros((len(node_ids), num_neighbors)).astype(np.long)
163+
nodes_edge_ids = np.zeros((len(node_ids), num_neighbors)).astype(np.longlong)
164164
# each entry in position (i,j) represents the interaction time between src node node_ids[i] and dst node nodes_neighbor_ids[i][j], before node_interact_times[i]
165165
# ndarray, shape (batch_size, num_neighbors)
166166
nodes_neighbor_times = np.zeros((len(node_ids), num_neighbors)).astype(np.float32)
@@ -444,7 +444,7 @@ def historical_sample(self, size: int, batch_src_node_ids: np.ndarray, batch_dst
444444

445445
# Note that if one of the input of np.concatenate is empty, the output will be composed of floats.
446446
# Hence, convert the type to long to guarantee valid index
447-
return negative_src_node_ids.astype(np.long), negative_dst_node_ids.astype(np.long)
447+
return negative_src_node_ids.astype(np.longlong), negative_dst_node_ids.astype(np.longlong)
448448

449449
def inductive_sample(self, size: int, batch_src_node_ids: np.ndarray, batch_dst_node_ids: np.ndarray,
450450
current_batch_start_time: float, current_batch_end_time: float):
@@ -484,7 +484,7 @@ def inductive_sample(self, size: int, batch_src_node_ids: np.ndarray, batch_dst_
484484

485485
# Note that if one of the input of np.concatenate is empty, the output will be composed of floats.
486486
# Hence, convert the type to long to guarantee valid index
487-
return negative_src_node_ids.astype(np.long), negative_dst_node_ids.astype(np.long)
487+
return negative_src_node_ids.astype(np.longlong), negative_dst_node_ids.astype(np.longlong)
488488

489489
def reset_random_state(self):
490490
"""

0 commit comments

Comments
 (0)