forked from graphdeeplearning/benchmarking-gnns
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request graphdeeplearning#8 from JakeStevens/citation_graphs
Citation graphs
- Loading branch information
Showing
22 changed files
with
1,356 additions
and
60 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
{ | ||
"gpu": { | ||
"use": true, | ||
"id": 0 | ||
}, | ||
|
||
"model": "GAT", | ||
|
||
"out_dir": "out/CitationGraphs_node_classification/", | ||
|
||
"params": { | ||
"seed": 41, | ||
"epochs": 300, | ||
"batch_size": 128, | ||
"init_lr": 0.005, | ||
"lr_reduce_factor": 0.5, | ||
"lr_schedule_patience": 5, | ||
"min_lr": 1e-5, | ||
"weight_decay": 5e-4, | ||
"print_epoch_interval": 5, | ||
"max_time": 48 | ||
}, | ||
|
||
"net_params": { | ||
"builtin": true, | ||
"L": 1, | ||
"n_heads": 8, | ||
"hidden_dim": 8, | ||
"out_dim": 8, | ||
"residual": false, | ||
"in_feat_dropout": 0.6, | ||
"dropout": 0.6, | ||
"graph_norm": false, | ||
"batch_norm": false, | ||
"self_loop": true | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
{ | ||
"gpu": { | ||
"use": true, | ||
"id": 0 | ||
}, | ||
|
||
"model": "GCN", | ||
|
||
"out_dir": "out/CitationGraphs_node_classification/", | ||
|
||
"params": { | ||
"seed": 41, | ||
"epochs": 300, | ||
"batch_size": 128, | ||
"init_lr": 1e-2, | ||
"lr_reduce_factor": 0.5, | ||
"lr_schedule_patience": 5, | ||
"min_lr": 1e-5, | ||
"weight_decay": 5e-4, | ||
"print_epoch_interval": 5, | ||
"max_time": 48 | ||
}, | ||
|
||
"net_params": { | ||
"builtin": true, | ||
"L": 1, | ||
"hidden_dim": 16, | ||
"out_dim": 16, | ||
"residual": false, | ||
"in_feat_dropout": 0.5, | ||
"dropout": 0.5, | ||
"graph_norm": false, | ||
"batch_norm": false, | ||
"self_loop": true | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
{ | ||
"gpu": { | ||
"use": true, | ||
"id": 0 | ||
}, | ||
|
||
"model": "GraphSage", | ||
|
||
"out_dir": "out/CitationGraphs_node_classification/", | ||
|
||
"params": { | ||
"seed": 41, | ||
"epochs": 300, | ||
"batch_size": 20, | ||
"init_lr": 1e-2, | ||
"lr_reduce_factor": 0.5, | ||
"lr_schedule_patience": 25, | ||
"min_lr": 1e-6, | ||
"weight_decay": 5e-4, | ||
"print_epoch_interval": 5, | ||
"max_time": 48 | ||
}, | ||
|
||
"net_params": { | ||
"builtin": true, | ||
"L": 1, | ||
"hidden_dim": 16, | ||
"out_dim": 16, | ||
"residual": false, | ||
"in_feat_dropout": 0.5, | ||
"dropout": 0.5, | ||
"graph_norm": false, | ||
"batch_norm": false, | ||
"sage_aggregator": "mean" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
{ | ||
"gpu": { | ||
"use": true, | ||
"id": 0 | ||
}, | ||
|
||
"model": "MLP", | ||
|
||
"out_dir": "out/CitationGraphs_node_classification/", | ||
|
||
"params": { | ||
"seed": 41, | ||
"epochs": 300, | ||
"batch_size": 20, | ||
"init_lr": 0.005, | ||
"lr_reduce_factor": 0.5, | ||
"lr_schedule_patience": 25, | ||
"min_lr": 1e-5, | ||
"weight_decay": 5e-4, | ||
"print_epoch_interval": 5, | ||
"max_time": 48 | ||
}, | ||
|
||
"net_params": { | ||
"L": 4, | ||
"hidden_dim": 16, | ||
"out_dim": 16, | ||
"readout": "mean", | ||
"gated": false, | ||
"in_feat_dropout": 0.6, | ||
"dropout": 0.6 | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
{ | ||
"gpu": { | ||
"use": true, | ||
"id": 0 | ||
}, | ||
|
||
"model": "MLP", | ||
|
||
"out_dir": "out/CitationGraphs_node_classification/", | ||
|
||
"params": { | ||
"seed": 41, | ||
"epochs": 300, | ||
"batch_size": 20, | ||
"init_lr": 0.005, | ||
"lr_reduce_factor": 0.5, | ||
"lr_schedule_patience": 25, | ||
"min_lr": 1e-5, | ||
"weight_decay": 5e-4, | ||
"print_epoch_interval": 5, | ||
"max_time": 48 | ||
}, | ||
|
||
"net_params": { | ||
"L": 4, | ||
"hidden_dim": 16, | ||
"out_dim": 16, | ||
"readout": "mean", | ||
"gated": true, | ||
"in_feat_dropout": 0.6, | ||
"dropout": 0.6 | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,91 @@ | ||
import torch | ||
import pickle | ||
import torch.utils.data | ||
import time | ||
import os | ||
import numpy as np | ||
|
||
import csv | ||
|
||
import dgl | ||
from dgl.data import CoraDataset | ||
from dgl.data import CitationGraphDataset | ||
import networkx as nx | ||
|
||
import random | ||
random.seed(42) | ||
|
||
|
||
def self_loop(g): | ||
""" | ||
Utility function only, to be used only when necessary as per user self_loop flag | ||
: Overwriting the function dgl.transform.add_self_loop() to not miss ndata['feat'] and edata['feat'] | ||
This function is called inside a function in CitationGraphsDataset class. | ||
""" | ||
new_g = dgl.DGLGraph() | ||
new_g.add_nodes(g.number_of_nodes()) | ||
new_g.ndata['feat'] = g.ndata['feat'] | ||
|
||
src, dst = g.all_edges(order="eid") | ||
src = dgl.backend.zerocopy_to_numpy(src) | ||
dst = dgl.backend.zerocopy_to_numpy(dst) | ||
non_self_edges_idx = src != dst | ||
nodes = np.arange(g.number_of_nodes()) | ||
new_g.add_edges(src[non_self_edges_idx], dst[non_self_edges_idx]) | ||
new_g.add_edges(nodes, nodes) | ||
|
||
# This new edata is not used since this function gets called only for GCN, GAT | ||
# However, we need this for the generic requirement of ndata and edata | ||
new_g.edata['feat'] = torch.zeros(new_g.number_of_edges()) | ||
return new_g | ||
|
||
|
||
|
||
|
||
class CitationGraphsDataset(torch.utils.data.Dataset): | ||
def __init__(self, name): | ||
t0 = time.time() | ||
self.name = name.lower() | ||
|
||
if self.name == 'cora': | ||
dataset = CoraDataset() | ||
else: | ||
dataset = CitationGraphDataset(self.name) | ||
dataset.graph.remove_edges_from(nx.selfloop_edges(dataset.graph)) | ||
graph = dgl.DGLGraph(dataset.graph) | ||
E = graph.number_of_edges() | ||
N = graph.number_of_nodes() | ||
D = dataset.features.shape[1] | ||
graph.ndata['feat'] = torch.Tensor(dataset.features) | ||
graph.edata['feat'] = torch.zeros((E, D)) | ||
graph.batch_num_nodes = [N] | ||
|
||
|
||
self.norm_n = torch.FloatTensor(N,1).fill_(1./float(N)).sqrt() | ||
self.norm_e = torch.FloatTensor(E,1).fill_(1./float(E)).sqrt() | ||
self.graph = graph | ||
self.train_mask = torch.BoolTensor(dataset.train_mask) | ||
self.val_mask = torch.BoolTensor(dataset.val_mask) | ||
self.test_mask = torch.BoolTensor(dataset.test_mask) | ||
self.labels = torch.LongTensor(dataset.labels) | ||
self.num_classes = dataset.num_labels | ||
self.num_dims = D | ||
|
||
|
||
|
||
print("[!] Dataset: ", self.name) | ||
|
||
|
||
print("Time taken: {:.4f}s".format(time.time()-t0)) | ||
|
||
|
||
def _add_self_loops(self): | ||
# function for adding self loops | ||
# this function will be called only if self_loop flag is True | ||
self.graph = self_loop(self.graph) | ||
norm = torch.pow(self.graph.in_degrees().float().clamp(min=1), -0.5) | ||
shp = norm.shape + (1,) * (self.graph.ndata['feat'].dim() - 1) | ||
self.norm_n = torch.reshape(norm, shp) | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.