Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Benchmark] Add inference test for current benchmarking and record performance #4892

Merged
merged 29 commits into from
Jul 13, 2022
Merged
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
8b7b50a
[Benchmark] Add inference and profile in citation
yanbing-j Jun 27, 2022
064d529
Print end-to-end time of inference
yanbing-j Jun 29, 2022
fcd08fb
Print end-to-end time of one epoch
yanbing-j Jun 29, 2022
0a0d349
Add inference.sh
yanbing-j Jun 30, 2022
5eb0491
Add inference and profile for to_hetero_mag
yanbing-j Jul 1, 2022
ac33b48
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jul 1, 2022
d5dfd35
Add inference for pna
yanbing-j Jul 4, 2022
5cb18c8
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jul 4, 2022
c8d3b8c
Add inference for benchmark/points/edge_cnn
yanbing-j Jul 4, 2022
e563390
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jul 4, 2022
b129f9d
Fix error
yanbing-j Jul 4, 2022
c3c3dae
Merge branch 'master' into yanbing/benchmark_inference
rusty1s Jul 9, 2022
1b69353
Merge branch 'yanbing/benchmark_inference' of https://github.com/yanb…
yanbing-j Jul 11, 2022
b80cd41
Update scripts
yanbing-j Jul 11, 2022
9bd310a
Add profile test to increase code coverage
yanbing-j Jul 11, 2022
5e90e81
Update script of points benchmark
yanbing-j Jul 11, 2022
f888faf
Update script for missing rename
yanbing-j Jul 12, 2022
55c5c42
Update scripts according to the comments
yanbing-j Jul 12, 2022
00bca65
Add CPU test for profile
yanbing-j Jul 12, 2022
b9a7593
Merge branch 'master' into yanbing/benchmark_inference
yanbing-j Jul 12, 2022
72f7eab
Merge branch 'master' into yanbing/benchmark_inference
yanbing-j Jul 13, 2022
cef334d
update
rusty1s Jul 13, 2022
e07d068
update
rusty1s Jul 13, 2022
b1a4620
update
rusty1s Jul 13, 2022
e6ffafe
update
rusty1s Jul 13, 2022
fea0f43
reset
rusty1s Jul 13, 2022
fd39e26
update
rusty1s Jul 13, 2022
7fd675b
changelog
rusty1s Jul 13, 2022
014e277
Merge branch 'master' into yanbing/benchmark_inference
rusty1s Jul 13, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions benchmark/citation/appnp.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from citation import get_planetoid_dataset, random_planetoid_splits, run
from torch.nn import Linear

from torch_geometric.nn import APPNP
from torch_geometric.nn import APPNP as Conv
rusty1s marked this conversation as resolved.
Show resolved Hide resolved
from torch_geometric.profile import rename_profile_file

parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, required=True)
Expand All @@ -20,6 +21,8 @@
parser.add_argument('--normalize_features', type=bool, default=True)
parser.add_argument('--K', type=int, default=10)
parser.add_argument('--alpha', type=float, default=0.1)
parser.add_argument('--inference', action='store_true')
parser.add_argument('--profile', action='store_true')
args = parser.parse_args()


Expand All @@ -28,7 +31,7 @@ def __init__(self, dataset):
super().__init__()
self.lin1 = Linear(dataset.num_features, args.hidden)
self.lin2 = Linear(args.hidden, dataset.num_classes)
self.prop1 = APPNP(args.K, args.alpha)
self.prop1 = Conv(args.K, args.alpha)

def reset_parameters(self):
self.lin1.reset_parameters()
Expand All @@ -47,4 +50,8 @@ def forward(self, data):
dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
permute_masks = random_planetoid_splits if args.random_splits else None
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
args.early_stopping, permute_masks)
args.early_stopping, args.inference, args.profile, permute_masks)

if args.profile:
rusty1s marked this conversation as resolved.
Show resolved Hide resolved
rename_profile_file('citation', Conv.__name__, args.dataset,
str(args.random_splits))
23 changes: 15 additions & 8 deletions benchmark/citation/arma.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import torch.nn.functional as F
from citation import get_planetoid_dataset, random_planetoid_splits, run

from torch_geometric.nn import ARMAConv
from torch_geometric.nn import ARMAConv as Conv
from torch_geometric.profile import rename_profile_file

parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, required=True)
Expand All @@ -21,18 +22,20 @@
parser.add_argument('--num_layers', type=int, default=1)
parser.add_argument('--shared_weights', type=bool, default=False)
parser.add_argument('--skip_dropout', type=float, default=0.75)
parser.add_argument('--inference', action='store_true')
parser.add_argument('--profile', action='store_true')
args = parser.parse_args()


class Net(torch.nn.Module):
def __init__(self, dataset):
super().__init__()
self.conv1 = ARMAConv(dataset.num_features, args.hidden,
args.num_stacks, args.num_layers,
args.shared_weights, dropout=args.skip_dropout)
self.conv2 = ARMAConv(args.hidden, dataset.num_classes,
args.num_stacks, args.num_layers,
args.shared_weights, dropout=args.skip_dropout)
self.conv1 = Conv(dataset.num_features, args.hidden, args.num_stacks,
args.num_layers, args.shared_weights,
dropout=args.skip_dropout)
self.conv2 = Conv(args.hidden, dataset.num_classes, args.num_stacks,
args.num_layers, args.shared_weights,
dropout=args.skip_dropout)

def reset_parameters(self):
self.conv1.reset_parameters()
Expand All @@ -49,4 +52,8 @@ def forward(self, data):
dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
permute_masks = random_planetoid_splits if args.random_splits else None
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
args.early_stopping, permute_masks)
args.early_stopping, args.inference, args.profile, permute_masks)

if args.profile:
rename_profile_file('citation', Conv.__name__, args.dataset,
str(args.random_splits))
15 changes: 11 additions & 4 deletions benchmark/citation/cheb.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import torch.nn.functional as F
from citation import get_planetoid_dataset, random_planetoid_splits, run

from torch_geometric.nn import ChebConv
from torch_geometric.nn import ChebConv as Conv
from torch_geometric.profile import rename_profile_file

parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, required=True)
Expand All @@ -18,14 +19,16 @@
parser.add_argument('--dropout', type=float, default=0.5)
parser.add_argument('--normalize_features', type=bool, default=True)
parser.add_argument('--num_hops', type=int, default=3)
parser.add_argument('--inference', action='store_true')
parser.add_argument('--profile', action='store_true')
args = parser.parse_args()


class Net(torch.nn.Module):
def __init__(self, dataset):
super().__init__()
self.conv1 = ChebConv(dataset.num_features, args.hidden, args.num_hops)
self.conv2 = ChebConv(args.hidden, dataset.num_classes, args.num_hops)
self.conv1 = Conv(dataset.num_features, args.hidden, args.num_hops)
self.conv2 = Conv(args.hidden, dataset.num_classes, args.num_hops)

def reset_parameters(self):
self.conv1.reset_parameters()
Expand All @@ -42,4 +45,8 @@ def forward(self, data):
dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
permute_masks = random_planetoid_splits if args.random_splits else None
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
args.early_stopping, permute_masks)
args.early_stopping, args.inference, args.profile, permute_masks)

if args.profile:
rename_profile_file('citation', Conv.__name__, args.dataset,
str(args.random_splits))
21 changes: 14 additions & 7 deletions benchmark/citation/gat.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import torch.nn.functional as F
from citation import get_planetoid_dataset, random_planetoid_splits, run

from torch_geometric.nn import GATConv
from torch_geometric.nn import GATConv as Conv
from torch_geometric.profile import rename_profile_file

parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, required=True)
Expand All @@ -19,17 +20,19 @@
parser.add_argument('--normalize_features', type=bool, default=True)
parser.add_argument('--heads', type=int, default=8)
parser.add_argument('--output_heads', type=int, default=1)
parser.add_argument('--inference', action='store_true')
parser.add_argument('--profile', action='store_true')
args = parser.parse_args()


class Net(torch.nn.Module):
def __init__(self, dataset):
super().__init__()
self.conv1 = GATConv(dataset.num_features, args.hidden,
heads=args.heads, dropout=args.dropout)
self.conv2 = GATConv(args.hidden * args.heads, dataset.num_classes,
heads=args.output_heads, concat=False,
dropout=args.dropout)
self.conv1 = Conv(dataset.num_features, args.hidden, heads=args.heads,
dropout=args.dropout)
self.conv2 = Conv(args.hidden * args.heads, dataset.num_classes,
heads=args.output_heads, concat=False,
dropout=args.dropout)

def reset_parameters(self):
self.conv1.reset_parameters()
Expand All @@ -47,4 +50,8 @@ def forward(self, data):
dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
permute_masks = random_planetoid_splits if args.random_splits else None
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
args.early_stopping, permute_masks)
args.early_stopping, args.inference, args.profile, permute_masks)

if args.profile:
rename_profile_file('citation', Conv.__name__, args.dataset,
str(args.random_splits))
15 changes: 11 additions & 4 deletions benchmark/citation/gcn.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import torch.nn.functional as F
from citation import get_planetoid_dataset, random_planetoid_splits, run

from torch_geometric.nn import GCNConv
from torch_geometric.nn import GCNConv as Conv
from torch_geometric.profile import rename_profile_file

parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, required=True)
Expand All @@ -17,14 +18,16 @@
parser.add_argument('--hidden', type=int, default=16)
parser.add_argument('--dropout', type=float, default=0.5)
parser.add_argument('--normalize_features', type=bool, default=True)
parser.add_argument('--inference', action='store_true')
parser.add_argument('--profile', action='store_true')
args = parser.parse_args()


class Net(torch.nn.Module):
def __init__(self, dataset):
super().__init__()
self.conv1 = GCNConv(dataset.num_features, args.hidden)
self.conv2 = GCNConv(args.hidden, dataset.num_classes)
self.conv1 = Conv(dataset.num_features, args.hidden)
self.conv2 = Conv(args.hidden, dataset.num_classes)

def reset_parameters(self):
self.conv1.reset_parameters()
Expand All @@ -41,4 +44,8 @@ def forward(self, data):
dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
permute_masks = random_planetoid_splits if args.random_splits else None
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
args.early_stopping, permute_masks)
args.early_stopping, args.inference, args.profile, permute_masks)

if args.profile:
rename_profile_file('citation', Conv.__name__, args.dataset,
str(args.random_splits))
118 changes: 118 additions & 0 deletions benchmark/citation/inference.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
#!/bin/sh

echo "Cora"
echo "===="

echo "GCN"
python gcn.py --dataset=Cora --inference
python gcn.py --dataset=Cora --random_splits=True --inference
rusty1s marked this conversation as resolved.
Show resolved Hide resolved
python gcn.py --dataset=Cora --inference --profile
python gcn.py --dataset=Cora --random_splits=True --inference --profile

echo "GAT"
python gat.py --dataset=Cora --inference
python gat.py --dataset=Cora --random_splits=True --inference
python gat.py --dataset=Cora --inference --profile
python gat.py --dataset=Cora --random_splits=True --inference --profile

echo "Cheby"
python cheb.py --dataset=Cora --num_hops=3 --inference
python cheb.py --dataset=Cora --num_hops=3 --random_splits=True --inference
python cheb.py --dataset=Cora --num_hops=3 --inference --profile
python cheb.py --dataset=Cora --num_hops=3 --random_splits=True --inference --profile

echo "SGC"
python sgc.py --dataset=Cora --K=3 --weight_decay=0.0005 --inference
python sgc.py --dataset=Cora --K=3 --weight_decay=0.0005 --random_splits=True --inference
python sgc.py --dataset=Cora --K=3 --weight_decay=0.0005 --inference --profile
python sgc.py --dataset=Cora --K=3 --weight_decay=0.0005 --random_splits=True --inference --profile

echo "ARMA"
python arma.py --dataset=Cora --num_stacks=2 --num_layers=1 --shared_weights=True --inference
python arma.py --dataset=Cora --num_stacks=3 --num_layers=1 --shared_weights=True --random_splits=True --inference
python arma.py --dataset=Cora --num_stacks=2 --num_layers=1 --shared_weights=True --inference --profile
python arma.py --dataset=Cora --num_stacks=3 --num_layers=1 --shared_weights=True --random_splits=True --inference --profile

echo "APPNP"
python appnp.py --dataset=Cora --alpha=0.1 --inference
python appnp.py --dataset=Cora --alpha=0.1 --random_splits=True --inference
python appnp.py --dataset=Cora --alpha=0.1 --inference --profile
python appnp.py --dataset=Cora --alpha=0.1 --random_splits=True --inference --profile

echo "CiteSeer"
echo "========"

echo "GCN"
python gcn.py --dataset=CiteSeer --inference
python gcn.py --dataset=CiteSeer --random_splits=True --inference
python gcn.py --dataset=CiteSeer --inference --profile
python gcn.py --dataset=CiteSeer --random_splits=True --inference --profile

echo "GAT"
python gat.py --dataset=CiteSeer --inference
python gat.py --dataset=CiteSeer --random_splits=True --inference
python gat.py --dataset=CiteSeer --inference --profile
python gat.py --dataset=CiteSeer --random_splits=True --inference --profile

echo "Cheby"
python cheb.py --dataset=CiteSeer --num_hops=2 --inference
python cheb.py --dataset=CiteSeer --num_hops=3 --random_splits=True --inference
python cheb.py --dataset=CiteSeer --num_hops=2 --inference --profile
python cheb.py --dataset=CiteSeer --num_hops=3 --random_splits=True --inference --profile

echo "SGC"
python sgc.py --dataset=CiteSeer --K=2 --weight_decay=0.005 --inference
python sgc.py --dataset=CiteSeer --K=2 --weight_decay=0.005 --random_splits=True --inference
python sgc.py --dataset=CiteSeer --K=2 --weight_decay=0.005 --inference --profile
python sgc.py --dataset=CiteSeer --K=2 --weight_decay=0.005 --random_splits=True --inference --profile

echo "ARMA"
python arma.py --dataset=CiteSeer --num_stacks=3 --num_layers=1 --shared_weights=True --inference
python arma.py --dataset=CiteSeer --num_stacks=3 --num_layers=1 --shared_weights=True --random_splits=True --inference
python arma.py --dataset=CiteSeer --num_stacks=3 --num_layers=1 --shared_weights=True --inference --profile
python arma.py --dataset=CiteSeer --num_stacks=3 --num_layers=1 --shared_weights=True --random_splits=True --inference --profile

echo "APPNP"
python appnp.py --dataset=CiteSeer --alpha=0.1 --inference
python appnp.py --dataset=CiteSeer --alpha=0.1 --random_splits=True --inference
python appnp.py --dataset=CiteSeer --alpha=0.1 --inference --profile
python appnp.py --dataset=CiteSeer --alpha=0.1 --random_splits=True --inference --profile

echo "PubMed"
echo "======"

echo "GCN"
python gcn.py --dataset=PubMed --inference
python gcn.py --dataset=PubMed --random_splits=True --inference
python gcn.py --dataset=PubMed --inference --profile
python gcn.py --dataset=PubMed --random_splits=True --inference --profile

echo "GAT"
python gat.py --dataset=PubMed --lr=0.01 --weight_decay=0.001 --output_heads=8 --inference
python gat.py --dataset=PubMed --lr=0.01 --weight_decay=0.001 --output_heads=8 --random_splits=True --inference
python gat.py --dataset=PubMed --lr=0.01 --weight_decay=0.001 --output_heads=8 --inference --profile
python gat.py --dataset=PubMed --lr=0.01 --weight_decay=0.001 --output_heads=8 --random_splits=True --inference --profile

echo "Cheby"
python cheb.py --dataset=PubMed --num_hops=2 --inference
python cheb.py --dataset=PubMed --num_hops=2 --random_splits=True --inference
python cheb.py --dataset=PubMed --num_hops=2 --inference --profile
python cheb.py --dataset=PubMed --num_hops=2 --random_splits=True --inference --profile

echo "SGC"
python sgc.py --dataset=PubMed --K=2 --weight_decay=0.0005 --inference
python sgc.py --dataset=PubMed --K=2 --weight_decay=0.0005 --random_splits=True --inference
python sgc.py --dataset=PubMed --K=2 --weight_decay=0.0005 --inference --profile
python sgc.py --dataset=PubMed --K=2 --weight_decay=0.0005 --random_splits=True --inference --profile

echo "ARMA"
python arma.py --dataset=PubMed --num_stacks=2 --num_layers=1 --skip_dropout=0 --inference
python arma.py --dataset=PubMed --num_stacks=2 --num_layers=1 --skip_dropout=0.5 --random_splits=True --inference
python arma.py --dataset=PubMed --num_stacks=2 --num_layers=1 --skip_dropout=0 --inference --profile
python arma.py --dataset=PubMed --num_stacks=2 --num_layers=1 --skip_dropout=0.5 --random_splits=True --inference --profile

echo "APPNP"
python appnp.py --dataset=PubMed --alpha=0.1 --inference
python appnp.py --dataset=PubMed --alpha=0.1 --random_splits=True --inference
python appnp.py --dataset=PubMed --alpha=0.1 --inference --profile
python appnp.py --dataset=PubMed --alpha=0.1 --random_splits=True --inference --profile
15 changes: 11 additions & 4 deletions benchmark/citation/sgc.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import torch.nn.functional as F
from citation import get_planetoid_dataset, random_planetoid_splits, run

from torch_geometric.nn import SGConv
from torch_geometric.nn import SGConv as Conv
from torch_geometric.profile import rename_profile_file

parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, required=True)
Expand All @@ -16,14 +17,16 @@
parser.add_argument('--early_stopping', type=int, default=10)
parser.add_argument('--normalize_features', type=bool, default=False)
parser.add_argument('--K', type=int, default=2)
parser.add_argument('--inference', action='store_true')
parser.add_argument('--profile', action='store_true')
args = parser.parse_args()


class Net(torch.nn.Module):
def __init__(self, dataset):
super().__init__()
self.conv1 = SGConv(dataset.num_features, dataset.num_classes,
K=args.K, cached=True)
self.conv1 = Conv(dataset.num_features, dataset.num_classes, K=args.K,
cached=True)

def reset_parameters(self):
self.conv1.reset_parameters()
Expand All @@ -37,4 +40,8 @@ def forward(self, data):
dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
permute_masks = random_planetoid_splits if args.random_splits else None
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
args.early_stopping, permute_masks)
args.early_stopping, args.inference, args.profile, permute_masks)

if args.profile:
rename_profile_file('citation', Conv.__name__, args.dataset,
str(args.random_splits))
Loading