summaryrefslogtreecommitdiff
path: root/experiments/run_dfagnn_depth.py
blob: ed6e6c36d3fdfc67baae38b95e0b1a7169f52850 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
#!/usr/bin/env python3
"""H7: DFA-GNN depth sweep for Figure 4(a)-style plot.

Runs DFA-GNN at L ∈ {4, 8, 10, 12, 16, 20} × {Cora, CiteSeer, PubMed, DBLP} × 20 seeds.
L=6 data already exists from prior experiments; L=2/3 skipped (CiteSeer L=2 GRAFT soft spot).

Combined with existing BP and GRAFT depth data, produces 3-method depth curves for Figure 4(a).
"""

import torch
import numpy as np
import json
import os
from src.data import load_dataset
from src.trainers import DFAGNNTrainer
from run_dblp_depth import load_dblp

device = 'cuda:0'
SEEDS = list(range(20))
EPOCHS = 200
DEPTHS = [4, 8, 10, 12, 16, 20]
OUT_DIR = 'results/dfagnn_depth_20seeds'

dfagnn_extra = dict(diffusion_alpha=0.5, diffusion_iters=10, max_topo_power=3)


def train_one(data, L, seed):
    torch.manual_seed(seed); np.random.seed(seed); torch.cuda.manual_seed_all(seed)
    t = DFAGNNTrainer(data=data, hidden_dim=64, lr=0.01, weight_decay=5e-4,
                      num_layers=L, residual_alpha=0.0, backbone='gcn', **dfagnn_extra)
    bv, bt = 0, 0
    for ep in range(EPOCHS):
        t.train_step()
        if ep % 5 == 0:
            v = t.evaluate('val_mask')
            te = t.evaluate('test_mask')
            if v > bv: bv, bt = v, te
    del t; torch.cuda.empty_cache()
    return bt


def main():
    os.makedirs(OUT_DIR, exist_ok=True)
    per_seed_file = os.path.join(OUT_DIR, 'per_seed_data.json')
    if os.path.exists(per_seed_file):
        with open(per_seed_file) as f:
            per_seed_data = json.load(f)
    else:
        per_seed_data = {}

    datasets_cfg = {
        'Cora': lambda: load_dataset('Cora', device=device),
        'CiteSeer': lambda: load_dataset('CiteSeer', device=device),
        'PubMed': lambda: load_dataset('PubMed', device=device),
        'DBLP': lambda: load_dblp(),
    }

    for ds_name, loader in datasets_cfg.items():
        data = loader()
        for L in DEPTHS:
            key = f"{ds_name}_L{L}_DFA-GNN"
            if key not in per_seed_data:
                per_seed_data[key] = {}

            print(f"\n=== {key} (20 seeds) ===", flush=True)
            for seed in SEEDS:
                sk = str(seed)
                if sk in per_seed_data[key]:
                    print(f"  seed {seed}: cached ({per_seed_data[key][sk]*100:.1f}%)", flush=True)
                    continue
                try:
                    acc = train_one(data, L, seed)
                    per_seed_data[key][sk] = acc
                    print(f"  seed {seed}: {acc*100:.1f}%", flush=True)
                except Exception as e:
                    print(f"  seed {seed}: FAILED - {e}", flush=True)
                    per_seed_data[key][sk] = 0.0

                with open(per_seed_file, 'w') as f:
                    json.dump(per_seed_data, f, indent=2)
        del data; torch.cuda.empty_cache()

    # Summary
    print(f"\n{'=' * 70}\nDFA-GNN depth sweep summary (20 seeds)\n{'=' * 70}")
    results = {}
    for ds in datasets_cfg:
        print(f"\n{ds}:")
        for L in DEPTHS:
            key = f"{ds}_L{L}_DFA-GNN"
            vals = np.array([per_seed_data[key][str(s)] for s in SEEDS]) * 100
            results[key] = {'mean': float(vals.mean()), 'std': float(vals.std()),
                             'per_seed': vals.tolist()}
            print(f"  L={L:2d}  DFA-GNN {vals.mean():5.1f} ± {vals.std():4.1f}")

    with open(os.path.join(OUT_DIR, 'results.json'), 'w') as f:
        json.dump(results, f, indent=2)
    print(f"\nSaved to {OUT_DIR}/results.json")


if __name__ == '__main__':
    main()