-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.yaml
More file actions
38 lines (32 loc) · 2.16 KB
/
config.yaml
File metadata and controls
38 lines (32 loc) · 2.16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
# Hypergraph Self-Supervised Learning Configuration
# === Experiment Settings ===
seed: 3407 # Random seed for reproducibility
runs: 5 # Number of experimental runs for statistical significance
# === Data Parameters ===
train_rate: 0.1 # Proportion of nodes for training set
valid_rate: 0.3 # Proportion of nodes for validation set
data_name: "CC-Cora" # Dataset name (Cora, Citeseer, CA-Cora, CC-Cora, CC-Citeseer, DBLP-Paper, DBLP-Conf, DBLP-Term, IMDB-Actor, IMDB-Director)
# === Model Architecture ===
num_hidden: 512 # Hidden dimension size for HGNN layers
encoder_type: "hgnnp" # Encoder architecture: hgnn, gat, gcn, hgnnp, mlp, linear
decoder_type: "hgnnp" # Decoder architecture (same as encoder for simplicity)
dropout: 0.0 # Dropout rate for regularization
# === Pre-training Parameters ===
lr: 0.0001 # Learning rate for pre-training phase
max_epoch: 250 # Maximum number of pre-training epochs
weight_decay: 0 # L2 regularization strength
optim_type: "adam" # Optimizer type: adam, adamw, adadelta, radam, sgd
loss_fn: "sce" # Loss function: sce or mse
mask_rate: 0.7 # Masking rate for attribute reconstruction task
replace_rate: 0.05 # Feature replacement rate in masking strategy
cl: 1 # Weight for contrastive learning loss component
attr: 3 # Weight for attribute reconstruction loss component
lamda: 0.2 # Lambda parameter for similarity scaling in contrastive loss
aug_ratio: 0.6 # Augmentation ratio for structural perturbation
# === Fine-tuning Parameters ===
lr_f: 0.01 # Learning rate for linear probing evaluation
max_epoch_f: 300 # Maximum epochs for linear classifier training
weight_decay_f: 0.0001 # Weight decay for linear probing
# === Output Directories ===
result_dir: "results" # Directory for saving experimental results and metrics
log_dir: "logs" # Directory for training logs and TensorBoard files