增大benchmark时的固定dropout和weight decay

This commit is contained in:
RYDE-WORK 2026-02-28 16:17:40 +08:00
parent 70b7a4c62a
commit bbc7c88f99

View File

@ -289,7 +289,7 @@ def main(
n_attn_layers: int = 4, n_attn_layers: int = 4,
fusion_strategy: str = "attention", fusion_strategy: str = "attention",
head_hidden_dim: int = 128, head_hidden_dim: int = 128,
dropout: float = 0.1, dropout: float = 0.3,
# MPNN 参数 # MPNN 参数
use_mpnn: bool = False, use_mpnn: bool = False,
mpnn_checkpoint: Optional[str] = None, mpnn_checkpoint: Optional[str] = None,
@ -298,7 +298,7 @@ def main(
# 训练参数 # 训练参数
batch_size: int = 64, batch_size: int = 64,
lr: float = 1e-4, lr: float = 1e-4,
weight_decay: float = 1e-5, weight_decay: float = 1e-3,
epochs: int = 50, epochs: int = 50,
patience: int = 10, patience: int = 10,
# 设备 # 设备