-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathconfig.yaml
51 lines (51 loc) · 1.08 KB
/
config.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
seed_everything: 7
trainer:
checkpoint_callback: true
callbacks:
- class_path: pytorch_lightning.callbacks.LearningRateMonitor
init_args:
logging_interval: epoch
- class_path: pytorch_lightning.callbacks.ModelCheckpoint
init_args:
save_top_k: 1
monitor: val_ExpRate
mode: max
filename: '{epoch}-{step}-{val_ExpRate:.4f}'
default_root_dir: 'lightning_logs/version_0'
gpus: 1
# gpus: 0, 1, 2, 3
# accelerator: ddp
check_val_every_n_epoch: 2
max_epochs: 300
deterministic: true
num_sanity_val_steps: 1
# resume_from_checkpoint:
model:
d_model: 256
# encoder
growth_rate: 24
num_layers: 16
# decoder
nhead: 8
num_decoder_layers: 3
dim_feedforward: 1024
dropout: 0.3
dc: 32
cross_coverage: true
self_coverage: true
# beam search
beam_size: 10
max_len: 200
alpha: 1.0
early_stopping: false
temperature: 1.0
# training
learning_rate: 0.08
patience: 20
data:
zipfile_path: data_crohme.zip
test_year: '2014'
train_batch_size: 8
eval_batch_size: 4
num_workers: 5
scale_aug: True