/
config.yml
122 lines (117 loc) · 2.38 KB
/
config.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
ex1_burgers:
node_feats: 1
edge_feats: null
pos_dim: 1
n_targets: 1
n_hidden: 96
num_feat_layers: 0
num_encoder_layers: 4
n_head: 1
pred_len: 0
n_freq_targets: 0
dim_feedforward: 192
feat_extract_type: null
attention_type: fourier
xavier_init: 0.001
diagonal_weight: 0.01
symmetric_init: False
layer_norm: False
attn_norm: True
batch_norm: False
spacial_residual: False
return_attn_weight: False
return_latent: False
residual_type: plus
seq_len: null
bulk_regression: False
decoder_type: ifft
freq_dim: 48
num_regressor_layers: 2
fourier_modes: 16
spacial_dim: 1
spacial_fc: False
dropout: 0.0
encoder_dropout: 0.0
ffn_dropout: 0.0
decoder_dropout: 0.0
debug: False
ex2_darcy:
node_feats: 1
pos_dim: 2
n_targets: 1
n_hidden: 128
num_feat_layers: 0
num_encoder_layers: 6
n_head: 4
dim_feedforward: 256
feat_extract_type: null
attention_type: galerkin
xavier_init: 0.01
diagonal_weight: 0.01
symmetric_init: False
layer_norm: False
attn_norm: True
norm_eps: 0.0000001
batch_norm: False
return_attn_weight: False
return_latent: False
decoder_type: ifft2
spacial_dim: 2
spacial_fc: True
upsample_mode: interp
downsample_mode: interp
freq_dim: 32
boundary_condition: dirichlet
num_regressor_layers: 2
fourier_modes: 12
regressor_activation: silu
downscaler_activation: relu
upscaler_activation: silu
last_activation: True
dropout: 0.0
downscaler_dropout: 0.05
upscaler_dropout: 0.0
ffn_dropout: 0.05
encoder_dropout: 0.05
decoder_dropout: 0
debug: False
ex3_darcy_inv:
subsample_nodes: 3
subsample_attn: 12
gamma: 0.0
noise: 0.01
inverse: True
node_feats: 1
pos_dim: 2
n_targets: 1
n_hidden: 192
num_feat_layers: 0
num_encoder_layers: 6
n_head: 4
dim_feedforward: 384
feat_extract_type: null
attention_type: galerkin
xavier_init: 0.01
diagonal_weight: 0.01
symmetric_init: False
layer_norm: False
attn_norm: True
norm_eps: 0.0000001
batch_norm: False
return_attn_weight: False
return_latent: False
decoder_type: pointwise
regressor_activation: silu
spacial_dim: 2
spacial_fc: True
upsample_mode: interp
downsample_mode: interp
boundary_condition: free
num_regressor_layers: 1
dropout: 0.05
downscaler_dropout: 0.05
upscaler_dropout: 0.05
ffn_dropout: 0.05
encoder_dropout: 0.05
decoder_dropout: 0.05
debug: False