Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Commit

Permalink
remove dropout from test fixtures (#2889)
Browse files Browse the repository at this point in the history
* remove dropout from test fixtures

* last bit of dropout in naqanet
  • Loading branch information
DeNeutoy committed Jul 16, 2019
1 parent 1cd2193 commit ec30c90
Show file tree
Hide file tree
Showing 26 changed files with 49 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ local bert_model = "allennlp/tests/fixtures/bert/vocab.txt";
"model": {
"type": "bert_for_classification",
"bert_model": bert_model,
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "bucket",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
}
}
},
"embedding_dropout": 0.2,
"embedding_dropout": 0.0,
"pre_encode_feedforward": {
"input_dim": 42,
"num_layers": 1,
Expand All @@ -44,7 +44,7 @@
"hidden_size": 3,
"num_layers": 1
},
"integrator_dropout": 0.2,
"integrator_dropout": 0.0,
"elmo": {
"options_file": "allennlp/tests/fixtures/elmo/options.json",
"weight_file": "allennlp/tests/fixtures/elmo/lm_weights.hdf5",
Expand All @@ -58,7 +58,7 @@
"num_layers": 2,
"output_dims": [6, 5],
"pool_sizes": [4, 3],
"dropout": [0.2, 0.0]
"dropout": [0.0, 0.0]
}
},
"iterator": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
}
}
},
"embedding_dropout": 0.2,
"embedding_dropout": 0.0,
"pre_encode_feedforward": {
"input_dim": 10,
"num_layers": 1,
Expand All @@ -36,13 +36,13 @@
"hidden_size": 3,
"num_layers": 1
},
"integrator_dropout": 0.2,
"integrator_dropout": 0.0,
"output_layer": {
"input_dim": 12,
"num_layers": 2,
"output_dims": [6, 5],
"pool_sizes": [4, 3],
"dropout": [0.2, 0.0]
"dropout": [0.0, 0.0]
}
},
"iterator": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@
}
}
},
"embedding_dropout": 0.2,
"embedding_dropout": 0.0,
"pre_encode_feedforward": {
"input_dim": 10,
"num_layers": 1,
"hidden_dims": [3],
"activations": ["relu"],
"dropout": [0.2]
"dropout": [0.0]
},
"encoder": {
"type": "lstm",
Expand All @@ -36,13 +36,13 @@
"hidden_size": 3,
"num_layers": 1
},
"integrator_dropout": 0.2,
"integrator_dropout": 0.0,
"output_layer": {
"input_dim": 12,
"num_layers": 2,
"hidden_dims": [6, 5],
"activations": ["relu", "linear"],
"dropout": [0.2, 0.0]
"dropout": [0.0, 0.0]
}
},
"iterator": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
}
}
},
"embedding_dropout": 0.2,
"embedding_dropout": 0.0,
"pre_encode_feedforward": {
"input_dim": 10,
"num_layers": 1,
Expand All @@ -44,7 +44,7 @@
"hidden_size": 3,
"num_layers": 1
},
"integrator_dropout": 0.2,
"integrator_dropout": 0.0,
"elmo": {
"options_file": "allennlp/tests/fixtures/elmo/options.json",
"weight_file": "allennlp/tests/fixtures/elmo/lm_weights.hdf5",
Expand All @@ -57,7 +57,7 @@
"num_layers": 2,
"output_dims": [6, 5],
"pool_sizes": [4, 3],
"dropout": [0.2, 0.0]
"dropout": [0.0, 0.0]
}
},
"iterator": {
Expand Down
6 changes: 3 additions & 3 deletions allennlp/tests/fixtures/bimpm/experiment.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
"validation_data_path": "allennlp/tests/fixtures/data/quora_paraphrase.tsv",
"model": {
"type": "bimpm",
"dropout": 0.1,
"dropout": 0.0,
"text_field_embedder": {
"token_embedders": {
"tokens": {
Expand Down Expand Up @@ -95,14 +95,14 @@
"input_size": 264,
"hidden_size": 100,
"num_layers": 2,
"dropout": 0.1
"dropout": 0.0
},
"classifier_feedforward": {
"input_dim": 400,
"num_layers": 2,
"hidden_dims": [200, 2],
"activations": ["relu", "linear"],
"dropout": [0.1, 0.0]
"dropout": [0.0, 0.0]
},
"initializer": [
[".*linear_layers.*weight", {"type": "xavier_normal"}],
Expand Down
2 changes: 1 addition & 1 deletion allennlp/tests/fixtures/coref/experiment.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
"num_filters": 10,
"ngram_filter_sizes": [5]
},
"dropout": 0.2
"dropout": 0.0
}
}
},
Expand Down
4 changes: 2 additions & 2 deletions allennlp/tests/fixtures/crf_tagger/experiment.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
"input_size": 25,
"hidden_size": 80,
"num_layers": 2,
"dropout": 0.25,
"dropout": 0.0,
"bidirectional": true
}
}
Expand All @@ -47,7 +47,7 @@
"input_size": 210,
"hidden_size": 300,
"num_layers": 2,
"dropout": 0.5,
"dropout": 0.0,
"bidirectional": true
},
"regularizer": [
Expand Down
4 changes: 2 additions & 2 deletions allennlp/tests/fixtures/crf_tagger/experiment_ccgbank.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"input_size": 25,
"hidden_size": 80,
"num_layers": 2,
"dropout": 0.25,
"dropout": 0.0,
"bidirectional": true
}
}
Expand All @@ -44,7 +44,7 @@
"input_size": 210,
"hidden_size": 300,
"num_layers": 2,
"dropout": 0.5,
"dropout": 0.0,
"bidirectional": true
},
"regularizer": [
Expand Down
4 changes: 2 additions & 2 deletions allennlp/tests/fixtures/crf_tagger/experiment_conll2000.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
"input_size": 25,
"hidden_size": 80,
"num_layers": 2,
"dropout": 0.25,
"dropout": 0.0,
"bidirectional": true
}
}
Expand All @@ -47,7 +47,7 @@
"input_size": 210,
"hidden_size": 300,
"num_layers": 2,
"dropout": 0.5,
"dropout": 0.0,
"bidirectional": true
},
"regularizer": [
Expand Down
2 changes: 1 addition & 1 deletion allennlp/tests/fixtures/dialog_qa/experiment.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"num_filters": 10,
"ngram_filter_sizes": [5]
},
"dropout": 0.2
"dropout": 0.0
}
}
},
Expand Down
2 changes: 1 addition & 1 deletion allennlp/tests/fixtures/esim/experiment.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
"num_layers": 1,
"hidden_dims": 5,
"activations": "relu",
"dropout": 0.5
"dropout": 0.0
},
"output_logit": {
"input_dim": 5,
Expand Down
2 changes: 1 addition & 1 deletion allennlp/tests/fixtures/event2mind/experiment.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
}
}
},
"embedding_dropout": 0.2,
"embedding_dropout": 0.0,
"encoder": {
"type": "gru",
"input_size": 10,
Expand Down
10 changes: 5 additions & 5 deletions allennlp/tests/fixtures/naqanet/experiment.json
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@
"num_convs_per_block": 4,
"conv_kernel_size": 7,
"num_attention_heads": 6,
"dropout_prob": 0.1,
"layer_dropout_undecayed_prob": 0.1,
"dropout_prob": 0.0,
"layer_dropout_undecayed_prob": 0.0,
"attention_dropout_prob": 0
},
"matrix_attention_layer": {
Expand All @@ -91,11 +91,11 @@
"num_convs_per_block": 2,
"conv_kernel_size": 5,
"num_attention_heads": 6,
"dropout_prob": 0.1,
"layer_dropout_undecayed_prob": 0.1,
"dropout_prob": 0.0,
"layer_dropout_undecayed_prob": 0.0,
"attention_dropout_prob": 0
},
"dropout_prob": 0.1,
"dropout_prob": 0.0,
"regularizer": [
[
".*",
Expand Down
10 changes: 5 additions & 5 deletions allennlp/tests/fixtures/qanet/experiment.json
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@
"num_convs_per_block": 2,
"conv_kernel_size": 2,
"num_attention_heads": 4,
"dropout_prob": 0.1,
"layer_dropout_undecayed_prob": 0.1,
"dropout_prob": 0.0,
"layer_dropout_undecayed_prob": 0.0,
"attention_dropout_prob": 0
},
"matrix_attention_layer": {
Expand All @@ -88,11 +88,11 @@
"num_convs_per_block": 2,
"conv_kernel_size": 5,
"num_attention_heads": 4,
"dropout_prob": 0.1,
"layer_dropout_undecayed_prob": 0.1,
"dropout_prob": 0.0,
"layer_dropout_undecayed_prob": 0.0,
"attention_dropout_prob": 0
},
"dropout_prob": 0.1,
"dropout_prob": 0.0,
"regularizer": [
[
".*",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
},
"max_decoding_steps": 30,
"attention": {"type": "dot_product"},
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
},
"max_decoding_steps": 30,
"attention": {"type": "dot_product"},
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"options_file": "allennlp/tests/fixtures/elmo/options.json",
"weight_file": "allennlp/tests/fixtures/elmo/lm_weights.hdf5",
"do_layer_norm": false,
"dropout": 0.5
"dropout": 0.0
}
}
},
Expand All @@ -52,7 +52,7 @@
},
"max_decoding_steps": 30,
"attention": {"type": "dot_product"},
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
},
"max_decoding_steps": 30,
"attention": {"type": "dot_product"},
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
},
"max_decoding_steps": 30,
"attention": {"type": "dot_product"},
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
},
"max_decoding_steps": 30,
"attention": {"type": "dot_product"},
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
},
"max_decoding_steps": 30,
"attention": {"type": "dot_product"},
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
},
"max_decoding_steps": 30,
"attention": {"type": "dot_product"},
"dropout": 0.2
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
},
"max_decoding_steps": 10,
"input_attention": {"type": "dot_product"},
"dropout": 0.5
"dropout": 0.0
},
"iterator": {
"type": "basic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"options_file": "allennlp/tests/fixtures/elmo/options.json",
"weight_file": "allennlp/tests/fixtures/elmo/lm_weights.hdf5",
"do_layer_norm": false,
"dropout": 0.5
"dropout": 0.0
}
}
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"num_layers": 3,
"hidden_dims": [5, 2, 1],
"activations": ["relu", "sigmoid", "sigmoid"],
"dropout": [0.2, 0.0, 0.0]
"dropout": [0.0, 0.0, 0.0]
},
"encoder": {
"type": "lstm",
Expand Down

0 comments on commit ec30c90

Please sign in to comment.