Commit
·
3bb233a
1
Parent(s):
e4c6e65
Change attention_probs_dropout_prob to 0.1 so that triton FlashAttention dependencies are avoided
Browse files- config.json +1 -1
config.json
CHANGED
|
@@ -4,7 +4,7 @@
|
|
| 4 |
"architectures": [
|
| 5 |
"BertForMaskedLM"
|
| 6 |
],
|
| 7 |
-
"attention_probs_dropout_prob": 0.
|
| 8 |
"auto_map": {
|
| 9 |
"AutoConfig": "configuration_bert.BertConfig",
|
| 10 |
"AutoModelForMaskedLM": "bert_layers.BertForMaskedLM"
|
|
|
|
| 4 |
"architectures": [
|
| 5 |
"BertForMaskedLM"
|
| 6 |
],
|
| 7 |
+
"attention_probs_dropout_prob": 0.1,
|
| 8 |
"auto_map": {
|
| 9 |
"AutoConfig": "configuration_bert.BertConfig",
|
| 10 |
"AutoModelForMaskedLM": "bert_layers.BertForMaskedLM"
|