Fix architecture
Browse files- config.json +2 -2
config.json
CHANGED
|
@@ -2,7 +2,7 @@
|
|
| 2 |
"apply_residual_connection_post_layernorm": false,
|
| 3 |
"attention_dropout": 0.0,
|
| 4 |
"architectures": [
|
| 5 |
-
"
|
| 6 |
],
|
| 7 |
"attention_softmax_in_fp32": true,
|
| 8 |
"bias_dropout_fusion": true,
|
|
@@ -27,4 +27,4 @@
|
|
| 27 |
"transformers_version": "4.20.0",
|
| 28 |
"use_cache": true,
|
| 29 |
"vocab_size": 250880
|
| 30 |
-
}
|
|
|
|
| 2 |
"apply_residual_connection_post_layernorm": false,
|
| 3 |
"attention_dropout": 0.0,
|
| 4 |
"architectures": [
|
| 5 |
+
"BloomForCausalLM"
|
| 6 |
],
|
| 7 |
"attention_softmax_in_fp32": true,
|
| 8 |
"bias_dropout_fusion": true,
|
|
|
|
| 27 |
"transformers_version": "4.20.0",
|
| 28 |
"use_cache": true,
|
| 29 |
"vocab_size": 250880
|
| 30 |
+
}
|