remove pooled_projection_dim
#76
by
dxqb
- opened
- transformer/config.json +1 -2
transformer/config.json
CHANGED
|
@@ -13,6 +13,5 @@
|
|
| 13 |
"num_attention_heads": 24,
|
| 14 |
"num_layers": 60,
|
| 15 |
"out_channels": 16,
|
| 16 |
-
"patch_size": 2
|
| 17 |
-
"pooled_projection_dim": 768
|
| 18 |
}
|
|
|
|
| 13 |
"num_attention_heads": 24,
|
| 14 |
"num_layers": 60,
|
| 15 |
"out_channels": 16,
|
| 16 |
+
"patch_size": 2
|
|
|
|
| 17 |
}
|