config.json 570 B

1234567891011121314151617181920212223
  1. {
  2. "architectures": [
  3. "BertForMaskedLM"
  4. ],
  5. "attention_probs_dropout_prob": 0.1,
  6. "gradient_checkpointing": false,
  7. "hidden_act": "gelu",
  8. "hidden_dropout_prob": 0.1,
  9. "hidden_size": 768,
  10. "initializer_range": 0.02,
  11. "intermediate_size": 3072,
  12. "layer_norm_eps": 1e-12,
  13. "max_position_embeddings": 512,
  14. "model_type": "bert",
  15. "num_attention_heads": 12,
  16. "num_hidden_layers": 12,
  17. "pad_token_id": 0,
  18. "position_embedding_type": "absolute",
  19. "transformers_version": "4.6.0.dev0",
  20. "type_vocab_size": 2,
  21. "use_cache": true,
  22. "vocab_size": 30522
  23. }