config.json
1 removal
Words removed | 1 |
Total words | 54 |
Words removed (%) | 1.85 |
29 lines
2 additions
Words added | 3 |
Total words | 56 |
Words added (%) | 5.36 |
30 lines
{
{
"architectures": [
"architectures": [
"MixtralForCausalLM"
"MixtralForCausalLM"
],
],
"attention_dropout": 0.0,
"attention_dropout": 0.0,
"bos_token_id": 1,
"bos_token_id": 1,
"eos_token_id": 2,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_act": "silu",
"hidden_size": 6144,
"hidden_size": 6144,
"initializer_range": 0.02,
"initializer_range": 0.02,
"intermediate_size": 16384,
"intermediate_size": 16384,
"max_position_embeddings": 65536,
"max_position_embeddings": 65536,
"model_type": "mixtral",
"model_type": "mixtral",
"num_attention_heads": 48,
"num_attention_heads": 48,
"num_experts_per_tok": 2,
"num_experts_per_tok": 2,
"num_hidden_layers": 56,
"num_hidden_layers": 56,
"num_key_value_heads": 8,
"num_key_value_heads": 8,
"num_local_experts": 8,
"num_local_experts": 8,
"output_router_logits": false,
"output_router_logits": false,
"rms_norm_eps": 1e-05,
"rms_norm_eps": 1e-05,
"rope_theta": 1000000,
"rope_theta": 1000000,
"router_aux_loss_coef": 0.001,
"router_aux_loss_coef": 0.001,
"router_jitter_noise": 0.0,
"sliding_window": null,
"sliding_window": null,
"tie_word_embeddings": false,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"torch_dtype": "bfloat16",
"transformers_version": "4.38.0",
"transformers_version": "4.40.0.dev0",
"use_cache": true,
"use_cache": true,
"vocab_size": 32000
"vocab_size": 32000
}
}