config.json

hibernatesai's picture

Upload 9 files

d02de7c verified 2 days ago

{

"_name_or_path": "meta-llama/Llama-3.2-3B-Instruct",

"architectures": [

"LlamaForCausalLM"

],

"attention_bias": false,

"attention_dropout": 0.0,

"bos_token_id": 128000,

"eos_token_id": 128009,

"head_dim": 128,

"hidden_act": "silu",

"hidden_size": 3072,

"initializer_range": 0.02,

"intermediate_size": 8192,

"max_position_embeddings": 131072,

"mlp_bias": false,

"model_type": "llama",

"num_attention_heads": 24,

"num_hidden_layers": 28,

"num_key_value_heads": 8,

"pad_token_id": 128004,

"pretraining_tp": 1,

"rms_norm_eps": 1e-05,

"rope_scaling": {

"factor": 32.0,

"high_freq_factor": 4.0,

"low_freq_factor": 1.0,

"original_max_position_embeddings": 8192,

"rope_type": "llama3"

},

"rope_theta": 500000.0,

"tie_word_embeddings": true,

"torch_dtype": "bfloat16",

"transformers_version": "4.47.1",

"use_cache": true,

"vocab_size": 128256

}