type: pharia attention_bias: true attention_dropout: 0.0 eos_token_id: 0 bos_token_id: 127179 pad_token_id: 1 hidden_act: gelu hidden_size: 64 initializer_range: 0.02 intermediate_size: 128 max_position_embeddings: 2048 mlp_bias: true num_attention_heads: 4 num_hidden_layers: 4 num_key_value_heads: 4 rope_scaling: null rope_theta: 1000000 tie_word_embeddings: false use_cache: true context_length: 2048 vocab_size: 178