@@ -6371,6 +6371,8 @@ void llama_model::print_info() const {
63716371        LLAMA_LOG_INFO (" %s: n_ff             = %s\n " print_f ([&](uint32_t  il) { return  hparams.n_ff (il); }, hparams.n_layer ).c_str ());
63726372        LLAMA_LOG_INFO (" %s: n_expert         = %u\n " n_expert );
63736373        LLAMA_LOG_INFO (" %s: n_expert_used    = %u\n " n_expert_used );
6374+         LLAMA_LOG_INFO (" %s: n_expert_groups  = %d\n " n_expert_groups );
6375+         LLAMA_LOG_INFO (" %s: n_group_used     = %d\n " n_group_used );
63746376        LLAMA_LOG_INFO (" %s: causal attn      = %d\n " causal_attn );
63756377        LLAMA_LOG_INFO (" %s: pooling type     = %d\n " pooling_type );
63766378        LLAMA_LOG_INFO (" %s: rope type        = %d\n " rope_type );
@@ -6471,8 +6473,6 @@ void llama_model::print_info() const {
64716473        LLAMA_LOG_INFO (" %s: n_ff_exp             = %d\n " n_ff_exp );
64726474        LLAMA_LOG_INFO (" %s: n_ff_shexp           = %d\n " n_ff_shexp );
64736475        LLAMA_LOG_INFO (" %s: n_expert_shared      = %d\n " n_expert_shared );
6474-         LLAMA_LOG_INFO (" %s: n_expert_groups      = %d\n " n_expert_groups );
6475-         LLAMA_LOG_INFO (" %s: n_group_used         = %d\n " n_group_used );
64766476        LLAMA_LOG_INFO (" %s: expert_weights_scale = %.1f\n " expert_weights_scale );
64776477        LLAMA_LOG_INFO (" %s: expert_weights_norm  = %d\n " expert_weights_norm );
64786478        LLAMA_LOG_INFO (" %s: expert_gating_func   = %s\n " llama_expert_gating_func_name ((llama_expert_gating_func_type) hparams.expert_gating_func ));
0 commit comments