Skip to content

Commit 454d7b7

Browse files
committed
fix review comment
1 parent 333eeed commit 454d7b7

File tree

1 file changed

+1
-4
lines changed

1 file changed

+1
-4
lines changed

src/llama-model.cpp

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -714,13 +714,10 @@ void llama_model::load_hparams(llama_model_loader & ml) {
714714
} break;
715715
case LLM_ARCH_MODERN_BERT:
716716
{
717-
hparams.rope_freq_base_train = 160000.0f;
718-
hparams.rope_freq_base_train_swa = 10000.0f;
719-
hparams.n_swa = 128;
720-
721717
hparams.swa_type = LLAMA_SWA_TYPE_SYMMETRIC;
722718
hparams.set_swa_pattern(3, 0);
723719

720+
ml.get_key(LLM_KV_ATTENTION_SLIDING_WINDOW, hparams.n_swa);
724721
ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps);
725722
ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn);
726723
ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type, false);

0 commit comments

Comments
 (0)