From 07a59bd212963bdb3dbb3430dab121410c9d7f2f Mon Sep 17 00:00:00 2001 From: Faria Huq Date: Sat, 25 May 2024 20:14:48 -0400 Subject: [PATCH] Fix Lora config error for Llama3 The current yml code throws an error: ValueError: Please set lora_modules_to_save to [`embed_tokens`, `lm_head`] when using an adapter and changing the special tokens. I added the required changes to resolve it --- examples/llama-3/lora-8b.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/llama-3/lora-8b.yml b/examples/llama-3/lora-8b.yml index 6b0ebaed86..cd21effb9a 100644 --- a/examples/llama-3/lora-8b.yml +++ b/examples/llama-3/lora-8b.yml @@ -24,6 +24,9 @@ lora_alpha: 16 lora_dropout: 0.05 lora_target_linear: true lora_fan_in_fan_out: +lora_modules_to_save: + - embed_tokens + - lm_head wandb_project: wandb_entity: