diff --git a/modules/llm/Mixtral-8x7b/Mixtral-8x7b_fine_tune.py b/modules/llm/Mixtral-8x7b/Mixtral-8x7b_fine_tune.py index 464ca64ac685e5a4830809cc004457dbfeae518f..99533c54e5b0535004bd292e8179c471d9d0391e 100644 --- a/modules/llm/Mixtral-8x7b/Mixtral-8x7b_fine_tune.py +++ b/modules/llm/Mixtral-8x7b/Mixtral-8x7b_fine_tune.py @@ -93,7 +93,7 @@ def generate(base_model, new_model): model = PeftModel.from_pretrained(base_model_reload, new_model) model = model.merge_and_unload() - tokenizer = transformers.AutoTokenizer.from_pretrained(new_model) + tokenizer = transformers.AutoTokenizer.from_pretrained(base_model) generate_text = transformers.pipeline( model=model, tokenizer=tokenizer,