diff --git a/README.md b/README.md index 083b0bb4..e62a9d47 100644 --- a/README.md +++ b/README.md @@ -169,7 +169,6 @@ Compared to ChatGLM's [P-Tuning](https://github.com/THUDM/ChatGLM2-6B/tree/main/ | [ChatGLM3](https://huggingface.co/THUDM) | 6B | chatglm3 | | [Command R](https://huggingface.co/CohereForAI) | 35B/104B | cohere | | [DeepSeek (Code/MoE)](https://huggingface.co/deepseek-ai) | 7B/16B/67B/236B | deepseek | -| [Exaone3.0](https://huggingface.co/LGAI-EXAONE) | 7.8B | exaone | | [Falcon](https://huggingface.co/tiiuae) | 7B/11B/40B/180B | falcon | | [Gemma/Gemma 2/CodeGemma](https://huggingface.co/google) | 2B/7B/9B/27B | gemma | | [GLM-4](https://huggingface.co/THUDM) | 9B | glm4 | diff --git a/README_zh.md b/README_zh.md index c3d1e6f8..b5da9785 100644 --- a/README_zh.md +++ b/README_zh.md @@ -170,7 +170,6 @@ https://github.com/user-attachments/assets/e6ce34b0-52d5-4f3e-a830-592106c4c272 | [ChatGLM3](https://huggingface.co/THUDM) | 6B | chatglm3 | | [Command R](https://huggingface.co/CohereForAI) | 35B/104B | cohere | | [DeepSeek (Code/MoE)](https://huggingface.co/deepseek-ai) | 7B/16B/67B/236B | deepseek | -| [Exaone3.0](https://huggingface.co/LGAI-EXAONE) | 7.8B | exaone | | [Falcon](https://huggingface.co/tiiuae) | 7B/11B/40B/180B | falcon | | [Gemma/Gemma 2/CodeGemma](https://huggingface.co/google) | 2B/7B/9B/27B | gemma | | [GLM-4](https://huggingface.co/THUDM) | 9B | glm4 | diff --git a/src/llamafactory/data/template.py b/src/llamafactory/data/template.py index 08988c1c..e7539019 100644 --- a/src/llamafactory/data/template.py +++ b/src/llamafactory/data/template.py @@ -648,10 +648,8 @@ _register_template( name="exaone", format_user=StringFormatter(slots=["[|user|]{{content}}\n[|assistant|]"]), format_system=StringFormatter(slots=["[|system|]{{content}}[|endofturn|]\n"]), - format_assistant=StringFormatter(slots=["{{content}}[|endofturn|]\n"]), format_separator=EmptyFormatter(slots=["\n"]), format_prefix=EmptyFormatter(slots=[""]), - stop_words=["[|endofturn|]"], efficient_eos=True, )