bluelike commited on
Commit
19a0f75
1 Parent(s): e64b09e

Update modeling_qwen.py

Browse files
Files changed (1) hide show
  1. modeling_qwen.py +2 -2
modeling_qwen.py CHANGED
@@ -56,9 +56,9 @@ QWen_PRETRAINED_MODEL_ARCHIVE_LIST = ["qwen-7b"]
56
 
57
  _ERROR_BAD_CHAT_FORMAT = """\
58
  We detect you are probably using the pretrained model (rather than chat model) for chatting, since the chat_format in generation_config is not "chatml".
59
- If you are directly using the model downloaded from Huggingface, please make sure you are using our "Qwen/Qwen-7B-Chat" Huggingface model (rather than "Qwen/Qwen-7B") when you call model.chat().
60
  我们检测到您可能在使用预训练模型(而非chat模型)进行多轮chat,因为您当前在generation_config指定的chat_format,并未设置为我们在对话中所支持的"chatml"格式。
61
- 如果您在直接使用我们从Huggingface提供的模型,请确保您在调用model.chat()时,使用的是"Qwen/Qwen-7B-Chat"模型(而非"Qwen/Qwen-7B"预训练模型)。
62
  """
63
 
64
  _SENTINEL = object()
 
56
 
57
  _ERROR_BAD_CHAT_FORMAT = """\
58
  We detect you are probably using the pretrained model (rather than chat model) for chatting, since the chat_format in generation_config is not "chatml".
59
+ If you are directly using the model downloaded from Huggingface, please make sure you are using our "Qwen/Qwen-VL-Chat" Huggingface model (rather than "Qwen/Qwen-VL") when you call model.chat().
60
  我们检测到您可能在使用预训练模型(而非chat模型)进行多轮chat,因为您当前在generation_config指定的chat_format,并未设置为我们在对话中所支持的"chatml"格式。
61
+ 如果您在直接使用我们从Huggingface提供的模型,请确保您在调用model.chat()时,使用的是"Qwen/Qwen-VL-Chat"模型(而非"Qwen/Qwen-VL"预训练模型)。
62
  """
63
 
64
  _SENTINEL = object()