czczup commited on
Commit
b6dd0e1
1 Parent(s): a514d8f

Upload folder using huggingface_hub

Browse files
configuration_internvl_chat.py CHANGED
@@ -26,7 +26,6 @@ class InternVLChatConfig(PretrainedConfig):
26
  llm_config=None,
27
  use_backbone_lora=0,
28
  use_llm_lora=0,
29
- pad2square=False,
30
  select_layer=-1,
31
  force_image_size=None,
32
  downsample_ratio=0.5,
@@ -56,7 +55,6 @@ class InternVLChatConfig(PretrainedConfig):
56
  raise ValueError('Unsupported architecture: {}'.format(llm_config['architectures'][0]))
57
  self.use_backbone_lora = use_backbone_lora
58
  self.use_llm_lora = use_llm_lora
59
- self.pad2square = pad2square
60
  self.select_layer = select_layer
61
  self.force_image_size = force_image_size
62
  self.downsample_ratio = downsample_ratio
@@ -85,7 +83,6 @@ class InternVLChatConfig(PretrainedConfig):
85
  output['model_type'] = self.__class__.model_type
86
  output['use_backbone_lora'] = self.use_backbone_lora
87
  output['use_llm_lora'] = self.use_llm_lora
88
- output['pad2square'] = self.pad2square
89
  output['select_layer'] = self.select_layer
90
  output['force_image_size'] = self.force_image_size
91
  output['downsample_ratio'] = self.downsample_ratio
 
26
  llm_config=None,
27
  use_backbone_lora=0,
28
  use_llm_lora=0,
 
29
  select_layer=-1,
30
  force_image_size=None,
31
  downsample_ratio=0.5,
 
55
  raise ValueError('Unsupported architecture: {}'.format(llm_config['architectures'][0]))
56
  self.use_backbone_lora = use_backbone_lora
57
  self.use_llm_lora = use_llm_lora
 
58
  self.select_layer = select_layer
59
  self.force_image_size = force_image_size
60
  self.downsample_ratio = downsample_ratio
 
83
  output['model_type'] = self.__class__.model_type
84
  output['use_backbone_lora'] = self.use_backbone_lora
85
  output['use_llm_lora'] = self.use_llm_lora
 
86
  output['select_layer'] = self.select_layer
87
  output['force_image_size'] = self.force_image_size
88
  output['downsample_ratio'] = self.downsample_ratio
conversation.py CHANGED
@@ -2,7 +2,7 @@
2
  Conversation prompt templates.
3
 
4
  We kindly request that you import fastchat instead of copying this file if you wish to use it.
5
- If you have any changes in mind, please contribute back so the community can benefit collectively and continue to maintain these valuable templates.
6
  """
7
 
8
  import dataclasses
@@ -335,6 +335,8 @@ register_conv_template(
335
  Conversation(
336
  name='Hermes-2',
337
  system_template='<|im_start|>system\n{system_message}',
 
 
338
  system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
339
  roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
340
  sep_style=SeparatorStyle.MPT,
@@ -354,6 +356,8 @@ register_conv_template(
354
  Conversation(
355
  name='internlm2-chat',
356
  system_template='<|im_start|>system\n{system_message}',
 
 
357
  system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
358
  roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
359
  sep_style=SeparatorStyle.MPT,
@@ -371,6 +375,8 @@ register_conv_template(
371
  Conversation(
372
  name='phi3-chat',
373
  system_template='<|system|>\n{system_message}',
 
 
374
  system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
375
  roles=('<|user|>\n', '<|assistant|>\n'),
376
  sep_style=SeparatorStyle.MPT,
 
2
  Conversation prompt templates.
3
 
4
  We kindly request that you import fastchat instead of copying this file if you wish to use it.
5
+ If you have changes in mind, please contribute back so the community can benefit collectively and continue to maintain these valuable templates.
6
  """
7
 
8
  import dataclasses
 
335
  Conversation(
336
  name='Hermes-2',
337
  system_template='<|im_start|>system\n{system_message}',
338
+ # note: The new system prompt was not used here to avoid changes in benchmark performance.
339
+ # system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态基础模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。',
340
  system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
341
  roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
342
  sep_style=SeparatorStyle.MPT,
 
356
  Conversation(
357
  name='internlm2-chat',
358
  system_template='<|im_start|>system\n{system_message}',
359
+ # note: The new system prompt was not used here to avoid changes in benchmark performance.
360
+ # system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态基础模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。',
361
  system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
362
  roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
363
  sep_style=SeparatorStyle.MPT,
 
375
  Conversation(
376
  name='phi3-chat',
377
  system_template='<|system|>\n{system_message}',
378
+ # note: The new system prompt was not used here to avoid changes in benchmark performance.
379
+ # system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态基础模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。',
380
  system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
381
  roles=('<|user|>\n', '<|assistant|>\n'),
382
  sep_style=SeparatorStyle.MPT,