thincal commited on
Commit
6ee4f5a
1 Parent(s): 658202d

feat: stream block ended with newline

Browse files

for sse wrapper, it needs to know the stream separator

Files changed (1) hide show
  1. modeling_chatglm.py +1 -1
modeling_chatglm.py CHANGED
@@ -1313,7 +1313,7 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
1313
  response = tokenizer.decode(outputs)
1314
  response = self.process_response(response)
1315
  new_history = history + [(query, response)]
1316
- yield response, new_history
1317
 
1318
  @torch.no_grad()
1319
  def stream_generate(
 
1313
  response = tokenizer.decode(outputs)
1314
  response = self.process_response(response)
1315
  new_history = history + [(query, response)]
1316
+ yield response + "\n", new_history
1317
 
1318
  @torch.no_grad()
1319
  def stream_generate(