feat: stream block ended with newline

#56
by thincal - opened
Files changed (1) hide show
  1. modeling_chatglm.py +1 -1
modeling_chatglm.py CHANGED
@@ -1313,7 +1313,7 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
1313
  response = tokenizer.decode(outputs)
1314
  response = self.process_response(response)
1315
  new_history = history + [(query, response)]
1316
- yield response, new_history
1317
 
1318
  @torch.no_grad()
1319
  def stream_generate(
 
1313
  response = tokenizer.decode(outputs)
1314
  response = self.process_response(response)
1315
  new_history = history + [(query, response)]
1316
+ yield response + "\n", new_history
1317
 
1318
  @torch.no_grad()
1319
  def stream_generate(