fixed bug
Browse files
app_modules/llm_inference.py
CHANGED
@@ -80,7 +80,11 @@ class LLMInference(metaclass=abc.ABCMeta):
|
|
80 |
if self.llm_loader.streamer is not None and isinstance(
|
81 |
self.llm_loader.streamer, TextIteratorStreamer
|
82 |
):
|
83 |
-
count =
|
|
|
|
|
|
|
|
|
84 |
|
85 |
while count > 0:
|
86 |
try:
|
|
|
80 |
if self.llm_loader.streamer is not None and isinstance(
|
81 |
self.llm_loader.streamer, TextIteratorStreamer
|
82 |
):
|
83 |
+
count = (
|
84 |
+
2
|
85 |
+
if "chat_history" in inputs and len(inputs.get("chat_history")) > 0
|
86 |
+
else 1
|
87 |
+
)
|
88 |
|
89 |
while count > 0:
|
90 |
try:
|