Spaces:
Running
Running
显示newbing回复的网址
Browse files- request_llm/bridge_newbing.py +94 -40
request_llm/bridge_newbing.py
CHANGED
@@ -16,7 +16,6 @@ import ssl
|
|
16 |
import sys
|
17 |
import uuid
|
18 |
from enum import Enum
|
19 |
-
from pathlib import Path
|
20 |
from typing import Generator
|
21 |
from typing import Literal
|
22 |
from typing import Optional
|
@@ -354,7 +353,7 @@ class Chatbot:
|
|
354 |
async def ask(
|
355 |
self,
|
356 |
prompt: str,
|
357 |
-
wss_link: str
|
358 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
359 |
options: dict = None,
|
360 |
) -> dict:
|
@@ -375,7 +374,7 @@ class Chatbot:
|
|
375 |
async def ask_stream(
|
376 |
self,
|
377 |
prompt: str,
|
378 |
-
wss_link: str
|
379 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
380 |
raw: bool = False,
|
381 |
options: dict = None,
|
@@ -403,7 +402,7 @@ class Chatbot:
|
|
403 |
Reset the conversation
|
404 |
"""
|
405 |
await self.close()
|
406 |
-
self.chat_hub = _ChatHub(_Conversation(self.cookies))
|
407 |
|
408 |
|
409 |
|
@@ -411,13 +410,14 @@ load_message = ""
|
|
411 |
|
412 |
"""
|
413 |
========================================================================
|
414 |
-
第二部分:子进程Worker
|
415 |
========================================================================
|
416 |
"""
|
417 |
import time
|
418 |
import importlib
|
419 |
from toolbox import update_ui, get_conf, trimmed_format_exc
|
420 |
from multiprocessing import Process, Pipe
|
|
|
421 |
class GetNewBingHandle(Process):
|
422 |
def __init__(self):
|
423 |
super().__init__(daemon=True)
|
@@ -431,7 +431,8 @@ class GetNewBingHandle(Process):
|
|
431 |
|
432 |
def check_dependency(self):
|
433 |
try:
|
434 |
-
|
|
|
435 |
self.success = True
|
436 |
except:
|
437 |
self.info = "缺少的依赖,如果要使用Newbing,除了基础的pip依赖以外,您还需要运行`pip install -r request_llm/requirements_newbing.txt`安装Newbing的依赖。"
|
@@ -440,34 +441,77 @@ class GetNewBingHandle(Process):
|
|
440 |
def ready(self):
|
441 |
return self.newbing_model is not None
|
442 |
|
443 |
-
async def async_run(self
|
444 |
# 读取配置
|
445 |
NEWBING_STYLE, = get_conf('NEWBING_STYLE')
|
446 |
from request_llm.bridge_all import model_info
|
447 |
endpoint = model_info['newbing']['endpoint']
|
448 |
-
|
449 |
-
|
450 |
-
|
451 |
-
|
452 |
-
|
453 |
-
|
454 |
-
|
455 |
-
|
456 |
-
if
|
457 |
-
self.
|
458 |
-
|
459 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
460 |
def run(self):
|
|
|
|
|
|
|
461 |
# 第一次运行,加载参数
|
462 |
retry = 0
|
463 |
self.local_history = []
|
464 |
while True:
|
465 |
try:
|
466 |
if self.newbing_model is None:
|
|
|
467 |
proxies, = get_conf('proxies')
|
|
|
|
|
|
|
|
|
|
|
468 |
NEWBING_COOKIES, = get_conf('NEWBING_COOKIES')
|
469 |
cookies = json.loads(NEWBING_COOKIES)
|
470 |
-
self.newbing_model = Chatbot(proxy=
|
471 |
break
|
472 |
else:
|
473 |
break
|
@@ -479,23 +523,24 @@ class GetNewBingHandle(Process):
|
|
479 |
raise RuntimeError("不能加载Newbing组件。")
|
480 |
|
481 |
# 进入任务等待状态
|
482 |
-
|
483 |
-
|
484 |
-
|
485 |
-
|
486 |
-
|
487 |
-
tb_str = '```\n' + trimmed_format_exc() + '```'
|
488 |
-
self.child.send('[Local Message] Newbing失败.')
|
489 |
self.child.send('[Finish]')
|
490 |
|
491 |
def stream_chat(self, **kwargs):
|
492 |
-
|
|
|
|
|
|
|
493 |
while True:
|
494 |
-
res = self.parent.recv()
|
495 |
if res != '[Finish]':
|
496 |
-
yield res
|
497 |
else:
|
498 |
-
break
|
499 |
return
|
500 |
|
501 |
|
@@ -523,13 +568,12 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="",
|
|
523 |
|
524 |
# 没有 sys_prompt 接口,因此把prompt加入 history
|
525 |
history_feedin = []
|
526 |
-
history_feedin.append(["What can I do?", sys_prompt])
|
527 |
for i in range(len(history)//2):
|
528 |
history_feedin.append([history[2*i], history[2*i+1]] )
|
529 |
|
530 |
watch_dog_patience = 5 # 看门狗 (watchdog) 的耐心, 设置5秒即可
|
531 |
response = ""
|
532 |
-
for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
|
533 |
observe_window[0] = response
|
534 |
if len(observe_window) >= 2:
|
535 |
if (time.time()-observe_window[1]) > watch_dog_patience:
|
@@ -543,7 +587,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|
543 |
单线程方法
|
544 |
函数的说明请见 request_llm/bridge_all.py
|
545 |
"""
|
546 |
-
chatbot.append((inputs, ""))
|
547 |
|
548 |
global newbing_handle
|
549 |
if newbing_handle is None or (not newbing_handle.success):
|
@@ -562,13 +606,23 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|
562 |
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
563 |
|
564 |
history_feedin = []
|
565 |
-
history_feedin.append(["What can I do?", system_prompt] )
|
566 |
for i in range(len(history)//2):
|
567 |
history_feedin.append([history[2*i], history[2*i+1]] )
|
568 |
|
569 |
-
|
570 |
-
|
571 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
572 |
|
573 |
-
|
574 |
-
yield from update_ui(chatbot=chatbot, history=history)
|
|
|
16 |
import sys
|
17 |
import uuid
|
18 |
from enum import Enum
|
|
|
19 |
from typing import Generator
|
20 |
from typing import Literal
|
21 |
from typing import Optional
|
|
|
353 |
async def ask(
|
354 |
self,
|
355 |
prompt: str,
|
356 |
+
wss_link: str,
|
357 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
358 |
options: dict = None,
|
359 |
) -> dict:
|
|
|
374 |
async def ask_stream(
|
375 |
self,
|
376 |
prompt: str,
|
377 |
+
wss_link: str,
|
378 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
379 |
raw: bool = False,
|
380 |
options: dict = None,
|
|
|
402 |
Reset the conversation
|
403 |
"""
|
404 |
await self.close()
|
405 |
+
self.chat_hub = _ChatHub(_Conversation(self.cookies, self.proxy))
|
406 |
|
407 |
|
408 |
|
|
|
410 |
|
411 |
"""
|
412 |
========================================================================
|
413 |
+
第二部分:子进程Worker(调用主体)
|
414 |
========================================================================
|
415 |
"""
|
416 |
import time
|
417 |
import importlib
|
418 |
from toolbox import update_ui, get_conf, trimmed_format_exc
|
419 |
from multiprocessing import Process, Pipe
|
420 |
+
|
421 |
class GetNewBingHandle(Process):
|
422 |
def __init__(self):
|
423 |
super().__init__(daemon=True)
|
|
|
431 |
|
432 |
def check_dependency(self):
|
433 |
try:
|
434 |
+
import rich
|
435 |
+
self.info = "依赖检测通过,等待NewBing响应。注意目前不能多人同时调用NewBing接口,否则将导致每个人的NewBing问询历史互相渗透。调用NewBing时,会自动使用已配置的代理。"
|
436 |
self.success = True
|
437 |
except:
|
438 |
self.info = "缺少的依赖,如果要使用Newbing,除了基础的pip依赖以外,您还需要运行`pip install -r request_llm/requirements_newbing.txt`安装Newbing的依赖。"
|
|
|
441 |
def ready(self):
|
442 |
return self.newbing_model is not None
|
443 |
|
444 |
+
async def async_run(self):
|
445 |
# 读取配置
|
446 |
NEWBING_STYLE, = get_conf('NEWBING_STYLE')
|
447 |
from request_llm.bridge_all import model_info
|
448 |
endpoint = model_info['newbing']['endpoint']
|
449 |
+
while True:
|
450 |
+
# 等待
|
451 |
+
kwargs = self.child.recv()
|
452 |
+
question=kwargs['query']
|
453 |
+
history=kwargs['history']
|
454 |
+
system_prompt=kwargs['system_prompt']
|
455 |
+
|
456 |
+
# 是否重置
|
457 |
+
if len(self.local_history) > 0 and len(history)==0:
|
458 |
+
await self.newbing_model.reset()
|
459 |
+
self.local_history = []
|
460 |
+
|
461 |
+
# 开始问问题
|
462 |
+
prompt = ""
|
463 |
+
if system_prompt not in self.local_history:
|
464 |
+
self.local_history.append(system_prompt)
|
465 |
+
prompt += system_prompt + '\n'
|
466 |
+
|
467 |
+
# 追加历史
|
468 |
+
for ab in history:
|
469 |
+
a, b = ab
|
470 |
+
if a not in self.local_history:
|
471 |
+
self.local_history.append(a)
|
472 |
+
prompt += a + '\n'
|
473 |
+
if b not in self.local_history:
|
474 |
+
self.local_history.append(b)
|
475 |
+
prompt += b + '\n'
|
476 |
+
|
477 |
+
# 问题
|
478 |
+
prompt += question
|
479 |
+
self.local_history.append(question)
|
480 |
+
|
481 |
+
# 提交
|
482 |
+
async for final, response in self.newbing_model.ask_stream(
|
483 |
+
prompt=question,
|
484 |
+
conversation_style=NEWBING_STYLE, # ["creative", "balanced", "precise"]
|
485 |
+
wss_link=endpoint, # "wss://sydney.bing.com/sydney/ChatHub"
|
486 |
+
):
|
487 |
+
if not final:
|
488 |
+
print(response)
|
489 |
+
self.child.send(str(response))
|
490 |
+
else:
|
491 |
+
print('-------- receive final ---------')
|
492 |
+
self.child.send('[Finish]')
|
493 |
+
|
494 |
+
|
495 |
def run(self):
|
496 |
+
"""
|
497 |
+
这个函数运行在子进程
|
498 |
+
"""
|
499 |
# 第一次运行,加载参数
|
500 |
retry = 0
|
501 |
self.local_history = []
|
502 |
while True:
|
503 |
try:
|
504 |
if self.newbing_model is None:
|
505 |
+
# 代理设置
|
506 |
proxies, = get_conf('proxies')
|
507 |
+
if proxies is None:
|
508 |
+
self.proxies_https = None
|
509 |
+
else:
|
510 |
+
self.proxies_https = proxies['https']
|
511 |
+
|
512 |
NEWBING_COOKIES, = get_conf('NEWBING_COOKIES')
|
513 |
cookies = json.loads(NEWBING_COOKIES)
|
514 |
+
self.newbing_model = Chatbot(proxy=self.proxies_https, cookies=cookies)
|
515 |
break
|
516 |
else:
|
517 |
break
|
|
|
523 |
raise RuntimeError("不能加载Newbing组件。")
|
524 |
|
525 |
# 进入任务等待状态
|
526 |
+
try:
|
527 |
+
asyncio.run(self.async_run())
|
528 |
+
except Exception:
|
529 |
+
tb_str = '```\n' + trimmed_format_exc() + '```'
|
530 |
+
self.child.send(f'[Local Message] Newbing失败 {tb_str}.')
|
|
|
|
|
531 |
self.child.send('[Finish]')
|
532 |
|
533 |
def stream_chat(self, **kwargs):
|
534 |
+
"""
|
535 |
+
这个函数运行在主进程
|
536 |
+
"""
|
537 |
+
self.parent.send(kwargs) # 发送请求到子进程
|
538 |
while True:
|
539 |
+
res = self.parent.recv() # 等待newbing回复的片段
|
540 |
if res != '[Finish]':
|
541 |
+
yield res # newbing回复的片段
|
542 |
else:
|
543 |
+
break # 结束
|
544 |
return
|
545 |
|
546 |
|
|
|
568 |
|
569 |
# 没有 sys_prompt 接口,因此把prompt加入 history
|
570 |
history_feedin = []
|
|
|
571 |
for i in range(len(history)//2):
|
572 |
history_feedin.append([history[2*i], history[2*i+1]] )
|
573 |
|
574 |
watch_dog_patience = 5 # 看门狗 (watchdog) 的耐心, 设置5秒即可
|
575 |
response = ""
|
576 |
+
for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=sys_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
|
577 |
observe_window[0] = response
|
578 |
if len(observe_window) >= 2:
|
579 |
if (time.time()-observe_window[1]) > watch_dog_patience:
|
|
|
587 |
单线程方法
|
588 |
函数的说明请见 request_llm/bridge_all.py
|
589 |
"""
|
590 |
+
chatbot.append((inputs, "[Local Message]: 等待Bing响应 ..."))
|
591 |
|
592 |
global newbing_handle
|
593 |
if newbing_handle is None or (not newbing_handle.success):
|
|
|
606 |
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
607 |
|
608 |
history_feedin = []
|
|
|
609 |
for i in range(len(history)//2):
|
610 |
history_feedin.append([history[2*i], history[2*i+1]] )
|
611 |
|
612 |
+
yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。")
|
613 |
+
for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
|
614 |
+
chatbot[-1] = (inputs, preprocess_newbing_out(response))
|
615 |
+
yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。")
|
616 |
+
|
617 |
+
history.extend([inputs, preprocess_newbing_out(response)])
|
618 |
+
yield from update_ui(chatbot=chatbot, history=history, msg="完成全部响应,请提交新问题。")
|
619 |
+
|
620 |
+
def preprocess_newbing_out(s):
|
621 |
+
pattern = r'\^(\d+)\^' # 匹配^数字^
|
622 |
+
sub = lambda m: '\['+m.group(1)+'\]' # 将匹配到的数字作为替换值
|
623 |
+
result = re.sub(pattern, sub, s) # 替换操作
|
624 |
+
|
625 |
+
if '[1]' in result:
|
626 |
+
result += '\n\n```\n' + "\n".join([r for r in result.split('\n') if r.startswith('[')]) + '\n```\n'
|
627 |
|
628 |
+
return result
|
|