Hansimov commited on
Commit
0fdaf32
1 Parent(s): 7bfabdf

:zap: [Enhance] OpenAI API to bing chat: Fix suggestions output, and unnecessary data line

Browse files
apis/chat_api.py CHANGED
@@ -11,17 +11,6 @@ from networks import OpenaiStreamOutputer
11
  from sse_starlette.sse import EventSourceResponse
12
 
13
 
14
- def mock_stream_chat(prompt):
15
- outputer = OpenaiStreamOutputer()
16
- for i in range(10):
17
- output = outputer.output(content=f"MSG {i} ", content_type="Completions")
18
- print(output)
19
- yield output
20
- output = outputer.output(content="", content_type="Finished")
21
- print(output)
22
- yield output
23
-
24
-
25
  class ChatAPIApp:
26
  def __init__(self):
27
  self.app = FastAPI(
@@ -144,13 +133,13 @@ class ChatAPIApp:
144
  )
145
 
146
  def chat_completions(self, item: ChatCompletionsPostItem):
147
- # connector = ConversationConnector(
148
- # conversation_style=item.model,
149
- # sec_access_token=item.sec_access_token,
150
- # client_id=item.client_id,
151
- # conversation_id=item.conversation_id,
152
- # invocation_id=item.invocation_id,
153
- # )
154
 
155
  if item.invocation_id == 0:
156
  # TODO: History Messages Merger
@@ -159,8 +148,7 @@ class ChatAPIApp:
159
  prompt = item.messages[-1]["content"]
160
 
161
  return EventSourceResponse(
162
- # connector.stream_chat(prompt=prompt, yield_output=True),
163
- mock_stream_chat(prompt),
164
  media_type="text/event-stream",
165
  )
166
 
 
11
  from sse_starlette.sse import EventSourceResponse
12
 
13
 
 
 
 
 
 
 
 
 
 
 
 
14
  class ChatAPIApp:
15
  def __init__(self):
16
  self.app = FastAPI(
 
133
  )
134
 
135
  def chat_completions(self, item: ChatCompletionsPostItem):
136
+ connector = ConversationConnector(
137
+ conversation_style=item.model,
138
+ sec_access_token=item.sec_access_token,
139
+ client_id=item.client_id,
140
+ conversation_id=item.conversation_id,
141
+ invocation_id=item.invocation_id,
142
+ )
143
 
144
  if item.invocation_id == 0:
145
  # TODO: History Messages Merger
 
148
  prompt = item.messages[-1]["content"]
149
 
150
  return EventSourceResponse(
151
+ connector.stream_chat(prompt=prompt, yield_output=True),
 
152
  media_type="text/event-stream",
153
  )
154
 
conversations/conversation_connector.py CHANGED
@@ -98,7 +98,12 @@ class ConversationConnector:
98
  # Stream: Meaningful Messages
99
  if data.get("type") == 1:
100
  if yield_output:
101
- yield message_parser.parse(data, return_output=True)
 
 
 
 
 
102
  else:
103
  message_parser.parse(data)
104
  # Stream: List of all messages in the whole conversation
 
98
  # Stream: Meaningful Messages
99
  if data.get("type") == 1:
100
  if yield_output:
101
+ output = message_parser.parse(data, return_output=True)
102
+ if isinstance(output, list):
103
+ for item in output:
104
+ yield item
105
+ else:
106
+ yield output
107
  else:
108
  message_parser.parse(data)
109
  # Stream: List of all messages in the whole conversation
examples/chat_with_openai.py CHANGED
@@ -15,7 +15,7 @@ response = client.chat.completions.create(
15
  messages=[
16
  {
17
  "role": "user",
18
- "content": "how many questions have I asked you?",
19
  }
20
  ],
21
  stream=True,
@@ -29,4 +29,5 @@ for chunk in response:
29
  elif chunk.choices[0].finish_reason == "stop":
30
  print()
31
  else:
32
- print(chunk)
 
 
15
  messages=[
16
  {
17
  "role": "user",
18
+ "content": "search california's weather for me",
19
  }
20
  ],
21
  stream=True,
 
29
  elif chunk.choices[0].finish_reason == "stop":
30
  print()
31
  else:
32
+ # print(chunk)
33
+ pass
networks/message_outputer.py CHANGED
@@ -7,21 +7,13 @@ class OpenaiStreamOutputer:
7
  * https://platform.openai.com/docs/api-reference/chat/create
8
  """
9
 
10
- def data_to_string(self, data={}, content_type="", media_type=None):
11
  # return (json.dumps(data) + "\n").encode("utf-8")
12
- data_str = f"{json.dumps(data)}\n"
13
-
14
- if media_type == "text/event-stream":
15
- data_str = f"data: {data_str}"
16
 
17
  return data_str
18
 
19
- def output(
20
- self,
21
- content=None,
22
- content_type=None,
23
- media_type=None,
24
- ) -> bytes:
25
  data = {
26
  "created": 1677825464,
27
  "id": "chatcmpl-bing",
@@ -69,4 +61,4 @@ class OpenaiStreamOutputer:
69
  "finish_reason": None,
70
  }
71
  ]
72
- return self.data_to_string(data, content_type, media_type)
 
7
  * https://platform.openai.com/docs/api-reference/chat/create
8
  """
9
 
10
+ def data_to_string(self, data={}, content_type=""):
11
  # return (json.dumps(data) + "\n").encode("utf-8")
12
+ data_str = f"{json.dumps(data)}"
 
 
 
13
 
14
  return data_str
15
 
16
+ def output(self, content=None, content_type=None) -> str:
 
 
 
 
 
17
  data = {
18
  "created": 1677825464,
19
  "id": "chatcmpl-bing",
 
61
  "finish_reason": None,
62
  }
63
  ]
64
+ return self.data_to_string(data, content_type)
networks/message_parser.py CHANGED
@@ -1,5 +1,3 @@
1
- import json
2
-
3
  from utils.logger import logger
4
  from networks import OpenaiStreamOutputer
5
 
@@ -33,7 +31,7 @@ class MessageParser:
33
  for suggestion_text in suggestion_texts:
34
  logger.file(f"- {suggestion_text}")
35
  if return_output:
36
- output_bytes = self.outputer.output(
37
  delta_content, content_type="Completions"
38
  )
39
  if message.get("suggestedResponses"):
@@ -41,11 +39,13 @@ class MessageParser:
41
  suggestion_texts_str += "\n".join(
42
  f"- {item}" for item in suggestion_texts
43
  )
44
- output_bytes += self.outputer.output(
45
  suggestion_texts_str,
46
  content_type="SuggestedResponses",
47
  )
48
- return output_bytes
 
 
49
 
50
  # Message: Search Query
51
  elif message_type in ["InternalSearchQuery"]:
 
 
 
1
  from utils.logger import logger
2
  from networks import OpenaiStreamOutputer
3
 
 
31
  for suggestion_text in suggestion_texts:
32
  logger.file(f"- {suggestion_text}")
33
  if return_output:
34
+ completions_output = self.outputer.output(
35
  delta_content, content_type="Completions"
36
  )
37
  if message.get("suggestedResponses"):
 
39
  suggestion_texts_str += "\n".join(
40
  f"- {item}" for item in suggestion_texts
41
  )
42
+ suggestions_output = self.outputer.output(
43
  suggestion_texts_str,
44
  content_type="SuggestedResponses",
45
  )
46
+ return [completions_output, suggestions_output]
47
+ else:
48
+ return completions_output
49
 
50
  # Message: Search Query
51
  elif message_type in ["InternalSearchQuery"]: