Update app.py
Browse files
app.py
CHANGED
@@ -73,6 +73,7 @@ def root():
|
|
73 |
"body": {
|
74 |
"messages": "Array of message objects",
|
75 |
"stream": "Boolean (true for streaming response)",
|
|
|
76 |
}
|
77 |
}
|
78 |
}
|
@@ -86,6 +87,7 @@ def messages():
|
|
86 |
|
87 |
try:
|
88 |
json_body = request.json
|
|
|
89 |
stream = json_body.get('stream', True) # 默认为True
|
90 |
previous_messages = "\n\n".join([msg['content'] for msg in json_body['messages']])
|
91 |
msg_id = str(uuid.uuid4())
|
@@ -94,7 +96,7 @@ def messages():
|
|
94 |
|
95 |
if not stream:
|
96 |
# 处理 stream 为 false 的情况
|
97 |
-
return handle_non_stream(previous_messages, msg_id)
|
98 |
|
99 |
# 记录日志:此时请求上下文仍然有效
|
100 |
log_request(request.remote_addr, request.path, 200)
|
@@ -107,7 +109,7 @@ def messages():
|
|
107 |
"type": "message",
|
108 |
"role": "assistant",
|
109 |
"content": [],
|
110 |
-
"model":
|
111 |
"stop_reason": None,
|
112 |
"stop_sequence": None,
|
113 |
"usage": {"input_tokens": 8, "output_tokens": 1},
|
@@ -124,8 +126,6 @@ def messages():
|
|
124 |
"attachments": [],
|
125 |
"language": "en-GB",
|
126 |
"timezone": "Europe/London",
|
127 |
-
"search_focus": "writing",
|
128 |
-
"frontend_uuid": str(uuid.uuid4()),
|
129 |
"mode": "concise",
|
130 |
"is_related_query": False,
|
131 |
"is_default_related_query": False,
|
@@ -205,7 +205,7 @@ def messages():
|
|
205 |
log_request(request.remote_addr, request.path, 400)
|
206 |
return jsonify({"error": str(e)}), 400
|
207 |
|
208 |
-
def handle_non_stream(previous_messages, msg_id):
|
209 |
"""
|
210 |
处理 stream 为 false 的情况,返回完整的响应。
|
211 |
"""
|
@@ -221,8 +221,6 @@ def handle_non_stream(previous_messages, msg_id):
|
|
221 |
"attachments": [],
|
222 |
"language": "en-GB",
|
223 |
"timezone": "Europe/London",
|
224 |
-
"search_focus": "writing",
|
225 |
-
"frontend_uuid": str(uuid.uuid4()),
|
226 |
"mode": "concise",
|
227 |
"is_related_query": False,
|
228 |
"is_default_related_query": False,
|
@@ -268,7 +266,7 @@ def handle_non_stream(previous_messages, msg_id):
|
|
268 |
full_response = {
|
269 |
"id": msg_id,
|
270 |
"content": [{"text": ''.join(response_text)}], # 合并所有文本块
|
271 |
-
"model":
|
272 |
"stop_reason": "end_turn",
|
273 |
"stop_sequence": None,
|
274 |
"usage": {
|
|
|
73 |
"body": {
|
74 |
"messages": "Array of message objects",
|
75 |
"stream": "Boolean (true for streaming response)",
|
76 |
+
"model": "Model to be used (optional, defaults to claude-3-opus-20240229)"
|
77 |
}
|
78 |
}
|
79 |
}
|
|
|
87 |
|
88 |
try:
|
89 |
json_body = request.json
|
90 |
+
model = json_body.get('model', 'claude-3-opus-20240229') # 动态获取模型,默认 claude-3-opus-20240229
|
91 |
stream = json_body.get('stream', True) # 默认为True
|
92 |
previous_messages = "\n\n".join([msg['content'] for msg in json_body['messages']])
|
93 |
msg_id = str(uuid.uuid4())
|
|
|
96 |
|
97 |
if not stream:
|
98 |
# 处理 stream 为 false 的情况
|
99 |
+
return handle_non_stream(previous_messages, msg_id, model)
|
100 |
|
101 |
# 记录日志:此时请求上下文仍然有效
|
102 |
log_request(request.remote_addr, request.path, 200)
|
|
|
109 |
"type": "message",
|
110 |
"role": "assistant",
|
111 |
"content": [],
|
112 |
+
"model": model, # 动态模型
|
113 |
"stop_reason": None,
|
114 |
"stop_sequence": None,
|
115 |
"usage": {"input_tokens": 8, "output_tokens": 1},
|
|
|
126 |
"attachments": [],
|
127 |
"language": "en-GB",
|
128 |
"timezone": "Europe/London",
|
|
|
|
|
129 |
"mode": "concise",
|
130 |
"is_related_query": False,
|
131 |
"is_default_related_query": False,
|
|
|
205 |
log_request(request.remote_addr, request.path, 400)
|
206 |
return jsonify({"error": str(e)}), 400
|
207 |
|
208 |
+
def handle_non_stream(previous_messages, msg_id, model):
|
209 |
"""
|
210 |
处理 stream 为 false 的情况,返回完整的响应。
|
211 |
"""
|
|
|
221 |
"attachments": [],
|
222 |
"language": "en-GB",
|
223 |
"timezone": "Europe/London",
|
|
|
|
|
224 |
"mode": "concise",
|
225 |
"is_related_query": False,
|
226 |
"is_default_related_query": False,
|
|
|
266 |
full_response = {
|
267 |
"id": msg_id,
|
268 |
"content": [{"text": ''.join(response_text)}], # 合并所有文本块
|
269 |
+
"model": model, # 动态模型
|
270 |
"stop_reason": "end_turn",
|
271 |
"stop_sequence": None,
|
272 |
"usage": {
|