smgc commited on
Commit
21a614b
β€’
1 Parent(s): edd867f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -15
app.py CHANGED
@@ -166,8 +166,8 @@ def messages():
166
 
167
  try:
168
  json_body = request.json
169
- model = json_body.get('model', 'claude-3-5-sonnet-20240620')
170
- stream = json_body.get('stream', False)
171
  messages = json_body.get('messages', [])
172
 
173
  # εˆ†ε—ε€„η†
@@ -183,10 +183,18 @@ def messages():
183
  total_output_tokens = 0
184
  full_response = []
185
 
 
 
 
 
 
 
 
 
186
  def generate():
187
  nonlocal total_output_tokens
188
 
189
- yield from send_event("message_start", {
190
  "type": "message_start",
191
  "message": {
192
  "id": msg_id,
@@ -200,6 +208,8 @@ def messages():
200
  },
201
  })
202
 
 
 
203
  for chunk_index, chunk in enumerate(chunks):
204
  chunk_input = "\n\n".join([normalize_content(msg['content']) for msg in chunk])
205
  chunk_input_tokens = calculate_tokens(chunk_input)
@@ -211,7 +221,7 @@ def messages():
211
  # εŠ¨ζ€θ°ƒζ•΄θΆ…ζ—Άζ—Άι—΄
212
  timeout_seconds = max(30, min(300, chunk_input_tokens // 1000 * 30))
213
 
214
- yield from send_event("chunk_start", {
215
  "type": "chunk_start",
216
  "chunk_index": chunk_index,
217
  "total_chunks": total_chunks,
@@ -228,6 +238,14 @@ def messages():
228
  if data.get('final', False):
229
  response_event.set()
230
 
 
 
 
 
 
 
 
 
231
  sio.on('query_progress', on_query_progress)
232
 
233
  def timeout_handler():
@@ -243,7 +261,6 @@ def messages():
243
 
244
  try:
245
  sio.connect('wss://www.perplexity.ai/', **connect_opts, headers=sio_opts['extraHeaders'])
246
- sio.emit('perplexity_ask', (chunk_input, get_emit_data()))
247
 
248
  while not response_event.is_set() and not timeout_event.is_set():
249
  sio.sleep(0.1)
@@ -252,7 +269,7 @@ def messages():
252
  full_response.append(new_chunk)
253
  chunk_tokens = calculate_tokens(new_chunk)
254
  total_output_tokens += chunk_tokens
255
- yield from send_event("content_block_delta", {
256
  "type": "content_block_delta",
257
  "index": 0,
258
  "delta": {"type": "text_delta", "text": new_chunk},
@@ -260,7 +277,7 @@ def messages():
260
 
261
  except Exception as e:
262
  logger.error(f"Error during chunk {chunk_index + 1}/{total_chunks} processing: {str(e)}", exc_info=True)
263
- yield from send_event("content_block_delta", {
264
  "type": "content_block_delta",
265
  "index": 0,
266
  "delta": {"type": "text_delta", "text": f"Error processing chunk {chunk_index + 1}/{total_chunks}: {str(e)}"},
@@ -270,26 +287,30 @@ def messages():
270
  if sio.connected:
271
  sio.disconnect()
272
 
273
- yield from send_event("chunk_end", {
274
  "type": "chunk_end",
275
  "chunk_index": chunk_index,
276
  "total_chunks": total_chunks,
277
  })
278
 
279
- yield from send_event("content_block_stop", {"type": "content_block_stop", "index": 0})
280
- yield from send_event("message_delta", {
281
  "type": "message_delta",
282
  "delta": {"stop_reason": "end_turn", "stop_sequence": None},
283
  "usage": {"input_tokens": total_input_tokens, "output_tokens": total_output_tokens},
284
  })
285
- yield from send_event("message_stop", {"type": "message_stop"})
286
 
287
  if stream:
288
  return Response(generate(), content_type='text/event-stream')
289
  else:
290
  # ιžζ΅εΌε€„η†
291
- for _ in generate():
292
- pass # ε€„η†ζ‰€ζœ‰η”Ÿζˆηš„δΊ‹δ»ΆοΌŒδ½†δΈε‘ι€
 
 
 
 
293
  return jsonify({
294
  "content": [{"text": ''.join(full_response), "type": "text"}],
295
  "id": msg_id,
@@ -306,8 +327,8 @@ def messages():
306
 
307
  except Exception as e:
308
  logger.error(f"Request error: {str(e)}", exc_info=True)
309
- log_request(request.remote_addr, request.path, 400)
310
- return jsonify({"error": str(e)}), 400
311
 
312
  def get_emit_data():
313
  return {
@@ -325,6 +346,7 @@ def get_emit_data():
325
  "query_source": "home"
326
  }
327
 
 
328
  def handle_non_stream(previous_messages, msg_id, model, input_tokens):
329
  try:
330
  response_event = Event()
 
166
 
167
  try:
168
  json_body = request.json
169
+ model = json_body.get('model', 'claude-3-opus-20240229')
170
+ stream = json_body.get('stream', True)
171
  messages = json_body.get('messages', [])
172
 
173
  # εˆ†ε—ε€„η†
 
183
  total_output_tokens = 0
184
  full_response = []
185
 
186
+ def send_event(event_type, data):
187
+ event = create_event(event_type, data)
188
+ logger.info(f"Sending {event_type} event", extra={
189
+ 'event_type': event_type,
190
+ 'data': {'content': event}
191
+ })
192
+ return event
193
+
194
  def generate():
195
  nonlocal total_output_tokens
196
 
197
+ yield send_event("message_start", {
198
  "type": "message_start",
199
  "message": {
200
  "id": msg_id,
 
208
  },
209
  })
210
 
211
+ yield send_event("content_block_start", {"type": "content_block_start", "index": 0, "content_block": {"type": "text", "text": ""}})
212
+
213
  for chunk_index, chunk in enumerate(chunks):
214
  chunk_input = "\n\n".join([normalize_content(msg['content']) for msg in chunk])
215
  chunk_input_tokens = calculate_tokens(chunk_input)
 
221
  # εŠ¨ζ€θ°ƒζ•΄θΆ…ζ—Άζ—Άι—΄
222
  timeout_seconds = max(30, min(300, chunk_input_tokens // 1000 * 30))
223
 
224
+ yield send_event("chunk_start", {
225
  "type": "chunk_start",
226
  "chunk_index": chunk_index,
227
  "total_chunks": total_chunks,
 
238
  if data.get('final', False):
239
  response_event.set()
240
 
241
+ def on_connect():
242
+ logger.info(f"Connected to Perplexity AI for chunk {chunk_index + 1}/{total_chunks}", extra={
243
+ 'event_type': 'chunk_connection_established',
244
+ 'data': {'chunk_index': chunk_index, 'total_chunks': total_chunks}
245
+ })
246
+ sio.emit('perplexity_ask', (chunk_input, get_emit_data()))
247
+
248
+ sio.on('connect', on_connect)
249
  sio.on('query_progress', on_query_progress)
250
 
251
  def timeout_handler():
 
261
 
262
  try:
263
  sio.connect('wss://www.perplexity.ai/', **connect_opts, headers=sio_opts['extraHeaders'])
 
264
 
265
  while not response_event.is_set() and not timeout_event.is_set():
266
  sio.sleep(0.1)
 
269
  full_response.append(new_chunk)
270
  chunk_tokens = calculate_tokens(new_chunk)
271
  total_output_tokens += chunk_tokens
272
+ yield send_event("content_block_delta", {
273
  "type": "content_block_delta",
274
  "index": 0,
275
  "delta": {"type": "text_delta", "text": new_chunk},
 
277
 
278
  except Exception as e:
279
  logger.error(f"Error during chunk {chunk_index + 1}/{total_chunks} processing: {str(e)}", exc_info=True)
280
+ yield send_event("content_block_delta", {
281
  "type": "content_block_delta",
282
  "index": 0,
283
  "delta": {"type": "text_delta", "text": f"Error processing chunk {chunk_index + 1}/{total_chunks}: {str(e)}"},
 
287
  if sio.connected:
288
  sio.disconnect()
289
 
290
+ yield send_event("chunk_end", {
291
  "type": "chunk_end",
292
  "chunk_index": chunk_index,
293
  "total_chunks": total_chunks,
294
  })
295
 
296
+ yield send_event("content_block_stop", {"type": "content_block_stop", "index": 0})
297
+ yield send_event("message_delta", {
298
  "type": "message_delta",
299
  "delta": {"stop_reason": "end_turn", "stop_sequence": None},
300
  "usage": {"input_tokens": total_input_tokens, "output_tokens": total_output_tokens},
301
  })
302
+ yield send_event("message_stop", {"type": "message_stop"})
303
 
304
  if stream:
305
  return Response(generate(), content_type='text/event-stream')
306
  else:
307
  # ιžζ΅εΌε€„η†
308
+ full_response = []
309
+ for event in generate():
310
+ if 'content_block_delta' in event:
311
+ data = json.loads(event.split('data: ')[1])
312
+ full_response.append(data['delta']['text'])
313
+
314
  return jsonify({
315
  "content": [{"text": ''.join(full_response), "type": "text"}],
316
  "id": msg_id,
 
327
 
328
  except Exception as e:
329
  logger.error(f"Request error: {str(e)}", exc_info=True)
330
+ log_request(request.remote_addr, request.path, 500)
331
+ return jsonify({"error": str(e)}), 500
332
 
333
  def get_emit_data():
334
  return {
 
346
  "query_source": "home"
347
  }
348
 
349
+
350
  def handle_non_stream(previous_messages, msg_id, model, input_tokens):
351
  try:
352
  response_event = Event()