File size: 14,414 Bytes
e3596d7
a503c73
096dbd4
 
d3cddbc
096dbd4
ae95de2
 
569a702
 
d6550e2
ae95de2
096dbd4
569a702
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5aad566
ae95de2
b73bb4c
 
ae95de2
5aad566
 
ae95de2
20085fe
 
 
 
 
 
 
 
ae95de2
d6550e2
ae95de2
b73bb4c
20085fe
af3d711
20085fe
b73bb4c
 
ae95de2
 
 
 
 
 
 
 
 
e3596d7
20085fe
af3d711
569a702
 
d6550e2
 
 
569a702
 
d6550e2
 
20085fe
 
 
 
 
 
 
ae95de2
3eec23c
 
 
20085fe
3eec23c
20085fe
 
 
 
3eec23c
1b7f3dd
20085fe
 
 
 
 
b9e956c
766230a
 
 
 
 
ae95de2
 
20085fe
 
a503c73
 
 
 
 
 
 
096dbd4
a503c73
 
 
 
2ceda68
096dbd4
a503c73
096dbd4
 
a503c73
ae95de2
 
20085fe
 
 
 
e3596d7
0e13636
d7146d5
 
ee90599
45eb20f
1b7f3dd
10d3a03
4a5e212
 
569a702
da88775
685e0bc
4a5e212
 
10d3a03
4a5e212
20085fe
 
ae95de2
685e0bc
d7146d5
4d93f43
 
 
 
 
 
 
 
 
 
20085fe
 
 
 
 
d7146d5
fbf8626
20085fe
 
fbf8626
20085fe
 
4d93f43
 
20085fe
685e0bc
2092356
685e0bc
 
 
 
 
 
 
4d93f43
 
d6550e2
 
 
4d93f43
d6550e2
 
685e0bc
2092356
4d93f43
 
 
 
 
 
2092356
 
20085fe
d6550e2
20085fe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d6550e2
 
 
 
 
 
b9e956c
20085fe
 
b9e956c
569a702
 
 
 
 
 
 
 
20085fe
 
 
569a702
20085fe
4d93f43
 
 
 
 
 
 
20085fe
569a702
 
 
 
 
 
 
b9e956c
4d93f43
 
20085fe
 
 
 
ae95de2
569a702
ae95de2
 
 
4d93f43
 
 
5192c54
 
fbf8626
5192c54
4d93f43
5192c54
20085fe
096dbd4
a503c73
4d93f43
20085fe
 
ae95de2
10d3a03
4a5e212
 
 
766230a
 
 
569a702
766230a
 
 
 
 
 
 
 
 
 
4a5e212
 
d6550e2
4a5e212
 
 
 
db9ffff
 
 
 
 
 
 
 
 
4a5e212
 
 
20085fe
 
 
 
 
569a702
4a5e212
5aad566
569a702
 
 
 
4a5e212
d7146d5
4a5e212
d7146d5
8eb01dc
4a5e212
 
8eb01dc
4a5e212
d7146d5
766230a
4a5e212
 
af3d711
 
 
 
20085fe
4a5e212
 
569a702
20085fe
4a5e212
 
 
 
ae95de2
 
20085fe
 
096dbd4
ae95de2
 
569a702
20085fe
 
511f8f6
096dbd4
d6550e2
ae95de2
569a702
d6550e2
 
 
faf18e1
d6550e2
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
import os
import json
import uuid
from datetime import datetime
from flask import Flask, request, Response, jsonify
import socketio
import requests
import logging
from threading import Event, Timer
import re

app = Flask(__name__)

# 自定义日志格式化器
class CustomFormatter(logging.Formatter):
    def format(self, record):
        log_data = {
            "timestamp": self.formatTime(record, self.datefmt),
            "level": record.levelname,
            "message": self.remove_ansi_escape(record.getMessage()),
        }
        if hasattr(record, 'event_type'):
            log_data['event_type'] = record.event_type
        if hasattr(record, 'data'):
            log_data['data'] = record.data
        return json.dumps(log_data, ensure_ascii=False, indent=2)

    def remove_ansi_escape(self, text):
        ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
        return ansi_escape.sub('', text)

def setup_logging():
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)
    handler = logging.StreamHandler()
    handler.setFormatter(CustomFormatter())
    logger.addHandler(handler)

logger = logging.getLogger(__name__)

# 从环境变量中获取API密钥
API_KEY = os.environ.get('PPLX_KEY')

# 代理设置
proxy_url = os.environ.get('PROXY_URL')

# 设置代理
if proxy_url:
    proxies = {
        'http': proxy_url,
        'https': proxy_url
    }
    transport = requests.Session()
    transport.proxies.update(proxies)
else:
    transport = None

sio = socketio.Client(http_session=transport, logger=False, engineio_logger=False)

# 连接选项
connect_opts = {
    'transports': ['websocket', 'polling'],
}

# 其他选项
sio_opts = {
    'extraHeaders': {
        'Cookie': os.environ.get('PPLX_COOKIE'),
        'User-Agent': os.environ.get('USER_AGENT'),
        'Accept': '*/*',
        'priority': 'u=1, i',
        'Referer': 'https://www.perplexity.ai/',
    }
}

def log_request(ip, route, status):
    timestamp = datetime.now().isoformat()
    logger.info(f"Request logged", extra={
        'event_type': 'request_log',
        'data': {
            'ip': ip,
            'route': route,
            'status': status,
            'timestamp': timestamp
        }
    })

def validate_api_key():
    api_key = request.headers.get('x-api-key')
    if api_key != API_KEY:
        log_request(request.remote_addr, request.path, 401)
        return jsonify({"error": "Invalid API key"}), 401
    return None

def normalize_content(content):
    if isinstance(content, str):
        return content
    elif isinstance(content, dict):
        return json.dumps(content, ensure_ascii=False)
    elif isinstance(content, list):
        return " ".join([normalize_content(item) for item in content])
    else:
        return ""

def calculate_tokens(text):
    if re.search(r'[^\x00-\x7F]', text):
        return len(text)
    else:
        tokens = text.split()
        return len(tokens)

def create_event(event, data):
    if isinstance(data, dict):
        data = json.dumps(data, ensure_ascii=False)
    return f"event: {event}\ndata: {data}\n\n"

@app.route('/')
def root():
    log_request(request.remote_addr, request.path, 200)
    return jsonify({
        "message": "Welcome to the Perplexity AI Proxy API",
        "endpoints": {
            "/ai/v1/messages": {
                "method": "POST",
                "description": "Send a message to the AI",
                "headers": {
                    "x-api-key": "Your API key (required)",
                    "Content-Type": "application/json"
                },
                "body": {
                    "messages": "Array of message objects",
                    "stream": "Boolean (true for streaming response)",
                    "model": "Model to be used (optional, defaults to claude-3-opus-20240229)"
                }
            }
        }
    })

@app.route('/ai/v1/messages', methods=['POST'])
def messages():
    auth_error = validate_api_key()
    if auth_error:
        return auth_error

    try:
        json_body = request.json
        model = json_body.get('model', 'claude-3-opus-20240229')
        stream = json_body.get('stream', True)

        previous_messages = "\n\n".join([normalize_content(msg['content']) for msg in json_body['messages']])
        input_tokens = calculate_tokens(previous_messages)

        msg_id = str(uuid.uuid4())
        response_event = Event()
        timeout_event = Event()
        response_text = []
        total_output_tokens = 0

        if not stream:
            return handle_non_stream(previous_messages, msg_id, model, input_tokens)

        log_request(request.remote_addr, request.path, 200)

        def generate():
            nonlocal total_output_tokens

            def send_event(event_type, data):
                event = create_event(event_type, data)
                logger.info(f"Sending {event_type} event", extra={
                    'event_type': event_type,
                    'data': {'content': event}
                })
                yield event

            # Send initial events
            yield from send_event("message_start", {
                "type": "message_start",
                "message": {
                    "id": msg_id,
                    "type": "message",
                    "role": "assistant",
                    "model": model,
                    "content": [],
                    "stop_reason": None,
                    "stop_sequence": None,
                    "usage": {"input_tokens": input_tokens, "output_tokens": total_output_tokens},
                },
            })
            yield from send_event("content_block_start", {"type": "content_block_start", "index": 0, "content_block": {"type": "text", "text": ""}})
            yield from send_event("ping", {"type": "ping"})

            def on_query_progress(data):
                nonlocal total_output_tokens, response_text
                if 'text' in data:
                    text = json.loads(data['text'])
                    chunk = text['chunks'][-1] if text['chunks'] else None
                    if chunk:
                        response_text.append(chunk)
                        chunk_tokens = calculate_tokens(chunk)
                        total_output_tokens += chunk_tokens
                        logger.info("Received chunk", extra={
                            'event_type': 'chunk_received',
                            'data': {
                                'chunk': chunk,
                                'tokens': chunk_tokens,
                                'total_tokens': total_output_tokens
                            }
                        })

                if data.get('final', False):
                    logger.info("Final response received", extra={
                        'event_type': 'response_complete',
                        'data': {
                            'total_tokens': total_output_tokens
                        }
                    })
                    response_event.set()

            def on_connect():
                logger.info("Connected to Perplexity AI", extra={'event_type': 'connection_established'})
                emit_data = {
                    "version": "2.9",
                    "source": "default",
                    "attachments": [],
                    "language": "en-GB",
                    "timezone": "Europe/London",
                    "mode": "concise",
                    "is_related_query": False,
                    "is_default_related_query": False,
                    "visitor_id": str(uuid.uuid4()),
                    "frontend_context_uuid": str(uuid.uuid4()),
                    "prompt_source": "user",
                    "query_source": "home"
                }
                sio.emit('perplexity_ask', (previous_messages, emit_data))
                logger.info("Sent query to Perplexity AI", extra={
                    'event_type': 'query_sent',
                    'data': {
                        'message': previous_messages[:100] + '...' if len(previous_messages) > 100 else previous_messages
                    }
                })

            sio.on('connect', on_connect)
            sio.on('query_progress', on_query_progress)

            def timeout_handler():
                logger.warning("Request timed out", extra={'event_type': 'request_timeout'})
                timeout_event.set()
                response_event.set()

            timer = Timer(30, timeout_handler)  # 30 seconds timeout
            timer.start()

            try:
                sio.connect('wss://www.perplexity.ai/', **connect_opts, headers=sio_opts['extraHeaders'])
                
                while not response_event.is_set() and not timeout_event.is_set():
                    sio.sleep(0.1)
                    while response_text:
                        chunk = response_text.pop(0)
                        yield from send_event("content_block_delta", {
                            "type": "content_block_delta",
                            "index": 0,
                            "delta": {"type": "text_delta", "text": chunk},
                        })
                
                if timeout_event.is_set():
                    yield from send_event("content_block_delta", {
                        "type": "content_block_delta",
                        "index": 0,
                        "delta": {"type": "text_delta", "text": "Request timed out"},
                    })
                
            except Exception as e:
                logger.error(f"Error during socket connection: {str(e)}", exc_info=True)
                yield from send_event("content_block_delta", {
                    "type": "content_block_delta",
                    "index": 0,
                    "delta": {"type": "text_delta", "text": f"Error during socket connection: {str(e)}"},
                })
            finally:
                timer.cancel()
                if sio.connected:
                    sio.disconnect()

            # Send final events
            yield from send_event("content_block_stop", {"type": "content_block_stop", "index": 0})
            yield from send_event("message_delta", {
                "type": "message_delta",
                "delta": {"stop_reason": "end_turn", "stop_sequence": None},
                "usage": {"output_tokens": total_output_tokens},
            })
            yield from send_event("message_stop", {"type": "message_stop"})

        return Response(generate(), content_type='text/event-stream')

    except Exception as e:
        logger.error(f"Request error: {str(e)}", exc_info=True)
        log_request(request.remote_addr, request.path, 400)
        return jsonify({"error": str(e)}), 400

def handle_non_stream(previous_messages, msg_id, model, input_tokens):
    try:
        response_event = Event()
        response_text = []
        total_output_tokens = 0

        def on_query_progress(data):
            nonlocal total_output_tokens, response_text
            if 'text' in data:
                text = json.loads(data['text'])
                chunk = text['chunks'][-1] if text['chunks'] else None
                if chunk:
                    response_text.append(chunk)
                    chunk_tokens = calculate_tokens(chunk)
                    total_output_tokens += chunk_tokens

            if data.get('final', False):
                response_event.set()

        def on_connect():
            logger.info("Connected to Perplexity AI (non-stream)", extra={'event_type': 'connection_established_non_stream'})
            emit_data = {
                "version": "2.9",
                "source": "default",
                "attachments": [],
                "language": "en-GB",
                "timezone": "Europe/London",
                "mode": "concise",
                "is_related_query": False,
                "is_default_related_query": False,
                "visitor_id": str(uuid.uuid4()),
                "frontend_context_uuid": str(uuid.uuid4()),
                "prompt_source": "user",
                "query_source": "home"
            }
            sio.emit('perplexity_ask', (previous_messages, emit_data))

        sio.on('connect', on_connect)
        sio.on('query_progress', on_query_progress)

        sio.connect('wss://www.perplexity.ai/', **connect_opts, headers=sio_opts['extraHeaders'])
        
        # Wait for response with timeout
        response_event.wait(timeout=30)

        if not response_text:
            logger.warning("No response received (non-stream)", extra={'event_type': 'no_response_non_stream'})
            return jsonify({"error": "No response received"}), 504

        full_response = {
            "content": [{"text": ''.join(response_text), "type": "text"}],
            "id": msg_id,
            "model": model,
            "role": "assistant",
            "stop_reason": "end_turn",
            "stop_sequence": None,
            "type": "message",
            "usage": {
                "input_tokens": input_tokens,
                "output_tokens": total_output_tokens,
            },
        }
        logger.info("Sending non-stream response", extra={
            'event_type': 'non_stream_response',
            'data': {'content': full_response}
        })
        return Response(json.dumps(full_response, ensure_ascii=False), content_type='application/json')

    except Exception as e:
        logger.error(f"Error during non-stream socket connection: {str(e)}", exc_info=True)
        return jsonify({"error": str(e)}), 500
    finally:
        if sio.connected:
            sio.disconnect()

@app.errorhandler(404)
def not_found(error):
    log_request(request.remote_addr, request.path, 404)
    return "Not Found", 404

@app.errorhandler(500)
def server_error(error):
    logger.error(f"Server error: {str(error)}", exc_info=True)
    log_request(request.remote_addr, request.path, 500)
    return "Something broke!", 500

if __name__ == '__main__':
    setup_logging()
    port = int(os.environ.get('PORT', 8081))
    logger.info(f"Perplexity proxy starting", extra={
        'event_type': 'server_start',
        'data': {'port': port}
    })
    if not API_KEY:
        logger.warning("PPLX_KEY environment variable is not set", extra={'event_type': 'config_warning'})
    app.run(host='0.0.0.0', port=port)