smgc commited on
Commit
b714dae
1 Parent(s): 3fb88cc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -5
app.py CHANGED
@@ -1,4 +1,6 @@
1
  from flask import Flask, request, Response
 
 
2
  import requests
3
  import json
4
  import os
@@ -86,10 +88,10 @@ async def chat():
86
  proxies = {'http': proxy, 'https': proxy} if proxy else None
87
  logger.info(f"Using proxy: {proxy}")
88
 
89
- async def generate():
90
  try:
91
  logger.info("Sending request to LLM API")
92
- async with requests.post(url, headers=headers, json=llm_payload, stream=True, proxies=proxies, allow_redirects=True) as response:
93
  logger.info(f"LLM API response status: {response.status_code}")
94
  logger.info(f"LLM API response headers: {dict(response.headers)}")
95
 
@@ -98,7 +100,7 @@ async def chat():
98
  logger.warning(f"Received 301 redirect. New location: {new_location}")
99
  # 如果需要,可以在这里处理重定向
100
 
101
- async for chunk in response.iter_content(chunk_size=1024):
102
  if chunk:
103
  logger.debug(f"Received chunk of size: {len(chunk)} bytes")
104
  yield chunk
@@ -114,9 +116,9 @@ async def home():
114
  return "Welcome to the Chat Completion API", 200
115
 
116
  # 创建 ASGI 应用
117
- asgi_app = app.asgi_app
118
 
119
  if __name__ == '__main__':
120
  import uvicorn
121
  logger.info("Starting the application")
122
- uvicorn.run("app:asgi_app", host="0.0.0.0", port=8000, log_level="info")
 
1
  from flask import Flask, request, Response
2
+ from werkzeug.middleware.dispatcher import DispatcherMiddleware
3
+ from werkzeug.wrappers import Response as WerkzeugResponse
4
  import requests
5
  import json
6
  import os
 
88
  proxies = {'http': proxy, 'https': proxy} if proxy else None
89
  logger.info(f"Using proxy: {proxy}")
90
 
91
+ def generate():
92
  try:
93
  logger.info("Sending request to LLM API")
94
+ with requests.post(url, headers=headers, json=llm_payload, stream=True, proxies=proxies, allow_redirects=True) as response:
95
  logger.info(f"LLM API response status: {response.status_code}")
96
  logger.info(f"LLM API response headers: {dict(response.headers)}")
97
 
 
100
  logger.warning(f"Received 301 redirect. New location: {new_location}")
101
  # 如果需要,可以在这里处理重定向
102
 
103
+ for chunk in response.iter_content(chunk_size=1024):
104
  if chunk:
105
  logger.debug(f"Received chunk of size: {len(chunk)} bytes")
106
  yield chunk
 
116
  return "Welcome to the Chat Completion API", 200
117
 
118
  # 创建 ASGI 应用
119
+ asgi_app = DispatcherMiddleware(app)
120
 
121
  if __name__ == '__main__':
122
  import uvicorn
123
  logger.info("Starting the application")
124
+ uvicorn.run(asgi_app, host="0.0.0.0", port=8000, log_level="info")