smgc commited on
Commit
9e063bf
1 Parent(s): ab1f865

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -22
app.py CHANGED
@@ -2,27 +2,17 @@ from fastapi import FastAPI, Request
2
  from fastapi.responses import StreamingResponse
3
  import httpx
4
  import json
5
- import random
6
  import os
7
-
8
  from helper import create_jwt, generate_random_tuple
9
 
10
  app = FastAPI()
11
 
12
  @app.post('/ai/v1/chat/completions')
13
  async def chat(request: Request):
14
- """
15
- Handle chat completion requests.
16
- This function processes incoming POST requests to the '/ai/v1/chat/completions' endpoint.
17
- It prepares the payload for the LLM API, generates a JWT for authentication,
18
- and streams the response from the LLM API back to the client.
19
- Returns:
20
- StreamingResponse: A streaming response containing the LLM API's output.
21
- Note:
22
- - The function uses environment variables for proxy configuration.
23
- - It generates random GitHub username and Zed user ID for each request.
24
- - The LLM model defaults to "claude-3-5-sonnet-20240620" if not specified.
25
- """
26
  # Get the payload from the request
27
  payload = await request.json()
28
 
@@ -45,14 +35,11 @@ async def chat(request: Request):
45
  }
46
  }
47
 
48
- github_username, zed_user_id = generate_random_tuple()
49
- jwt = create_jwt(github_username, zed_user_id)
50
-
51
  headers = {
52
  'Host': 'llm.zed.dev',
53
  'accept': '*/*',
54
  'content-type': 'application/json',
55
- 'authorization': f'Bearer {jwt}',
56
  'user-agent': 'Zed/0.149.3 (macos; aarch64)'
57
  }
58
 
@@ -68,10 +55,6 @@ async def chat(request: Request):
68
 
69
  return StreamingResponse(generate(), media_type='application/octet-stream')
70
 
71
- @app.get("/")
72
- async def root():
73
- return {"message": "Welcome to the AI Chat Completions API"}
74
-
75
  if __name__ == '__main__':
76
  import uvicorn
77
  uvicorn.run(app, host="0.0.0.0", port=8000)
 
2
  from fastapi.responses import StreamingResponse
3
  import httpx
4
  import json
 
5
  import os
 
6
  from helper import create_jwt, generate_random_tuple
7
 
8
  app = FastAPI()
9
 
10
  @app.post('/ai/v1/chat/completions')
11
  async def chat(request: Request):
12
+ # Generate JWT token
13
+ github_username, user_id = generate_random_tuple()
14
+ jwt_token = create_jwt(github_username, user_id)
15
+
 
 
 
 
 
 
 
 
16
  # Get the payload from the request
17
  payload = await request.json()
18
 
 
35
  }
36
  }
37
 
 
 
 
38
  headers = {
39
  'Host': 'llm.zed.dev',
40
  'accept': '*/*',
41
  'content-type': 'application/json',
42
+ 'authorization': f'Bearer {jwt_token}', # Use the generated JWT token
43
  'user-agent': 'Zed/0.149.3 (macos; aarch64)'
44
  }
45
 
 
55
 
56
  return StreamingResponse(generate(), media_type='application/octet-stream')
57
 
 
 
 
 
58
  if __name__ == '__main__':
59
  import uvicorn
60
  uvicorn.run(app, host="0.0.0.0", port=8000)