sanbatte commited on
Commit
781e8b6
1 Parent(s): c7d3bf4

api con gradio

Browse files
README.md CHANGED
@@ -1,12 +1,12 @@
1
  ---
2
- title: Real State Api G
3
- emoji: 🦀
4
- colorFrom: red
5
  colorTo: green
6
- sdk: gradio
7
- sdk_version: 4.15.0
8
- app_file: app.py
9
  pinned: false
10
  ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
1
  ---
2
+ title: Real State Api
3
+ emoji: 🏢
4
+ colorFrom: yellow
5
  colorTo: green
6
+ sdk: docker
 
 
7
  pinned: false
8
  ---
9
 
10
+ # D360 Challenge
11
+
12
+
app/agents/__init__.py ADDED
File without changes
app/agents/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (155 Bytes). View file
 
app/agents/__pycache__/real_state_agent.cpython-310.pyc ADDED
Binary file (596 Bytes). View file
 
app/agents/real_state_agent.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.agents.agent_types import AgentType
2
+ from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent
3
+ from langchain_openai import ChatOpenAI
4
+ from app.utils.data_preparation import load_data
5
+
6
+
7
+ df = load_data()
8
+
9
+
10
+ real_state_agent = create_pandas_dataframe_agent(
11
+ llm=ChatOpenAI(temperature=0, model="gpt-3.5-turbo-0613"),
12
+ df=df,
13
+ # prefix = prefix,
14
+ verbose=True,
15
+ agent_type=AgentType.OPENAI_FUNCTIONS,
16
+ )
app/data/listings.csv ADDED
The diff for this file is too large to render. See raw diff
 
app/models/__init__.py ADDED
File without changes
app/models/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (155 Bytes). View file
 
app/models/__pycache__/prediction_models.cpython-310.pyc ADDED
Binary file (546 Bytes). View file
 
app/models/prediction_models.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel
2
+
3
+
4
+ class PredictionRequest(BaseModel):
5
+ question: str
6
+
7
+
8
+ class PredictionResponse(BaseModel):
9
+ response: str
app/routes/__init__.py ADDED
File without changes
app/routes/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (155 Bytes). View file
 
app/routes/__pycache__/home.cpython-310.pyc ADDED
Binary file (384 Bytes). View file
 
app/routes/__pycache__/prediction.cpython-310.pyc ADDED
Binary file (734 Bytes). View file
 
app/routes/home.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter
2
+
3
+ router = APIRouter()
4
+
5
+
6
+ @router.get("/")
7
+ def read_root():
8
+ return {"message": "D360 Challenge by Santiago Battezzati"}
app/routes/prediction.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter
2
+ from app.models.prediction_models import PredictionRequest, PredictionResponse
3
+ from typing import List
4
+ from app.agents.real_state_agent import real_state_agent
5
+
6
+
7
+ import os
8
+ import openai
9
+
10
+ openai.api_key = os.getenv("OPENAI_API_KEY")
11
+
12
+
13
+ router = APIRouter()
14
+
15
+
16
+ ## MODIFICAR ESTO:
17
+ @router.post("/", response_model=List[PredictionResponse])
18
+ def predict(request: PredictionRequest):
19
+ response = real_state_agent.run(request.question)
20
+
21
+ # ver el type esto sigue siendo list, pero ahora dije que queiro una response string:
22
+ response_data = [{"response": response}]
23
+
24
+ return response_data
app/utils/__init__.py ADDED
File without changes
app/utils/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (154 Bytes). View file
 
app/utils/__pycache__/data_preparation.cpython-310.pyc ADDED
Binary file (336 Bytes). View file
 
app/utils/__pycache__/validations.cpython-310.pyc ADDED
Binary file (734 Bytes). View file
 
app/utils/data_preparation.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+
3
+
4
+ def load_data():
5
+ df = pd.read_csv("app/data/listings.csv")
6
+ return df
main.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from app.routes import prediction
3
+ from app.routes.prection import predict
4
+
5
+ from app.routes import home
6
+ import gradio as gr
7
+
8
+
9
+ app = FastAPI()
10
+
11
+
12
+ app.include_router(
13
+ home.router,
14
+ tags=["home"],
15
+ )
16
+
17
+
18
+ app.include_router(
19
+ prediction.router,
20
+ prefix="/predict",
21
+ tags=["prediction"],
22
+ )
23
+
24
+ iface = gr.Interface(fn=predict, inputs="text", outputs="text")
25
+ iface.launch()
requirements.txt ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi==0.108.0
2
+ langchain==0.1.1
3
+ langchain-experimental==0.0.49
4
+ langchain-openai==0.0.3
5
+ openai==1.8.0
6
+ pandas==2.1.4
7
+ pydantic==2.5.3
8
+ pydantic_core==2.14.6
9
+ #jupyterlab==4.0.10
10
+ tabulate==0.9.0
11
+ uvicorn==0.25.0
12
+
13
+
14
+ ###
15
+ ## agregar open ai y langchain y langchain experimental...
16
+ ## tabulate...
17
+