File size: 5,103 Bytes
f59d2d3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 |
"""
This is main.py
"""
from fastapi import FastAPI, File, UploadFile, Form, Request
from fastapi.staticfiles import StaticFiles
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
from fastapi.templating import Jinja2Templates
from pydantic import BaseModel
from typing import List
class AppData:
def __init__(self):
self.file_content = ""
self.name_list = []
self.place = []
self.times = []
self.name_dic = {}
self.end_output = []
class ItemListRequest(BaseModel):
nameList: List[str]
app_data = AppData()
# ์ค์
app = FastAPI()
app.mount("/static", StaticFiles(directory="static"), name="static")
templates = Jinja2Templates(directory="templates")
@app.get("/", response_class=HTMLResponse)
async def page_home(request: Request):
"""INDEX.HTML ํ๋ฉด"""
return templates.TemplateResponse("index.html", {"request": request})
@app.get("/put.html", response_class=HTMLResponse)
async def page_put(request: Request):
"""PUT.HTML ํ๋ฉด"""
return templates.TemplateResponse("put.html", {"request": request})
@app.get("/confirm.html", response_class=HTMLResponse)
async def page_confirm(request: Request):
"""confirm.HTML ํ๋ฉด"""
return templates.TemplateResponse("confirm.html",{
"request": request, "file_content": app_data.file_content})
@app.get("/result.html", response_class=HTMLResponse)
async def page_result(request: Request):
"""result.HTML ํ๋ฉด"""
return templates.TemplateResponse("result.html", {"request": request})
@app.get("/user.html", response_class=HTMLResponse)
async def page_user(request: Request):
"""user.HTML ํ๋ฉด"""
return templates.TemplateResponse("user.html", {"request": request})
@app.get("/final.html", response_class=HTMLResponse)
async def page_final(request: Request):
"""final.HTML ํ๋ฉด"""
return templates.TemplateResponse("final.html", {"request": request,
"output": app_data.end_output,
"place": app_data.place,
"time": app_data.times})
@app.post("/upload", response_class=HTMLResponse)
async def upload_file(file: UploadFile = File(...)):
"""ํ์ผ ์
๋ก๋ ๋ฐ ์ ์ฅ"""
with open("uploads/" + file.filename, "wb") as f:
f.write(file.file.read())
with open("uploads/" + file.filename, "r", encoding="utf-8") as f:
app_data.file_content = f.read()
return RedirectResponse(url="/put.html")
@app.post("/ners", response_class=JSONResponse)
async def ner_file():
"""์ ์ฅ๋ ํ์ผ์ NER ์์
์ ํด์ ํ์๋ ์ฅ์๋ฅผ ๊ตฌ๋ถ"""
from utils.load_model import load_ner
from utils.input_process import make_ner_input
from utils.ner_utils import make_name_list, show_name_list, combine_similar_names
content = app_data.file_content
_, ner_checkpoint = load_ner()
contents = make_ner_input(content)
name_list, times, places = make_name_list(contents, ner_checkpoint)
name_dic = show_name_list(name_list)
similar_name = combine_similar_names(name_dic)
result_list = [', '.join(names) for names, _ in similar_name.items()]
app_data.place = ' '.join(places)
app_data.times = ' '.join(times)
# JSONResponse๋ก ์๋ต
return JSONResponse(content={"itemList": result_list})
@app.post("/kcsn", response_class=JSONResponse)
async def kcsn_file(request_data: ItemListRequest):
"""์ฌ์ฉ์๊ฐ ์ฌ๋ ค์ค ํ์ผ์ ๋ํด์ KCSN ๋ชจ๋ธ ๋์"""
import torch
from utils.fs_utils import get_alias2id, find_speak, making_script
from utils.input_process import make_instance_list, input_data_loader
from utils.train_model import KCSN
from utils.ner_utils import convert_name2codename, convert_codename2name
content = app_data.file_content
name_list = request_data.nameList
name_dic = {}
for idx, name in enumerate(name_list):
name_dic[f'&C{idx:02d}&'] = name.split(', ')
content_re = convert_name2codename(name_dic, content)
# checkpoint = torch.load('./model/final.pth')
# model = checkpoint['model']
# model.to('cpu')
# tokenizer = checkpoint['tokenizer']
from utils.arguments import get_train_args
from transformers import AutoTokenizer
args = get_train_args()
path ='model/model.ckpt'
model = KCSN(args)
model.to('cpu')
checkpoint = torch.load(path)
tokenizer = AutoTokenizer.from_pretrained(args.bert_pretrained_dir)
model.load_state_dict(checkpoint['model'])
check_name = 'data/name.txt'
alias2id = get_alias2id(check_name)
instances, instance_num = make_instance_list(content_re)
inputs = input_data_loader(instances, alias2id)
output = find_speak(model, inputs, tokenizer, alias2id)
outputs = convert_codename2name(name_dic, output)
app_data.end_output = making_script(content, outputs, instance_num)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="127.0.0.1", port=8000)
|