123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111 |
- from fastapi import FastAPI, Form
- from fastapi import Depends, FastAPI, HTTPException, status, Request, Form, Cookie, Response, Header
- from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
- from fastapi.templating import Jinja2Templates
- from fastapi.staticfiles import StaticFiles
- from pydantic import BaseModel
- import iCulture_semantic_search
- import iCulture_wordcloud
- import json
- import tqdm
- import uvicorn
- app = FastAPI()
- app.mount("/static", StaticFiles(directory="static"), name="static")
- templates = Jinja2Templates(directory="templates")
- # input model
- class Query(BaseModel):
- query: str
- top_k: int
- similarity: float
- start_date: str
- end_date: str
- # output model
- class Semantic_search(BaseModel):
- semantic_search: str
- class Tag_list(BaseModel):
- tag_list: str
- class Wordcloud(BaseModel):
- wordcloud: str
- @app.get("/", response_class=HTMLResponse)
- async def root(request: Request, response: Response):
- return templates.TemplateResponse("index.html", {"request": request, "response": response})
- @app.post("/semantic_search", response_model=Semantic_search)
- async def semantic_search(query: Query):
- print('-'*50,'\n')
- print('【Request】')
- print(query,'\n')
- print('-'*50)
- query = query.dict()
- return Semantic_search(
- semantic_search=json.dumps(
- iCulture_semantic_search.search_event(query['query'], query['top_k'], query['similarity'], query['start_date'], query['end_date']))
- )
- @app.post("/tag_list", response_model=Tag_list)
- async def tag_list(query: Query):
- query = query.dict()
- return Tag_list(
- tag_list=json.dumps(
- iCulture_semantic_search.search_event_for_tag_list(
- query['query'], query['top_k'], query['similarity'], query['start_date'], query['end_date']
- ))
- )
- @app.post("/wordcloud", response_model=Wordcloud)
- async def wordcloud(query: Query):
- query = query.dict()
- return Wordcloud(
- wordcloud=json.dumps(iCulture_wordcloud.to_wordcloud(
- query['query'], query['top_k'], query['similarity'], query['start_date'], query['end_date']))
- )
- @app.post("/add_search")
- async def add_search(query: Query):
- query = query.dict()
- keywords = query['query'].split()
- ### return these three
- ret_keywords = []
- ret_labels = []
- ret_names = []
- #print("###############keywords:", keywords)
- '''
- with open("static/data.json", "r") as f:
- return_keywords = json.load(f)
- for keyword in keywords:
- if ret := return_keywords.get(keyword):
- return_keywords = ret
- else:
- ret = {}
- break
- ret_keywords = list(ret.keys())
- print("##########return", ret_keywords)
- '''
- ### write here and ret is list of recommend keywords
- ret_keywords = ["abc", "def", "ghi", "jkl", "nmo", "pqr"]
- ret_labels = ["1", "2", "3", "4", "5", "6"]
- ret_names = ["Tomoya", "Jared", "Doris", "Wizer", "Nina", "Morrison"]
- ###
- return {"add_keywords":ret_keywords, "add_labels":ret_labels, "add_names":ret_names}
- # if __name__ == "__main__":
- # print('123')
- # uvicorn.run("setup:app", host="0.0.0.0", port=12345, reload=True)
|