File size: 2,673 Bytes
714be4e
47b5f0c
714be4e
57cab59
 
 
819bacd
 
47b5f0c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
819bacd
57cab59
47b5f0c
57cab59
 
 
 
819bacd
 
 
57cab59
 
819bacd
57cab59
 
 
 
 
 
 
 
 
47b5f0c
819bacd
dcb6c5f
819bacd
57cab59
 
 
 
 
819bacd
 
984d2f5
819bacd
714be4e
819bacd
 
 
984d2f5
819bacd
 
714be4e
 
 
 
 
 
 
 
819bacd
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import logging
from contextlib import asynccontextmanager

import uvicorn
from fastapi import APIRouter, FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from transformers import AutoModel, AutoModelForMaskedLM, AutoTokenizer, pipeline

from app.modules.clearVariables.routes.clearVariables_route import \
    router as clear_variables_routes
from app.modules.documentHandeler.routes.document_handeler_route import \
    router as upload_file_routes
from app.modules.querySearch.routes.querySearch_route import \
    router as query_search_routes


@asynccontextmanager
async def lifespan(app: FastAPI):
    dense_model_name = "sentence-transformers/all-MiniLM-L6-v2"
    sparse_model_name = "prithivida/Splade_PP_en_v1"
    qa_model_name = "deepset/roberta-base-squad2"

    dense_tokenizer = AutoTokenizer.from_pretrained(dense_model_name)
    dense_model = AutoModel.from_pretrained(dense_model_name)

    sparse_tokenizer = AutoTokenizer.from_pretrained(sparse_model_name)
    sparse_model = AutoModelForMaskedLM.from_pretrained(sparse_model_name)

    qa_pipeline = pipeline("question-answering", model=qa_model_name)

    yield {
        "dense_tokenizer": dense_tokenizer,
        "dense_model": dense_model,
        "sparse_tokenizer": sparse_tokenizer,
        "sparse_model": sparse_model,
        "qa_pipeline": qa_pipeline,
    }


app = FastAPI(lifespan=lifespan)

origins = [
    "http://localhost:8000",
    "http://localhost:3000",
    "https://your-space-name.hf.space",
    "localhost:8000",
    "https://abadesalex-docurag.hf.space/api",
]


app.add_middleware(
    CORSMiddleware,
    allow_origins=origins,
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

app_router = APIRouter(prefix="/api")
app_router.include_router(upload_file_routes, prefix="/document", tags=["document"])
app_router.include_router(query_search_routes, prefix="/query", tags=["query"])
app_router.include_router(clear_variables_routes, prefix="/clear", tags=["clear"])


@app_router.get("/")
async def root():
    return {"message": "Hello World"}


# Serve static files from the 'out/_next/static' directory
app.mount("/_next/static", StaticFiles(directory="app/out/_next/static"), name="static")


# Serve the main index.html
@app.get("/")
def read_root():
    return FileResponse("app/out/index.html")


@app.on_event("startup")
async def startup_event():
    logging.info("Application is starting up...")


@app.on_event("shutdown")
async def shutdown_event():
    logging.info("Application is shutting down...")


app.include_router(app_router)