ManTea commited on
Commit
194e0e0
·
1 Parent(s): efe307c

Add application file

Browse files
Dockerfile ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+
3
+ RUN useradd -m -u 1000 user
4
+ USER user
5
+ ENV PATH="/home/user/.local/bin:$PATH"
6
+
7
+ WORKDIR /app
8
+
9
+ COPY --chown=user ./requirements.txt requirements.txt
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ COPY --chown=user . /app
13
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
14
+
README.md DELETED
@@ -1,10 +0,0 @@
1
- ---
2
- title: TestAPI
3
- emoji: ⚡
4
- colorFrom: yellow
5
- colorTo: blue
6
- sdk: docker
7
- pinned: false
8
- ---
9
-
10
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
config/firebase_cfg.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import firebase_admin
2
+ from firebase_admin import credentials, storage
3
+
4
+ def init_firebase():
5
+ if not firebase_admin._apps:
6
+ cred = credentials.Certificate("./cred.json")
7
+ firebase_admin.initialize_app(cred, {
8
+ 'storageBucket': 'paper-trans-3e6b8.appspot.com'
9
+ })
config/llm_model_cfg.py ADDED
File without changes
config/logging_cfg.py ADDED
File without changes
logs/chat.log ADDED
File without changes
logs/definition.log ADDED
File without changes
logs/http.log ADDED
File without changes
logs/trans.log ADDED
File without changes
main.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from routes.upload_route import upload_router
3
+ from config.firebase_cfg import init_firebase
4
+
5
+ app = FastAPI()
6
+
7
+ app.include_router(upload_router)
middleware/__init__.py ADDED
File without changes
middleware/cors.py ADDED
File without changes
middleware/http.py ADDED
File without changes
models/api_model.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import google.generativeai as genai
3
+ from langchain_google_genai import ChatGoogleGenerativeAI
4
+ from langchain_google_genai import GoogleGenerativeAIEmbeddings
5
+ from langchain_google_genai import ChatGoogleGenerativeAI
6
+ from langchain_google_genai import GoogleGenerativeAIEmbeddings
7
+
8
+
9
+ #lấy API gemini
10
+ os.environ["GOOGLE_API_KEY"] = ''
11
+ genai.configure(api_key=os.environ["GOOGLE_API_KEY"])
12
+
13
+ #lấy model chatbot
14
+ model = ChatGoogleGenerativeAI(model="gemini-pro",
15
+ temperature=0.4)
16
+
17
+ #lấy model embedding
18
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
models/rag_chain/trans_chain.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.vectorstores import FAISS
2
+ from langchain.chains import RetrievalQA
3
+ from langchain.prompts import PromptTemplate
4
+ from models import api_model
5
+
6
+ def chain():
7
+ #Tạo prompt cho LangChains
8
+ prompt = PromptTemplate(
9
+ template = """<|im_start|>system\nYou are a chatbot that takes care of and helps customers find information. If the customer wants to ask for information, use the following information to answer the question (If you don't know the answer, say you don't know, don't try to make up the answer). If not, respond to customers like a regular chatbot.\n
10
+ {context}<|im_end|>\n<|im_start|>user\n{question}<|im_end|>\n<|im_start|>assistant""",
11
+ input_variables = ["context", "question"],
12
+ )
13
+
14
+ #Embedding bằng FAISS
15
+ db = FAISS.load_local('datasource/db_faiss', api_model.embeddings, allow_dangerous_deserialization=True)
16
+
17
+ llm_chain = RetrievalQA.from_chain_type(
18
+ llm = api_model.model,
19
+ chain_type= "stuff",
20
+ retriever = db.as_retriever(search_kwargs = {"k":10}, max_tokens_limit=1024),
21
+ return_source_documents = False,
22
+ chain_type_kwargs= {'prompt': prompt}
23
+ )
24
+ return llm_chain
25
+
26
+ def chat(request):
27
+ llm_chain = chain()
28
+ response = llm_chain.invoke(request)
29
+ return str(response['result'])
30
+
31
+
32
+
33
+
34
+
35
+
models/trans_model.py ADDED
File without changes
requirement.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ fastapi
2
+ uvicorn[standard]
routes/base.py ADDED
File without changes
routes/chat_route.py ADDED
File without changes
routes/def_route.py ADDED
File without changes
routes/trans_route.py ADDED
File without changes
routes/upload_route.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, File, UploadFile, HTTPException
2
+ from firebase_admin import credentials, storage
3
+ import firebase_admin
4
+ from uuid import uuid4
5
+ from fastapi import APIRouter
6
+ from config.firebase_cfg import init_firebase
7
+
8
+ init_firebase()
9
+
10
+ upload_router = APIRouter()
11
+
12
+ @upload_router.post("/upload_pdf")
13
+ async def upload_pdf(file: UploadFile = File(...)):
14
+ if file.content_type != "application/pdf":
15
+ raise HTTPException(status_code=400, detail="must be PDF file")
16
+
17
+ file_name = f"{uuid4()}.pdf"
18
+
19
+ bucket = storage.bucket()
20
+ blob = bucket.blob(file_name)
21
+
22
+ blob.upload_from_file(file.file, content_type="application/pdf")
23
+ blob.make_public()
24
+
25
+ file_url = blob.public_url
26
+ return {"file_url": file_url}
27
+
28
+ @upload_router.post("/upload_docs")
29
+ async def upload_docs(file: UploadFile = File(...)):
30
+ if file.content_type != "application/docs":
31
+ raise HTTPException(status_code=400, detail="must be DOCS file")
32
+
33
+ file_name = f"{uuid4()}.docs"
34
+
35
+ bucket = storage.bucket()
36
+ blob = bucket.blob(file_name)
37
+
38
+ blob.upload_from_file(file.file, content_type="application/docs")
39
+ blob.make_public()
40
+
41
+ file_url = blob.public_url
42
+ return {"file_url": file_url}
43
+
44
+ @upload_router.post("/delete_file")
45
+ async def delete_file(file_name: str):
46
+ try:
47
+ bucket = storage.bucket()
48
+ blob = bucket.blob(file_name)
49
+
50
+ if not blob.exists():
51
+ raise HTTPException(status_code=404, detail="File not found")
52
+
53
+ blob.delete()
54
+
55
+ return {"message": f"File {file_name} deleted successfully"}
56
+ except Exception as e:
57
+ raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
58
+
59
+ # url = "http://127.0.0.1:8000/delete_file"
60
+ # data = {"file_name": "example.pdf"}
61
+
62
+ # response = requests.post(url, json=data)
schema/schema.py ADDED
File without changes
server.py ADDED
@@ -0,0 +1 @@
 
 
1
+ #abc