Spaces:
Paused
Paused
change chain_type to rerank
Browse files
app.py
CHANGED
|
@@ -5,9 +5,8 @@ import streamlit as st
|
|
| 5 |
from PyPDF2 import PdfReader
|
| 6 |
from langchain.text_splitter import CharacterTextSplitter
|
| 7 |
from langchain.chains.question_answering import load_qa_chain
|
| 8 |
-
from langchain_huggingface import HuggingFaceEmbeddings
|
| 9 |
from langchain_community.vectorstores import FAISS
|
| 10 |
-
from langchain_community.llms import HuggingFacePipeline
|
| 11 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 12 |
|
| 13 |
|
|
@@ -60,7 +59,7 @@ def setup_qa_chain():
|
|
| 60 |
print(f"Error loading model: {e}")
|
| 61 |
return
|
| 62 |
llm = HuggingFacePipeline(pipeline=pipe)
|
| 63 |
-
qa_chain = load_qa_chain(llm, chain_type="
|
| 64 |
|
| 65 |
# 메인 페이지 UI
|
| 66 |
def main_page():
|
|
|
|
| 5 |
from PyPDF2 import PdfReader
|
| 6 |
from langchain.text_splitter import CharacterTextSplitter
|
| 7 |
from langchain.chains.question_answering import load_qa_chain
|
| 8 |
+
from langchain_huggingface import HuggingFaceEmbeddings, HuggingFacePipeline
|
| 9 |
from langchain_community.vectorstores import FAISS
|
|
|
|
| 10 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 11 |
|
| 12 |
|
|
|
|
| 59 |
print(f"Error loading model: {e}")
|
| 60 |
return
|
| 61 |
llm = HuggingFacePipeline(pipeline=pipe)
|
| 62 |
+
qa_chain = load_qa_chain(llm, chain_type="map_rerank")
|
| 63 |
|
| 64 |
# 메인 페이지 UI
|
| 65 |
def main_page():
|