random2222 commited on
Commit
0fed33e
·
verified ·
1 Parent(s): 0a9d72e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -16
app.py CHANGED
@@ -5,13 +5,7 @@ from langchain_community.embeddings import HuggingFaceEmbeddings
5
  from langchain_community.document_loaders import PyMuPDFLoader
6
  from langchain_text_splitters import CharacterTextSplitter
7
  from langchain.chains import RetrievalQA
8
- from langchain_huggingface import HuggingFaceEndpoint # Updated import
9
- from huggingface_hub import login
10
-
11
- # Authentication
12
- if not os.environ.get('HF_TOKEN'):
13
- raise ValueError("❌ Add HF_TOKEN in Space secrets!")
14
- login(token=os.environ.get('HF_TOKEN'))
15
 
16
  def create_qa_system():
17
  try:
@@ -40,19 +34,19 @@ def create_qa_system():
40
  # Build vector store
41
  db = FAISS.from_documents(texts, embeddings)
42
 
43
- # Initialize LLM with corrected parameters
44
- llm = HuggingFaceEndpoint(
45
- endpoint_url="https://api-inference.huggingface.co/models/google/flan-t5-small",
46
- task="text2text-generation",
47
- temperature=0.2, # Direct parameter
48
- max_new_tokens=128, # Correct parameter name
49
- huggingfacehub_api_token=os.environ.get('HF_TOKEN')
50
  )
51
 
52
  return RetrievalQA.from_chain_type(
53
- llm=llm,
54
  chain_type="stuff",
55
- retriever=db.as_retriever(search_kwargs={"k": 2}))
56
  except Exception as e:
57
  raise gr.Error(f"Initialization failed: {str(e)}")
58
 
 
5
  from langchain_community.document_loaders import PyMuPDFLoader
6
  from langchain_text_splitters import CharacterTextSplitter
7
  from langchain.chains import RetrievalQA
8
+ from transformers import pipeline # Local model execution
 
 
 
 
 
 
9
 
10
  def create_qa_system():
11
  try:
 
34
  # Build vector store
35
  db = FAISS.from_documents(texts, embeddings)
36
 
37
+ # Local model pipeline
38
+ qa_pipeline = pipeline(
39
+ "text2text-generation",
40
+ model="google/flan-t5-small", # Runs locally
41
+ device=-1, # Use CPU
42
+ max_length=128,
43
+ temperature=0.2
44
  )
45
 
46
  return RetrievalQA.from_chain_type(
47
+ llm=qa_pipeline,
48
  chain_type="stuff",
49
+ retriever=db.as_retriever(search_kwargs={"k": 2})
50
  except Exception as e:
51
  raise gr.Error(f"Initialization failed: {str(e)}")
52