saritha commited on
Commit
1a19f3c
·
verified ·
1 Parent(s): 8d84dff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -8,7 +8,6 @@ from langchain_google_genai import ChatGoogleGenerativeAI
8
  import google.generativeai as genai
9
 
10
 
11
- # Fungsi untuk inisialisasi
12
  async def initialize(file_path, question):
13
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
14
  model = genai.GenerativeModel('gemini-pro')
@@ -25,11 +24,13 @@ async def initialize(file_path, question):
25
  pages = pdf_loader.load_and_split()
26
  context = "\n".join(str(page.page_content) for page in pages[:30])
27
  stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
28
- stuff_answer = await stuff_chain({"input_documents": pages, "question": question, "context": context}, return_only_outputs=True)
 
29
  return stuff_answer['output_text']
30
  else:
31
  return "Error: Unable to process the document. Please ensure the PDF file is valid."
32
 
 
33
  # Define Gradio Interface
34
  input_file = gr.File(label="Upload PDF File")
35
  input_question = gr.Textbox(label="Ask about the document")
 
8
  import google.generativeai as genai
9
 
10
 
 
11
  async def initialize(file_path, question):
12
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
13
  model = genai.GenerativeModel('gemini-pro')
 
24
  pages = pdf_loader.load_and_split()
25
  context = "\n".join(str(page.page_content) for page in pages[:30])
26
  stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
27
+ # Refactor the below line to make sure it returns an awaitable object
28
+ stuff_answer = stuff_chain({"input_documents": pages, "question": question, "context": context}, return_only_outputs=True)
29
  return stuff_answer['output_text']
30
  else:
31
  return "Error: Unable to process the document. Please ensure the PDF file is valid."
32
 
33
+
34
  # Define Gradio Interface
35
  input_file = gr.File(label="Upload PDF File")
36
  input_question = gr.Textbox(label="Ask about the document")