saritha commited on
Commit
638394b
·
verified ·
1 Parent(s): 93c72f6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -12
app.py CHANGED
@@ -1,6 +1,5 @@
1
  import os
2
  from langchain_core.prompts import PromptTemplate
3
- from langchain.chains.question_answering import load_qa_chain
4
  from langchain_community.output_parsers.rail_parser import GuardrailsOutputParser
5
  from langchain_community.document_loaders import PyPDFLoader
6
  import google.generativeai as genai
@@ -42,19 +41,13 @@ def initialize(pdf_file, question):
42
  # Load the GeminiPro model
43
  model = genai.GenerativeModel('gemini-pro')
44
 
45
- # Debugging: Print object type and attributes
46
- print(type(model))
47
- print(dir(model)) # List attributes and methods
48
 
49
- # Check if the model has a 'generate' method (or similar)
50
- if not hasattr(model, 'generate'):
51
- raise Exception("Your LLM object might not have a text generation method!")
52
 
53
- stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
54
- stuff_answer = stuff_chain(input_data={"context": processed_context, "question": question}, return_only_outputs=True)
55
-
56
- # Extract the answer
57
- generated_answer = stuff_answer['output_text']
58
 
59
  return generated_answer
60
  else:
 
1
  import os
2
  from langchain_core.prompts import PromptTemplate
 
3
  from langchain_community.output_parsers.rail_parser import GuardrailsOutputParser
4
  from langchain_community.document_loaders import PyPDFLoader
5
  import google.generativeai as genai
 
41
  # Load the GeminiPro model
42
  model = genai.GenerativeModel('gemini-pro')
43
 
44
+ # ... rest of your code for processing context and question
 
 
45
 
46
+ # Generate answer using GeminiPro's predict method (replace with the appropriate method)
47
+ generated_answer = model.predict(inputs=prompt) # Assuming a 'predict' method
 
48
 
49
+ # Extract the answer (parse the output from 'predict')
50
+ # ... (implementation depends on the model's output format)
 
 
 
51
 
52
  return generated_answer
53
  else: