Nevidu commited on
Commit
1bb785f
·
verified ·
1 Parent(s): f09d8d2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -10
app.py CHANGED
@@ -1,6 +1,8 @@
1
  import gradio as gr
2
  import numpy as np
3
  from sklearn.metrics.pairwise import cosine_similarity
 
 
4
  # from sklearn.decomposition import PCA
5
  from langchain_community.llms import Ollama
6
  from langchain_chroma import Chroma
@@ -10,26 +12,38 @@ from langchain_community.document_loaders import DirectoryLoader, TextLoader, Py
10
  from langchain_experimental.text_splitter import SemanticChunker
11
  from langchain.text_splitter import RecursiveCharacterTextSplitter
12
  from langchain_community.embeddings.ollama import OllamaEmbeddings
 
13
 
14
  from typing import List, Dict
15
  from langchain.docstore.document import Document
16
 
17
- from transformers import T5Tokenizer, T5ForConditionalGeneration
18
-
19
  tokenizer = T5Tokenizer.from_pretrained("Voicelab/vlt5-base-keywords")
20
  model = T5ForConditionalGeneration.from_pretrained("Voicelab/vlt5-base-keywords")
21
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  vectorstore = Chroma(
23
  # docs,
24
- embedding_function=OllamaEmbeddings(model = "gemma:2b"),
25
  persist_directory="chroma_db"
26
  )
27
 
28
  print(vectorstore.similarity_search_with_score("Course Leader"))
29
 
30
- llm = Ollama(
31
- model="llama3.2:3b"
32
- )
33
 
34
  def retrieve_relevant_chunks(
35
  vector_store: Chroma,
@@ -121,7 +135,7 @@ def main(query):
121
  print(f"Error: {str(e)}")
122
 
123
  formatted_prompt = f"""
124
- You are an AI assistant. Your goal is to answer questions regarding student handbooks based on the following context provided. Make sure all the answers are within the given context:
125
  {prompt}
126
 
127
  Based on the above, answer the following question:
@@ -129,7 +143,7 @@ def main(query):
129
  Give the answer in a clear and concise manner
130
  """
131
 
132
- response = llm.predict(formatted_prompt)
133
 
134
  return response
135
 
@@ -153,5 +167,4 @@ with gr.Blocks() as demo:
153
  login_btn.click(main, inputs=[query], outputs=answer)
154
 
155
  # demo.launch(share = True, auth=authenticate)
156
- demo.launch(share = True)
157
-
 
1
  import gradio as gr
2
  import numpy as np
3
  from sklearn.metrics.pairwise import cosine_similarity
4
+ import subprocess
5
+
6
  # from sklearn.decomposition import PCA
7
  from langchain_community.llms import Ollama
8
  from langchain_chroma import Chroma
 
12
  from langchain_experimental.text_splitter import SemanticChunker
13
  from langchain.text_splitter import RecursiveCharacterTextSplitter
14
  from langchain_community.embeddings.ollama import OllamaEmbeddings
15
+ from langchain.embeddings import HuggingFaceEmbeddings
16
 
17
  from typing import List, Dict
18
  from langchain.docstore.document import Document
19
 
20
+ from transformers import T5Tokenizer, T5ForConditionalGeneration, AutoTokenizer, AutoModelForCausalLM, pipeline
 
21
  tokenizer = T5Tokenizer.from_pretrained("Voicelab/vlt5-base-keywords")
22
  model = T5ForConditionalGeneration.from_pretrained("Voicelab/vlt5-base-keywords")
23
 
24
+ model_name = "meta-llama/Llama-3.2-1B-Instruct"
25
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
26
+ model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto")
27
+
28
+ generator = pipeline(
29
+ "text-generation",
30
+ model=model,
31
+ tokenizer=tokenizer,
32
+ max_new_tokens=500, # Adjust as needed
33
+ temperature=0.5 # Adjust as needed
34
+ )
35
+
36
  vectorstore = Chroma(
37
  # docs,
38
+ embedding_function=HuggingFaceEmbeddings(model_name = "google/gemma-2b"),
39
  persist_directory="chroma_db"
40
  )
41
 
42
  print(vectorstore.similarity_search_with_score("Course Leader"))
43
 
44
+ # llm = Ollama(
45
+ # model="llama3.2:3b"
46
+ # )
47
 
48
  def retrieve_relevant_chunks(
49
  vector_store: Chroma,
 
135
  print(f"Error: {str(e)}")
136
 
137
  formatted_prompt = f"""
138
+ You are an AI assistant. Your goal is to answer questions regarding degree information based on the following context provided. Make sure all the answers are within the given context and act like you are a representative of IIT so do not mention anthing for users to know that you are reading something:
139
  {prompt}
140
 
141
  Based on the above, answer the following question:
 
143
  Give the answer in a clear and concise manner
144
  """
145
 
146
+ response = generator(formatted_prompt, return_full_text=False)
147
 
148
  return response
149
 
 
167
  login_btn.click(main, inputs=[query], outputs=answer)
168
 
169
  # demo.launch(share = True, auth=authenticate)
170
+ demo.launch()