Spaces:
Runtime error
Runtime error
Small update, seems to be working (outside of one small change) out of the box
Browse files- app.py +13 -12
- files/Full_Pamplet.pdf +0 -0
- flagged/log.csv +4 -0
app.py
CHANGED
@@ -39,6 +39,10 @@ As a derivate work of [Llama-2-7b-chat](https://huggingface.co/meta-llama/Llama-
|
|
39 |
this demo is governed by the original [license](https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat/blob/main/LICENSE.txt) and [acceptable use policy](https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat/blob/main/USE_POLICY.md).
|
40 |
"""
|
41 |
|
|
|
|
|
|
|
|
|
42 |
def read_pdf_to_documents(file_path):
|
43 |
doc = fitz.open(file_path)
|
44 |
documents = []
|
@@ -50,16 +54,17 @@ def read_pdf_to_documents(file_path):
|
|
50 |
|
51 |
# Function to update the global system prompt
|
52 |
def update_system_prompt(new_prompt):
|
53 |
-
global
|
54 |
-
|
55 |
query_wrapper_prompt = SimpleInputPrompt("{query_str} [/INST]")
|
56 |
return "System prompt updated."
|
57 |
|
|
|
58 |
def query_model(question):
|
59 |
llm = HuggingFaceLLM(
|
60 |
context_window=4096,
|
61 |
max_new_tokens=256,
|
62 |
-
system_prompt=
|
63 |
query_wrapper_prompt=query_wrapper_prompt,
|
64 |
model=model,
|
65 |
tokenizer=tokenizer
|
@@ -101,21 +106,17 @@ if torch.cuda.is_available():
|
|
101 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
|
102 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
103 |
tokenizer.use_default_system_prompt = False
|
104 |
-
|
105 |
-
system_prompt = """<s>[INST] <<SYS>>
|
106 |
-
|
107 |
-
<</SYS>>"""
|
108 |
# Throw together the query wrapper
|
109 |
query_wrapper_prompt = SimpleInputPrompt("{query_str} [/INST]")
|
110 |
llm = HuggingFaceLLM(context_window=4096,
|
111 |
max_new_tokens=256,
|
112 |
-
system_prompt=
|
113 |
query_wrapper_prompt=query_wrapper_prompt,
|
114 |
model=model, tokenizer=tokenizer)
|
115 |
embeddings = LangchainEmbedding(HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2"))
|
116 |
service_context = ServiceContext.from_defaults(chunk_size=1024, llm=llm, embed_model=embeddings)
|
117 |
set_global_service_context(service_context)
|
118 |
-
file_path = Path(
|
119 |
documents = read_pdf_to_documents(file_path)
|
120 |
index = VectorStoreIndex.from_documents(documents)
|
121 |
query_engine = index.as_query_engine()
|
@@ -123,7 +124,7 @@ if torch.cuda.is_available():
|
|
123 |
|
124 |
update_prompt_interface = gr.Interface(
|
125 |
fn=update_system_prompt,
|
126 |
-
inputs=gr.Textbox(lines=5, placeholder="Enter the system prompt here...", label="System Prompt", value=
|
127 |
outputs=gr.Textbox(label="Status"),
|
128 |
title="System Prompt Updater",
|
129 |
description="Update the system prompt used for context."
|
@@ -142,7 +143,7 @@ query_interface = gr.Interface(
|
|
142 |
combined_interface = gr.TabbedInterface([update_prompt_interface, query_interface], ["Update System Prompt", "Query Assistant"])
|
143 |
|
144 |
# Launch the combined interface
|
145 |
-
combined_interface.launch()
|
146 |
|
147 |
"""
|
148 |
@spaces.GPU(duration=240)
|
@@ -248,4 +249,4 @@ with gr.Blocks(css="style.css") as demo:
|
|
248 |
gr.Markdown(LICENSE)
|
249 |
|
250 |
if __name__ == "__main__":
|
251 |
-
demo.queue(max_size=20).launch()
|
|
|
39 |
this demo is governed by the original [license](https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat/blob/main/LICENSE.txt) and [acceptable use policy](https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat/blob/main/USE_POLICY.md).
|
40 |
"""
|
41 |
|
42 |
+
SYSTEM_PROMPT = """<s>[INST] <<SYS>>
|
43 |
+
|
44 |
+
<</SYS>>"""
|
45 |
+
|
46 |
def read_pdf_to_documents(file_path):
|
47 |
doc = fitz.open(file_path)
|
48 |
documents = []
|
|
|
54 |
|
55 |
# Function to update the global system prompt
|
56 |
def update_system_prompt(new_prompt):
|
57 |
+
global SYSTEM_PROMPT
|
58 |
+
SYSTEM_PROMPT = new_prompt
|
59 |
query_wrapper_prompt = SimpleInputPrompt("{query_str} [/INST]")
|
60 |
return "System prompt updated."
|
61 |
|
62 |
+
@spaces.GPU(duration=240)
|
63 |
def query_model(question):
|
64 |
llm = HuggingFaceLLM(
|
65 |
context_window=4096,
|
66 |
max_new_tokens=256,
|
67 |
+
system_prompt=SYSTEM_PROMPT,
|
68 |
query_wrapper_prompt=query_wrapper_prompt,
|
69 |
model=model,
|
70 |
tokenizer=tokenizer
|
|
|
106 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
|
107 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
108 |
tokenizer.use_default_system_prompt = False
|
|
|
|
|
|
|
|
|
109 |
# Throw together the query wrapper
|
110 |
query_wrapper_prompt = SimpleInputPrompt("{query_str} [/INST]")
|
111 |
llm = HuggingFaceLLM(context_window=4096,
|
112 |
max_new_tokens=256,
|
113 |
+
system_prompt=SYSTEM_PROMPT,
|
114 |
query_wrapper_prompt=query_wrapper_prompt,
|
115 |
model=model, tokenizer=tokenizer)
|
116 |
embeddings = LangchainEmbedding(HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2"))
|
117 |
service_context = ServiceContext.from_defaults(chunk_size=1024, llm=llm, embed_model=embeddings)
|
118 |
set_global_service_context(service_context)
|
119 |
+
file_path = Path("files/Full Pamplet.pdf")
|
120 |
documents = read_pdf_to_documents(file_path)
|
121 |
index = VectorStoreIndex.from_documents(documents)
|
122 |
query_engine = index.as_query_engine()
|
|
|
124 |
|
125 |
update_prompt_interface = gr.Interface(
|
126 |
fn=update_system_prompt,
|
127 |
+
inputs=gr.Textbox(lines=5, placeholder="Enter the system prompt here...", label="System Prompt", value=SYSTEM_PROMPT),
|
128 |
outputs=gr.Textbox(label="Status"),
|
129 |
title="System Prompt Updater",
|
130 |
description="Update the system prompt used for context."
|
|
|
143 |
combined_interface = gr.TabbedInterface([update_prompt_interface, query_interface], ["Update System Prompt", "Query Assistant"])
|
144 |
|
145 |
# Launch the combined interface
|
146 |
+
#combined_interface.launch()
|
147 |
|
148 |
"""
|
149 |
@spaces.GPU(duration=240)
|
|
|
249 |
gr.Markdown(LICENSE)
|
250 |
|
251 |
if __name__ == "__main__":
|
252 |
+
demo.queue(max_size=20).launch(debug=True)
|
files/Full_Pamplet.pdf
ADDED
Binary file (570 kB). View file
|
|
flagged/log.csv
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
System Prompt,Status,flag,username,timestamp
|
2 |
+
"<s>[INST] <<SYS>>
|
3 |
+
|
4 |
+
<</SYS>>",,,,2024-08-15 07:51:11.445523
|