Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -45,7 +45,12 @@ if not HF_TOKEN:
|
|
45 |
|
46 |
# --- UI Settings ---
|
47 |
TITLE = "<h1 style='text-align:center; margin-bottom: 20px;'>Local RAG Llama 3.1 8B</h1>"
|
48 |
-
|
|
|
|
|
|
|
|
|
|
|
49 |
|
50 |
CSS = """
|
51 |
.upload-section {
|
@@ -222,21 +227,14 @@ def create_or_update_index(files, request: gr.Request):
|
|
222 |
}
|
223 |
|
224 |
node_parser = HierarchicalNodeParser.from_defaults(
|
225 |
-
chunk_sizes=[2048, 512, 128],
|
226 |
-
chunk_overlap=20
|
227 |
)
|
228 |
logger.info(f"Parsing {len(new_documents)} documents into hierarchical nodes")
|
229 |
new_nodes = node_parser.get_nodes_from_documents(new_documents)
|
230 |
new_leaf_nodes = get_leaf_nodes(new_nodes)
|
231 |
new_root_nodes = get_root_nodes(new_nodes)
|
232 |
logger.info(f"Generated {len(new_nodes)} total nodes ({len(new_root_nodes)} root, {len(new_leaf_nodes)} leaf)")
|
233 |
-
node_ancestry = {}
|
234 |
-
for node in new_nodes:
|
235 |
-
if hasattr(node, 'metadata') and 'file_name' in node.metadata:
|
236 |
-
file_origin = node.metadata['file_name']
|
237 |
-
if file_origin not in node_ancestry:
|
238 |
-
node_ancestry[file_origin] = 0
|
239 |
-
node_ancestry[file_origin] += 1
|
240 |
|
241 |
if os.path.exists(save_dir):
|
242 |
logger.info(f"Loading existing index from {save_dir}")
|
@@ -411,9 +409,11 @@ def stream_chat(
|
|
411 |
|
412 |
def create_demo():
|
413 |
with gr.Blocks(css=CSS, theme=gr.themes.Soft()) as demo:
|
414 |
-
# Title
|
415 |
gr.HTML(TITLE)
|
416 |
-
|
|
|
|
|
417 |
with gr.Row(elem_classes="main-container"):
|
418 |
with gr.Column(elem_classes="upload-section"):
|
419 |
file_upload = gr.File(
|
@@ -457,7 +457,7 @@ def create_demo():
|
|
457 |
|
458 |
with gr.Accordion("Advanced Settings", open=False):
|
459 |
system_prompt = gr.Textbox(
|
460 |
-
value="You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside tags, and then provide your solution or response to the problem. As a knowledgeable assistant, provide detailed answers using the relevant information from all uploaded documents.",
|
461 |
label="System Prompt",
|
462 |
lines=3
|
463 |
)
|
@@ -474,7 +474,7 @@ def create_demo():
|
|
474 |
minimum=128,
|
475 |
maximum=8192,
|
476 |
step=64,
|
477 |
-
value=
|
478 |
label="Max New Tokens",
|
479 |
)
|
480 |
top_p = gr.Slider(
|
|
|
45 |
|
46 |
# --- UI Settings ---
|
47 |
TITLE = "<h1 style='text-align:center; margin-bottom: 20px;'>Local RAG Llama 3.1 8B</h1>"
|
48 |
+
DISCORD_BADGE = """<p style="text-align:center; margin-top: -10px;">
|
49 |
+
<a href="https://discord.gg/openfreeai" target="_blank">
|
50 |
+
<img src="https://img.shields.io/static/v1?label=Discord&message=Openfree%20AI&color=%230000ff&labelColor=%23800080&logo=discord&logoColor=white&style=for-the-badge" alt="badge">
|
51 |
+
</a>
|
52 |
+
</p>
|
53 |
+
"""
|
54 |
|
55 |
CSS = """
|
56 |
.upload-section {
|
|
|
227 |
}
|
228 |
|
229 |
node_parser = HierarchicalNodeParser.from_defaults(
|
230 |
+
chunk_sizes=[2048, 512, 128],
|
231 |
+
chunk_overlap=20
|
232 |
)
|
233 |
logger.info(f"Parsing {len(new_documents)} documents into hierarchical nodes")
|
234 |
new_nodes = node_parser.get_nodes_from_documents(new_documents)
|
235 |
new_leaf_nodes = get_leaf_nodes(new_nodes)
|
236 |
new_root_nodes = get_root_nodes(new_nodes)
|
237 |
logger.info(f"Generated {len(new_nodes)} total nodes ({len(new_root_nodes)} root, {len(new_leaf_nodes)} leaf)")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
238 |
|
239 |
if os.path.exists(save_dir):
|
240 |
logger.info(f"Loading existing index from {save_dir}")
|
|
|
409 |
|
410 |
def create_demo():
|
411 |
with gr.Blocks(css=CSS, theme=gr.themes.Soft()) as demo:
|
412 |
+
# Title
|
413 |
gr.HTML(TITLE)
|
414 |
+
# Discord badge immediately under the title
|
415 |
+
gr.HTML(DISCORD_BADGE)
|
416 |
+
|
417 |
with gr.Row(elem_classes="main-container"):
|
418 |
with gr.Column(elem_classes="upload-section"):
|
419 |
file_upload = gr.File(
|
|
|
457 |
|
458 |
with gr.Accordion("Advanced Settings", open=False):
|
459 |
system_prompt = gr.Textbox(
|
460 |
+
value="You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside tags, and then provide your solution or response to the problem. As a knowledgeable assistant, provide detailed answers using the relevant information from all uploaded documents.",
|
461 |
label="System Prompt",
|
462 |
lines=3
|
463 |
)
|
|
|
474 |
minimum=128,
|
475 |
maximum=8192,
|
476 |
step=64,
|
477 |
+
value=1024,
|
478 |
label="Max New Tokens",
|
479 |
)
|
480 |
top_p = gr.Slider(
|