Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -14,7 +14,7 @@ import csv
|
|
14 |
FEEDBACK_LOG = "feedback_log.csv"
|
15 |
|
16 |
# π§ Ask Evo
|
17 |
-
def ask_evo(question, option1, option2, history):
|
18 |
options = [option1.strip(), option2.strip()]
|
19 |
result = evo_chat_predict(history, question.strip(), options)
|
20 |
|
@@ -25,7 +25,8 @@ def ask_evo(question, option1, option2, history):
|
|
25 |
"evo_answer": result["answer"],
|
26 |
"confidence": result["confidence"],
|
27 |
"reasoning": result["reasoning"],
|
28 |
-
"context": result["context_used"]
|
|
|
29 |
}
|
30 |
|
31 |
# Log feedback
|
@@ -44,9 +45,7 @@ def ask_evo(question, option1, option2, history):
|
|
44 |
stats = get_model_config()
|
45 |
sys_stats = get_system_stats()
|
46 |
|
47 |
-
# β
FIXED PHASE ERROR HERE
|
48 |
stats_text = f"Layers: {stats.get('num_layers', '?')} | Heads: {stats.get('num_heads', '?')} | FFN: {stats.get('ffn_dim', '?')} | Memory: {stats.get('memory_enabled', '?')} | Accuracy: {stats.get('accuracy', '?')}"
|
49 |
-
|
50 |
sys_text = f"Device: {sys_stats['device']} | CPU: {sys_stats['cpu_usage_percent']}% | RAM: {sys_stats['memory_used_gb']}GB / {sys_stats['memory_total_gb']}GB | GPU: {sys_stats['gpu_name']} ({sys_stats['gpu_memory_used_gb']}GB / {sys_stats['gpu_memory_total_gb']}GB)"
|
51 |
|
52 |
return evo_output, gpt_output, stats_text, sys_text, history
|
@@ -65,7 +64,7 @@ def export_feedback():
|
|
65 |
|
66 |
# π§Ή Clear
|
67 |
def clear_all():
|
68 |
-
return "", "", "", "", []
|
69 |
|
70 |
# πΌοΈ UI
|
71 |
with gr.Blocks(title="π§ Evo β Reasoning AI") as demo:
|
@@ -87,8 +86,7 @@ with gr.Blocks(title="π§ Evo β Reasoning AI") as demo:
|
|
87 |
stats = gr.Textbox(label="π Evo Stats")
|
88 |
system = gr.Textbox(label="π΅ Status")
|
89 |
|
90 |
-
|
91 |
-
evo_radio = gr.Radio(["Evo", "GPT"], label="π§ Who was better?", info="Optional β fuels evolution")
|
92 |
|
93 |
history = gr.State([])
|
94 |
|
@@ -100,9 +98,9 @@ with gr.Blocks(title="π§ Evo β Reasoning AI") as demo:
|
|
100 |
|
101 |
export_table = gr.Dataframe(label="π Conversation History")
|
102 |
|
103 |
-
ask_btn.click(fn=ask_evo, inputs=[question, option1, option2, history], outputs=[evo_ans, gpt_ans, stats, system, history])
|
104 |
retrain_btn.click(fn=retrain_evo, inputs=[], outputs=[stats])
|
105 |
-
clear_btn.click(fn=clear_all, inputs=[], outputs=[question, option1, option2, evo_ans, gpt_ans, stats, system, history])
|
106 |
export_btn.click(fn=export_feedback, inputs=[], outputs=[export_table])
|
107 |
|
108 |
if __name__ == "__main__":
|
|
|
14 |
FEEDBACK_LOG = "feedback_log.csv"
|
15 |
|
16 |
# π§ Ask Evo
|
17 |
+
def ask_evo(question, option1, option2, history, user_vote):
|
18 |
options = [option1.strip(), option2.strip()]
|
19 |
result = evo_chat_predict(history, question.strip(), options)
|
20 |
|
|
|
25 |
"evo_answer": result["answer"],
|
26 |
"confidence": result["confidence"],
|
27 |
"reasoning": result["reasoning"],
|
28 |
+
"context": result["context_used"],
|
29 |
+
"user_preference": user_vote or ""
|
30 |
}
|
31 |
|
32 |
# Log feedback
|
|
|
45 |
stats = get_model_config()
|
46 |
sys_stats = get_system_stats()
|
47 |
|
|
|
48 |
stats_text = f"Layers: {stats.get('num_layers', '?')} | Heads: {stats.get('num_heads', '?')} | FFN: {stats.get('ffn_dim', '?')} | Memory: {stats.get('memory_enabled', '?')} | Accuracy: {stats.get('accuracy', '?')}"
|
|
|
49 |
sys_text = f"Device: {sys_stats['device']} | CPU: {sys_stats['cpu_usage_percent']}% | RAM: {sys_stats['memory_used_gb']}GB / {sys_stats['memory_total_gb']}GB | GPU: {sys_stats['gpu_name']} ({sys_stats['gpu_memory_used_gb']}GB / {sys_stats['gpu_memory_total_gb']}GB)"
|
50 |
|
51 |
return evo_output, gpt_output, stats_text, sys_text, history
|
|
|
64 |
|
65 |
# π§Ή Clear
|
66 |
def clear_all():
|
67 |
+
return "", "", "", "", [], None
|
68 |
|
69 |
# πΌοΈ UI
|
70 |
with gr.Blocks(title="π§ Evo β Reasoning AI") as demo:
|
|
|
86 |
stats = gr.Textbox(label="π Evo Stats")
|
87 |
system = gr.Textbox(label="π΅ Status")
|
88 |
|
89 |
+
evo_radio = gr.Radio(["Evo", "GPT"], label="π§ Who was better?", info="Optional β fuels evolution")
|
|
|
90 |
|
91 |
history = gr.State([])
|
92 |
|
|
|
98 |
|
99 |
export_table = gr.Dataframe(label="π Conversation History")
|
100 |
|
101 |
+
ask_btn.click(fn=ask_evo, inputs=[question, option1, option2, history, evo_radio], outputs=[evo_ans, gpt_ans, stats, system, history])
|
102 |
retrain_btn.click(fn=retrain_evo, inputs=[], outputs=[stats])
|
103 |
+
clear_btn.click(fn=clear_all, inputs=[], outputs=[question, option1, option2, evo_ans, gpt_ans, stats, system, history, evo_radio])
|
104 |
export_btn.click(fn=export_feedback, inputs=[], outputs=[export_table])
|
105 |
|
106 |
if __name__ == "__main__":
|