Spaces:
Paused
Paused
Update app.py via AI Editor
Browse files
app.py
CHANGED
@@ -38,9 +38,8 @@ shredded_documents = {}
|
|
38 |
shredded_document = None
|
39 |
generated_response = None
|
40 |
|
41 |
-
# Streaming and cancel globals
|
42 |
gemini_lock = Lock()
|
43 |
-
stream_buffer = {"preview": ""}
|
44 |
stream_event = Event()
|
45 |
|
46 |
def decode_document(decoded_bytes):
|
@@ -92,7 +91,6 @@ def upload_to_gemini_file(decoded_bytes, filename):
|
|
92 |
return None
|
93 |
|
94 |
def gemini_generate_content_stream(prompt, file_id=None, chat_input=None, cancel_event=None):
|
95 |
-
# Streaming Gemini not officially supported, so simulate by splitting result into manageable chunks
|
96 |
try:
|
97 |
files = []
|
98 |
if file_id:
|
@@ -107,7 +105,6 @@ def gemini_generate_content_stream(prompt, file_id=None, chat_input=None, cancel
|
|
107 |
content_list.append("\n\n")
|
108 |
content_list.append(prompt)
|
109 |
model = genai.GenerativeModel(GEMINI_MODEL)
|
110 |
-
# No streaming in SDK, so emulate
|
111 |
response = model.generate_content(
|
112 |
contents=content_list,
|
113 |
generation_config=genai.types.GenerationConfig(
|
@@ -115,7 +112,7 @@ def gemini_generate_content_stream(prompt, file_id=None, chat_input=None, cancel
|
|
115 |
)
|
116 |
)
|
117 |
result = response.text if hasattr(response, "text") else str(response)
|
118 |
-
chunk_size = 400
|
119 |
for i in range(0, len(result), chunk_size):
|
120 |
if cancel_event is not None and cancel_event.is_set():
|
121 |
logging.info("Gemini stream cancelled by user.")
|
@@ -151,6 +148,7 @@ def save_proposal_as_docx(proposal_text, base_filename):
|
|
151 |
|
152 |
def process_document(action, selected_filename=None, chat_input=None, rfp_decoded_bytes=None, cancel_event=None, stream=True):
|
153 |
global shredded_document, generated_response, stream_buffer
|
|
|
154 |
logging.info(f"Process document called with action: {action}")
|
155 |
|
156 |
doc_content = None
|
@@ -224,7 +222,6 @@ def process_document(action, selected_filename=None, chat_input=None, rfp_decode
|
|
224 |
return "No RFP/SOW/PWS/RFI document selected.", None, None, None
|
225 |
rfp_filename = selected_filename
|
226 |
rfp_fileid = uploaded_documents_fileid.get(selected_filename)
|
227 |
-
# Upload file to Gemini if not already uploaded (if rfp_decoded_bytes provided and no fileid)
|
228 |
if not rfp_fileid and rfp_decoded_bytes is not None:
|
229 |
try:
|
230 |
fileid = upload_to_gemini_file(rfp_decoded_bytes, rfp_filename)
|
@@ -569,7 +566,9 @@ app.layout = dbc.Container([
|
|
569 |
State('select-document-dropdown', 'value'),
|
570 |
State('select-proposal-dropdown', 'value'),
|
571 |
State('select-generated-dropdown', 'value'),
|
572 |
-
Input('cancel-action-btn', 'n_clicks')
|
|
|
|
|
573 |
],
|
574 |
prevent_initial_call=True
|
575 |
)
|
@@ -581,7 +580,8 @@ def master_callback(
|
|
581 |
shredded_delete_clicks, shredded_doc_children,
|
582 |
select_generated_value,
|
583 |
chat_input, selected_filename, selected_proposal_dropdown, selected_generated_dropdown_state,
|
584 |
-
cancel_clicks
|
|
|
585 |
):
|
586 |
ctx = callback_context
|
587 |
triggered_id = ctx.triggered[0]['prop_id'].split('.')[0] if ctx.triggered else None
|
@@ -607,6 +607,33 @@ def master_callback(
|
|
607 |
|
608 |
global gemini_lock, stream_buffer, stream_event
|
609 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
610 |
if triggered_id == 'cancel-action-btn':
|
611 |
stream_event.set()
|
612 |
streaming = False
|
@@ -735,7 +762,6 @@ def master_callback(
|
|
735 |
|
736 |
output_data_upload = html.Div("No action taken yet.", style={"wordWrap": "break-word"})
|
737 |
|
738 |
-
# Stream logic: only one at a time
|
739 |
if triggered_id in ['shred-action-btn', 'proposal-action-btn']:
|
740 |
got_lock = gemini_lock.acquire(blocking=False)
|
741 |
if not got_lock:
|
@@ -752,7 +778,6 @@ def master_callback(
|
|
752 |
stream_event.clear()
|
753 |
stream_buffer["preview"] = ""
|
754 |
streaming = True
|
755 |
-
# Launch thread and let interval update the preview
|
756 |
def stream_gemini_thread(action, doc_value, chat_input, rfp_decoded_bytes):
|
757 |
try:
|
758 |
process_document(action, doc_value, chat_input, rfp_decoded_bytes, cancel_event=stream_event, stream=True)
|
@@ -812,26 +837,6 @@ def master_callback(
|
|
812 |
True
|
813 |
)
|
814 |
|
815 |
-
@app.callback(
|
816 |
-
Output('output-data-upload', 'children'),
|
817 |
-
Output('stream-status', 'data'),
|
818 |
-
Output('stream-interval', 'disabled'),
|
819 |
-
Input('stream-interval', 'n_intervals'),
|
820 |
-
State('stream-status', 'data'),
|
821 |
-
prevent_initial_call=True
|
822 |
-
)
|
823 |
-
def stream_output_callback(n_intervals, stream_status):
|
824 |
-
global stream_buffer, gemini_lock
|
825 |
-
if not stream_status.get('streaming'):
|
826 |
-
return dash.no_update, stream_status, True
|
827 |
-
preview = stream_buffer.get("preview", "")
|
828 |
-
# If lock is free, means operation done
|
829 |
-
still_streaming = gemini_lock.locked()
|
830 |
-
if not still_streaming:
|
831 |
-
stream_status['streaming'] = False
|
832 |
-
return dcc.Markdown(preview, style={"whiteSpace": "pre-wrap", "wordWrap": "break-word"}), stream_status, True
|
833 |
-
return dcc.Markdown(preview, style={"whiteSpace": "pre-wrap", "wordWrap": "break-word"}), stream_status, False
|
834 |
-
|
835 |
if __name__ == '__main__':
|
836 |
print("Starting the Dash application...")
|
837 |
app.run(debug=True, host='0.0.0.0', port=7860, threaded=True)
|
|
|
38 |
shredded_document = None
|
39 |
generated_response = None
|
40 |
|
|
|
41 |
gemini_lock = Lock()
|
42 |
+
stream_buffer = {"preview": ""}
|
43 |
stream_event = Event()
|
44 |
|
45 |
def decode_document(decoded_bytes):
|
|
|
91 |
return None
|
92 |
|
93 |
def gemini_generate_content_stream(prompt, file_id=None, chat_input=None, cancel_event=None):
|
|
|
94 |
try:
|
95 |
files = []
|
96 |
if file_id:
|
|
|
105 |
content_list.append("\n\n")
|
106 |
content_list.append(prompt)
|
107 |
model = genai.GenerativeModel(GEMINI_MODEL)
|
|
|
108 |
response = model.generate_content(
|
109 |
contents=content_list,
|
110 |
generation_config=genai.types.GenerationConfig(
|
|
|
112 |
)
|
113 |
)
|
114 |
result = response.text if hasattr(response, "text") else str(response)
|
115 |
+
chunk_size = 400
|
116 |
for i in range(0, len(result), chunk_size):
|
117 |
if cancel_event is not None and cancel_event.is_set():
|
118 |
logging.info("Gemini stream cancelled by user.")
|
|
|
148 |
|
149 |
def process_document(action, selected_filename=None, chat_input=None, rfp_decoded_bytes=None, cancel_event=None, stream=True):
|
150 |
global shredded_document, generated_response, stream_buffer
|
151 |
+
|
152 |
logging.info(f"Process document called with action: {action}")
|
153 |
|
154 |
doc_content = None
|
|
|
222 |
return "No RFP/SOW/PWS/RFI document selected.", None, None, None
|
223 |
rfp_filename = selected_filename
|
224 |
rfp_fileid = uploaded_documents_fileid.get(selected_filename)
|
|
|
225 |
if not rfp_fileid and rfp_decoded_bytes is not None:
|
226 |
try:
|
227 |
fileid = upload_to_gemini_file(rfp_decoded_bytes, rfp_filename)
|
|
|
566 |
State('select-document-dropdown', 'value'),
|
567 |
State('select-proposal-dropdown', 'value'),
|
568 |
State('select-generated-dropdown', 'value'),
|
569 |
+
Input('cancel-action-btn', 'n_clicks'),
|
570 |
+
Input('stream-interval', 'n_intervals'),
|
571 |
+
State('stream-status', 'data')
|
572 |
],
|
573 |
prevent_initial_call=True
|
574 |
)
|
|
|
580 |
shredded_delete_clicks, shredded_doc_children,
|
581 |
select_generated_value,
|
582 |
chat_input, selected_filename, selected_proposal_dropdown, selected_generated_dropdown_state,
|
583 |
+
cancel_clicks,
|
584 |
+
stream_n_intervals, stream_status
|
585 |
):
|
586 |
ctx = callback_context
|
587 |
triggered_id = ctx.triggered[0]['prop_id'].split('.')[0] if ctx.triggered else None
|
|
|
607 |
|
608 |
global gemini_lock, stream_buffer, stream_event
|
609 |
|
610 |
+
if triggered_id == 'stream-interval':
|
611 |
+
if not stream_status or not stream_status.get('streaming'):
|
612 |
+
return dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, stream_status, True
|
613 |
+
preview = stream_buffer.get("preview", "")
|
614 |
+
still_streaming = gemini_lock.locked()
|
615 |
+
if not still_streaming:
|
616 |
+
stream_status['streaming'] = False
|
617 |
+
output_data_upload = dcc.Markdown(preview, style={"whiteSpace": "pre-wrap", "wordWrap": "break-word"})
|
618 |
+
doc_options = [{'label': fn, 'value': fn} for fn in uploaded_documents.keys()]
|
619 |
+
doc_value = selected_doc if selected_doc in uploaded_documents else (next(iter(uploaded_documents), None) if uploaded_documents else None)
|
620 |
+
shredded_doc_list_items = get_shredded_doc_list(shredded_documents)
|
621 |
+
uploaded_doc_list = get_uploaded_doc_list(uploaded_documents)
|
622 |
+
generated_doc_list = get_generated_doc_list(generated_documents)
|
623 |
+
generated_doc_options = [{'label': fn, 'value': fn} for fn in generated_documents.keys()]
|
624 |
+
generated_doc_value = select_generated_value if select_generated_value in generated_documents else (next(iter(generated_documents), None) if generated_documents else None)
|
625 |
+
generated_rfp_section = get_generated_rfp_download_section(generated_doc_value)
|
626 |
+
return (
|
627 |
+
shred_store, proposal_store, output_data_upload,
|
628 |
+
uploaded_doc_list, doc_options, doc_value,
|
629 |
+
shredded_doc_list_items,
|
630 |
+
generated_doc_list, generated_doc_options, generated_doc_value,
|
631 |
+
generated_rfp_section,
|
632 |
+
stream_status,
|
633 |
+
True
|
634 |
+
)
|
635 |
+
return dash.no_update, dash.no_update, dcc.Markdown(preview, style={"whiteSpace": "pre-wrap", "wordWrap": "break-word"}), dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, dash.no_update, stream_status, False
|
636 |
+
|
637 |
if triggered_id == 'cancel-action-btn':
|
638 |
stream_event.set()
|
639 |
streaming = False
|
|
|
762 |
|
763 |
output_data_upload = html.Div("No action taken yet.", style={"wordWrap": "break-word"})
|
764 |
|
|
|
765 |
if triggered_id in ['shred-action-btn', 'proposal-action-btn']:
|
766 |
got_lock = gemini_lock.acquire(blocking=False)
|
767 |
if not got_lock:
|
|
|
778 |
stream_event.clear()
|
779 |
stream_buffer["preview"] = ""
|
780 |
streaming = True
|
|
|
781 |
def stream_gemini_thread(action, doc_value, chat_input, rfp_decoded_bytes):
|
782 |
try:
|
783 |
process_document(action, doc_value, chat_input, rfp_decoded_bytes, cancel_event=stream_event, stream=True)
|
|
|
837 |
True
|
838 |
)
|
839 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
840 |
if __name__ == '__main__':
|
841 |
print("Starting the Dash application...")
|
842 |
app.run(debug=True, host='0.0.0.0', port=7860, threaded=True)
|