Spaces:
Sleeping
Sleeping
File size: 5,159 Bytes
971e38a 335600f 971e38a e3c954b 971e38a 335600f e3c954b 971e38a e3c954b 971e38a e3c954b 971e38a 335600f 971e38a e3c954b 971e38a e3c954b 971e38a 335600f 971e38a e3c954b 335600f e3c954b 971e38a 335600f e3c954b 971e38a e3c954b 335600f 971e38a 335600f e3c954b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
from __future__ import annotations
import os, json, asyncio, tempfile
import gradio as gr
from dotenv import load_dotenv
load_dotenv()
from genesis.pipeline import research_once
from genesis.providers import postprocess_summary, synthesize_tts
from genesis.graph import build_preview_graph_html
from genesis.graphdb import write_topic_and_papers
APP_TITLE = "GENESIS-AI β Synthetic Biology Deep Research (Safety-First)"
APP_DESC = (
"High-level synthetic biology literature synthesis with citations. "
"This app **never** produces operational protocols."
)
DEFAULT_POST = os.getenv("POSTPROCESSOR_DEFAULT", "none").lower()
DEFAULT_RERANK_MODEL = os.getenv("RERANK_MODEL", "mixedbread-ai/mxbai-rerank-large-v1")
async def run_pipeline(query: str, fast: bool, postprocessor: str, want_graph: bool, state: dict) -> tuple:
out = await research_once(query, fast=fast, rerank_model=DEFAULT_RERANK_MODEL)
# Optional post-processing (Gemini/DeepSeek) for polish ONLY (no lab steps)
if postprocessor and postprocessor != "none":
out["final_output"] = await postprocess_summary(
base_text=out.get("final_output") or "",
citations=out.get("citations", []),
engine=postprocessor,
)
# Save into state for follow-ups (TTS, Graph Writer)
state["final_text"] = out.get("final_output") or ""
state["citations"] = out.get("citations", [])
state["query"] = query
# Optional graph preview
graph_html = build_preview_graph_html(state["citations"]) if want_graph else None
final_md = state["final_text"] or "_No output_"
cites_list = [f"- [{c.get('title','link')}]({c.get('url','')})" for c in state["citations"]]
cites_md = "
".join(cites_list) if cites_list else "_None detected_"
json_blob = json.dumps(out, indent=2)
return final_md, cites_md, json_blob, graph_html, state
async def do_tts(state: dict) -> tuple:
text = (state or {}).get("final_text") or ""
if not text.strip():
return None, "Nothing to narrate yet β run research first."
try:
audio_bytes, mime = await synthesize_tts(text)
if not audio_bytes:
return None, "TTS not configured or failed. Ensure ELEVEN_LABS_API_KEY/VOICE_ID are set."
suffix = ".mp3" if "mpeg" in (mime or "") else ".wav"
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as f:
f.write(audio_bytes)
path = f.name
return path, "Narration ready."
except Exception as e:
return None, f"TTS error: {e}"
async def do_graph_write(state: dict) -> str:
topic = (state or {}).get("query") or "Untitled Topic"
citations = (state or {}).get("citations") or []
if not citations:
return "No citations present β run research first."
try:
counts = await write_topic_and_papers(topic, citations)
return f"Wrote to Neo4j: nodes={counts.get('nodes',0)}, rels={counts.get('rels',0)}"
except Exception as e:
return f"Neo4j write error: {e}. Ensure NEO4J_* env vars are set."
with gr.Blocks(theme=gr.themes.Soft(), fill_height=True) as demo:
gr.Markdown(f"# {APP_TITLE}")
gr.Markdown(APP_DESC)
state = gr.State({"final_text": "", "citations": [], "query": ""})
with gr.Row():
query = gr.Textbox(
label="Your high-level research request",
lines=5,
placeholder=(
"e.g., High-level synthesis of CRISPR base-editing trends in oncology (last 2 years). "
"Summarize mechanisms, targets, ethics, and provide citations."
),
)
with gr.Row():
fast = gr.Checkbox(label="Fast mode (o4-mini-deep-research)", value=False)
post = gr.Dropdown(
label="Post-processor",
choices=["none", "gemini", "deepseek"],
value=DEFAULT_POST,
allow_custom_value=False,
)
want_graph = gr.Checkbox(label="Build graph preview", value=False)
go = gr.Button("Run Deep Research", variant="primary")
with gr.Tabs():
with gr.Tab("Research Report"):
report = gr.Markdown()
with gr.Tab("Citations"):
citations = gr.Markdown()
with gr.Tab("JSON Export"):
json_out = gr.Code(language="json")
with gr.Tab("Graph Preview"):
graph_html = gr.HTML()
with gr.Tab("Graph Writer (Neo4j)"):
write_btn = gr.Button("Write Topic & Papers to Neo4j", variant="secondary")
write_status = gr.Markdown()
with gr.Tab("Narration (ElevenLabs)"):
tts_btn = gr.Button("Narrate Summary", variant="secondary")
tts_audio = gr.Audio(label="Narration", autoplay=False)
tts_status = gr.Markdown()
go.click(
fn=run_pipeline,
inputs=[query, fast, post, want_graph, state],
outputs=[report, citations, json_out, graph_html, state],
)
tts_btn.click(fn=do_tts, inputs=[state], outputs=[tts_audio, tts_status])
write_btn.click(fn=do_graph_write, inputs=[state], outputs=[write_status])
if __name__ == "__main__":
demo.launch() |