Spaces:
Runtime error
Runtime error
File size: 3,545 Bytes
9e93462 757bf39 9e93462 757bf39 9e93462 f647d10 9e93462 f647d10 9e93462 72ce352 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 |
import gradio as gr
import time
from gradio import ChatMessage
from langchain_core.runnables import RunnableConfig
from langchain_teddynote.messages import random_uuid
from langchain_core.messages import BaseMessage, HumanMessage
from pprint import pprint
from graph import app as workflow
def format_namespace(namespace):
return namespace[-1].split(":")[0] if len(namespace) > 0 else "root graph"
def generate_response(message, history):
inputs = {
"messages": [HumanMessage(content=message)],
}
node_names = []
response = []
for namespace, chunk in workflow.stream(
inputs,
stream_mode="updates", subgraphs=True
):
for node_name, node_chunk in chunk.items():
# node_namesκ° λΉμ΄μμ§ μμ κ²½μ°μλ§ νν°λ§
if len(node_names) > 0 and node_name not in node_names:
continue
if len(response) > 0:
response[-1].metadata["status"] = "done"
# print("\n" + "=" * 50)
msg = []
formatted_namespace = format_namespace(namespace)
if formatted_namespace == "root graph":
print(f"π Node: \033[1;36m{node_name}\033[0m π")
meta_title = f"π€ `{node_name}`"
else:
print(
f"π Node: \033[1;36m{node_name}\033[0m in [\033[1;33m{formatted_namespace}\033[0m] π"
)
meta_title = f"π€ `{node_name}` in `{formatted_namespace}`"
response.append(ChatMessage(content="", metadata={"title": meta_title, "status": "pending"}))
yield response
print("- " * 25)
# λ
Έλμ μ²ν¬ λ°μ΄ν° μΆλ ₯
out_str = []
if isinstance(node_chunk, dict):
for k, v in node_chunk.items():
if isinstance(v, BaseMessage):
v.pretty_print()
out_str.append(v.pretty_repr())
elif isinstance(v, list):
for list_item in v:
if isinstance(list_item, BaseMessage):
list_item.pretty_print()
out_str.append(list_item.pretty_repr())
else:
out_str.append(list_item)
print(list_item)
elif isinstance(v, dict):
for node_chunk_key, node_chunk_value in node_chunk.items():
out_str.append(f"{node_chunk_key}:\n{node_chunk_value}")
print(f"{node_chunk_key}:\n{node_chunk_value}")
else:
out_str.append(f"{k}:\n{v}")
print(f"\033[1;32m{k}\033[0m:\n{v}")
response[-1].content = "\n".join(out_str)
yield response
else:
if node_chunk is not None:
for item in node_chunk:
out_str.append(item)
print(item)
response[-1].content = "\n".join(out_str)
yield response
yield response
print("=" * 50)
response[-1].metadata["status"] = "done"
response.append(ChatMessage(content=node_chunk['messages'][-1].content))
yield response
demo = gr.ChatInterface(
generate_response,
type="messages",
title="Nested Thoughts Chat Interface",
examples=["2024λ
μ the FAANG companies μ΄ κ·Όλ‘μκ·λͺ¨μ λν λΆμμ νκ΅μ΄λ‘ λΆνν΄!"]
)
if __name__ == "__main__":
demo.launch(ssr_mode=False) |