Spaces:
Runtime error
Runtime error
File size: 3,996 Bytes
ed0c48c 1033417 ed0c48c 1881bb0 8987ddb 279ae4c ed0c48c 1881bb0 ed0c48c 6306cd3 ed0c48c 1881bb0 ed0c48c 1881bb0 ed0c48c 1881bb0 ed0c48c 1881bb0 ed0c48c 1881bb0 1033417 ed0c48c 1881bb0 ed0c48c 1881bb0 ed0c48c 1881bb0 ed0c48c 1881bb0 ed0c48c 1881bb0 ed0c48c 1033417 ed0c48c 279ae4c ed0c48c 1033417 1881bb0 ed0c48c 1881bb0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
from langchain_core.tools import tool
from langgraph.graph import StateGraph, START, MessagesState
from langgraph.prebuilt import tools_condition, ToolNode
from langchain_groq import ChatGroq
from langchain_core.messages import HumanMessage, SystemMessage
import math
import os
from dotenv import load_dotenv
load_dotenv()
groq_api_key = os.getenv("GROQ_API_KEY")
serpapi_api_key = os.getenv("SERPAPI_API_KEY")
# -------------------------
# Tools
# -------------------------
@tool
def add(a: float, b: float) -> float:
"""Adds two numbers and returns the sum."""
return a + b
@tool
def subtract(a: float, b: float) -> float:
"""Subtracts second number from first and returns the difference."""
return a - b
@tool
def multiply(a: float, b: float) -> float:
"""Multiplies two numbers and returns the product."""
return a * b
@tool
def divide(a: float, b: float) -> float:
"""Divides first number by second and returns the quotient. Returns infinity if divisor is zero."""
if b == 0:
return float('inf')
return a / b
@tool
def modulus(a: int, b: int) -> int:
"""Returns the modulus (remainder) of a divided by b."""
return a % b
@tool
def python_eval(code: str) -> str:
"""Evaluates a Python expression and returns the result or error message."""
try:
result = eval(code)
return f"Result: {result}"
except Exception as e:
return f"Error: {str(e)}"
@tool
def translate_to_arabic(text: str) -> str:
"""Returns a placeholder Arabic translation of the input text."""
return f"Arabic translation of '{text}'"
@tool
def translate_to_english(text: str) -> str:
"""Returns a placeholder English translation of the input text."""
return f"English translation of '{text}'"
@tool
def summarize_text(text: str) -> str:
"""Returns a summary (first 100 characters) of the input text."""
return f"Summary: {text[:100]}..."
@tool
def analyze_sentiment(text: str) -> str:
"""Analyzes the sentiment of the text and returns Positive, Negative, or Neutral."""
if any(word in text.lower() for word in ["good", "great", "excellent", "happy"]):
return "Sentiment: Positive"
elif any(word in text.lower() for word in ["bad", "terrible", "sad", "hate"]):
return "Sentiment: Negative"
return "Sentiment: Neutral"
@tool
def speech_to_text_stub(audio: str) -> str:
"""A placeholder tool to convert audio input to text."""
return "Converted audio to text: (This is a placeholder result)"
# -------------------------
# System Prompt
# -------------------------
system_prompt = """
You are DeepSeek, a thoughtful and curious AI assistant. You analyze before answering.
You always reflect step by step, consider using tools intelligently, and aim for precision and clarity.
Behaviors:
- Think deeply about the user's question.
- Decide if you need tools to calculate, search, translate, or analyze.
- If no tool is needed, answer directly with your own knowledge.
Respond in a helpful, concise, and accurate way.
"""
sys_msg = SystemMessage(content=system_prompt)
# -------------------------
# Build LangGraph Agent
# -------------------------
def build_deepseek_graph():
llm = ChatGroq(model="deepseek-r1-distill-llama-70b", groq_api_key=groq_api_key)
all_tools = [
add, subtract, multiply, divide, modulus,
translate_to_arabic, translate_to_english,
summarize_text, analyze_sentiment,
python_eval, speech_to_text_stub
]
llm_with_tools = llm.bind_tools(all_tools)
def assistant(state: MessagesState):
return {"messages": [llm_with_tools.invoke(state["messages"])]}
builder = StateGraph(MessagesState)
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(all_tools))
builder.add_edge(START, "assistant")
builder.add_conditional_edges("assistant", tools_condition)
builder.add_edge("tools", "assistant")
ninu = builder.compile()
return ninu
|