Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,10 +1,9 @@
|
|
| 1 |
-
from langchain_huggingface import HuggingFacePipeline as HF, ChatHuggingFace as Ch
|
| 2 |
from subprocess import Popen, PIPE as P
|
| 3 |
from langchain_experimental.tools.python.tool import PythonREPLTool as PYT
|
| 4 |
from langchain.agents import load_tools, create_structured_chat_agent as Agent,AgentExecutor as Ex, AgentType as Type
|
| 5 |
from langchain.agents.agent_toolkits import create_retriever_tool as crt
|
| 6 |
from langchain_community.agent_toolkits import FileManagementToolkit as FMT
|
| 7 |
-
from langchain.tools import Tool
|
| 8 |
from langchain.memory import ConversationBufferMemory as MEM,RedisChatMessageHistory as HIS
|
| 9 |
from langchain.schema import SystemMessage as SM,HumanMessage as HM, AIMessage as AM
|
| 10 |
from langchain import hub
|
|
@@ -484,7 +483,7 @@ import torch
|
|
| 484 |
#m=M.from_pretrained("peterpeter8585/syai4.3")
|
| 485 |
#t=T.from_pretrained("peterpeter8585/syai4.3")
|
| 486 |
#pipe=pipeline(model=m,tokenizer=t,task="text-generation")
|
| 487 |
-
|
| 488 |
from langchain.retrievers import WikipediaRetriever as Wiki
|
| 489 |
import gradio as gr
|
| 490 |
chatbot = gr.Chatbot(
|
|
@@ -497,6 +496,7 @@ def terminal(c):
|
|
| 497 |
return a.stdout.read()+a.stderr.read()
|
| 498 |
tools=FMT().get_tools()
|
| 499 |
tools.append(PYT())
|
|
|
|
| 500 |
tools.extend(load_tools(["requests_all"],allow_dangerous_tools=True))
|
| 501 |
tools.extend(load_tools(["llm-math","ddg-search"],llm=llm))
|
| 502 |
tools.append(Tool.from_function(func=terminal,name="terminal",description="ํฐ๋ฏธ๋ ๋ช
๋ น์ด์คํ์ ์ ํฉํจ"))
|
|
|
|
|
|
|
| 1 |
from subprocess import Popen, PIPE as P
|
| 2 |
from langchain_experimental.tools.python.tool import PythonREPLTool as PYT
|
| 3 |
from langchain.agents import load_tools, create_structured_chat_agent as Agent,AgentExecutor as Ex, AgentType as Type
|
| 4 |
from langchain.agents.agent_toolkits import create_retriever_tool as crt
|
| 5 |
from langchain_community.agent_toolkits import FileManagementToolkit as FMT
|
| 6 |
+
from langchain.tools import Tool,YouTubeSearchTool as YTS
|
| 7 |
from langchain.memory import ConversationBufferMemory as MEM,RedisChatMessageHistory as HIS
|
| 8 |
from langchain.schema import SystemMessage as SM,HumanMessage as HM, AIMessage as AM
|
| 9 |
from langchain import hub
|
|
|
|
| 483 |
#m=M.from_pretrained("peterpeter8585/syai4.3")
|
| 484 |
#t=T.from_pretrained("peterpeter8585/syai4.3")
|
| 485 |
#pipe=pipeline(model=m,tokenizer=t,task="text-generation")
|
| 486 |
+
llm=HuggingFacePipeline.from_model_id(model_id="peterpeter8585/deepseek_1",task="text-generation")
|
| 487 |
from langchain.retrievers import WikipediaRetriever as Wiki
|
| 488 |
import gradio as gr
|
| 489 |
chatbot = gr.Chatbot(
|
|
|
|
| 496 |
return a.stdout.read()+a.stderr.read()
|
| 497 |
tools=FMT().get_tools()
|
| 498 |
tools.append(PYT())
|
| 499 |
+
tools.append(YTS())
|
| 500 |
tools.extend(load_tools(["requests_all"],allow_dangerous_tools=True))
|
| 501 |
tools.extend(load_tools(["llm-math","ddg-search"],llm=llm))
|
| 502 |
tools.append(Tool.from_function(func=terminal,name="terminal",description="ํฐ๋ฏธ๋ ๋ช
๋ น์ด์คํ์ ์ ํฉํจ"))
|