Upload folder using huggingface_hub
Browse files- README.md +1 -7
- app.py +75 -0
- requirements.txt +2 -0
README.md
CHANGED
@@ -1,12 +1,6 @@
|
|
1 |
---
|
2 |
title: Testing
|
3 |
-
|
4 |
-
colorFrom: indigo
|
5 |
-
colorTo: pink
|
6 |
sdk: gradio
|
7 |
sdk_version: 4.19.2
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
title: Testing
|
3 |
+
app_file: app.py
|
|
|
|
|
4 |
sdk: gradio
|
5 |
sdk_version: 4.19.2
|
|
|
|
|
6 |
---
|
|
|
|
app.py
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain.tools import AIPluginTool
|
2 |
+
from langchain.utilities import WikipediaAPIWrapper
|
3 |
+
from langchain.schema import (
|
4 |
+
AIMessage,
|
5 |
+
HumanMessage,
|
6 |
+
SystemMessage
|
7 |
+
)
|
8 |
+
from langchain.tools import MoveFileTool, format_tool_to_openai_function
|
9 |
+
from langchain.tools import BaseTool, StructuredTool, Tool, tool
|
10 |
+
from langchain.chat_models import ChatOpenAI
|
11 |
+
from langchain.agents import AgentType, initialize_agent, load_tools
|
12 |
+
from langchain import LLMMathChain, SerpAPIWrapper
|
13 |
+
import gradio as gr
|
14 |
+
import os
|
15 |
+
import openai
|
16 |
+
import gradio as gr
|
17 |
+
from gradio import ChatInterface
|
18 |
+
import time
|
19 |
+
|
20 |
+
# Get the value of the openai_api_key from environment variable
|
21 |
+
openai.api_key = os.getenv("OPENAI_API_KEY")
|
22 |
+
|
23 |
+
# Import things that are needed generically from langchain
|
24 |
+
|
25 |
+
|
26 |
+
def predict(inputs, chatbot):
|
27 |
+
|
28 |
+
messages = []
|
29 |
+
for conv in chatbot:
|
30 |
+
user = conv[0]
|
31 |
+
messages.append({"role": "user", "content": user})
|
32 |
+
assistant = conv[1]
|
33 |
+
messages.append({"role": "assistant", "content": assistant})
|
34 |
+
messages.append({"role": "user", "content": inputs})
|
35 |
+
|
36 |
+
# a ChatCompletion request
|
37 |
+
client = openai.OpenAI(
|
38 |
+
base_url="https://anybody-statewide-morrison-sizes.trycloudflare.com/", api_key="not-needed")
|
39 |
+
completion = client.chat.completions.create(
|
40 |
+
model="ttt", # this field is currently unused
|
41 |
+
messages=messages,
|
42 |
+
temperature=0.7,
|
43 |
+
stream=True,
|
44 |
+
)
|
45 |
+
new_message = {"role": "assistant", "content": ""}
|
46 |
+
|
47 |
+
for chunk in completion:
|
48 |
+
if chunk.choices[0].delta.content:
|
49 |
+
print(chunk.choices[0].delta.content, end="", flush=True)
|
50 |
+
new_message["content"] += chunk.choices[0].delta.content
|
51 |
+
yield new_message
|
52 |
+
|
53 |
+
messages.append(new_message)
|
54 |
+
|
55 |
+
|
56 |
+
interface = gr.ChatInterface(predict)
|
57 |
+
with gr.Blocks() as demo:
|
58 |
+
gr.Markdown("""
|
59 |
+
# GPT 3.5 Discord Bot powered by gradio!
|
60 |
+
To use this space as a discord bot, first install the gradio_client
|
61 |
+
|
62 |
+
```bash
|
63 |
+
pip install gradio_client
|
64 |
+
```
|
65 |
+
|
66 |
+
Then run the following command
|
67 |
+
|
68 |
+
```python
|
69 |
+
client = grc.Client.duplicate("gradio-discord-bots/gpt-35-turbo", private=False, secrets={"OPENAI_API_KEY": "<your-key-here>"}, sleep_timeout=2880)
|
70 |
+
client.deploy_discord(api_names=["chat"])
|
71 |
+
""")
|
72 |
+
with gr.Row(visible=False):
|
73 |
+
interface.render()
|
74 |
+
|
75 |
+
demo.queue(concurrency_count=100).launch()
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
openai
|
2 |
+
langchain
|