Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -28,9 +28,10 @@ from gradio import inputs, outputs
|
|
28 |
import huggingface_hub
|
29 |
from huggingface_hub import Repository, hf_hub_download, upload_file
|
30 |
from datetime import datetime
|
31 |
-
|
32 |
-
|
33 |
-
|
|
|
34 |
DATA_FILE = os.path.join("data", DATA_FILENAME)
|
35 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
36 |
|
@@ -44,6 +45,7 @@ if (!window.hasBeenRun) {
|
|
44 |
</script>
|
45 |
"""
|
46 |
|
|
|
47 |
try:
|
48 |
hf_hub_download(
|
49 |
repo_id=DATASET_REPO_ID,
|
@@ -53,6 +55,8 @@ try:
|
|
53 |
)
|
54 |
except:
|
55 |
print("file not found")
|
|
|
|
|
56 |
repo = Repository(
|
57 |
local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
|
58 |
)
|
@@ -76,7 +80,7 @@ def generate_html() -> str:
|
|
76 |
html += "</div>"
|
77 |
return html
|
78 |
|
79 |
-
def
|
80 |
if name and message:
|
81 |
with open(DATA_FILE, "a") as csvfile:
|
82 |
writer = csv.DictWriter(csvfile, fieldnames=["name", "message", "time"])
|
@@ -87,7 +91,7 @@ def store_message(name: str, message: str):
|
|
87 |
return ""
|
88 |
|
89 |
iface = gr.Interface(
|
90 |
-
|
91 |
[
|
92 |
inputs.Textbox(placeholder="Your name"),
|
93 |
inputs.Textbox(placeholder="Your message", lines=2),
|
@@ -100,9 +104,19 @@ iface = gr.Interface(
|
|
100 |
|
101 |
#store_message(message, response) # Save to dataset
|
102 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
|
104 |
generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
|
105 |
-
|
|
|
106 |
gr.Parallel(generator1,
|
107 |
generator2,
|
108 |
generator3,
|
|
|
28 |
import huggingface_hub
|
29 |
from huggingface_hub import Repository, hf_hub_download, upload_file
|
30 |
from datetime import datetime
|
31 |
+
# created new dataset as awacke1/MindfulStory.csv
|
32 |
+
DATASET_REPO_URL = "https://huggingface.co/datasets/awacke1/MindfulStory.csv"
|
33 |
+
DATASET_REPO_ID = "awacke1/MindfulStory.csv"
|
34 |
+
DATA_FILENAME = "MindfulStory.csv"
|
35 |
DATA_FILE = os.path.join("data", DATA_FILENAME)
|
36 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
37 |
|
|
|
45 |
</script>
|
46 |
"""
|
47 |
|
48 |
+
# Download dataset repo using hub download
|
49 |
try:
|
50 |
hf_hub_download(
|
51 |
repo_id=DATASET_REPO_ID,
|
|
|
55 |
)
|
56 |
except:
|
57 |
print("file not found")
|
58 |
+
|
59 |
+
# Set up cloned dataset from repo for operations
|
60 |
repo = Repository(
|
61 |
local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
|
62 |
)
|
|
|
80 |
html += "</div>"
|
81 |
return html
|
82 |
|
83 |
+
def persist_memory(name: str, message: str):
|
84 |
if name and message:
|
85 |
with open(DATA_FILE, "a") as csvfile:
|
86 |
writer = csv.DictWriter(csvfile, fieldnames=["name", "message", "time"])
|
|
|
91 |
return ""
|
92 |
|
93 |
iface = gr.Interface(
|
94 |
+
persist_memory,
|
95 |
[
|
96 |
inputs.Textbox(placeholder="Your name"),
|
97 |
inputs.Textbox(placeholder="Your message", lines=2),
|
|
|
104 |
|
105 |
#store_message(message, response) # Save to dataset
|
106 |
|
107 |
+
#generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
|
108 |
+
#generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
|
109 |
+
#generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)
|
110 |
+
|
111 |
+
#greeter_1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 1"))
|
112 |
+
#greeter_2 = gr.Interface(lambda name: f"Greetings {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 2"))
|
113 |
+
#demo = gr.Parallel(greeter_1, greeter_2)
|
114 |
+
|
115 |
+
generator1 = gr.Interface(lambda name: f"Hello {name}!", outputs=gr.Textbox(label="GPT2-Large")).load("huggingface/gpt2-large", api_key=HF_TOKEN)
|
116 |
generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
|
117 |
generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
|
118 |
+
|
119 |
+
|
120 |
gr.Parallel(generator1,
|
121 |
generator2,
|
122 |
generator3,
|