File size: 5,109 Bytes
a255fdc
c43e925
a255fdc
1f51e41
a255fdc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c43e925
 
 
 
 
 
 
 
bb86c51
 
 
 
c43e925
 
 
 
 
 
 
 
 
 
 
 
 
bb86c51
c43e925
 
 
 
 
 
 
 
 
bb86c51
 
c43e925
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bb86c51
c43e925
 
 
 
 
 
 
2158acb
c43e925
 
bb86c51
c43e925
 
 
 
 
 
 
 
 
 
 
 
bb86c51
 
 
 
 
 
 
 
ce0495a
e22e4cc
a59e149
 
e22e4cc
4a14aac
 
bb86c51
7a308f8
bb86c51
fbc8d88
4a14aac
 
dff03ea
42501c9
c43e925
 
3b7017b
fbc8d88
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
import gradio as gr
import os
from transformers import pipeline
title = "❤️🧠MindfulStory📖💾MemoryMaker"
examples = [
    ["Music and art make me feel"],
    ["Feel better each day when you awake by"],
    ["Feel better physically by"],
    ["Practicing mindfulness each day"],
    ["Be happier by"],
    ["Meditation can improve health"],
    ["Spending time outdoors"],
    ["Stress is relieved by quieting your mind, getting exercise and time with nature"],
    ["Break the cycle of stress and anxiety"],
    ["Feel calm in stressful situations"],
    ["Deal with work pressure"],
    ["Learn to reduce feelings of overwhelmed"]
]
from gradio import inputs
from gradio.inputs import Textbox
from gradio import outputs

# PersistDataset -----
import os
import csv
import gradio as gr
from gradio import inputs, outputs
import huggingface_hub
from huggingface_hub import Repository, hf_hub_download, upload_file
from datetime import datetime
# created new dataset as awacke1/MindfulStory.csv
DATASET_REPO_URL = "https://huggingface.co/datasets/awacke1/MindfulStory.csv"
DATASET_REPO_ID = "awacke1/MindfulStory.csv"
DATA_FILENAME = "MindfulStory.csv"
DATA_FILE = os.path.join("data", DATA_FILENAME)
HF_TOKEN = os.environ.get("HF_TOKEN")

SCRIPT = """
<script>
if (!window.hasBeenRun) {
    window.hasBeenRun = true;
    console.log("should only happen once");
    document.querySelector("button.submit").click();
}
</script>
"""

# Download dataset repo using hub download
try:
    hf_hub_download(
        repo_id=DATASET_REPO_ID,
        filename=DATA_FILENAME,
        cache_dir=DATA_DIRNAME,
        force_filename=DATA_FILENAME
    )
except:
    print("file not found")

# Set up cloned dataset from repo for operations
repo = Repository(
    local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
)

def generate_html() -> str:
    with open(DATA_FILE) as csvfile:
        reader = csv.DictReader(csvfile)
        rows = []
        for row in reader:
            rows.append(row)
        rows.reverse()
        if len(rows) == 0:
            return "no messages yet"
        else:
            html = "<div class='chatbot'>"
            for row in rows:
                html += "<div>"
                html += f"<span>{row['inputs']}</span>"
                html += f"<span class='outputs'>{row['outputs']}</span>"
                html += "</div>"
            html += "</div>"
            return html
            
def persist_memory(name: str, message: str):
    if name and message:
        with open(DATA_FILE, "a") as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=["name", "message", "time"])
            writer.writerow(
                {"name": name, "message": message, "time": str(datetime.now())}
            )
        commit_url = repo.push_to_hub()
    return {"name": name, "message": message, "time": str(datetime.now())}
    
iface = gr.Interface(
    persist_memory,
    [
        inputs.Textbox(placeholder="Your name"),
        inputs.Textbox(placeholder="Your message", lines=2),
    ],
    "html",
    css="""
    .message {background-color:cornflowerblue;color:white; padding:4px;margin:4px;border-radius:4px; }
    """,
)

    #store_message(message, response) # Save to dataset
    
#generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
#generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
#generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)

#greeter_1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 1"))
#greeter_2 = gr.Interface(lambda name: f"Greetings {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 2"))
#demo = gr.Parallel(greeter_1, greeter_2)

#generator1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=gr.Textbox(label="GPT2-Large")).load("huggingface/gpt2-large", api_key=HF_TOKEN)


tbOutput = gr.Textbox(label="GPT Output")
generator1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=[tbOutput]).load("huggingface/gpt2-large", api_key=HF_TOKEN)
generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)

#MemoryChange=tbOutput.change(persist_memory,inputs=[tbOutput],outputs=gr.Textbox(label="PersistMemoryOutput"))

parallelModel = gr.Parallel(generator1, 
            generator2, 
            generator3, 
            inputs = gr.inputs.Textbox(lines=5, label="Enter a sentence to get another sentence."),
            examples=examples,
            title="Mindfulness Story Generation with Persistent Dataset Memory",
            description=f"Mindfulness Story Generation with Persistent Dataset Memory",
            article=f"Memory Dataset URL: [{DATASET_REPO_URL}]({DATASET_REPO_URL})"
            )

tbMemoryOutput = gr.Textbox(label="Memory Output")
btnSave = gr.Button("Save")
btnSave.click(fn=persist_memory, inputs=tbOutput, outputs=tbMemoryOutput)
            
parallelModel.launch(share=False)