File size: 6,241 Bytes
a255fdc
c43e925
a255fdc
1f51e41
a255fdc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c43e925
 
 
 
 
 
 
 
bb86c51
 
 
 
c43e925
 
 
 
 
 
 
 
 
 
 
 
 
bb86c51
c43e925
 
 
 
 
 
 
 
 
bb86c51
 
c43e925
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bb86c51
 
 
 
 
 
 
 
ce0495a
e22e4cc
a59e149
5d2f200
320044c
0f8469c
 
 
 
 
 
 
e59763c
2e22219
 
e59763c
c41696c
 
bb86c51
7a308f8
1279ad7
bb86c51
6b3bddd
658534a
 
 
 
 
 
 
c41696c
658534a
 
 
 
 
 
 
 
 
6b3bddd
658534a
6b3bddd
 
658534a
6b3bddd
658534a
 
fbc8d88
 
 
320044c
fbc8d88
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
import gradio as gr
import os
from transformers import pipeline
title = "❤️🧠MindfulStory📖💾MemoryMaker"
examples = [
    ["Music and art make me feel"],
    ["Feel better each day when you awake by"],
    ["Feel better physically by"],
    ["Practicing mindfulness each day"],
    ["Be happier by"],
    ["Meditation can improve health"],
    ["Spending time outdoors"],
    ["Stress is relieved by quieting your mind, getting exercise and time with nature"],
    ["Break the cycle of stress and anxiety"],
    ["Feel calm in stressful situations"],
    ["Deal with work pressure"],
    ["Learn to reduce feelings of overwhelmed"]
]
from gradio import inputs
from gradio.inputs import Textbox
from gradio import outputs

# PersistDataset -----
import os
import csv
import gradio as gr
from gradio import inputs, outputs
import huggingface_hub
from huggingface_hub import Repository, hf_hub_download, upload_file
from datetime import datetime
# created new dataset as awacke1/MindfulStory.csv
DATASET_REPO_URL = "https://huggingface.co/datasets/awacke1/MindfulStory.csv"
DATASET_REPO_ID = "awacke1/MindfulStory.csv"
DATA_FILENAME = "MindfulStory.csv"
DATA_FILE = os.path.join("data", DATA_FILENAME)
HF_TOKEN = os.environ.get("HF_TOKEN")

SCRIPT = """
<script>
if (!window.hasBeenRun) {
    window.hasBeenRun = true;
    console.log("should only happen once");
    document.querySelector("button.submit").click();
}
</script>
"""

# Download dataset repo using hub download
try:
    hf_hub_download(
        repo_id=DATASET_REPO_ID,
        filename=DATA_FILENAME,
        cache_dir=DATA_DIRNAME,
        force_filename=DATA_FILENAME
    )
except:
    print("file not found")

# Set up cloned dataset from repo for operations
repo = Repository(
    local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
)

def generate_html() -> str:
    with open(DATA_FILE) as csvfile:
        reader = csv.DictReader(csvfile)
        rows = []
        for row in reader:
            rows.append(row)
        rows.reverse()
        if len(rows) == 0:
            return "no messages yet"
        else:
            html = "<div class='chatbot'>"
            for row in rows:
                html += "<div>"
                html += f"<span>{row['inputs']}</span>"
                html += f"<span class='outputs'>{row['outputs']}</span>"
                html += "</div>"
            html += "</div>"
            return html

    #store_message(message, response) # Save to dataset
    
#generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
#generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
#generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)

#greeter_1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 1"))
#greeter_2 = gr.Interface(lambda name: f"Greetings {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 2"))
#demo = gr.Parallel(greeter_1, greeter_2)

#generator1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=gr.Textbox(label="GPT2-Large")).load("huggingface/gpt2-large", api_key=HF_TOKEN)


#tbOutput = gr.Textbox(label="GPT Output")
#generator1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=[tbOutput]).load("huggingface/gpt2-large", api_key=HF_TOKEN)
#generator1 = generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)
#generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
#generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)

#model_1_iface = gr.Interface(    fn=your_function_1,    inputs=gr.inputs.Textbox(),    outputs=gr.outputs.Label(num_top_classes=10))
#model_2_iface = gr.Interface(    fn= your_function_2,    inputs=gr.inputs.Textbox(),    outputs=gr.outputs.Label(num_top_classes=10),)

#generator1 = gr.Interface(fn=persist_memory,inputs=gr.inputs.Textbox(),outputs=gr.outputs.Label(num_top_classes=10) ).load("huggingface/gpt2-large",api_key=HF_TOKEN)
#OutputsGen=gr.outputs.Label(num_top_classes=10) 
#generator1 = gr.Interface(fn=persist_memory,inputs=[OutputsGen, OutputsGen],outputs=OutputsGen).load("huggingface/gpt2-large",api_key=HF_TOKEN)
generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)
#generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
#generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)

#MemoryChange=tbOutput.change(persist_memory,inputs=[tbOutput],outputs=gr.Textbox(label="PersistMemoryOutput"))
SplitterInputBox = gr.inputs.Textbox(lines=5, label="Enter a sentence to get another sentence.")

def AIMemory(name: str, message: str):
    if name and message:
        with open(DATA_FILE, "a") as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=["name", "message", "time"])
            writer.writerow({"name": name, "message": message, "time": str(datetime.now())})
        commit_url = repo.push_to_hub()
    return {"name": name, "message": message, "time": str(datetime.now())}
    
with gr.Blocks() as Memory:

#parallelModel = gr.Parallel(generator1, generator2, generator3, inputs = SplitterInputBox, examples=examples,
#            title="Mindfulness Story Generation with Persistent Dataset Memory",
#            description=f"Mindfulness Story Generation with Persistent Dataset Memory",
#            article=f"Memory Dataset URL: [{DATASET_REPO_URL}]({DATASET_REPO_URL})" )

    gr.Markdown("Mindfulness Story Generation with Persistent Dataset Memory")
    with gr.Row():
        inp = gr.Textbox(placeholder="What text would you like to extend with generation?")
        inp2 = gr.Textbox(placeholder="What text would you like to extend with generation?")
        out = gr.Textbox()
        out2 = gr.Textbox()
        out3 = gr.Textbox()
    btn = gr.Button("Run")
    btn.click(fn=AIMemory, inputs=[inp,inp], outputs=[out,out2,out3])

Memory.launch()

tbMemoryOutput = gr.Textbox(label="Memory Output")
btnSave = gr.Button("Save")
#btnSave.click(fn=persist_memory, inputs=[SplitterInputBox, tbOutput], outputs=tbMemoryOutput)
            
parallelModel.launch(share=False)