awacke1 commited on
Commit
756935c
·
1 Parent(s): ffb5d20

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -67
app.py CHANGED
@@ -61,55 +61,10 @@ repo = Repository(
61
  local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
62
  )
63
 
64
- def generate_html() -> str:
65
- with open(DATA_FILE) as csvfile:
66
- reader = csv.DictReader(csvfile)
67
- rows = []
68
- for row in reader:
69
- rows.append(row)
70
- rows.reverse()
71
- if len(rows) == 0:
72
- return "no messages yet"
73
- else:
74
- html = "<div class='chatbot'>"
75
- for row in rows:
76
- html += "<div>"
77
- html += f"<span>{row['inputs']}</span>"
78
- html += f"<span class='outputs'>{row['outputs']}</span>"
79
- html += "</div>"
80
- html += "</div>"
81
- return html
82
-
83
- #store_message(message, response) # Save to dataset
84
-
85
- #generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
86
- #generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
87
- #generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)
88
-
89
- #greeter_1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 1"))
90
- #greeter_2 = gr.Interface(lambda name: f"Greetings {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 2"))
91
- #demo = gr.Parallel(greeter_1, greeter_2)
92
-
93
- #generator1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=gr.Textbox(label="GPT2-Large")).load("huggingface/gpt2-large", api_key=HF_TOKEN)
94
-
95
-
96
- #tbOutput = gr.Textbox(label="GPT Output")
97
- #generator1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=[tbOutput]).load("huggingface/gpt2-large", api_key=HF_TOKEN)
98
- #generator1 = generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)
99
- #generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
100
- #generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
101
-
102
- #model_1_iface = gr.Interface( fn=your_function_1, inputs=gr.inputs.Textbox(), outputs=gr.outputs.Label(num_top_classes=10))
103
- #model_2_iface = gr.Interface( fn= your_function_2, inputs=gr.inputs.Textbox(), outputs=gr.outputs.Label(num_top_classes=10),)
104
-
105
- #generator1 = gr.Interface(fn=persist_memory,inputs=gr.inputs.Textbox(),outputs=gr.outputs.Label(num_top_classes=10) ).load("huggingface/gpt2-large",api_key=HF_TOKEN)
106
- #OutputsGen=gr.outputs.Label(num_top_classes=10)
107
- #generator1 = gr.Interface(fn=persist_memory,inputs=[OutputsGen, OutputsGen],outputs=OutputsGen).load("huggingface/gpt2-large",api_key=HF_TOKEN)
108
  generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)
109
- #generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
110
- #generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
111
 
112
- #MemoryChange=tbOutput.change(persist_memory,inputs=[tbOutput],outputs=gr.Textbox(label="PersistMemoryOutput"))
113
  SplitterInputBox = gr.inputs.Textbox(lines=5, label="Enter a sentence to get another sentence.")
114
 
115
  def AIMemory(name: str, message: str):
@@ -120,27 +75,8 @@ def AIMemory(name: str, message: str):
120
  commit_url = repo.push_to_hub()
121
  return {"name": name, "message": message, "time": str(datetime.now())}
122
 
123
- #with gr.Blocks() as Memory:
124
-
125
  parallelModel = gr.Parallel(generator1, generator2, generator3, inputs = SplitterInputBox, examples=examples,
126
  title="Mindfulness Story Generation with Persistent Dataset Memory",
127
  description=f"Mindfulness Story Generation with Persistent Dataset Memory",
128
  article=f"Memory Dataset URL: [{DATASET_REPO_URL}]({DATASET_REPO_URL})" )
129
- #
130
- # gr.Markdown("Mindfulness Story Generation with Persistent Dataset Memory")
131
- # with gr.Row():
132
- # inp = gr.Textbox(placeholder="What text would you like to extend with generation?")
133
- # inp2 = gr.Textbox(placeholder="What text would you like to extend with generation?")
134
- # out = gr.Textbox()
135
- # out2 = gr.Textbox()
136
- # out3 = gr.Textbox()
137
- # btn = gr.Button("Run")
138
- # btn.click(fn=AIMemory, inputs=[inp,inp], outputs=[out,out2,out3])
139
-
140
- parallelModel.launch()
141
-
142
- #tbMemoryOutput = gr.Textbox(label="Memory Output")
143
- #btnSave = gr.Button("Save")
144
- #btnSave.click(fn=persist_memory, inputs=[SplitterInputBox, tbOutput], outputs=tbMemoryOutput)
145
-
146
- #parallelModel.launch(share=False)
 
61
  local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
62
  )
63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  generator1 = gr.Interface.load("huggingface/gpt2-large", api_key=HF_TOKEN)
65
+ generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B", api_key=HF_TOKEN)
66
+ generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=HF_TOKEN)
67
 
 
68
  SplitterInputBox = gr.inputs.Textbox(lines=5, label="Enter a sentence to get another sentence.")
69
 
70
  def AIMemory(name: str, message: str):
 
75
  commit_url = repo.push_to_hub()
76
  return {"name": name, "message": message, "time": str(datetime.now())}
77
 
 
 
78
  parallelModel = gr.Parallel(generator1, generator2, generator3, inputs = SplitterInputBox, examples=examples,
79
  title="Mindfulness Story Generation with Persistent Dataset Memory",
80
  description=f"Mindfulness Story Generation with Persistent Dataset Memory",
81
  article=f"Memory Dataset URL: [{DATASET_REPO_URL}]({DATASET_REPO_URL})" )
82
+ parallelModel.launch(share=False)