chansung commited on
Commit
5f0f907
·
1 Parent(s): 6101050

upload v1687590401 model

Browse files
Files changed (3) hide show
  1. .ipynb_checkpoints/app-checkpoint.py +1 -1
  2. app.py +12 -377
  3. requirements.txt +1 -2
.ipynb_checkpoints/app-checkpoint.py CHANGED
@@ -8,7 +8,7 @@ from huggingface_hub import Repository
8
 
9
  local_path = "hf_model"
10
 
11
- model_version = "v1687507574"
12
  model_repo_id = "chansung/kerasnlp-gpt2-alpaca-pipeline"
13
  model_repo_url = f"https://huggingface.co/{model_repo_id}"
14
 
 
8
 
9
  local_path = "hf_model"
10
 
11
+ model_version = "v1687590401"
12
  model_repo_id = "chansung/kerasnlp-gpt2-alpaca-pipeline"
13
  model_repo_url = f"https://huggingface.co/{model_repo_id}"
14
 
app.py CHANGED
@@ -1,21 +1,14 @@
1
  from typing import Text, Any, Dict, Optional
2
 
3
- import json
4
- import copy
5
-
6
  import tensorflow as tf
7
  import tensorflow_text
8
  from tensorflow.python.saved_model import tag_constants
9
  from huggingface_hub import Repository
10
 
11
- import gradio as gr
12
- from pingpong import PingPong
13
- from pingpong.gradio import GradioAlpacaChatPPManager
14
- from pingpong.context import CtxLastWindowStrategy
15
-
16
  local_path = "hf_model"
17
 
18
- model_version = "v1687507574"
19
  model_repo_id = "chansung/kerasnlp-gpt2-alpaca-pipeline"
20
  model_repo_url = f"https://huggingface.co/{model_repo_id}"
21
 
@@ -30,382 +23,24 @@ _ = _clone_and_checkout(model_repo_url, local_path, model_version)
30
  model = tf.saved_model.load(local_path, tags=[tag_constants.SERVING])
31
  gpt_lm_predict_fn = model.signatures["serving_default"]
32
 
33
- STYLE = """
34
- .custom-btn {
35
- border: none !important;
36
- background: none !important;
37
- box-shadow: none !important;
38
- display: block !important;
39
- text-align: left !important;
40
- }
41
- .custom-btn:hover {
42
- background: rgb(243 244 246) !important;
43
- }
44
-
45
- .custom-btn-highlight {
46
- border: none !important;
47
- background: rgb(243 244 246) !important;
48
- box-shadow: none !important;
49
- display: block !important;
50
- text-align: left !important;
51
- }
52
-
53
- #prompt-txt > label > span {
54
- display: none !important;
55
- }
56
- #prompt-txt > label > textarea {
57
- border: transparent;
58
- box-shadow: none;
59
- }
60
- #chatbot {
61
- height: 800px;
62
- overflow: auto;
63
- box-shadow: none !important;
64
- border: none !important;
65
- }
66
- #chatbot > .wrap {
67
- max-height: 780px;
68
- }
69
- #chatbot + div {
70
- border-radius: 35px !important;
71
- width: 80% !important;
72
- margin: auto !important;
73
- }
74
-
75
- #left-pane {
76
- background-color: #f9fafb;
77
- border-radius: 15px;
78
- padding: 10px;
79
- }
80
-
81
- #left-top {
82
- padding-left: 10px;
83
- padding-right: 10px;
84
- text-align: center;
85
- font-weight: bold;
86
- font-size: large;
87
- }
88
-
89
- #chat-history-accordion {
90
- background: transparent;
91
- border: 0.8px !important;
92
- }
93
-
94
- #right-pane {
95
- margin-left: 20px;
96
- margin-right: 70px;
97
- }
98
-
99
- #initial-popup {
100
- z-index: 100;
101
- position: absolute;
102
- width: 50%;
103
- top: 50%;
104
- height: 50%;
105
- left: 50%;
106
- transform: translate(-50%, -50%);
107
- border-radius: 35px;
108
- padding: 15px;
109
- }
110
-
111
- #initial-popup-title {
112
- text-align: center;
113
- font-size: 18px;
114
- font-weight: bold;
115
- }
116
-
117
- #initial-popup-left-pane {
118
- min-width: 150px !important;
119
- }
120
-
121
- #initial-popup-right-pane {
122
- text-align: right;
123
- }
124
-
125
- .example-btn {
126
- padding-top: 20px !important;
127
- padding-bottom: 20px !important;
128
- padding-left: 5px !important;
129
- padding-right: 5px !important;
130
- background: linear-gradient(to bottom right, #f7faff, #ffffff) !important;
131
- box-shadow: none !important;
132
- border-radius: 20px !important;
133
- }
134
-
135
- .example-btn:hover {
136
- box-shadow: 0.3px 0.3px 0.3px gray !important;
137
- }
138
-
139
- #example-title {
140
- margin-bottom: 15px;
141
- }
142
-
143
- #aux-btns-popup {
144
- z-index: 200;
145
- position: absolute !important;
146
- bottom: 75px !important;
147
- right: 15px !important;
148
- }
149
-
150
- #aux-btns-popup > div {
151
- flex-wrap: nowrap;
152
- width: auto;
153
- margin: auto;
154
- }
155
-
156
- .aux-btn {
157
- height: 30px !important;
158
- flex-wrap: initial !important;
159
- flex: none !important;
160
- min-width: min(100px,100%) !important;
161
- font-weight: unset !important;
162
- font-size: 10pt !important;
163
-
164
- background: linear-gradient(to bottom right, #f7faff, #ffffff) !important;
165
- box-shadow: none !important;
166
- border-radius: 20px !important;
167
- }
168
-
169
- .aux-btn:hover {
170
- box-shadow: 0.3px 0.3px 0.3px gray !important;
171
- }
172
- """
173
-
174
- get_local_storage = """
175
- function() {
176
- globalThis.setStorage = (key, value)=>{
177
- localStorage.setItem(key, JSON.stringify(value));
178
- }
179
- globalThis.getStorage = (key, value)=>{
180
- return JSON.parse(localStorage.getItem(key));
181
- }
182
-
183
- var local_data = getStorage('local_data');
184
- var history = [];
185
-
186
- if(local_data) {
187
- local_data[0].pingpongs.forEach(element =>{
188
- history.push([element.ping, element.pong]);
189
- });
190
- }
191
- else {
192
- local_data = [];
193
- for (let step = 0; step < 10; step++) {
194
- local_data.push({'ctx': '', 'pingpongs':[]});
195
- }
196
- setStorage('local_data', local_data);
197
- }
198
-
199
- if(history.length == 0) {
200
- document.querySelector("#initial-popup").classList.remove('hide');
201
- }
202
-
203
- return [history, local_data];
204
- }
205
- """
206
-
207
- update_left_btns_state = """
208
- (v)=>{
209
- document.querySelector('.custom-btn-highlight').classList.add('custom-btn');
210
- document.querySelector('.custom-btn-highlight').classList.remove('custom-btn-highlight');
211
-
212
- const elements = document.querySelectorAll(".custom-btn");
213
-
214
- for(var i=0; i < elements.length; i++) {
215
- const element = elements[i];
216
- if(element.textContent == v) {
217
- console.log(v);
218
- element.classList.add('custom-btn-highlight');
219
- element.classList.remove('custom-btn');
220
- break;
221
- }
222
- }
223
- }"""
224
-
225
- channels = [
226
- "1st Channel",
227
- "2nd Channel",
228
- "3rd Channel",
229
- "4th Channel",
230
- "5th Channel",
231
- "6th Channel",
232
- "7th Channel",
233
- "8th Channel",
234
- "9th Channel",
235
- "10th Channel"
236
- ]
237
- channel_btns = []
238
-
239
- examples = [
240
- "hello world",
241
- "what's up?",
242
- "this is GradioChat"
243
- ]
244
- ex_btns = []
245
-
246
- def reset_chat(idx, ld):
247
- res = [GradioAlpacaChatPPManager.from_json(json.dumps(ppm)) for ppm in ld]
248
- res[idx].pingpongs = []
249
-
250
- return (
251
- "",
252
- [],
253
- str(res),
254
- gr.update(visible=True),
255
- gr.update(interactive=False),
256
- )
257
-
258
- def build_prompts(ppmanager):
259
- dummy_ppm = copy.deepcopy(ppmanager)
260
-
261
- dummy_ppm.ctx = """Below are a series of dialogues between human and an AI assistant.
262
- The AI tries to answer the given instruction as in response.
263
- The AI MUST not generate any text containing `### Response` or `### Instruction`.
264
- The AI MUST be helpful, polite, honest, sophisticated, emotionally aware, and humble-but-knowledgeable.
265
- The assistant MUST be happy to help with almost anything, and will do its best to understand exactly what is needed.
266
- It also MUST avoid giving false or misleading information, and it caveats when it isn’t entirely sure about the right answer.
267
- That said, the assistant is practical and really does its best, and doesn’t let caution get too much in the way of being useful.
268
- """
269
- return CtxLastWindowStrategy(3)(dummy_ppm)
270
-
271
- def add_pingpong(idx, ld, ping):
272
- res = [GradioAlpacaChatPPManager.from_json(json.dumps(ppm)) for ppm in ld]
273
- ppm = res[idx]
274
-
275
- ppm.add_pingpong(
276
- PingPong(ping, "")
277
- )
278
-
279
- prompt = tf.constant(build_prompts(ppm))
280
- max_length = tf.constant(512, dtype="int64")
281
- print(f"Prompt:\n{prompt}")
282
 
283
  result = gpt_lm_predict_fn(
284
  prompt=prompt,
285
  max_length=max_length,
286
- )['result'].numpy().decode('UTF-8')
287
- result = result.split("### Response:")[-1].strip()
288
-
289
- ppm.add_pong(result)
290
- print(f"res:\n{str(res)}")
291
- return "", ppm.build_uis(), str(res)
292
-
293
- def channel_num(btn_title):
294
- choice = 0
295
-
296
- for idx, channel in enumerate(channels):
297
- if channel == btn_title:
298
- choice = idx
299
-
300
- return choice
301
-
302
- def set_chatbot(btn, ld):
303
- choice = channel_num(btn)
304
-
305
- res = [
306
- GradioAlpacaChatPPManager.from_json(json.dumps(ppm_str))
307
- for ppm_str in ld
308
- ]
309
- empty = len(res[choice].pingpongs) == 0
310
- return (
311
- res[choice].build_uis(),
312
- choice,
313
- gr.update(visible=empty)
314
  )
315
 
316
- def set_example(btn):
317
- return btn, gr.update(visible=False)
318
-
319
- def set_popup_visibility(ld, example_block):
320
- return example_block
321
-
322
- with gr.Blocks(css=STYLE, elem_id='container-col') as demo:
323
- idx = gr.State(0)
324
- local_data = gr.JSON({},visible=False)
325
-
326
- with gr.Row():
327
- with gr.Column(scale=1, min_width=180):
328
- gr.Markdown("GradioChat", elem_id="left-top")
329
-
330
- with gr.Column(elem_id="left-pane"):
331
- with gr.Accordion("Histories", elem_id="chat-history-accordion"):
332
- channel_btns.append(gr.Button(channels[0], elem_classes=["custom-btn-highlight"]))
333
-
334
- for channel in channels[1:]:
335
- channel_btns.append(gr.Button(channel, elem_classes=["custom-btn"]))
336
-
337
- with gr.Column(scale=8, elem_id="right-pane"):
338
- with gr.Column(elem_id="initial-popup", visible=False) as example_block:
339
- with gr.Row(scale=1):
340
- with gr.Column(elem_id="initial-popup-left-pane"):
341
- gr.Markdown("GradioChat", elem_id="initial-popup-title")
342
- gr.Markdown("Making the community's best AI chat models available to everyone.")
343
- with gr.Column(elem_id="initial-popup-right-pane"):
344
- gr.Markdown("Chat UI is now open sourced on Hugging Face Hub")
345
- gr.Markdown("check out the [↗ repository](https://huggingface.co/spaces/chansung/test-multi-conv)")
346
-
347
- with gr.Column(scale=1):
348
- gr.Markdown("Examples")
349
- with gr.Row() as text_block:
350
- for example in examples:
351
- ex_btns.append(gr.Button(example, elem_classes=["example-btn"]))
352
-
353
- with gr.Column(elem_id="aux-btns-popup", visible=True):
354
- with gr.Row():
355
- # stop = gr.Button("Stop", elem_classes=["aux-btn"])
356
- # regenerate = gr.Button("Regenerate", elem_classes=["aux-btn"])
357
- clean = gr.Button("Clean", elem_classes=["aux-btn"])
358
 
359
- chatbot = gr.Chatbot(elem_id='chatbot')
360
- instruction_txtbox = gr.Textbox(
361
- placeholder="Ask anything", label="",
362
- elem_id="prompt-txt"
363
- )
364
-
365
- for btn in channel_btns:
366
- btn.click(
367
- set_chatbot,
368
- [btn, local_data],
369
- [chatbot, idx, example_block]
370
- ).then(
371
- None, btn, None,
372
- _js=update_left_btns_state
373
- )
374
-
375
- for btn in ex_btns:
376
- btn.click(
377
- set_example,
378
- [btn],
379
- [instruction_txtbox, example_block]
380
- )
381
 
382
- instruction_txtbox.submit(
383
- lambda: gr.update(visible=False),
384
- None,
385
- example_block
386
- ).then(
387
- add_pingpong,
388
- [idx, local_data, instruction_txtbox],
389
- [instruction_txtbox, chatbot, local_data]
390
- ).then(
391
- None, local_data, None,
392
- _js="(v)=>{ setStorage('local_data',v) }"
393
  )
394
 
395
- clean.click(
396
- reset_chat,
397
- [idx, local_data],
398
- [instruction_txtbox, chatbot, local_data, example_block]
399
- ).then(
400
- None, local_data, None,
401
- _js="(v)=>{ setStorage('local_data',v) }"
402
- )
403
-
404
- demo.load(
405
- None,
406
- inputs=None,
407
- outputs=[chatbot, local_data],
408
- _js=get_local_storage,
409
- )
410
-
411
  demo.launch()
 
1
  from typing import Text, Any, Dict, Optional
2
 
3
+ import gradio as gr
 
 
4
  import tensorflow as tf
5
  import tensorflow_text
6
  from tensorflow.python.saved_model import tag_constants
7
  from huggingface_hub import Repository
8
 
 
 
 
 
 
9
  local_path = "hf_model"
10
 
11
+ model_version = "v1687590401"
12
  model_repo_id = "chansung/kerasnlp-gpt2-alpaca-pipeline"
13
  model_repo_url = f"https://huggingface.co/{model_repo_id}"
14
 
 
23
  model = tf.saved_model.load(local_path, tags=[tag_constants.SERVING])
24
  gpt_lm_predict_fn = model.signatures["serving_default"]
25
 
26
+ def gen_text(prompt, max_length=256):
27
+ prompt = tf.constant(f"### Instruction:\n{prompt}\n\n### Response:\n")
28
+ max_length = tf.constant(max_length, dtype="int64")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
 
30
  result = gpt_lm_predict_fn(
31
  prompt=prompt,
32
  max_length=max_length,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  )
34
 
35
+ return result['result'].numpy().decode('UTF-8').split("### Response:")[-1].strip()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
+ with gr.Blocks() as demo:
38
+ instruction = gr.Textbox("Instruction")
39
+ output = gr.Textbox("Output", lines=5)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
 
41
+ instruction.submit(
42
+ lambda prompt: gen_text(prompt),
43
+ instruction, output
 
 
 
 
 
 
 
 
44
  )
45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  demo.launch()
requirements.txt CHANGED
@@ -1,4 +1,3 @@
1
  tensorflow
2
  tensorflow_text
3
- huggingface_hub
4
- bingbong
 
1
  tensorflow
2
  tensorflow_text
3
+ huggingface_hub