awacke1 commited on
Commit
fd2f3cc
·
1 Parent(s): 81b4017

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -12
app.py CHANGED
@@ -27,6 +27,7 @@ def SaveResult(text, outputfileName):
27
  f.write('\n')
28
  else:
29
  with open(outputfileName, "w") as f: #write
 
30
  f.write(str(text.replace("\n"," ")))
31
  f.write('\n')
32
  return
@@ -66,12 +67,18 @@ title = "💬ChatBack🧠💾"
66
  description = """Chatbot With persistent memory dataset allowing multiagent system AI to access a shared dataset as memory pool with stored interactions.
67
  Current Best SOTA Chatbot: https://huggingface.co/facebook/blenderbot-400M-distill?text=Hey+my+name+is+ChatBack%21+Are+you+ready+to+rock%3F """
68
 
 
 
 
 
 
69
  def chat(message, history):
70
  history = history or []
71
  if history:
72
  history_useful = ['</s> <s>'.join([str(a[0])+'</s> <s>'+str(a[1]) for a in history])]
73
  else:
74
  history_useful = []
 
75
  history_useful = add_note_to_history(message, history_useful)
76
  inputs = tokenizer(history_useful, return_tensors="pt")
77
  inputs, history_useful, history = take_last_tokens(inputs, history_useful, history)
@@ -86,15 +93,13 @@ def chat(message, history):
86
  if UseMemory:
87
  outputfileName = 'File.csv'
88
  df = store_message(message, response, outputfileName) # Save to dataset
89
- basedir = os.path.dirname(__file__)
90
- savePath = outputfileName
91
 
92
- #return history, df, outputfileName
93
- return history, df
94
 
 
95
  with gr.Blocks() as demo:
96
- gr.Markdown("<h1><center>🍰Gradio chatbot backed by memory in a dataset repository.🎨</center></h1>")
97
- #gr.Markdown("The memory dataset for saves is [{DATASET_REPO_URL}]({DATASET_REPO_URL}) And here: https://huggingface.co/spaces/awacke1/DatasetAnalyzer Code and datasets on chat are here hf tk: https://paperswithcode.com/datasets?q=chat&v=lst&o=newest")
98
 
99
  with gr.Row():
100
  t1 = gr.Textbox(lines=1, default="", label="Chat Text:")
@@ -102,13 +107,11 @@ with gr.Blocks() as demo:
102
 
103
  with gr.Row(): # inputs and buttons
104
  s1 = gr.State([])
105
- s2 = gr.Markdown()
106
- with gr.Row():
107
- file = gr.File(label="File"),
108
  df1 = gr.Dataframe(wrap=True, max_rows=1000, overflow_row_behaviour= "paginate")
 
 
 
109
 
110
-
111
- #b1.click(fn=chat, inputs=[t1, s1], outputs=[s1, df1, file])
112
- b1.click(fn=chat, inputs=[t1, s1], outputs=[s1, df1])
113
 
114
  demo.launch(debug=True, show_error=True)
 
27
  f.write('\n')
28
  else:
29
  with open(outputfileName, "w") as f: #write
30
+ f.write(str("time, message, text\n")) # one time only to get column headers for CSV file
31
  f.write(str(text.replace("\n"," ")))
32
  f.write('\n')
33
  return
 
67
  description = """Chatbot With persistent memory dataset allowing multiagent system AI to access a shared dataset as memory pool with stored interactions.
68
  Current Best SOTA Chatbot: https://huggingface.co/facebook/blenderbot-400M-distill?text=Hey+my+name+is+ChatBack%21+Are+you+ready+to+rock%3F """
69
 
70
+ def get_base(filename):
71
+ basedir = os.path.dirname(__file__)
72
+ loadPath = basedir + "\\" + filename
73
+ return loadPath
74
+
75
  def chat(message, history):
76
  history = history or []
77
  if history:
78
  history_useful = ['</s> <s>'.join([str(a[0])+'</s> <s>'+str(a[1]) for a in history])]
79
  else:
80
  history_useful = []
81
+
82
  history_useful = add_note_to_history(message, history_useful)
83
  inputs = tokenizer(history_useful, return_tensors="pt")
84
  inputs, history_useful, history = take_last_tokens(inputs, history_useful, history)
 
93
  if UseMemory:
94
  outputfileName = 'File.csv'
95
  df = store_message(message, response, outputfileName) # Save to dataset
96
+ basedir = get_base(outputfileName)
 
97
 
98
+ return history, df, basedir
 
99
 
100
+
101
  with gr.Blocks() as demo:
102
+ gr.Markdown("<h1><center>🍰Gradio chatbot backed by memory in a local CSV file.🎨</center></h1>")
 
103
 
104
  with gr.Row():
105
  t1 = gr.Textbox(lines=1, default="", label="Chat Text:")
 
107
 
108
  with gr.Row(): # inputs and buttons
109
  s1 = gr.State([])
 
 
 
110
  df1 = gr.Dataframe(wrap=True, max_rows=1000, overflow_row_behaviour= "paginate")
111
+ with gr.Row(): # inputs and buttons
112
+ file = gr.File(label="File")
113
+ s2 = gr.Markdown()
114
 
115
+ b1.click(fn=chat, inputs=[t1, s1], outputs=[s1, df1, file])
 
 
116
 
117
  demo.launch(debug=True, show_error=True)