winglian commited on
Commit
aa0011f
·
1 Parent(s): 96c5a1f

Update tabbed.py

Browse files
Files changed (1) hide show
  1. tabbed.py +2 -2
tabbed.py CHANGED
@@ -74,7 +74,7 @@ start_message = """
74
 
75
  def generate_text_instruct(input_text):
76
  response = ""
77
- for output in llm(f"### Instruction:\n{input_text}\n\n### Response:\n", echo=False, stream=True, **config['chat']):
78
  answer = output['choices'][0]['text']
79
  response += answer
80
  yield response
@@ -125,7 +125,7 @@ with gr.Blocks() as demo:
125
  repeat_penalty = gr.Slider(0.0, 2.0, label="Repetition Penalty", step=0.1, value=1.1)
126
 
127
  system_msg = gr.Textbox(
128
- start_message, label="System Message", interactive=False, visible=False)
129
 
130
  chat_history_state = gr.State()
131
  clear.click(clear_chat, inputs=[chat_history_state, message], outputs=[chat_history_state, message], queue=False)
 
74
 
75
  def generate_text_instruct(input_text):
76
  response = ""
77
+ for output in llm(f"USER: {input_text}\nASSISTANT:", echo=False, stream=True, **config['chat']):
78
  answer = output['choices'][0]['text']
79
  response += answer
80
  yield response
 
125
  repeat_penalty = gr.Slider(0.0, 2.0, label="Repetition Penalty", step=0.1, value=1.1)
126
 
127
  system_msg = gr.Textbox(
128
+ start_message, label="System Message", interactive=False, visible=True, placeholder="system prompt, useful for RP")
129
 
130
  chat_history_state = gr.State()
131
  clear.click(clear_chat, inputs=[chat_history_state, message], outputs=[chat_history_state, message], queue=False)