freeCS-dot-org commited on
Commit
6e8269f
·
verified ·
1 Parent(s): bacf4cd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -0
app.py CHANGED
@@ -37,6 +37,11 @@ class ConversationManager:
37
  def add_exchange(self, user_message, assistant_response, formatted_response):
38
  self.model_history.append((user_message, assistant_response))
39
  self.user_history.append((user_message, formatted_response))
 
 
 
 
 
40
 
41
  def get_model_history(self):
42
  return self.model_history
@@ -76,7 +81,14 @@ def stream_chat(
76
  top_k: int = 1,
77
  penalty: float = 1.1,
78
  ):
 
 
 
 
 
 
79
  model_history = conversation_manager.get_model_history()
 
80
 
81
  conversation = []
82
  for prompt, answer in model_history:
@@ -86,6 +98,8 @@ def stream_chat(
86
  ])
87
 
88
  conversation.append({"role": "user", "content": message})
 
 
89
 
90
  input_ids = tokenizer.apply_chat_template(
91
  conversation,
@@ -126,6 +140,10 @@ def stream_chat(
126
  formatted_buffer = format_response(buffer)
127
 
128
  if thread.is_alive() is False:
 
 
 
 
129
  conversation_manager.add_exchange(
130
  message,
131
  original_response, # Original for model
 
37
  def add_exchange(self, user_message, assistant_response, formatted_response):
38
  self.model_history.append((user_message, assistant_response))
39
  self.user_history.append((user_message, formatted_response))
40
+ # Log the exchange
41
+ print(f"\nModel History Exchange:")
42
+ print(f"User: {user_message}")
43
+ print(f"Assistant (Original): {assistant_response}")
44
+ print(f"Assistant (Formatted): {formatted_response}")
45
 
46
  def get_model_history(self):
47
  return self.model_history
 
81
  top_k: int = 1,
82
  penalty: float = 1.1,
83
  ):
84
+ print(f'\nNew Chat Request:')
85
+ print(f'Message: {message}')
86
+ print(f'History from UI: {history}')
87
+ print(f'System Prompt: {system_prompt}')
88
+ print(f'Parameters: temp={temperature}, max_tokens={max_new_tokens}, top_p={top_p}, top_k={top_k}, penalty={penalty}')
89
+
90
  model_history = conversation_manager.get_model_history()
91
+ print(f'Model History: {model_history}')
92
 
93
  conversation = []
94
  for prompt, answer in model_history:
 
98
  ])
99
 
100
  conversation.append({"role": "user", "content": message})
101
+ print(f'\nFormatted Conversation for Model:')
102
+ print(conversation)
103
 
104
  input_ids = tokenizer.apply_chat_template(
105
  conversation,
 
140
  formatted_buffer = format_response(buffer)
141
 
142
  if thread.is_alive() is False:
143
+ print(f'\nGeneration Complete:')
144
+ print(f'Original Response: {original_response}')
145
+ print(f'Formatted Response: {formatted_buffer}')
146
+
147
  conversation_manager.add_exchange(
148
  message,
149
  original_response, # Original for model