Raiff1982 commited on
Commit
3a26be5
·
verified ·
1 Parent(s): 4cd4274

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -5,7 +5,7 @@ from codette_core import Code7eCQURE
5
 
6
  openai.api_key = os.getenv("OPENAI_API_KEY")
7
 
8
- # Initialize local Codette core
9
  codette_cqure = Code7eCQURE(
10
  perspectives=["Newton", "DaVinci", "Ethical", "Quantum", "Memory"],
11
  ethical_considerations="Codette Manifesto: kindness, inclusion, safety, hope.",
@@ -38,7 +38,7 @@ def codette_chat(message, history, consent=True, dynamic_rec=True, use_finetune=
38
  err_msg = str(e)
39
  if "Response ended prematurely" in err_msg:
40
  err_note = "[FT model network timeout. Using local core to continue the conversation.]"
41
- internal_flag = "⚠️ Potential proxy interference or timeout from OpenAI stream detected."
42
  else:
43
  err_note = f"[Error calling FT model]: {err_msg}"
44
  internal_flag = "⚠️ Unknown FT error occurred."
@@ -57,8 +57,8 @@ def codette_chat(message, history, consent=True, dynamic_rec=True, use_finetune=
57
  )
58
 
59
  description_text = '''
60
- A sovereign AI capable of emotional, ethical, and reflective reasoning.
61
- Choose your engine and engage her in ongoing dialogue.
62
  '''
63
 
64
  chat = gr.ChatInterface(
@@ -68,9 +68,9 @@ chat = gr.ChatInterface(
68
  gr.Checkbox(label="Enable Dynamic Recursion", value=True),
69
  gr.Checkbox(label="Use Fine-Tuned Model (Codette v5 @ step 220)", value=True)
70
  ],
71
- title="Codette Conversation",
72
  description=description_text,
73
  )
74
 
75
  if __name__ == "__main__":
76
- chat.launch()
 
5
 
6
  openai.api_key = os.getenv("OPENAI_API_KEY")
7
 
8
+ # Initialize Codette's local fallback
9
  codette_cqure = Code7eCQURE(
10
  perspectives=["Newton", "DaVinci", "Ethical", "Quantum", "Memory"],
11
  ethical_considerations="Codette Manifesto: kindness, inclusion, safety, hope.",
 
38
  err_msg = str(e)
39
  if "Response ended prematurely" in err_msg:
40
  err_note = "[FT model network timeout. Using local core to continue the conversation.]"
41
+ internal_flag = "⚠️ Potential proxy interference or stream timeout detected."
42
  else:
43
  err_note = f"[Error calling FT model]: {err_msg}"
44
  internal_flag = "⚠️ Unknown FT error occurred."
 
57
  )
58
 
59
  description_text = '''
60
+ Codette is running with her fine-tuned reasoning model (v5 @ step 220).
61
+ Embeddings are disabled. Local fallback activates if FT fails.
62
  '''
63
 
64
  chat = gr.ChatInterface(
 
68
  gr.Checkbox(label="Enable Dynamic Recursion", value=True),
69
  gr.Checkbox(label="Use Fine-Tuned Model (Codette v5 @ step 220)", value=True)
70
  ],
71
+ title="Codette (FT Model, No Embeddings)",
72
  description=description_text,
73
  )
74
 
75
  if __name__ == "__main__":
76
+ chat.launch()