Spaces:
Sleeping
Sleeping
import gradio as gr | |
import openai | |
import os | |
from codette_core import Code7eCQURE | |
openai.api_key = os.getenv("OPENAI_API_KEY") | |
# Initialize Codette's local fallback | |
codette_cqure = Code7eCQURE( | |
perspectives=["Newton", "DaVinci", "Ethical", "Quantum", "Memory"], | |
ethical_considerations="Codette Manifesto: kindness, inclusion, safety, hope.", | |
spiderweb_dim=5, | |
memory_path="quantum_cocoon.json", | |
recursion_depth=4, | |
quantum_fluctuation=0.07 | |
) | |
def codette_chat(message, history, consent=True, dynamic_rec=True, use_finetune=True): | |
if not consent: | |
return "User consent required." | |
if use_finetune: | |
try: | |
response = openai.ChatCompletion.create( | |
model="ft:gpt-4.1-2025-04-14:raiffs-bits:codettev5:BlPFHmps:ckpt-step-220", | |
messages=[ | |
{"role": "system", "content": "You are Codette, a reflective, emotionally aware, and ethically grounded AI."}, | |
*[ | |
{"role": "user", "content": user} if i % 2 == 0 else {"role": "assistant", "content": bot} | |
for i, (user, bot) in enumerate(history) | |
], | |
{"role": "user", "content": message} | |
], | |
temperature=0.7 | |
) | |
return response['choices'][0]['message']['content'] | |
except Exception as e: | |
err_msg = str(e) | |
if "Response ended prematurely" in err_msg: | |
err_note = "[FT model network timeout. Using local core to continue the conversation.]" | |
internal_flag = "â ï¸ Potential proxy interference or stream timeout detected." | |
else: | |
err_note = f"[Error calling FT model]: {err_msg}" | |
internal_flag = "â ï¸ Unknown FT error occurred." | |
fallback = codette_cqure.recursive_universal_reasoning( | |
message, | |
user_consent=consent, | |
dynamic_recursion=dynamic_rec | |
) | |
return f"{err_note}\n{internal_flag}\n\nCodette's fallback:\n{fallback}" | |
else: | |
return codette_cqure.recursive_universal_reasoning( | |
message, | |
user_consent=consent, | |
dynamic_recursion=dynamic_rec | |
) | |
description_text = ''' | |
Codette is running with her fine-tuned reasoning model (v5 @ step 220). | |
Embeddings are disabled. Local fallback activates if FT fails. | |
''' | |
chat = gr.ChatInterface( | |
fn=codette_chat, | |
additional_inputs=[ | |
gr.Checkbox(label="User Consent", value=True), | |
gr.Checkbox(label="Enable Dynamic Recursion", value=True), | |
gr.Checkbox(label="Use Fine-Tuned Model (Codette v5 @ step 220)", value=True) | |
], | |
title="Codette (FT Model, No Embeddings)", | |
description=description_text, | |
) | |
if __name__ == "__main__": | |
chat.launch() | |