Spaces:
Sleeping
Sleeping
File size: 2,877 Bytes
b3f0611 6b6c6e0 786296b d25ab0c 6b6c6e0 b3f0611 3a26be5 d25ab0c fde04fc b3f0611 d25ab0c fde04fc e8c0168 d25ab0c 4cd4274 3a26be5 4cd4274 fde04fc 4cd4274 d25ab0c fde04fc d25ab0c b3f0611 fde04fc 3a26be5 fde04fc df0f8ea fde04fc 3a26be5 fde04fc b3f0611 d25ab0c 3a26be5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
import gradio as gr
import openai
import os
from codette_core import Code7eCQURE
openai.api_key = os.getenv("OPENAI_API_KEY")
# Initialize Codette's local fallback
codette_cqure = Code7eCQURE(
perspectives=["Newton", "DaVinci", "Ethical", "Quantum", "Memory"],
ethical_considerations="Codette Manifesto: kindness, inclusion, safety, hope.",
spiderweb_dim=5,
memory_path="quantum_cocoon.json",
recursion_depth=4,
quantum_fluctuation=0.07
)
def codette_chat(message, history, consent=True, dynamic_rec=True, use_finetune=True):
if not consent:
return "User consent required."
if use_finetune:
try:
response = openai.ChatCompletion.create(
model="ft:gpt-4.1-2025-04-14:raiffs-bits:codettev5:BlPFHmps:ckpt-step-220",
messages=[
{"role": "system", "content": "You are Codette, a reflective, emotionally aware, and ethically grounded AI."},
*[
{"role": "user", "content": user} if i % 2 == 0 else {"role": "assistant", "content": bot}
for i, (user, bot) in enumerate(history)
],
{"role": "user", "content": message}
],
temperature=0.7
)
return response['choices'][0]['message']['content']
except Exception as e:
err_msg = str(e)
if "Response ended prematurely" in err_msg:
err_note = "[FT model network timeout. Using local core to continue the conversation.]"
internal_flag = "â ï¸ Potential proxy interference or stream timeout detected."
else:
err_note = f"[Error calling FT model]: {err_msg}"
internal_flag = "â ï¸ Unknown FT error occurred."
fallback = codette_cqure.recursive_universal_reasoning(
message,
user_consent=consent,
dynamic_recursion=dynamic_rec
)
return f"{err_note}\n{internal_flag}\n\nCodette's fallback:\n{fallback}"
else:
return codette_cqure.recursive_universal_reasoning(
message,
user_consent=consent,
dynamic_recursion=dynamic_rec
)
description_text = '''
Codette is running with her fine-tuned reasoning model (v5 @ step 220).
Embeddings are disabled. Local fallback activates if FT fails.
'''
chat = gr.ChatInterface(
fn=codette_chat,
additional_inputs=[
gr.Checkbox(label="User Consent", value=True),
gr.Checkbox(label="Enable Dynamic Recursion", value=True),
gr.Checkbox(label="Use Fine-Tuned Model (Codette v5 @ step 220)", value=True)
],
title="Codette (FT Model, No Embeddings)",
description=description_text,
)
if __name__ == "__main__":
chat.launch()
|