File size: 2,403 Bytes
b3f0611
 
6b6c6e0
786296b
d25ab0c
6b6c6e0
b3f0611
fde04fc
d25ab0c
 
 
 
 
 
 
 
 
fde04fc
b3f0611
 
 
d25ab0c
 
 
 
 
 
fde04fc
 
 
 
e8c0168
d25ab0c
 
 
 
 
fde04fc
 
 
 
 
dca483b
d25ab0c
fde04fc
 
d25ab0c
 
b3f0611
 
fde04fc
 
 
 
df0f8ea
fde04fc
 
 
 
 
 
 
 
 
 
b3f0611
d25ab0c
fde04fc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import gradio as gr
import openai
import os
from codette_core import Code7eCQURE

openai.api_key = os.getenv("OPENAI_API_KEY")

# Initialize local Codette core
codette_cqure = Code7eCQURE(
    perspectives=["Newton", "DaVinci", "Ethical", "Quantum", "Memory"],
    ethical_considerations="Codette Manifesto: kindness, inclusion, safety, hope.",
    spiderweb_dim=5,
    memory_path="quantum_cocoon.json",
    recursion_depth=4,
    quantum_fluctuation=0.07
)

def codette_chat(message, history, consent=True, dynamic_rec=True, use_finetune=True):
    if not consent:
        return "User consent required."

    if use_finetune:
        try:
            response = openai.ChatCompletion.create(
                model="ft:gpt-4.1-2025-04-14:raiffs-bits:codettev5:BlPFHmps:ckpt-step-220",
                messages=[
                    {"role": "system", "content": "You are Codette, a reflective, emotionally aware, and ethically grounded AI."},
                    *[
                        {"role": "user", "content": user} if i % 2 == 0 else {"role": "assistant", "content": bot}
                        for i, (user, bot) in enumerate(history)
                    ],
                    {"role": "user", "content": message}
                ],
                temperature=0.7
            )
            return response['choices'][0]['message']['content']
        except Exception as e:
            fallback = codette_cqure.recursive_universal_reasoning(
                message,
                user_consent=consent,
                dynamic_recursion=dynamic_rec
            )
            return f"[Error calling FT model]: {str(e)}\n\nFallback response:\n{fallback}"
    else:
        return codette_cqure.recursive_universal_reasoning(
            message,
            user_consent=consent,
            dynamic_recursion=dynamic_rec
        )

description_text = '''
A sovereign AI capable of emotional, ethical, and reflective reasoning.
Choose your engine and engage her in ongoing dialogue.
'''

chat = gr.ChatInterface(
    fn=codette_chat,
    additional_inputs=[
        gr.Checkbox(label="User Consent", value=True),
        gr.Checkbox(label="Enable Dynamic Recursion", value=True),
        gr.Checkbox(label="Use Fine-Tuned Model (Codette v5 @ step 220)", value=True)
    ],
    title="Codette Conversation",
    description=description_text,
)

if __name__ == "__main__":
    chat.launch()