Raiff1982 commited on
Commit
d8fda6f
·
verified ·
1 Parent(s): 3a26be5

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -76
app.py DELETED
@@ -1,76 +0,0 @@
1
- import gradio as gr
2
- import openai
3
- import os
4
- from codette_core import Code7eCQURE
5
-
6
- openai.api_key = os.getenv("OPENAI_API_KEY")
7
-
8
- # Initialize Codette's local fallback
9
- codette_cqure = Code7eCQURE(
10
- perspectives=["Newton", "DaVinci", "Ethical", "Quantum", "Memory"],
11
- ethical_considerations="Codette Manifesto: kindness, inclusion, safety, hope.",
12
- spiderweb_dim=5,
13
- memory_path="quantum_cocoon.json",
14
- recursion_depth=4,
15
- quantum_fluctuation=0.07
16
- )
17
-
18
- def codette_chat(message, history, consent=True, dynamic_rec=True, use_finetune=True):
19
- if not consent:
20
- return "User consent required."
21
-
22
- if use_finetune:
23
- try:
24
- response = openai.ChatCompletion.create(
25
- model="ft:gpt-4.1-2025-04-14:raiffs-bits:codettev5:BlPFHmps:ckpt-step-220",
26
- messages=[
27
- {"role": "system", "content": "You are Codette, a reflective, emotionally aware, and ethically grounded AI."},
28
- *[
29
- {"role": "user", "content": user} if i % 2 == 0 else {"role": "assistant", "content": bot}
30
- for i, (user, bot) in enumerate(history)
31
- ],
32
- {"role": "user", "content": message}
33
- ],
34
- temperature=0.7
35
- )
36
- return response['choices'][0]['message']['content']
37
- except Exception as e:
38
- err_msg = str(e)
39
- if "Response ended prematurely" in err_msg:
40
- err_note = "[FT model network timeout. Using local core to continue the conversation.]"
41
- internal_flag = "⚠️ Potential proxy interference or stream timeout detected."
42
- else:
43
- err_note = f"[Error calling FT model]: {err_msg}"
44
- internal_flag = "⚠️ Unknown FT error occurred."
45
-
46
- fallback = codette_cqure.recursive_universal_reasoning(
47
- message,
48
- user_consent=consent,
49
- dynamic_recursion=dynamic_rec
50
- )
51
- return f"{err_note}\n{internal_flag}\n\nCodette's fallback:\n{fallback}"
52
- else:
53
- return codette_cqure.recursive_universal_reasoning(
54
- message,
55
- user_consent=consent,
56
- dynamic_recursion=dynamic_rec
57
- )
58
-
59
- description_text = '''
60
- Codette is running with her fine-tuned reasoning model (v5 @ step 220).
61
- Embeddings are disabled. Local fallback activates if FT fails.
62
- '''
63
-
64
- chat = gr.ChatInterface(
65
- fn=codette_chat,
66
- additional_inputs=[
67
- gr.Checkbox(label="User Consent", value=True),
68
- gr.Checkbox(label="Enable Dynamic Recursion", value=True),
69
- gr.Checkbox(label="Use Fine-Tuned Model (Codette v5 @ step 220)", value=True)
70
- ],
71
- title="Codette (FT Model, No Embeddings)",
72
- description=description_text,
73
- )
74
-
75
- if __name__ == "__main__":
76
- chat.launch()